diff --git a/.codecov.yml b/.codecov.yml deleted file mode 100644 index 9d85a301aa..0000000000 --- a/.codecov.yml +++ /dev/null @@ -1,3 +0,0 @@ -codecov: - branch: dev - # strict_yaml_branch: master # Enable this if we want to use the yml file in master to dictate the reports for all branches \ No newline at end of file diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000..c60668e82c --- /dev/null +++ b/.dockerignore @@ -0,0 +1,8 @@ +# General + +# Backend +server/build/libs + +# UI +**/node_modules +ui/build \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000000..6ee796ba44 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,37 @@ +--- +name: Bug report +about: Create a report to help us improve +title: "" +labels: 'type: bug' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**Details** +Conductor version: +Persistence implementation: Cassandra, Postgres, MySQL, Dynomite etc +Queue implementation: Postgres, MySQL, Dynoqueues etc +Lock: Redis or Zookeeper? +Workflow definition: +Task definition: +Event handler definition: + + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000000..3ba13e0cec --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1 @@ +blank_issues_enabled: false diff --git a/.github/ISSUE_TEMPLATE/documentation.md b/.github/ISSUE_TEMPLATE/documentation.md new file mode 100644 index 0000000000..790cd31e1e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/documentation.md @@ -0,0 +1,12 @@ +--- +name: Documentation +about: Something in the documentation that needs improvement +title: "[DOC]: " +labels: 'type: docs' +assignees: '' + +--- + +## What are you missing in the docs + +## Proposed text diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000..659e4a8cd2 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Propose a new feature +title: "[FEATURE]: " +labels: 'type: feature' +assignees: '' + +--- + +Please read our [contributor guide](https://github.com/Netflix/conductor/blob/main/CONTRIBUTING.md) before creating an issue. +Also consider discussing your idea on the [discussion forum](https://github.com/Netflix/conductor/discussions) first. + +## Describe the Feature Request +_A clear and concise description of what the feature request is._ + +## Describe Preferred Solution +_A clear and concise description of what you want to happen._ + +## Describe Alternatives +_A clear and concise description of any alternative solutions or features you've considered._ diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md new file mode 100644 index 0000000000..764c49d645 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/question.md @@ -0,0 +1,10 @@ +--- +name: Question +about: Ask a question +title: "[QUESTION]: " +labels: question +assignees: '' + +--- + +Please do not create issues for questions. Use the [discussion forum](https://github.com/Netflix/conductor/discussions) instead! diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..ef87e0b826 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,14 @@ +version: 2 +updates: + - package-ecosystem: "gradle" + directory: "/" + schedule: + interval: "weekly" + reviewers: + - "aravindanr" + - "jxu-nflx" + - "apanicker-nflx" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000000..d1c1e70c81 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,20 @@ +Pull Request type +---- + +- [ ] Bugfix +- [ ] Feature +- [ ] Refactoring (no functional changes, no api changes) +- [ ] Build related changes (Please run `./gradlew generateLock saveLock` to refresh dependencies) +- [ ] WHOSUSING.md +- [ ] Other (please describe): + +Changes in this PR +---- + +_Describe the new behavior from this PR, and why it's needed_ +Issue # + +Alternatives considered +---- + +_Describe alternative implementation you have considered_ diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml new file mode 100644 index 0000000000..6aaea44868 --- /dev/null +++ b/.github/release-drafter.yml @@ -0,0 +1,37 @@ +template: | + ## What’s Changed + + $CHANGES + +name-template: 'v$RESOLVED_VERSION' +tag-template: 'v$RESOLVED_VERSION' + +categories: + - title: 'IMPORTANT' + label: 'type: important' + - title: 'New' + label: 'type: feature' + - title: 'Bug Fixes' + label: 'type: bug' + - title: 'Refactor' + label: 'type: maintenance' + - title: 'Documentation' + label: 'type: docs' + - title: 'Dependency Updates' + label: 'type: dependencies' + +version-resolver: + minor: + labels: + - 'type: important' + + patch: + labels: + - 'type: bug' + - 'type: maintenance' + - 'type: docs' + - 'type: dependencies' + - 'type: feature' + +exclude-labels: + - 'skip-changelog' diff --git a/.github/workflows/publish-jar.yml b/.github/workflows/publish-jar.yml new file mode 100644 index 0000000000..fd5dc6d833 --- /dev/null +++ b/.github/workflows/publish-jar.yml @@ -0,0 +1,34 @@ +name: Build and publish JAR + +on: + push: + branches: + - master + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Set up Zulu JDK 11 + uses: actions/setup-java@v2 + with: + java-version: '11' + distribution: 'zulu' + - name: Build with Gradle + run: ./gradlew build -x :conductor-contribs:test -x :conductor-test-harness:test --scan --stacktrace + - name: Create build tag + run: | + echo "::set-output name=TAG::$(git describe --abbrev=0 --tags --exclude '*-build.*')-build.${{ github.run_number }}+${{ github.sha }}" + id: tag + - name: Upload conductor-server JAR + uses: ncipollo/release-action@v1 + with: + artifacts: "server/build/libs/conductor-server-*-boot.jar" + commit: ${{ github.sha }} + tag: ${{ steps.tag.outputs.TAG }} + token: ${{ github.token }} + diff --git a/.gitignore b/.gitignore index eb2ac67732..f79fb99daf 100644 --- a/.gitignore +++ b/.gitignore @@ -1,23 +1,34 @@ -**/node_modules -**/npm-debug.log -ui/dist -ui/package-lock.json +# Java Build .gradle -.project +.classpath +dump.rdb +out bin -build -client/python/conductor.egg-info +target +buildscan.log +/docs/site + +# Python +/polyglot-clients/python/conductor.egg-info *.pyc -.classpath -docs/site -site -ui/.settings + +# OS & IDE +.DS_Store .settings -dump.rdb +.vscode .idea +.project *.iml -out/ -bin/ -target/ -.DS_Store -target/ + +# JS & UI Related +node_modules +/ui/build + +# publishing secrets +secrets/signing-key + +# local builds +lib/ +build/ +*/build/ + diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 42a494c299..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,25 +0,0 @@ -language: java -jdk: -- oraclejdk8 - -after_success: - - bash <(curl -s https://codecov.io/bash) -install: true -script: "./buildViaTravis.sh" -git: - depth: 150 -addons: - apt: - packages: - - redis-server -cache: - directories: - - "$HOME/.gradle/caches" -env: - global: - - secure: Klmmz8ctX995+JBE2BEnR0CQxe9IVMblIHrQMAim/j3Jdw9rgPX4H4UdDgNbt3yEbFOk54R/2g9eJst2hJVXzpsMiArFLo77HwEwl2cHUPgpCzhQLpp5nyw2e/giKBqaYqN99tzA4P/VsDL9EZR6JBdprG5pega2IdNj9pc9Q/QnG7OEwk/PU4CAhcL6iVvQVqmXf9wwgI8SH+6IKa1BVCzs5GQF3Hxtzk9tNIJAh0f/FMTyax6nLHY0cNJdu9ky75BS4X1BYIYRisSxcHKRLGaYBY6JGzWtba3p1o4hmKw6PKtd8f/aOeOLyZTOePGUVrBIn9WXG/R3UvvZgtkcv/0tq7sMjKRF5sDHetlgXTsPmXvM4RzxJYUx7JQKVdAmFKyRiUFtDFjUQKwbF2gdGSQQd9daZLm4X7KGb8tjiTCB9xSQg7wwbCUJrfGZbicA0sZ8y9Lu+fEbrmzkWqSpy+/I2P2gurKMLdM2y9FwWhcV4mN4u/U9r9B/jJHedm7pVoZBxVs32C6uhkEGeCBxy01Kr9h2EVyT/oZvzo0we2Le9pnQvJ/sNUnujbrxRHC/hWF1sZ2aBeitDLp+b7SK+IqiyX9yt1w5Y/1aduKOyttiRUNgpl1Ytg+cp0z1oz9uSe7JrGL1HSdb0zQg0Fw0ZgGcg+rwe8DyWFJYCjlAbD0= - - secure: WNhtLjX0pLoFdxJ5I5C4E98pPNgFnrpz5nuTc+DBNfX29Ip9wD3/L8+RU7GYr+da+f3ALdboOxaYo5akx6nwlQHGfdhMEHE5SDejS2gAktuKcLEH8WPrp4nXqUnrlpfwnQ7DXB/k01XB9yN4fagEiRFEApB2z0kOv/Q1va5d3M9TrFxT28RFsZnnUWy23VzgCjulqzWZchI24Ra2kWiAjif2EWjwAn3ZD/JKYGZghhtFcAHETg1kz65/AXHOnSFXCntED7AONmSXnpCxuk/5p7CqBoKfJ7IXXT8e98ajmrNvlBx/dBkAepspislc2hgLUSbHAjzuqwdQnz2MGm/XE/lMhhVU5uCDaPDnih9MgH/0ewjJtAc1uTKoc2NtjjzoBCTYhaFCSHzJuSWoi5NIVp9dvAM+thfTcm+/buNdGHkrg+1eLwiItwSHK8R1nBAvk76AqHRR7FdJVi919t+KjveiEd7ez1w9iWnaYh3kycHQCgIwQ9LXfzrR8sLULNq4KNManrL/iuDcEyRG/q6SDfaq5X/i5MFhZ9PuCSorrve7sRKiM4ncxwk2vwrRrqOuqps8QFKLAaf/KvBCdn/FE5vhh++/f3/9oMD6Mc8KqcsTYZFnAm/EKn2236dKBrRI8a2pSQQsMKaoGKOTAmaW2jV9+8j4ODMLi9qWQ3bOumY= - - secure: brK+NGTogesfjqwHso/dK5wqO2LxEUo9gEtUZADr9UlFDzcIU6JomGjeZzeaCsOBlFbZ4p0IIRfivGCi7yegQPWad3cAlyoAQ3+0b+ZxiumbJu5SVVr32x5NxfHNvnW9zIqFIOA9A6GjNq2AkdfBrr9bAerPYc8RjbU4PkH/+CM4HDrx2m/6eXrEbtElCi7IfdRLH9wu6D9/2ANdpK7bCjY2S9sMBvDUsUzGmcoUnJBdInjPYxL1tmAQlAMgWW8E/vKVdyjKq6JsYpwuVnztHlMryrXVQglwrbXtB0gl4Qvqdv0kXAlTf76wQsViEOIvoJV63o/cnFG2lZbVAJ+JGE7cCRaQpIzDf0il5XDkF86XQjaqWpfeEQu7CNj2yjXItn/2q2HaMu4uoEQwQSifRo0n44S7WOSkrZcYly4/Hax9SjiVvDDimlVqp0fURNpo41SMtlW0jXWIYEstft+0vWtPpwzHd9mWEqCQiXkDoAYpjPpfQFpcwFLAi+JO+4Y1Yhuw8NBMHTIDOzjbEwRo06yO/9pYICmg34a1mVAOTdAhpXR8HfTbFlTd80Xm3kYLmrOZrj5yWvP1+XgLDnMFEqw3nHYHQYGWKaVWs5OfYlkhWrpinMVdciLJEp20fUudiSfO7zcbjOXbN5Gg7E0X9kdIbKG9/h+m9buHJuD8QPA= - - secure: W03DuzGYB2tpW6cJJgeFwG7urNPxSbNrrDk3kOApDb216woJ9BVSBGF1Jhhje6o7yYK9k2C2z02ulMNRnfkZ4Zt5WrsiD5zljXKM9G5BOy4zMVesEj93hRq99pfMiidH4pd6N1SZpFCeybxHIIuGHl43lCeDlgxxvpavsnoRwwDLGeRdiMlSB2uChAa9j0CmPr28cYB1r1iXpQPyOjgApI7TzRB42+j+pR0GmZWdCUbKpUPeyg13jQ3d4udgRSPG7b2jUTdrSiVkOD71d/25tmLNWygt2O+mUfp6cKDxZrYpD+V6MFIxHd5AWg9Z3KY/QBUizPKAvpKNDQ8pVj6yqsePYShl4IpTUhzbeFkATvSNXZyzSlmHXkAwkO7Gb8yOOvFqbH8cSqfXqNtjBIoP8WnA4caeY1ZCQ8ec0IpIc3nqng/lTk89hJ+vlmmj1h2G0Yh7syaNxNd7+yno5BXoLXlobACPMUYeHifEjtzcngM9i91m9yFviv6n6WGTnbSz4QTB0Pr5IEzIrOAudBPS4MijR/PmsgEa5l1tiCSWiTf2VJwMcB7g5tAzZqGX/wp4C6A/gbfPUutZBbeVnFCzGP5f9R0QtOOZm07cmN1IoO9+uBvPI62K3TQefgIF0/XKfiRhGKnhwdgZl5RZwN0WkAPVEjoWYXY4QSAZg99H88E= - - secure: VAHbP/8nTAIl2UuT++C/BfSfBDxJPZOEgbCQcCyUpHsFa8SdstuB5Le6VZYaAzcs7wR9WFIHP6+llJyg76p1OhxHC/iG+5QFSqKSkA+RkPyBAmtNTw+Pt5i/0MMxNbBrlogPvFoGe9/wighYQKNwK3In431PSh5n4sEiXPc4XVSzaP+Qxpd1g4VQwQV950JTx97QMLwnR1RNNz/LhBaisE7XdTM561znmqhcRmfGZY7dlhdZxMp+60ngutIZUfSekFLY2nYecoWZv6kEMBxEMnnGBYamCUy856TIVgzGAtD5VScSiRxkwawBKN1OsgvEfwxg/duCTZ9GkQ1LFwxjNDX7bVUo3DsjlqteyJ8n1bh3oYlKgFN6XRiC/Tz8fh66N94AFM8+dc9aJFyBlPBPW1MxxjS+4Y9l3cHxTvyoixguKSHdOypT2PdWkWWSIPGE6j6S33sUJyJuuA/Eq4pG4bd1OfXcjdw+/UJlkLsb3p+ojPhlFqDtRlFTLeS2Mz565EOs/jTzUjuQFNrz3f4Ht+1JpWq9To8KjHzRelRxWR183cikTD9SCDRTQlBlMXcMJHXAasssU5BFr6ZprulKI4UNiU0b3CCVlofDiL/Zd/788TDyqCX/pqI/YmK62zP/EWxOZTCdbfbYetu/+b4c5z//ygfLbw2j3bmtB8ojnE0= - - secure: jiEHSPnbGaejrl6I9Aj4ZOmunzwBtLtnYLggB6W2KBVj115QLRTr2E/SkXrHINWLksV98oPs8J6E6v/LSJ7YwMQssyPmO2UjhakFNZCZpUIYeo+l8vP9LKRZhTbhav9dOG80RUIXUzqJl48GjaFrChYzdzNSXEwBhVqS/cPbEkfxZ+bPnPsuUseLjd/pFbn09CJduqhUWqv9OzjVa0cTjnVGIBDoqWp69p5M2Q8Kpf4wMsZ/gn1oww20YE/XpDrxo1bZyNLbPwsqRSK5lnwG8uqgohkFYAJfIzoriXK74pEPqqp99zmAIO8otdKeEVU6EA6NoK6LzAUa/6l8sa2cxcxNU6bbVEC/IbAWQYWGRDrUa0fNWYaNF/2aMSKbXCgH/KQQnBR8laVlNhhXArxUJGBaygSrLPL12l53tSAXPoPD6jYABtkPPkW95jyp4Zu7LrmjRCNJN/qMXl/DOl306WKzBHnftBeeICsFw6AEkoSHIEIrEJpk/jN1uLWhoOmE6o7sEn6mwVhq4/DqqCGnZZez6RwwqQ2Hiq2Agf7LXEzt5lfm3dKkaxVw4mFuieMWcxmrXYEe9MtrYwdUzssse/p5x2a+SeDgoSg2w17ZNoTUJD6ZSgxMuYJEIPzXgISqZh+ln3ZO0+Raa5yVALhrVY/FCKCuPhwDESE9i65MVlY= diff --git a/CHANGELOG.md b/CHANGELOG.md index e69de29bb2..beb97c8efc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -0,0 +1,369 @@ +Conductor has been upgraded to use the SpringBoot framework and requires Java11 or above. +#### NOTE: The java clients (conductor-client, conductor-client-spring, conductor-grpc-client) are still compiled using Java8 to ensure backward compatibility and smoother migration. + +## Removals/Deprecations +- Removed support for EmbeddedElasticSearch +- Removed deprecated constructors in DynoQueueDAO +- Removed deprecated methods in the Worker interface +- Removed OAuth Support in HTTP task (Looking for contributions for OAuth/OAuth2.0) +- Removed deprecated fields and methods in the Workflow object +- Removed deprecated fields and methods in the Task object +- Removed deprecated fields and methods in the WorkflowTask object + +Removed unused methods from QueueDAO: +- List pop(String, int, int, long) +- List pollMessages(String, int, int, long) + +Removed APIs: +- GET /tasks/in_progress/{tasktype} +- GET /tasks/in_progress/{workflowId}/{taskRefName} +- POST /tasks/{taskId}/ack +- POST /tasks/queue/requeue +- DELETE /queue/{taskType}/{taskId} + + +- GET /event/queues +- GET /event/queues/providers + + +- void restart(String) in workflow client +- List getPendingTasksByType(String, String, Integer) in task client +- Task getPendingTaskForWorkflow(String, String) in task client +- boolean preAck(Task) in Worker +- int getPollCount() in Worker + +## What's changed +Changes to configurations: + +### `azureblob-storage` module: + +| Old | New | Default | +| --- | --- | --- | +| workflow.external.payload.storage.azure_blob.connection_string | conductor.external-payload-storage.azureblob.connectionString | null | +| workflow.external.payload.storage.azure_blob.container_name | conductor.external-payload-storage.azureblob.containerName | conductor-payloads | +| workflow.external.payload.storage.azure_blob.endpoint | conductor.external-payload-storage.azureblob.endpoint | null | +| workflow.external.payload.storage.azure_blob.sas_token | conductor.external-payload-storage.azureblob.sasToken | null | +| workflow.external.payload.storage.azure_blob.signedurlexpirationseconds | conductor.external-payload-storage.azureblob.signedUrlExpirationDuration | 5s | +| workflow.external.payload.storage.azure_blob.workflow_input_path | conductor.external-payload-storage.azureblob.workflowInputPath | workflow/input/ | +| workflow.external.payload.storage.azure_blob.workflow_output_path | conductor.external-payload-storage.azureblob.workflowOutputPath | workflow/output/ | +| workflow.external.payload.storage.azure_blob.task_input_path | conductor.external-payload-storage.azureblob.taskInputPath | task/input/ | +| workflow.external.payload.storage.azure_blob.task_output_path | conductor.external-payload-storage.azureblob.taskOutputPath | task/output/ | + +### `cassandra-persistence` module: + +| Old | New | Default | +| --- | --- | --- | +| workflow.cassandra.host | conductor.cassandra.hostAddress | 127.0.0.1 | +| workflow.cassandra.port | conductor.cassandra.port | 9142 | +| workflow.cassandra.cluster | conductor.cassandra.cluster | "" | +| workflow.cassandra.keyspace | conductor.cassandra.keyspace | conductor | +| workflow.cassandra.shard.size | conductor.cassandra.shardSize | 100 | +| workflow.cassandra.replication.strategy | conductor.cassandra.replicationStrategy | SimpleStrategy | +| workflow.cassandra.replication.factor.key | conductor.cassandra.replicationFactorKey | replication_factor | +| workflow.cassandra.replication.factor.value | conductor.cassandra.replicationFactorValue | 3 | +| workflow.cassandra.read.consistency.level | conductor.cassandra.readConsistencyLevel | LOCAL_QUORUM | +| workflow.cassandra.write.consistency.level | conductor.cassandra.writeConsistencyLevel | LOCAL_QUORUM | +| conductor.taskdef.cache.refresh.time.seconds | conductor.cassandra.taskDefCacheRefreshInterval | 60s | +| conductor.eventhandler.cache.refresh.time.seconds | conductor.cassandra.eventHandlerCacheRefreshInterval | 60s | +| workflow.event.execution.persistence.ttl.seconds | conductor.cassandra.eventExecutionPersistenceTTL | 0s | + +### `contribs` module: + +| Old | New | Default | +| --- | --- | --- | +| workflow.archival.ttl.seconds | conductor.workflow-status-listener.archival.ttlDuration | 0s | +| workflow.archival.delay.queue.worker.thread.count | conductor.workflow-status-listener.archival.delayQueueWorkerThreadCount | 5 | +| workflow.archival.delay.seconds | conductor.workflow-status-listener.archival.delaySeconds | 60 | +| | | +| workflowstatuslistener.publisher.success.queue | conductor.workflow-status-listener.queue-publisher.successQueue | _callbackSuccessQueue | +| workflowstatuslistener.publisher.failure.queue | conductor.workflow-status-listener.queue-publisher.failureQueue | _callbackFailureQueue | +| | | | +| com.netflix.conductor.contribs.metrics.LoggingMetricsModule.reportPeriodSeconds | conductor.metrics-logger.reportInterval | 30s | +| | | | +| workflow.event.queues.amqp.batchSize | conductor.event-queues.amqp.batchSize | 1 | +| workflow.event.queues.amqp.pollTimeInMs | conductor.event-queues.amqp.pollTimeDuration | 100ms | +| workflow.event.queues.amqp.hosts | conductor.event-queues.amqp.hosts | localhost | +| workflow.event.queues.amqp.username | conductor.event-queues.amqp.username | guest | +| workflow.event.queues.amqp.password | conductor.event-queues.amqp.password | guest | +| workflow.event.queues.amqp.virtualHost | conductor.event-queues.amqp.virtualHost | / | +| workflow.event.queues.amqp.port | conductor.event-queues.amqp.port.port | 5672 | +| workflow.event.queues.amqp.connectionTimeout | conductor.event-queues.amqp.connectionTimeout | 60000ms | +| workflow.event.queues.amqp.useNio | conductor.event-queues.amqp.useNio | false | +| workflow.event.queues.amqp.durable | conductor.event-queues.amqp.durable | true | +| workflow.event.queues.amqp.exclusive | conductor.event-queues.amqp.exclusive | false | +| workflow.event.queues.amqp.autoDelete | conductor.event-queues.amqp.autoDelete | false | +| workflow.event.queues.amqp.contentType | conductor.event-queues.amqp.contentType | application/json | +| workflow.event.queues.amqp.contentEncoding | conductor.event-queues.amqp.contentEncoding | UTF-8 | +| workflow.event.queues.amqp.amqp_exchange | conductor.event-queues.amqp.exchangeType | topic | +| workflow.event.queues.amqp.deliveryMode | conductor.event-queues.amqp.deliveryMode | 2 | +| workflow.listener.queue.useExchange | conductor.event-queues.amqp.useExchange | true | +| workflow.listener.queue.prefix | conductor.event-queues.amqp.listenerQueuePrefix | "" | +| | | | +| io.nats.streaming.clusterId | conductor.event-queues.nats-stream.clusterId | test-cluster | +| io.nats.streaming.durableName | conductor.event-queues.nats-stream.durableName | null | +| io.nats.streaming.url | conductor.event-queues.nats-stream.url | nats://localhost:4222 | +| | | | +| workflow.event.queues.sqs.batchSize | conductor.event-queues.sqs.batchSize | 1 | +| workflow.event.queues.sqs.pollTimeInMS | conductor.event-queues.sqs.pollTimeDuration | 100ms | +| workflow.event.queues.sqs.visibilityTimeoutInSeconds | conductor.event-queues.sqs.visibilityTimeout | 60s | +| workflow.listener.queue.prefix | conductor.event-queues.sqs.listenerQueuePrefix | "" | +| workflow.listener.queue.authorizedAccounts | conductor.event-queues.sqs.authorizedAccounts | "" | +| | | | +| workflow.external.payload.storage.s3.bucket | conductor.external-payload-storage.s3.bucketName | conductor_payloads | +| workflow.external.payload.storage.s3.signedurlexpirationseconds | conductor.external-payload-storage.s3.signedUrlExpirationDuration | 5s | +| workflow.external.payload.storage.s3.region | conductor.external-payload-storage.s3.region | us-east-1 | +| | | | +| http.task.read.timeout | conductor.tasks.http.readTimeout | 150ms | +| http.task.connect.timeout | conductor.tasks.http.connectTimeout | 100ms | +| | | | +| kafka.publish.request.timeout.ms | conductor.tasks.kafka-publish.requestTimeout | 100ms | +| kafka.publish.max.block.ms | conductor.tasks.kafka-publish.maxBlock | 500ms | +| kafka.publish.producer.cache.size | conductor.tasks.kafka-publish.cacheSize | 10 | +| kafka.publish.producer.cache.time.ms | conductor.tasks.kafka-publish.cacheTime | 120000ms | + +### `core` module: + +| Old | New | Default | +| --- | --- | --- | +| environment | _removed_ | | +| STACK | conductor.app.stack | test | +| APP_ID | conductor.app.appId | conductor | +| workflow.executor.service.max.threads | conductor.app.executorServiceMaxThreadCount | 50 | +| decider.sweep.frequency.seconds | conductor.app.sweepFrequency | 30s | +| workflow.sweeper.thread.count | conductor.app.sweeperThreadCount | 5 | +| workflow.event.processor.thread.count | conductor.app.eventProcessorThreadCount | 2 | +| workflow.event.message.indexing.enabled | conductor.app.eventMessageIndexingEnabled | true | +| workflow.event.execution.indexing.enabled | conductor.app.eventExecutionIndexingEnabled | true | +| workflow.decider.locking.enabled | conductor.app.workflowExecutionLockEnabled | false | +| workflow.locking.lease.time.ms | conductor.app.lockLeaseTime | 60000ms | +| workflow.locking.time.to.try.ms | conductor.app.lockTimeToTry | 500ms | +| tasks.active.worker.lastpoll | conductor.app.activeWorkerLastPollTimeout | 10s | +| task.queue.message.postponeSeconds | conductor.app.taskExecutionPostponeDuration | 60s | +| workflow.taskExecLog.indexing.enabled | conductor.app.taskExecLogIndexingEnabled | true | +| async.indexing.enabled | conductor.app.asyncIndexingEnabled | false | +| workflow.system.task.worker.thread.count | conductor.app.systemTaskWorkerThreadCount | # available processors * 2 | +| workflow.system.task.worker.callback.seconds | conductor.app.systemTaskWorkerCallbackDuration | 30s | +| workflow.system.task.worker.poll.interval | conductor.app.systemTaskWorkerPollInterval | 50s | +| workflow.system.task.worker.executionNameSpace | conductor.app.systemTaskWorkerExecutionNamespace | "" | +| workflow.isolated.system.task.worker.thread.count | conductor.app.isolatedSystemTaskWorkerThreadCount | 1 | +| workflow.system.task.queue.pollCount | conductor.app.systemTaskMaxPollCount | 1 | +| async.update.short.workflow.duration.seconds | conductor.app.asyncUpdateShortRunningWorkflowDuration | 30s | +| async.update.delay.seconds | conductor.app.asyncUpdateDelay | 60s | +| summary.input.output.json.serialization.enabled | conductor.app.summary-input-output-json-serialization.enabled | false | +| workflow.owner.email.mandatory | conductor.app.ownerEmailMandatory | true | +| workflow.repairservice.enabled | conductor.app.workflowRepairServiceEnabled | false | +| workflow.event.queue.scheduler.poll.thread.count | conductor.app.eventSchedulerPollThreadCount | # CPU cores | +| workflow.dyno.queues.pollingInterval | conductor.app.eventQueuePollInterval | 100ms | +| workflow.dyno.queues.pollCount | conductor.app.eventQueuePollCount | 10 | +| workflow.dyno.queues.longPollTimeout | conductor.app.eventQueueLongPollTimeout | 1000ms | +| conductor.workflow.input.payload.threshold.kb | conductor.app.workflowInputPayloadSizeThreshold | 5120KB | +| conductor.max.workflow.input.payload.threshold.kb | conductor.app.maxWorkflowInputPayloadSizeThreshold | 10240KB | +| conductor.workflow.output.payload.threshold.kb | conductor.app.workflowOutputPayloadSizeThreshold | 5120KB | +| conductor.max.workflow.output.payload.threshold.kb | conductor.app.maxWorkflowOutputPayloadSizeThreshold | 10240KB | +| conductor.task.input.payload.threshold.kb | conductor.app.taskInputPayloadSizeThreshold | 3072KB | +| conductor.max.task.input.payload.threshold.kb | conductor.app.maxTaskInputPayloadSizeThreshold | 10240KB | +| conductor.task.output.payload.threshold.kb | conductor.app.taskOutputPayloadSizeThreshold | 3072KB | +| conductor.max.task.output.payload.threshold.kb | conductor.app.maxTaskOutputPayloadSizeThreshold | 10240KB | +| conductor.max.workflow.variables.payload.threshold.kb | conductor.app.maxWorkflowVariablesPayloadSizeThreshold | 256KB | +| | | | +| workflow.isolated.system.task.enable | conductor.app.isolatedSystemTaskEnabled | false | +| workflow.isolated.system.task.poll.time.secs | conductor.app.isolatedSystemTaskQueuePollInterval | 10s | +| | | | +| workflow.task.pending.time.threshold.minutes | conductor.app.taskPendingTimeThreshold | 60m | +| | | | +| workflow.monitor.metadata.refresh.counter | conductor.workflow-monitor.metadataRefreshInterval | 10 | +| workflow.monitor.stats.freq.seconds | conductor.workflow-monitor.statsFrequency | 60s | + +### `es6-persistence` module: + +| Old | New | Default | +| --- | --- | --- | +| workflow.elasticsearch.version | conductor.elasticsearch.version | 6 | +| workflow.elasticsearch.url | conductor.elasticsearch.url | localhost:9300 | +| workflow.elasticsearch.index.name | conductor.elasticsearch.indexPrefix | conductor | +| workflow.elasticsearch.tasklog.index.name | _removed_ | | +| workflow.elasticsearch.cluster.health.color | conductor.elasticsearch.clusterHealthColor | green | +| workflow.elasticsearch.archive.search.batchSize | _removed_ | | +| workflow.elasticsearch.index.batchSize | conductor.elasticsearch.indexBatchSize | 1 | +| workflow.elasticsearch.async.dao.worker.queue.size | conductor.elasticsearch.asyncWorkerQueueSize | 100 | +| workflow.elasticsearch.async.dao.max.pool.size | conductor.elasticsearch.asyncMaxPoolSize | 12 | +| workflow.elasticsearch.async.buffer.flush.timeout.seconds | conductor.elasticsearch.asyncBufferFlushTimeout | 10s | +| workflow.elasticsearch.index.shard.count | conductor.elasticsearch.indexShardCount | 5 | +| workflow.elasticsearch.index.replicas.count | conductor.elasticsearch.indexReplicasCount | 1 | +| tasklog.elasticsearch.query.size | conductor.elasticsearch.taskLogResultLimit | 10 | +| workflow.elasticsearch.rest.client.connectionRequestTimeout.milliseconds | conductor.elasticsearch.restClientConnectionRequestTimeout | -1 | +| workflow.elasticsearch.auto.index.management.enabled | conductor.elasticsearch.autoIndexManagementEnabled | true | +| workflow.elasticsearch.document.type.override | conductor.elasticsearch.documentTypeOverride | "" | + +### `es7-persistence` module: + +| Old | New | Default | +| --- | --- | --- | +| workflow.elasticsearch.version | conductor.elasticsearch.version | 7 | +| workflow.elasticsearch.url | conductor.elasticsearch.url | localhost:9300 | +| workflow.elasticsearch.index.name | conductor.elasticsearch.indexPrefix | conductor | +| workflow.elasticsearch.tasklog.index.name | _removed_ | | +| workflow.elasticsearch.cluster.health.color | conductor.elasticsearch.clusterHealthColor | green | +| workflow.elasticsearch.archive.search.batchSize | _removed_ | | +| workflow.elasticsearch.index.batchSize | conductor.elasticsearch.indexBatchSize | 1 | +| workflow.elasticsearch.async.dao.worker.queue.size | conductor.elasticsearch.asyncWorkerQueueSize | 100 | +| workflow.elasticsearch.async.dao.max.pool.size | conductor.elasticsearch.asyncMaxPoolSize | 12 | +| workflow.elasticsearch.async.buffer.flush.timeout.seconds | conductor.elasticsearch.asyncBufferFlushTimeout | 10s | +| workflow.elasticsearch.index.shard.count | conductor.elasticsearch.indexShardCount | 5 | +| workflow.elasticsearch.index.replicas.count | conductor.elasticsearch.indexReplicasCount | 1 | +| tasklog.elasticsearch.query.size | conductor.elasticsearch.taskLogResultLimit | 10 | +| workflow.elasticsearch.rest.client.connectionRequestTimeout.milliseconds | conductor.elasticsearch.restClientConnectionRequestTimeout | -1 | +| workflow.elasticsearch.auto.index.management.enabled | conductor.elasticsearch.autoIndexManagementEnabled | true | +| workflow.elasticsearch.document.type.override | conductor.elasticsearch.documentTypeOverride | "" | +| workflow.elasticsearch.basic.auth.username | conductor.elasticsearch.username | "" | +| workflow.elasticsearch.basic.auth.password | conductor.elasticsearch.password | "" | + +### `grpc-server` module: + +| Old | New | Default | +| --- | --- | --- | +| conductor.grpc.server.port | conductor.grpc-server.port | 8090 | +| conductor.grpc.server.reflectionEnabled | conductor.grpc-server.reflectionEnabled | true | + +### `mysql-persistence` module (v3.0.0 - v3.0.5): + +| Old | New | Default | +| --- | --- | --- | +| jdbc.url | conductor.mysql.jdbcUrl | jdbc:mysql://localhost:3306/conductor | +| jdbc.username | conductor.mysql.jdbcUsername | conductor | +| jdbc.password | conductor.mysql.jdbcPassword | password | +| flyway.enabled | conductor.mysql.flywayEnabled | true | +| flyway.table | conductor.mysql.flywayTable | null | +| conductor.mysql.connection.pool.size.max | conductor.mysql.connectionPoolMaxSize | -1 | +| conductor.mysql.connection.pool.idle.min | conductor.mysql.connectionPoolMinIdle | -1 | +| conductor.mysql.connection.lifetime.max | conductor.mysql.connectionMaxLifetime | 30m | +| conductor.mysql.connection.idle.timeout | conductor.mysql.connectionIdleTimeout | 10m | +| conductor.mysql.connection.timeout | conductor.mysql.connectionTimeout | 30s | +| conductor.mysql.transaction.isolation.level | conductor.mysql.transactionIsolationLevel | "" | +| conductor.mysql.autocommit | conductor.mysql.autoCommit | false | +| conductor.taskdef.cache.refresh.time.seconds | conductor.mysql.taskDefCacheRefreshInterval | 60s | + +### `mysql-persistence` module (v3.0.5+): + +| Old | New | +| --- | --- | +| jdbc.url | spring.datasource.url | +| jdbc.username | spring.datasource.username | +| jdbc.password | spring.datasource.password | +| flyway.enabled | spring.flyway.enabled | +| flyway.table | spring.flyway.table | +| conductor.mysql.connection.pool.size.max | spring.datasource.hikari.maximum-pool-size | +| conductor.mysql.connection.pool.idle.min | spring.datasource.hikari.minimum-idle | +| conductor.mysql.connection.lifetime.max | spring.datasource.hikari.max-lifetime | +| conductor.mysql.connection.idle.timeout | spring.datasource.hikari.idle-timeout | +| conductor.mysql.connection.timeout | spring.datasource.hikari.connection-timeout | +| conductor.mysql.transaction.isolation.level | spring.datasource.hikari.transaction-isolation | +| conductor.mysql.autocommit | spring.datasource.hikari.auto-commit | +| conductor.taskdef.cache.refresh.time.seconds | conductor.mysql.taskDefCacheRefreshInterval | + +* for more properties and default values: https://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#application-properties.data.spring.datasource.hikari + +### `postgres-persistence` module (v3.0.0 - v3.0.5): + +| Old | New | Default | +| --- | --- | --- | +| jdbc.url | conductor.postgres.jdbcUrl | jdbc:postgresql://localhost:5432/conductor | +| jdbc.username | conductor.postgres.jdbcUsername | conductor | +| jdbc.password | conductor.postgres.jdbcPassword | password | +| flyway.enabled | conductor.postgres.flywayEnabled | true | +| flyway.table | conductor.postgres.flywayTable | null | +| conductor.postgres.connection.pool.size.max | conductor.postgres.connectionPoolMaxSize | -1 | +| conductor.postgres.connection.pool.idle.min | conductor.postgres.connectionPoolMinIdle | -1 | +| conductor.postgres.connection.lifetime.max | conductor.postgres.connectionMaxLifetime | 30m | +| conductor.postgres.connection.idle.timeout | conductor.postgres.connectionIdleTimeout | 10m | +| conductor.postgres.connection.timeout | conductor.postgres.connectionTimeout | 30s | +| conductor.postgres.transaction.isolation.level | conductor.postgres.transactionIsolationLevel | "" | +| conductor.postgres.autocommit | conductor.postgres.autoCommit | false | +| conductor.taskdef.cache.refresh.time.seconds | conductor.postgres.taskDefCacheRefreshInterval | 60s | + +### `postgres-persistence` module (v3.0.5+): + +| Old | New | +| --- | --- | +| jdbc.url | spring.datasource.url | +| jdbc.username | spring.datasource.username | +| jdbc.password | spring.datasource.password | +| flyway.enabled | spring.flyway.enabled | +| flyway.table | spring.flyway.table | +| conductor.postgres.connection.pool.size.max | spring.datasource.hikari.maximum-pool-size | +| conductor.postgres.connection.pool.idle.min | spring.datasource.hikari.minimum-idle | +| conductor.postgres.connection.lifetime.max | spring.datasource.hikari.max-lifetime | +| conductor.postgres.connection.idle.timeout | spring.datasource.hikari.idle-timeout | +| conductor.postgres.connection.timeout | spring.datasource.hikari.connection-timeout | +| conductor.postgres.transaction.isolation.level | spring.datasource.hikari.transaction-isolation | +| conductor.postgres.autocommit | spring.datasource.hikari.auto-commit | +| conductor.taskdef.cache.refresh.time.seconds | conductor.postgres.taskDefCacheRefreshInterval | + +* for more properties and default values: https://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#application-properties.data.spring.datasource.hikari + +### `redis-lock` module: + +| Old | New | Default | +| --- | --- | --- | +| workflow.redis.locking.server.type | conductor.redis-lock.serverType | single | +| workflow.redis.locking.server.address | conductor.redis-lock.serverAddress | redis://127.0.0.1:6379 | +| workflow.redis.locking.server.password | conductor.redis-lock.serverPassword | null | +| workflow.redis.locking.server.master.name | conductor.redis-lock.serverMasterName | master | +| workflow.decider.locking.namespace | conductor.redis-lock.namespace | "" | +| workflow.decider.locking.exceptions.ignore | conductor.redis-lock.ignoreLockingExceptions | false | + +### `redis-persistence` module: + +| Old | New | Default | +| --- | --- | --- | +| EC2_REGION | conductor.redis.dataCenterRegion | us-east-1 | +| EC2_AVAILABILITY_ZONE | conductor.redis.availabilityZone | us-east-1c | +| workflow.dynomite.cluster | _removed_ | +| workflow.dynomite.cluster.name | conductor.redis.clusterName | "" | +| workflow.dynomite.cluster.hosts | conductor.redis.hosts | null | +| workflow.namespace.prefix | conductor.redis.workflowNamespacePrefix | null | +| workflow.namespace.queue.prefix | conductor.redis.queueNamespacePrefix | null | +| workflow.dyno.keyspace.domain | conductor.redis.keyspaceDomain | null | +| workflow.dynomite.connection.maxConnsPerHost | conductor.redis.maxConnectionsPerHost | 10 | +| workflow.dynomite.connection.max.retry.attempt | conductor.redis.maxRetryAttempts | 0 | +| workflow.dynomite.connection.max.timeout.exhausted.ms | conductor.redis.maxTimeoutWhenExhausted | 800ms | +| queues.dynomite.nonQuorum.port | conductor.redis.queuesNonQuorumPort | 22122 | +| workflow.dyno.queue.sharding.strategy | conductor.redis.queueShardingStrategy | roundRobin | +| conductor.taskdef.cache.refresh.time.seconds | conductor.redis.taskDefCacheRefreshInterval | 60s | +| workflow.event.execution.persistence.ttl.seconds | conductor.redis.eventExecutionPersistenceTTL | 60s | + +### `zookeeper-lock` module: + +| Old | New | Default | +| --- | --- | --- | +| workflow.zookeeper.lock.connection | conductor.zookeeper-lock.connectionString | localhost:2181 | +| workflow.zookeeper.lock.sessionTimeoutMs | conductor.zookeeper-lock.sessionTimeout | 60000ms | +| workflow.zookeeper.lock.connectionTimeoutMs | conductor.zookeeper-lock.connectionTimeout | 15000ms | +| workflow.decider.locking.namespace | conductor.zookeeper-lock.namespace | "" | + +### Component configuration: + +| Old | New | Default | +| --- | --- | --- | +| db | conductor.db.type | "" | +| workflow.indexing.enabled | conductor.indexing.enabled | true | +| conductor.disable.async.workers | conductor.system-task-workers.enabled | true | +| decider.sweep.disable | conductor.workflow-reconciler.enabled | true | +| conductor.grpc.server.enabled | conductor.grpc-server.enabled | false | +| workflow.external.payload.storage | conductor.external-payload-storage.type | dummy | +| workflow.default.event.processor.enabled | conductor.default-event-processor.enabled | true | +| workflow.events.default.queue.type | conductor.default-event-queue.type | sqs | +| workflow.status.listener.type | conductor.workflow-status-listener.type | stub | +| workflow.decider.locking.server | conductor.workflow-execution-lock.type | noop_lock | +| | | | +| workflow.default.event.queue.enabled | conductor.event-queues.default.enabled | true | +| workflow.sqs.event.queue.enabled | conductor.event-queues.sqs.enabled | false | +| workflow.amqp.event.queue.enabled | conductor.event-queues.amqp.enabled | false | +| workflow.nats.event.queue.enabled | conductor.event-queues.nats.enabled | false | +| workflow.nats_stream.event.queue.enabled | conductor.event-queues.nats-stream.enabled | false | +| | | | +| - | conductor.metrics-logger.enabled | false | +| - | conductor.metrics-prometheus.enabled | false | +| - | conductor.metrics-datadog.enable | false | +| - | conductor.metrics-datadog.api-key | | + diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..f8076bc629 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,49 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at netflixoss@netflix.com. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 49b8d25c08..2fb6797762 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,24 +1,72 @@ -# Contributing to Conductor +Thanks for your interest in Conductor! +This guide helps to find the most efficient way to contribute, ask questions, and report issues. -We are following the Gitflow workflow. The active development branch is [dev](https://github.com/Netflix/conductor/tree/dev), the stable branch is [master](https://github.com/Netflix/conductor/tree/master). +Code of conduct +----- -Contributions will be accepted to the [dev](https://github.com/Netflix/conductor/tree/dev) only. +Please review our [code of conduct](CODE_OF_CONDUCT.md). -## How to provide a patch for a new feature +I have a question! +----- -1. If it is a major feature, please create an [Issue]( https://github.com/Netflix/conductor/issues ) and discuss with the project leaders. +We have a dedicated [discussion forum](https://github.com/Netflix/conductor/discussions) for asking "how to" questions and to discuss ideas. The discussion forum is a great place to start if you're considering creating a feature request or work on a Pull Request. +*Please do not create issues to ask questions.* -2. If in step 1 you get an acknowledge from the project leaders, use the - following procedure to submit a patch: +I want to contribute! +------ - a. Fork Dynomite on github ( http://help.github.com/fork-a-repo/ ) +We welcome Pull Requests and already had many outstanding community contributions! +Creating and reviewing Pull Requests take considerable time. This section helps you set up for a smooth Pull Request experience. - b. Create a topic branch (git checkout -b my_branch) +The stable branch is [main](https://github.com/Netflix/conductor/tree/main). - c. Push to your branch (git push origin my_branch) +Please create pull requests for your contributions against [main](https://github.com/Netflix/conductor/tree/main) only. - d. Initiate a pull request on github ( http://help.github.com/send-pull-requests/ ) +It's a great idea to discuss the new feature you're considering on the [discussion forum](https://github.com/Netflix/conductor/discussions) before writing any code. There are often different ways you can implement a feature. Getting some discussion about different options helps shape the best solution. When starting directly with a Pull Request, there is the risk of having to make considerable changes. Sometimes that is the best approach, though! Showing an idea with code can be very helpful; be aware that it might be throw-away work. Some of our best Pull Requests came out of multiple competing implementations, which helped shape it to perfection. - e. Done :) +Also, consider that not every feature is a good fit for Conductor. A few things to consider are: -For minor fixes just open a pull request to the [dev]( https://github.com/Netflix/conductor/tree/dev ) branch on Github. +* Is it increasing complexity for the user, or might it be confusing? +* Does it, in any way, break backward compatibility (this is seldom acceptable) +* Does it require new dependencies (this is rarely acceptable for core modules) +* Should the feature be opt-in or enabled by default. For integration with a new Queuing recipe or persistence module, a separate module which can be optionally enabled is the right choice. +* Should the feature be implemented in the main Conductor repository, or would it be better to set up a separate repository? Especially for integration with other systems, a separate repository is often the right choice because the life-cycle of it will be different. + +Of course, for more minor bug fixes and improvements, the process can be more light-weight. + +We'll try to be responsive to Pull Requests. Do keep in mind that because of the inherently distributed nature of open source projects, responses to a PR might take some time because of time zones, weekends, and other things we may be working on. + +I want to report an issue +----- + +If you found a bug, it is much appreciated if you create an issue. Please include clear instructions on how to reproduce the issue, or even better, include a test case on a branch. Make sure to come up with a descriptive title for the issue because this helps while organizing issues. + +I have a great idea for a new feature +---- +Many features in Conductor have come from ideas from the community. If you think something is missing or certain use cases could be supported better, let us know! You can do so by opening a discussion on the [discussion forum](https://github.com/Netflix/conductor/discussions). Provide as much relevant context to why and when the feature would be helpful. Providing context is especially important for "Support XYZ" issues since we might not be familiar with what "XYZ" is and why it's useful. If you have an idea of how to implement the feature, include that as well. + +Once we have decided on a direction, it's time to summarize the idea by creating a new issue. + +## Code Style +We use [spotless](https://github.com/diffplug/spotless) to enforce consistent code style for the project, so make sure to run `gradlew spotlessApply` to fix any violations after code changes. + +## License + +By contributing your code, you agree to license your contribution under the terms of the APLv2: https://github.com/Netflix/conductor/blob/master/LICENSE + +All files are released with the Apache 2.0 license, and the following license header will be automatically added to your new file if none present: + +``` +/** + * Copyright $YEAR Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +``` diff --git a/LICENSE b/LICENSE index ad410e1130..6a1d025d83 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ Apache License same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright {yyyy} {name of copyright owner} + Copyright {yyyy} Netflix, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.md b/README.md index bee4e46d16..e3aabd1364 100644 --- a/README.md +++ b/README.md @@ -2,92 +2,109 @@ ## Conductor -Conductor is an _orchestration_ engine that runs in the cloud. +Conductor is a _workflow orchestration_ engine that runs in the cloud. - -[![Download](https://api.bintray.com/packages/netflixoss/maven/conductor/images/download.svg)](https://bintray.com/netflixoss/maven/conductor/_latestVersion) +[![Github release](https://img.shields.io/github/v/release/Netflix/conductor.svg)](https://GitHub.com/Netflix/conductor/releases) +[![CI](https://github.com/Netflix/conductor/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/Netflix/conductor/actions/workflows/ci.yml) [![License](https://img.shields.io/github/license/Netflix/conductor.svg)](http://www.apache.org/licenses/LICENSE-2.0) -[![Issues](https://img.shields.io/github/issues/Netflix/conductor.svg)](https://github.com/Netflix/conductor/issues) [![NetflixOSS Lifecycle](https://img.shields.io/osslifecycle/Netflix/conductor.svg)]() ## Builds -Conductor builds are run on Travis CI [here](https://travis-ci.org/Netflix/conductor). - -| Branch | Build | Coverage (coveralls.io) | Coverage (codecov.io) | -|:------:|:-------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------------------------------:| -| master | [![Build Status](https://travis-ci.org/Netflix/conductor.svg?branch=master)](https://travis-ci.org/Netflix/conductor) | [![Coverage Status](https://coveralls.io/repos/github/Netflix/conductor/badge.svg?branch=master)](https://coveralls.io/github/Netflix/conductor?branch=master) | [![codecov](https://codecov.io/gh/Netflix/conductor/branch/master/graph/badge.svg)](https://codecov.io/gh/Netflix/conductor/branch/master) | -| dev | [![Build Status](https://travis-ci.org/Netflix/conductor.svg?branch=dev)](https://travis-ci.org/Netflix/conductor) | [![Coverage Status](https://coveralls.io/repos/github/Netflix/conductor/badge.svg?branch=dev)](https://coveralls.io/github/Netflix/conductor?branch=dev) | [![codecov](https://codecov.io/gh/Netflix/conductor/branch/dev/graph/badge.svg)](https://codecov.io/gh/Netflix/conductor/branch/dev) | - -## Documentation & Getting Started -[http://netflix.github.io/conductor/](http://netflix.github.io/conductor/) - -[Getting Started](http://netflix.github.io/conductor/intro) guide. - -## Get Conductor -Binaries are available from Maven Central and jcenter. - -Below are the various artifacts published: - -|Artifact|Description| -|-----------|---------------| -|conductor-common|Common models used by various conductor modules| -|conductor-core|Core Conductor module| -|conductor-redis-persistence|Persistence using Redis/Dynomite| -|conductor-es5-persistence|Indexing using Elasticsearch 5.X| -|conductor-jersey|Jersey JAX-RS resources for the core services| -|conductor-ui|node.js based UI for Conductor| -|conductor-contribs|Optional contrib package that holds extended workflow tasks and support for SQS| -|conductor-client|Java client for Conductor that includes helpers for running a worker tasks| -|conductor-server|Self contained Jetty server| -|conductor-test-harness|Used for building test harness and an in-memory kitchensink demo| - -## Building -To build the server, use the following dependencies in your classpath: - -* conductor-common -* conductor-core -* conductor-jersey -* conductor-redis-persistence (_unless using your own persistence module_) -* conductor-es5-persistence (_unless using your own index module_) -* conductor-contribs (_optional_) - - -### Deploying Jersey JAX-RS resources -Add the following packages to classpath scan: - -```java -com.netflix.conductor.server.resources -com.netflix.workflow.contribs.queue -``` -Conductor relies on the guice (4.0+) for the dependency injection. -Persistence has a guice module to wire up appropriate interfaces: - -```java -com.netflix.conductor.dao.RedisWorkflowModule -``` +The latest version is [![Github release](https://img.shields.io/github/v/release/Netflix/conductor.svg)](https://GitHub.com/Netflix/conductor/releases). +| Branch | Build | +|:------:|:-------------------------------------------------------------------------------------------------------------:| +| main | [![CI](https://github.com/Netflix/conductor/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/Netflix/conductor/actions/workflows/ci.yml) | +| 2.31 | [![Build Status](https://travis-ci.com/Netflix/conductor.svg?branch=2.31)](https://travis-ci.com/Netflix/conductor) | + + +## Getting Started - Building & Running Conductor +### Docker +The easiest way to get started is with Docker containers. Please follow the instructions [here](https://github.com/Netflix/conductor/tree/main/docker). The server and UI can also be built from source separately. + +### Conductor Server From Source +Conductor Server is a [Spring Boot](https://spring.io/projects/spring-boot) project and follows all applicable conventions. First, ensure that Java JDK 11+ is installed. + +#### Development +The server can be started locally by running `./gradlew bootRun` from the project root. This will start up Conductor with an in-memory persistence and queue implementation. It is not recommended for production use but can come in handy for quickly evaluating what Conductor's all about. For actual production use-cases, please use one of the supported persistence and queue implementations. + +You can verify the development server is up by navigating to `http://localhost:8080` in a browser. + +#### Production Build +Running `./gradlew build` from the project root builds the project into the `/build` directory. Note that Docker is a requirement for tests to run, and thus a requirement to build even if you are building +outside of a Docker container. If you do not have Docker installed you can run `./gradlew build -x test` to skip tests. + + +#### Pre-built JAR +A [pre-built](https://artifacts.netflix.net/netflixoss/com/netflix/conductor/conductor-server/) executable jar is available that can be downloaded and run using: + +`java -jar conductor-server-*-boot.jar` + +### Conductor UI from Source + +The UI is a standard `create-react-app` React Single Page Application (SPA). To get started, with Node 14 and `yarn` installed, first run `yarn install` from within the `/ui` directory to retrieve package dependencies. + +There is no need to "build" the project unless you require compiled assets to host on a production web server. If the latter is true, the project can be built with the command `yarn build`. + +To run the UI on the bundled development server, run `yarn run start`. Navigate your browser to `http://localhost:5000`. The server must already be running on port 8080. + + +## Documentation +[Documentation](http://netflix.github.io/conductor/) +[Roadmap](https://github.com/Netflix/conductor/wiki/Roadmap) +[Getting Started Guide](https://netflix.github.io/conductor/gettingstarted/basicconcepts/). + +## Published Artifacts +Binaries are available from [Netflix OSS Maven](https://artifacts.netflix.net/netflixoss/com/netflix/conductor/) repository, or the [Maven Central Repository](https://search.maven.org/search?q=g:com.netflix.conductor). + +| Artifact | Description | +| ----------- | --------------- | +| conductor-common | Common models used by various conductor modules | +| conductor-core | Core Conductor module | +| conductor-redis-persistence | Persistence and queue using Redis/Dynomite | +| conductor-cassandra-persistence | Persistence using Cassandra | +| conductor-mysql-persistence | Persistence and queue using MySQL | +| conductor-postgres-persistence | Persistence and queue using Postgres | +| conductor-es6-persistence | Indexing using Elasticsearch 6.X | +| conductor-rest | Spring MVC resources for the core services | +| conductor-ui | node.js based UI for Conductor | +| conductor-contribs | Optional contrib package that holds extended workflow tasks and support for SQS, AMQP, etc| +| conductor-client | Java client for Conductor that includes helpers for running worker tasks | +| conductor-client-spring | Client starter kit for Spring | +| conductor-server | Spring Boot Web Application | +| conductor-azureblob-storage | External payload storage implementation using AzureBlob | +| conductor-redis-lock | Workflow execution lock implementation using Redis | +| conductor-zookeeper-lock | Workflow execution lock implementation using Zookeeper | +| conductor-grpc | Protobuf models used by the server and client | +| conductor-grpc-client | gRPC server Application | +| conductor-grpc-server | gRPC client to interact with the gRPC server | +| conductor-test-harness | Integration and regression tests | + ## Database Requirements * The default persistence used is [Dynomite](https://github.com/Netflix/dynomite) * For queues, we are relying on [dyno-queues](https://github.com/Netflix/dyno-queues) -* The indexing backend is [Elasticsearch](https://www.elastic.co/) (5.x) +* The indexing backend is [Elasticsearch](https://www.elastic.co/) (6.x) ## Other Requirements -* JDK 1.8+ -* Servlet Container +* JDK 11+ +* UI requires Node 14 to build. Earlier Node versions may work but is untested. + +## Community +[Discussion Forum](https://github.com/Netflix/conductor/discussions) Please use the forum for questions and discussing ideas and join the community. + +[Access here other Conductor related projects made by the community!](/RELATED.md) - Backup tool, Cron like workflow starter, Docker containers... ## Get Support -Conductor is maintained by Media Workflow Infrastructure team at Netflix. Use github issue tracking for any support request. +Conductor is maintained by Media Workflow Infrastructure team at Netflix. Use github issue tracking for filing issues and [Discussion Forum](https://github.com/Netflix/conductor/discussions) for any other questions, ideas or support requests. ## Contributions -Whether it is a small doc correction, bug fix or adding new module to support some crazy feature, contributions are highly appreciated. We just ask to follow standard oss guidelines. And to reiterate, please check with us before spending too much time, only to find later that someone else is already working on similar feature. - -`dev` branch is the current working branch, while `master` branch is current stable branch. Please send your PR's to `dev` branch, making sure that it builds on your local system successfully. Also, please make sure all the conflicts are resolved. +Whether it is a small documentation correction, bug fix or new features, contributions are highly appreciated. We just ask to follow standard oss guidelines. [Discussion Forum](https://github.com/Netflix/conductor/discussions) is a good place to ask questions, discuss new features and explore ideas. Please check with us before spending too much time, only to find later that someone else is already working on a similar feature. -Feel free to create an issue with a label: question, with any questions or requests for help. +`main` branch is the current working branch, while `2.31` branch is the latest stable 2.x branch. Please send your PR's to `main` branch, making sure that it builds on your local system successfully. Also, please make sure all the conflicts are resolved. ## License -Copyright 2018 Netflix, Inc. +Copyright 2021 Netflix, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/RELATED.md b/RELATED.md new file mode 100644 index 0000000000..19ad4ef6b3 --- /dev/null +++ b/RELATED.md @@ -0,0 +1,64 @@ +# Community projects related to Conductor + + +## Microservices operations + +* https://github.com/flaviostutz/schellar - Schellar is a scheduler tool for instantiating Conductor workflows from time to time, mostly like a cron job, but with transport of input/output variables between calls. + +* https://github.com/flaviostutz/backtor - Backtor is a backup scheduler tool that uses Conductor workers to handle backup operations and decide when to expire backups (ex.: keep backup 3 days, 2 weeks, 2 months, 1 semester) + +* https://github.com/cquon/conductor-tools - Conductor CLI for launching workflows, polling tasks, listing running tasks etc + + +## Conductor deployment + +* https://github.com/flaviostutz/conductor-server - Docker container for running Conductor with Prometheus metrics plugin installed and some tweaks to ease provisioning of workflows from json files embedded to the container + +* https://github.com/flaviostutz/conductor-ui - Docker container for running Conductor UI so that you can easily scale UI independently + +* https://github.com/flaviostutz/elasticblast - "Elasticsearch to Bleve" bridge tailored for running Conductor on top of Bleve indexer. The footprint of Elasticsearch may cost too much for small deployments on Cloud environment. + +* https://github.com/mohelsaka/conductor-prometheus-metrics - Conductor plugin for exposing Prometheus metrics over path '/metrics' + +## OAuth2.0 Security Configuration +Forked Repository - [Conductor (Secure)](https://github.com/maheshyaddanapudi/conductor/tree/oauth2) + +[OAuth2.0 Role Based Security!](https://github.com/maheshyaddanapudi/conductor/blob/oauth2/SECURITY.md) - Spring Security with easy configuration to secure the Conductor server APIs. + +Docker image published to [Docker Hub](https://hub.docker.com/repository/docker/conductorboot/server) + +## Conductor Worker utilities + +* https://github.com/ggrcha/conductor-go-client - Conductor Golang client for writing Workers in Golang + +* https://github.com/courosh12/conductor-dotnet-client - Conductor DOTNET client for writing Workers in DOTNET + * https://github.com/TwoUnderscorez/serilog-sinks-conductor-task-log - Serilog sink for sending worker log events to Netflix Conductor + +* https://github.com/davidwadden/conductor-workers - Various ready made Conductor workers for common operations on some platforms (ex.: Jira, Github, Concourse) + +## Conductor Web UI + +* https://github.com/maheshyaddanapudi/conductor-ng-ui - Angular based - Conductor Workflow Management UI + +## Conductor Persistence + +### Mongo Persistence + +* https://github.com/maheshyaddanapudi/conductor/tree/mongo_persistence - With option to use Mongo Database as persistence unit. + * Mongo Persistence / Option to use Mongo Database as persistence unit. + * Docker Compose example with MongoDB Container. + +### Oracle Persistence + +* https://github.com/maheshyaddanapudi/conductor/tree/oracle_persistence - With option to use Oracle Database as persistence unit. + * Oracle Persistence / Option to use Oracle Database as persistence unit : version > 12.2 - Tested well with 19C + * Docker Compose example with Oracle Container. + +## Schedule Conductor Workflow +* https://github.com/jas34/scheduledwf - It solves the following problem statements: + * At times there are use cases in which we need to run some tasks/jobs only at a scheduled time. + * In microservice architecture maintaining schedulers in various microservices is a pain. + * We should have a central dedicate service that can do scheduling for us and provide a trigger to a microservices at expected time. +* It offers an additional module `io.github.jas34.scheduledwf.config.ScheduledWfServerModule` built on the existing core +of conductor and does not require deployment of any additional service. +For more details refer: [Schedule Conductor Workflows](https://jas34.github.io/scheduledwf) and [Capability In Conductor To Schedule Workflows](https://github.com/Netflix/conductor/discussions/2256) \ No newline at end of file diff --git a/WHOSUSING.md b/WHOSUSING.md new file mode 100644 index 0000000000..34b4fcea80 --- /dev/null +++ b/WHOSUSING.md @@ -0,0 +1,8 @@ + +## Who uses Conductor? + +We would like to keep track of whose using Conductor. Please send a pull request with your company name and Github handle. + +* [Netflix](www.netflix.com) [[@aravindanr](https://github.com/aravindanr)] +* [Florida Blue](www.bcbsfl.com) [[@rickfish](https://github.com/rickfish)] + diff --git a/annotations-processor/README.md b/annotations-processor/README.md new file mode 100644 index 0000000000..667a2f8ba4 --- /dev/null +++ b/annotations-processor/README.md @@ -0,0 +1,33 @@ +# Annotation Processor for Code Gen + +- Original Author: Vicent Martí - https://github.com/vmg +- Original Repo: https://github.com/vmg/protogen + +This module is strictly for code generation tasks during builds based on annotations. +Currently supports `protogen` + +### Usage + +See example below + +### Example + +This is an actual example of this module which is implemented in common/build.gradle + +```groovy +task protogen(dependsOn: jar, type: JavaExec) { + classpath configurations.annotationsProcessorCodegen + main = 'com.netflix.conductor.annotationsprocessor.protogen.ProtoGenTask' + args( + "conductor.proto", + "com.netflix.conductor.proto", + "github.com/netflix/conductor/client/gogrpc/conductor/model", + "${rootDir}/grpc/src/main/proto", + "${rootDir}/grpc/src/main/java/com/netflix/conductor/grpc", + "com.netflix.conductor.grpc", + jar.archivePath, + "com.netflix.conductor.common", + ) +} +``` + diff --git a/annotations-processor/build.gradle b/annotations-processor/build.gradle new file mode 100644 index 0000000000..efa2ab715d --- /dev/null +++ b/annotations-processor/build.gradle @@ -0,0 +1,24 @@ + +sourceSets { + example +} + +dependencies { + implementation project(':conductor-annotations') + compile 'com.google.guava:guava:25.1-jre' + compile 'com.squareup:javapoet:1.11.+' + compile 'com.github.jknack:handlebars:4.0.+' + compile 'com.google.protobuf:protobuf-java:3.5.1' + compile 'javax.annotation:javax.annotation-api:1.3.2' + compile gradleApi() + + exampleCompile sourceSets.main.output + exampleCompile project(':conductor-annotations') +} + +task exampleJar(type: Jar) { + archiveFileName = 'example.jar' + from sourceSets.example.output.classesDirs +} + +testClasses.finalizedBy(exampleJar) \ No newline at end of file diff --git a/annotations-processor/dependencies.lock b/annotations-processor/dependencies.lock new file mode 100644 index 0000000000..bb108d41f7 --- /dev/null +++ b/annotations-processor/dependencies.lock @@ -0,0 +1,1074 @@ +{ + "annotationProcessor": { + "org.springframework.boot:spring-boot-configuration-processor": { + "locked": "2.3.12.RELEASE" + } + }, + "compileClasspath": { + "com.github.jknack:handlebars": { + "locked": "4.0.7" + }, + "com.google.code.findbugs:jsr305": { + "locked": "3.0.2", + "transitive": [ + "com.google.guava:guava" + ] + }, + "com.google.errorprone:error_prone_annotations": { + "locked": "2.1.3", + "transitive": [ + "com.google.guava:guava" + ] + }, + "com.google.guava:guava": { + "locked": "25.1-jre" + }, + "com.google.j2objc:j2objc-annotations": { + "locked": "1.1", + "transitive": [ + "com.google.guava:guava" + ] + }, + "com.google.protobuf:protobuf-java": { + "locked": "3.5.1" + }, + "com.netflix.conductor:conductor-annotations": { + "project": true + }, + "com.squareup:javapoet": { + "locked": "1.11.1" + }, + "javax.annotation:javax.annotation-api": { + "locked": "1.3.2" + }, + "org.antlr:antlr4-runtime": { + "locked": "4.7.1", + "transitive": [ + "com.github.jknack:handlebars" + ] + }, + "org.apache.commons:commons-lang3": { + "locked": "3.10", + "transitive": [ + "com.github.jknack:handlebars" + ] + }, + "org.apache.logging.log4j:log4j-api": { + "locked": "2.17.0", + "transitive": [ + "org.apache.logging.log4j:log4j-core", + "org.apache.logging.log4j:log4j-jul", + "org.apache.logging.log4j:log4j-slf4j-impl", + "org.apache.logging.log4j:log4j-web" + ] + }, + "org.apache.logging.log4j:log4j-core": { + "locked": "2.17.0", + "transitive": [ + "org.apache.logging.log4j:log4j-web" + ] + }, + "org.apache.logging.log4j:log4j-jul": { + "locked": "2.17.0" + }, + "org.apache.logging.log4j:log4j-slf4j-impl": { + "locked": "2.17.0" + }, + "org.apache.logging.log4j:log4j-web": { + "locked": "2.17.0" + }, + "org.checkerframework:checker-qual": { + "locked": "2.0.0", + "transitive": [ + "com.google.guava:guava" + ] + }, + "org.codehaus.mojo:animal-sniffer-annotations": { + "locked": "1.14", + "transitive": [ + "com.google.guava:guava" + ] + }, + "org.mozilla:rhino": { + "locked": "1.7.7", + "transitive": [ + "com.github.jknack:handlebars" + ] + }, + "org.slf4j:slf4j-api": { + "locked": "1.7.30", + "transitive": [ + "com.github.jknack:handlebars", + "org.apache.logging.log4j:log4j-slf4j-impl" + ] + } + }, + "exampleCompileClasspath": { + "com.netflix.conductor:conductor-annotations": { + "project": true + } + }, + "exampleRuntimeClasspath": { + "com.netflix.conductor:conductor-annotations": { + "project": true + }, + "org.apache.logging.log4j:log4j-api": { + "locked": "2.13.3", + "transitive": [ + "com.netflix.conductor:conductor-annotations", + "org.apache.logging.log4j:log4j-core", + "org.apache.logging.log4j:log4j-jul", + "org.apache.logging.log4j:log4j-slf4j-impl", + "org.apache.logging.log4j:log4j-web" + ] + }, + "org.apache.logging.log4j:log4j-core": { + "locked": "2.13.3", + "transitive": [ + "com.netflix.conductor:conductor-annotations", + "org.apache.logging.log4j:log4j-slf4j-impl", + "org.apache.logging.log4j:log4j-web" + ] + }, + "org.apache.logging.log4j:log4j-jul": { + "locked": "2.13.3", + "transitive": [ + "com.netflix.conductor:conductor-annotations" + ] + }, + "org.apache.logging.log4j:log4j-slf4j-impl": { + "locked": "2.13.3", + "transitive": [ + "com.netflix.conductor:conductor-annotations" + ] + }, + "org.apache.logging.log4j:log4j-web": { + "locked": "2.13.3", + "transitive": [ + "com.netflix.conductor:conductor-annotations" + ] + }, + "org.slf4j:slf4j-api": { + "locked": "1.7.30", + "transitive": [ + "org.apache.logging.log4j:log4j-slf4j-impl" + ] + } + }, + "runtimeClasspath": { + "com.github.jknack:handlebars": { + "locked": "4.0.7" + }, + "com.google.code.findbugs:jsr305": { + "locked": "3.0.2", + "transitive": [ + "com.google.guava:guava" + ] + }, + "com.google.errorprone:error_prone_annotations": { + "locked": "2.1.3", + "transitive": [ + "com.google.guava:guava" + ] + }, + "com.google.guava:guava": { + "locked": "25.1-jre" + }, + "com.google.j2objc:j2objc-annotations": { + "locked": "1.1", + "transitive": [ + "com.google.guava:guava" + ] + }, + "com.google.protobuf:protobuf-java": { + "locked": "3.5.1" + }, + "com.netflix.conductor:conductor-annotations": { + "project": true + }, + "com.squareup:javapoet": { + "locked": "1.11.1" + }, + "javax.annotation:javax.annotation-api": { + "locked": "1.3.2" + }, + "org.antlr:antlr4-runtime": { + "locked": "4.7.1", + "transitive": [ + "com.github.jknack:handlebars" + ] + }, + "org.apache.commons:commons-lang3": { + "locked": "3.10", + "transitive": [ + "com.github.jknack:handlebars" + ] + }, + "org.apache.logging.log4j:log4j-api": { + "locked": "2.17.0", + "transitive": [ + "com.netflix.conductor:conductor-annotations", + "org.apache.logging.log4j:log4j-core", + "org.apache.logging.log4j:log4j-jul", + "org.apache.logging.log4j:log4j-slf4j-impl", + "org.apache.logging.log4j:log4j-web" + ] + }, + "org.apache.logging.log4j:log4j-core": { + "locked": "2.17.0", + "transitive": [ + "com.netflix.conductor:conductor-annotations", + "org.apache.logging.log4j:log4j-slf4j-impl", + "org.apache.logging.log4j:log4j-web" + ] + }, + "org.apache.logging.log4j:log4j-jul": { + "locked": "2.17.0", + "transitive": [ + "com.netflix.conductor:conductor-annotations" + ] + }, + "org.apache.logging.log4j:log4j-slf4j-impl": { + "locked": "2.17.0", + "transitive": [ + "com.netflix.conductor:conductor-annotations" + ] + }, + "org.apache.logging.log4j:log4j-web": { + "locked": "2.17.0", + "transitive": [ + "com.netflix.conductor:conductor-annotations" + ] + }, + "org.checkerframework:checker-qual": { + "locked": "2.0.0", + "transitive": [ + "com.google.guava:guava" + ] + }, + "org.codehaus.mojo:animal-sniffer-annotations": { + "locked": "1.14", + "transitive": [ + "com.google.guava:guava" + ] + }, + "org.mozilla:rhino": { + "locked": "1.7.7", + "transitive": [ + "com.github.jknack:handlebars" + ] + }, + "org.slf4j:slf4j-api": { + "locked": "1.7.30", + "transitive": [ + "com.github.jknack:handlebars", + "org.apache.logging.log4j:log4j-slf4j-impl" + ] + } + }, + "testCompileClasspath": { + "com.github.jknack:handlebars": { + "locked": "4.0.7" + }, + "com.google.code.findbugs:jsr305": { + "locked": "3.0.2", + "transitive": [ + "com.google.guava:guava" + ] + }, + "com.google.errorprone:error_prone_annotations": { + "locked": "2.1.3", + "transitive": [ + "com.google.guava:guava" + ] + }, + "com.google.guava:guava": { + "locked": "25.1-jre" + }, + "com.google.j2objc:j2objc-annotations": { + "locked": "1.1", + "transitive": [ + "com.google.guava:guava" + ] + }, + "com.google.protobuf:protobuf-java": { + "locked": "3.5.1" + }, + "com.jayway.jsonpath:json-path": { + "locked": "2.4.0", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "com.netflix.conductor:conductor-annotations": { + "project": true + }, + "com.squareup:javapoet": { + "locked": "1.11.1" + }, + "com.vaadin.external.google:android-json": { + "locked": "0.0.20131108.vaadin1", + "transitive": [ + "org.skyscreamer:jsonassert" + ] + }, + "jakarta.activation:jakarta.activation-api": { + "locked": "1.2.2", + "transitive": [ + "jakarta.xml.bind:jakarta.xml.bind-api" + ] + }, + "jakarta.annotation:jakarta.annotation-api": { + "locked": "1.3.5", + "transitive": [ + "org.springframework.boot:spring-boot-starter" + ] + }, + "jakarta.xml.bind:jakarta.xml.bind-api": { + "locked": "2.3.3", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "javax.annotation:javax.annotation-api": { + "locked": "1.3.2" + }, + "junit:junit": { + "locked": "4.13.2", + "transitive": [ + "org.junit.vintage:junit-vintage-engine" + ] + }, + "net.bytebuddy:byte-buddy": { + "locked": "1.10.22", + "transitive": [ + "org.mockito:mockito-core" + ] + }, + "net.bytebuddy:byte-buddy-agent": { + "locked": "1.10.22", + "transitive": [ + "org.mockito:mockito-core" + ] + }, + "net.minidev:accessors-smart": { + "locked": "2.3.1", + "transitive": [ + "net.minidev:json-smart" + ] + }, + "net.minidev:json-smart": { + "locked": "2.3.1", + "transitive": [ + "com.jayway.jsonpath:json-path" + ] + }, + "org.antlr:antlr4-runtime": { + "locked": "4.7.1", + "transitive": [ + "com.github.jknack:handlebars" + ] + }, + "org.apache.commons:commons-lang3": { + "locked": "3.10", + "transitive": [ + "com.github.jknack:handlebars" + ] + }, + "org.apache.logging.log4j:log4j-api": { + "locked": "2.17.0", + "transitive": [ + "org.apache.logging.log4j:log4j-core", + "org.apache.logging.log4j:log4j-jul", + "org.apache.logging.log4j:log4j-slf4j-impl", + "org.apache.logging.log4j:log4j-web" + ] + }, + "org.apache.logging.log4j:log4j-core": { + "locked": "2.17.0", + "transitive": [ + "org.apache.logging.log4j:log4j-web", + "org.springframework.boot:spring-boot-starter-log4j2" + ] + }, + "org.apache.logging.log4j:log4j-jul": { + "locked": "2.17.0", + "transitive": [ + "org.springframework.boot:spring-boot-starter-log4j2" + ] + }, + "org.apache.logging.log4j:log4j-slf4j-impl": { + "locked": "2.17.0", + "transitive": [ + "org.springframework.boot:spring-boot-starter-log4j2" + ] + }, + "org.apache.logging.log4j:log4j-web": { + "locked": "2.17.0" + }, + "org.apiguardian:apiguardian-api": { + "locked": "1.1.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.jupiter:junit-jupiter-params", + "org.junit.platform:junit-platform-commons", + "org.junit.platform:junit-platform-engine", + "org.junit.vintage:junit-vintage-engine" + ] + }, + "org.assertj:assertj-core": { + "locked": "3.16.1", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.checkerframework:checker-qual": { + "locked": "2.0.0", + "transitive": [ + "com.google.guava:guava" + ] + }, + "org.codehaus.mojo:animal-sniffer-annotations": { + "locked": "1.14", + "transitive": [ + "com.google.guava:guava" + ] + }, + "org.hamcrest:hamcrest": { + "locked": "2.2", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.junit.jupiter:junit-jupiter": { + "locked": "5.6.3", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.junit.jupiter:junit-jupiter-api": { + "locked": "5.6.3", + "transitive": [ + "org.junit.jupiter:junit-jupiter", + "org.junit.jupiter:junit-jupiter-params" + ] + }, + "org.junit.jupiter:junit-jupiter-params": { + "locked": "5.6.3", + "transitive": [ + "org.junit.jupiter:junit-jupiter" + ] + }, + "org.junit.platform:junit-platform-commons": { + "locked": "1.6.3", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.platform:junit-platform-engine" + ] + }, + "org.junit.platform:junit-platform-engine": { + "locked": "1.6.3", + "transitive": [ + "org.junit.vintage:junit-vintage-engine" + ] + }, + "org.junit.vintage:junit-vintage-engine": { + "locked": "5.6.3", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.junit:junit-bom": { + "locked": "5.6.3", + "transitive": [ + "org.junit.jupiter:junit-jupiter", + "org.junit.jupiter:junit-jupiter-api", + "org.junit.jupiter:junit-jupiter-params", + "org.junit.platform:junit-platform-commons", + "org.junit.platform:junit-platform-engine", + "org.junit.vintage:junit-vintage-engine" + ] + }, + "org.mockito:mockito-core": { + "locked": "3.3.3", + "transitive": [ + "org.mockito:mockito-junit-jupiter", + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.mockito:mockito-junit-jupiter": { + "locked": "3.3.3", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.mozilla:rhino": { + "locked": "1.7.7", + "transitive": [ + "com.github.jknack:handlebars" + ] + }, + "org.objenesis:objenesis": { + "locked": "2.6", + "transitive": [ + "org.mockito:mockito-core" + ] + }, + "org.opentest4j:opentest4j": { + "locked": "1.2.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.platform:junit-platform-engine" + ] + }, + "org.ow2.asm:asm": { + "locked": "5.0.4", + "transitive": [ + "net.minidev:accessors-smart" + ] + }, + "org.skyscreamer:jsonassert": { + "locked": "1.5.0", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.slf4j:jul-to-slf4j": { + "locked": "1.7.30", + "transitive": [ + "org.springframework.boot:spring-boot-starter-log4j2", + "org.springframework.boot:spring-boot-starter-logging" + ] + }, + "org.slf4j:slf4j-api": { + "locked": "1.7.30", + "transitive": [ + "com.github.jknack:handlebars", + "com.jayway.jsonpath:json-path", + "org.apache.logging.log4j:log4j-slf4j-impl", + "org.slf4j:jul-to-slf4j" + ] + }, + "org.springframework.boot:spring-boot": { + "locked": "2.3.12.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot-autoconfigure", + "org.springframework.boot:spring-boot-starter", + "org.springframework.boot:spring-boot-test", + "org.springframework.boot:spring-boot-test-autoconfigure" + ] + }, + "org.springframework.boot:spring-boot-autoconfigure": { + "locked": "2.3.12.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot-starter", + "org.springframework.boot:spring-boot-test-autoconfigure" + ] + }, + "org.springframework.boot:spring-boot-starter": { + "locked": "2.3.12.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.springframework.boot:spring-boot-starter-log4j2": { + "locked": "2.3.12.RELEASE" + }, + "org.springframework.boot:spring-boot-starter-logging": { + "locked": "2.3.12.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot-starter" + ] + }, + "org.springframework.boot:spring-boot-starter-test": { + "locked": "2.3.12.RELEASE" + }, + "org.springframework.boot:spring-boot-test": { + "locked": "2.3.12.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test", + "org.springframework.boot:spring-boot-test-autoconfigure" + ] + }, + "org.springframework.boot:spring-boot-test-autoconfigure": { + "locked": "2.3.12.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.springframework:spring-aop": { + "locked": "5.2.15.RELEASE", + "transitive": [ + "org.springframework:spring-context" + ] + }, + "org.springframework:spring-beans": { + "locked": "5.2.15.RELEASE", + "transitive": [ + "org.springframework:spring-aop", + "org.springframework:spring-context" + ] + }, + "org.springframework:spring-context": { + "locked": "5.2.15.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot" + ] + }, + "org.springframework:spring-core": { + "locked": "5.2.15.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot", + "org.springframework.boot:spring-boot-starter", + "org.springframework.boot:spring-boot-starter-test", + "org.springframework:spring-aop", + "org.springframework:spring-beans", + "org.springframework:spring-context", + "org.springframework:spring-expression", + "org.springframework:spring-test" + ] + }, + "org.springframework:spring-expression": { + "locked": "5.2.15.RELEASE", + "transitive": [ + "org.springframework:spring-context" + ] + }, + "org.springframework:spring-jcl": { + "locked": "5.2.15.RELEASE", + "transitive": [ + "org.springframework:spring-core" + ] + }, + "org.springframework:spring-test": { + "locked": "5.2.15.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.xmlunit:xmlunit-core": { + "locked": "2.7.0", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.yaml:snakeyaml": { + "locked": "1.26", + "transitive": [ + "org.springframework.boot:spring-boot-starter" + ] + } + }, + "testRuntimeClasspath": { + "com.github.jknack:handlebars": { + "locked": "4.0.7" + }, + "com.google.code.findbugs:jsr305": { + "locked": "3.0.2", + "transitive": [ + "com.google.guava:guava" + ] + }, + "com.google.errorprone:error_prone_annotations": { + "locked": "2.1.3", + "transitive": [ + "com.google.guava:guava" + ] + }, + "com.google.guava:guava": { + "locked": "25.1-jre" + }, + "com.google.j2objc:j2objc-annotations": { + "locked": "1.1", + "transitive": [ + "com.google.guava:guava" + ] + }, + "com.google.protobuf:protobuf-java": { + "locked": "3.5.1" + }, + "com.jayway.jsonpath:json-path": { + "locked": "2.4.0", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "com.netflix.conductor:conductor-annotations": { + "project": true + }, + "com.squareup:javapoet": { + "locked": "1.11.1" + }, + "com.vaadin.external.google:android-json": { + "locked": "0.0.20131108.vaadin1", + "transitive": [ + "org.skyscreamer:jsonassert" + ] + }, + "jakarta.activation:jakarta.activation-api": { + "locked": "1.2.2", + "transitive": [ + "jakarta.xml.bind:jakarta.xml.bind-api" + ] + }, + "jakarta.annotation:jakarta.annotation-api": { + "locked": "1.3.5", + "transitive": [ + "org.springframework.boot:spring-boot-starter" + ] + }, + "jakarta.xml.bind:jakarta.xml.bind-api": { + "locked": "2.3.3", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "javax.annotation:javax.annotation-api": { + "locked": "1.3.2" + }, + "junit:junit": { + "locked": "4.13.2", + "transitive": [ + "org.junit.vintage:junit-vintage-engine" + ] + }, + "net.bytebuddy:byte-buddy": { + "locked": "1.10.22", + "transitive": [ + "org.mockito:mockito-core" + ] + }, + "net.bytebuddy:byte-buddy-agent": { + "locked": "1.10.22", + "transitive": [ + "org.mockito:mockito-core" + ] + }, + "net.minidev:accessors-smart": { + "locked": "2.3.1", + "transitive": [ + "net.minidev:json-smart" + ] + }, + "net.minidev:json-smart": { + "locked": "2.3.1", + "transitive": [ + "com.jayway.jsonpath:json-path" + ] + }, + "org.antlr:antlr4-runtime": { + "locked": "4.7.1", + "transitive": [ + "com.github.jknack:handlebars" + ] + }, + "org.apache.commons:commons-lang3": { + "locked": "3.10", + "transitive": [ + "com.github.jknack:handlebars" + ] + }, + "org.apache.logging.log4j:log4j-api": { + "locked": "2.17.0", + "transitive": [ + "com.netflix.conductor:conductor-annotations", + "org.apache.logging.log4j:log4j-core", + "org.apache.logging.log4j:log4j-jul", + "org.apache.logging.log4j:log4j-slf4j-impl", + "org.apache.logging.log4j:log4j-web" + ] + }, + "org.apache.logging.log4j:log4j-core": { + "locked": "2.17.0", + "transitive": [ + "com.netflix.conductor:conductor-annotations", + "org.apache.logging.log4j:log4j-slf4j-impl", + "org.apache.logging.log4j:log4j-web", + "org.springframework.boot:spring-boot-starter-log4j2" + ] + }, + "org.apache.logging.log4j:log4j-jul": { + "locked": "2.17.0", + "transitive": [ + "com.netflix.conductor:conductor-annotations", + "org.springframework.boot:spring-boot-starter-log4j2" + ] + }, + "org.apache.logging.log4j:log4j-slf4j-impl": { + "locked": "2.17.0", + "transitive": [ + "com.netflix.conductor:conductor-annotations", + "org.springframework.boot:spring-boot-starter-log4j2" + ] + }, + "org.apache.logging.log4j:log4j-web": { + "locked": "2.17.0", + "transitive": [ + "com.netflix.conductor:conductor-annotations" + ] + }, + "org.apiguardian:apiguardian-api": { + "locked": "1.1.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.jupiter:junit-jupiter-engine", + "org.junit.jupiter:junit-jupiter-params", + "org.junit.platform:junit-platform-commons", + "org.junit.platform:junit-platform-engine", + "org.junit.vintage:junit-vintage-engine" + ] + }, + "org.assertj:assertj-core": { + "locked": "3.16.1", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.checkerframework:checker-qual": { + "locked": "2.0.0", + "transitive": [ + "com.google.guava:guava" + ] + }, + "org.codehaus.mojo:animal-sniffer-annotations": { + "locked": "1.14", + "transitive": [ + "com.google.guava:guava" + ] + }, + "org.hamcrest:hamcrest": { + "locked": "2.2", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.junit.jupiter:junit-jupiter": { + "locked": "5.6.3", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.junit.jupiter:junit-jupiter-api": { + "locked": "5.6.3", + "transitive": [ + "org.junit.jupiter:junit-jupiter", + "org.junit.jupiter:junit-jupiter-engine", + "org.junit.jupiter:junit-jupiter-params", + "org.mockito:mockito-junit-jupiter" + ] + }, + "org.junit.jupiter:junit-jupiter-engine": { + "locked": "5.6.3", + "transitive": [ + "org.junit.jupiter:junit-jupiter" + ] + }, + "org.junit.jupiter:junit-jupiter-params": { + "locked": "5.6.3", + "transitive": [ + "org.junit.jupiter:junit-jupiter" + ] + }, + "org.junit.platform:junit-platform-commons": { + "locked": "1.6.3", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.platform:junit-platform-engine" + ] + }, + "org.junit.platform:junit-platform-engine": { + "locked": "1.6.3", + "transitive": [ + "org.junit.jupiter:junit-jupiter-engine", + "org.junit.vintage:junit-vintage-engine" + ] + }, + "org.junit.vintage:junit-vintage-engine": { + "locked": "5.6.3", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.junit:junit-bom": { + "locked": "5.6.3", + "transitive": [ + "org.junit.jupiter:junit-jupiter", + "org.junit.jupiter:junit-jupiter-api", + "org.junit.jupiter:junit-jupiter-engine", + "org.junit.jupiter:junit-jupiter-params", + "org.junit.platform:junit-platform-commons", + "org.junit.platform:junit-platform-engine", + "org.junit.vintage:junit-vintage-engine" + ] + }, + "org.mockito:mockito-core": { + "locked": "3.3.3", + "transitive": [ + "org.mockito:mockito-junit-jupiter", + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.mockito:mockito-junit-jupiter": { + "locked": "3.3.3", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.mozilla:rhino": { + "locked": "1.7.7", + "transitive": [ + "com.github.jknack:handlebars" + ] + }, + "org.objenesis:objenesis": { + "locked": "2.6", + "transitive": [ + "org.mockito:mockito-core" + ] + }, + "org.opentest4j:opentest4j": { + "locked": "1.2.0", + "transitive": [ + "org.junit.jupiter:junit-jupiter-api", + "org.junit.platform:junit-platform-engine" + ] + }, + "org.ow2.asm:asm": { + "locked": "5.0.4", + "transitive": [ + "net.minidev:accessors-smart" + ] + }, + "org.skyscreamer:jsonassert": { + "locked": "1.5.0", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.slf4j:jul-to-slf4j": { + "locked": "1.7.30", + "transitive": [ + "org.springframework.boot:spring-boot-starter-log4j2", + "org.springframework.boot:spring-boot-starter-logging" + ] + }, + "org.slf4j:slf4j-api": { + "locked": "1.7.30", + "transitive": [ + "com.github.jknack:handlebars", + "com.jayway.jsonpath:json-path", + "org.apache.logging.log4j:log4j-slf4j-impl", + "org.slf4j:jul-to-slf4j" + ] + }, + "org.springframework.boot:spring-boot": { + "locked": "2.3.12.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot-autoconfigure", + "org.springframework.boot:spring-boot-starter", + "org.springframework.boot:spring-boot-test", + "org.springframework.boot:spring-boot-test-autoconfigure" + ] + }, + "org.springframework.boot:spring-boot-autoconfigure": { + "locked": "2.3.12.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot-starter", + "org.springframework.boot:spring-boot-test-autoconfigure" + ] + }, + "org.springframework.boot:spring-boot-starter": { + "locked": "2.3.12.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.springframework.boot:spring-boot-starter-log4j2": { + "locked": "2.3.12.RELEASE" + }, + "org.springframework.boot:spring-boot-starter-logging": { + "locked": "2.3.12.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot-starter" + ] + }, + "org.springframework.boot:spring-boot-starter-test": { + "locked": "2.3.12.RELEASE" + }, + "org.springframework.boot:spring-boot-test": { + "locked": "2.3.12.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test", + "org.springframework.boot:spring-boot-test-autoconfigure" + ] + }, + "org.springframework.boot:spring-boot-test-autoconfigure": { + "locked": "2.3.12.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.springframework:spring-aop": { + "locked": "5.2.15.RELEASE", + "transitive": [ + "org.springframework:spring-context" + ] + }, + "org.springframework:spring-beans": { + "locked": "5.2.15.RELEASE", + "transitive": [ + "org.springframework:spring-aop", + "org.springframework:spring-context" + ] + }, + "org.springframework:spring-context": { + "locked": "5.2.15.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot" + ] + }, + "org.springframework:spring-core": { + "locked": "5.2.15.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot", + "org.springframework.boot:spring-boot-starter", + "org.springframework.boot:spring-boot-starter-test", + "org.springframework:spring-aop", + "org.springframework:spring-beans", + "org.springframework:spring-context", + "org.springframework:spring-expression", + "org.springframework:spring-test" + ] + }, + "org.springframework:spring-expression": { + "locked": "5.2.15.RELEASE", + "transitive": [ + "org.springframework:spring-context" + ] + }, + "org.springframework:spring-jcl": { + "locked": "5.2.15.RELEASE", + "transitive": [ + "org.springframework:spring-core" + ] + }, + "org.springframework:spring-test": { + "locked": "5.2.15.RELEASE", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.xmlunit:xmlunit-core": { + "locked": "2.7.0", + "transitive": [ + "org.springframework.boot:spring-boot-starter-test" + ] + }, + "org.yaml:snakeyaml": { + "locked": "1.26", + "transitive": [ + "org.springframework.boot:spring-boot-starter" + ] + } + } +} \ No newline at end of file diff --git a/annotations-processor/src/example/java/com/example/Example.java b/annotations-processor/src/example/java/com/example/Example.java new file mode 100644 index 0000000000..b3c7befe83 --- /dev/null +++ b/annotations-processor/src/example/java/com/example/Example.java @@ -0,0 +1,25 @@ +/* + * Copyright 2022 Netflix, Inc. + *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *
+ * http://www.apache.org/licenses/LICENSE-2.0 + *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.example; + +import com.netflix.conductor.annotations.protogen.ProtoField; +import com.netflix.conductor.annotations.protogen.ProtoMessage; + +@ProtoMessage +public class Example { + @ProtoField(id = 1) + public String name; + + @ProtoField(id = 2) + public Long count; +} diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/AbstractMessage.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/AbstractMessage.java new file mode 100644 index 0000000000..bc92d901f7 --- /dev/null +++ b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/AbstractMessage.java @@ -0,0 +1,134 @@ +/* + * Copyright 2022 Netflix, Inc. + *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *
+ * http://www.apache.org/licenses/LICENSE-2.0 + *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+import com.netflix.conductor.annotations.protogen.ProtoEnum;
+import com.netflix.conductor.annotations.protogen.ProtoMessage;
+import com.netflix.conductor.annotationsprocessor.protogen.types.MessageType;
+import com.netflix.conductor.annotationsprocessor.protogen.types.TypeMapper;
+
+import com.squareup.javapoet.MethodSpec;
+import com.squareup.javapoet.TypeSpec;
+
+public abstract class AbstractMessage {
+ protected Class> clazz;
+ protected MessageType type;
+ protected List
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen;
+
+import javax.lang.model.element.Modifier;
+
+import com.netflix.conductor.annotationsprocessor.protogen.types.MessageType;
+
+import com.squareup.javapoet.MethodSpec;
+import com.squareup.javapoet.TypeName;
+import com.squareup.javapoet.TypeSpec;
+
+public class Enum extends AbstractMessage {
+ public enum MapType {
+ FROM_PROTO("fromProto"),
+ TO_PROTO("toProto");
+
+ private final String methodName;
+
+ MapType(String m) {
+ methodName = m;
+ }
+
+ public String getMethodName() {
+ return methodName;
+ }
+ }
+
+ public Enum(Class cls, MessageType parent) {
+ super(cls, parent);
+
+ int protoIndex = 0;
+ for (java.lang.reflect.Field field : cls.getDeclaredFields()) {
+ if (field.isEnumConstant()) fields.add(new EnumField(protoIndex++, field));
+ }
+ }
+
+ @Override
+ public String getProtoClass() {
+ return "enum";
+ }
+
+ private MethodSpec javaMap(MapType mt, TypeName from, TypeName to) {
+ MethodSpec.Builder method = MethodSpec.methodBuilder(mt.getMethodName());
+ method.addModifiers(Modifier.PUBLIC);
+ method.returns(to);
+ method.addParameter(from, "from");
+
+ method.addStatement("$T to", to);
+ method.beginControlFlow("switch (from)");
+
+ for (Field field : fields) {
+ String fromName = (mt == MapType.TO_PROTO) ? field.getName() : field.getProtoName();
+ String toName = (mt == MapType.TO_PROTO) ? field.getProtoName() : field.getName();
+ method.addStatement("case $L: to = $T.$L; break", fromName, to, toName);
+ }
+
+ method.addStatement(
+ "default: throw new $T(\"Unexpected enum constant: \" + from)",
+ IllegalArgumentException.class);
+ method.endControlFlow();
+ method.addStatement("return to");
+ return method.build();
+ }
+
+ @Override
+ protected void javaMapFromProto(TypeSpec.Builder type) {
+ type.addMethod(
+ javaMap(
+ MapType.FROM_PROTO,
+ this.type.getJavaProtoType(),
+ TypeName.get(this.clazz)));
+ }
+
+ @Override
+ protected void javaMapToProto(TypeSpec.Builder type) {
+ type.addMethod(
+ javaMap(MapType.TO_PROTO, TypeName.get(this.clazz), this.type.getJavaProtoType()));
+ }
+
+ public class EnumField extends Field {
+ protected EnumField(int index, java.lang.reflect.Field field) {
+ super(index, field);
+ }
+
+ @Override
+ public String getProtoTypeDeclaration() {
+ return String.format("%s = %d", getProtoName(), getProtoIndex());
+ }
+ }
+}
diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/Message.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/Message.java
new file mode 100644
index 0000000000..9dfaf28832
--- /dev/null
+++ b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/Message.java
@@ -0,0 +1,141 @@
+/*
+ * Copyright 2022 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen;
+
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.lang.model.element.Modifier;
+
+import com.netflix.conductor.annotations.protogen.ProtoField;
+import com.netflix.conductor.annotations.protogen.ProtoMessage;
+import com.netflix.conductor.annotationsprocessor.protogen.types.AbstractType;
+import com.netflix.conductor.annotationsprocessor.protogen.types.MessageType;
+import com.netflix.conductor.annotationsprocessor.protogen.types.TypeMapper;
+
+import com.squareup.javapoet.ClassName;
+import com.squareup.javapoet.MethodSpec;
+import com.squareup.javapoet.TypeSpec;
+
+public class Message extends AbstractMessage {
+ public Message(Class> cls, MessageType parent) {
+ super(cls, parent);
+
+ for (java.lang.reflect.Field field : clazz.getDeclaredFields()) {
+ ProtoField ann = field.getAnnotation(ProtoField.class);
+ if (ann == null) continue;
+
+ fields.add(new MessageField(ann.id(), field));
+ }
+ }
+
+ protected ProtoMessage getAnnotation() {
+ return (ProtoMessage) this.clazz.getAnnotation(ProtoMessage.class);
+ }
+
+ @Override
+ public String getProtoClass() {
+ return "message";
+ }
+
+ @Override
+ protected void javaMapToProto(TypeSpec.Builder type) {
+ if (!getAnnotation().toProto() || getAnnotation().wrapper()) return;
+
+ ClassName javaProtoType = (ClassName) this.type.getJavaProtoType();
+ MethodSpec.Builder method = MethodSpec.methodBuilder("toProto");
+ method.addModifiers(Modifier.PUBLIC);
+ method.returns(javaProtoType);
+ method.addParameter(this.clazz, "from");
+
+ method.addStatement(
+ "$T to = $T.newBuilder()", javaProtoType.nestedClass("Builder"), javaProtoType);
+
+ for (Field field : this.fields) {
+ if (field instanceof MessageField) {
+ AbstractType fieldType = ((MessageField) field).getAbstractType();
+ fieldType.mapToProto(field.getName(), method);
+ }
+ }
+
+ method.addStatement("return to.build()");
+ type.addMethod(method.build());
+ }
+
+ @Override
+ protected void javaMapFromProto(TypeSpec.Builder type) {
+ if (!getAnnotation().fromProto() || getAnnotation().wrapper()) return;
+
+ MethodSpec.Builder method = MethodSpec.methodBuilder("fromProto");
+ method.addModifiers(Modifier.PUBLIC);
+ method.returns(this.clazz);
+ method.addParameter(this.type.getJavaProtoType(), "from");
+
+ method.addStatement("$T to = new $T()", this.clazz, this.clazz);
+
+ for (Field field : this.fields) {
+ if (field instanceof MessageField) {
+ AbstractType fieldType = ((MessageField) field).getAbstractType();
+ fieldType.mapFromProto(field.getName(), method);
+ }
+ }
+
+ method.addStatement("return to");
+ type.addMethod(method.build());
+ }
+
+ public static class MessageField extends Field {
+ protected AbstractType type;
+
+ protected MessageField(int index, java.lang.reflect.Field field) {
+ super(index, field);
+ }
+
+ public AbstractType getAbstractType() {
+ if (type == null) {
+ type = TypeMapper.INSTANCE.get(field.getGenericType());
+ }
+ return type;
+ }
+
+ private static Pattern CAMEL_CASE_RE = Pattern.compile("(?<=[a-z])[A-Z]");
+
+ private static String toUnderscoreCase(String input) {
+ Matcher m = CAMEL_CASE_RE.matcher(input);
+ StringBuilder sb = new StringBuilder();
+ while (m.find()) {
+ m.appendReplacement(sb, "_" + m.group());
+ }
+ m.appendTail(sb);
+ return sb.toString().toLowerCase();
+ }
+
+ @Override
+ public String getProtoTypeDeclaration() {
+ return String.format(
+ "%s %s = %d",
+ getAbstractType().getProtoType(), toUnderscoreCase(getName()), getProtoIndex());
+ }
+
+ @Override
+ public void getDependencies(Set
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import com.netflix.conductor.annotationsprocessor.protogen.types.TypeMapper;
+
+import com.squareup.javapoet.ClassName;
+
+public class ProtoFile {
+ public static String PROTO_SUFFIX = "Pb";
+
+ private ClassName baseClass;
+ private AbstractMessage message;
+ private String filePath;
+
+ private String protoPackageName;
+ private String javaPackageName;
+ private String goPackageName;
+
+ public ProtoFile(
+ Class> object,
+ String protoPackageName,
+ String javaPackageName,
+ String goPackageName) {
+ this.protoPackageName = protoPackageName;
+ this.javaPackageName = javaPackageName;
+ this.goPackageName = goPackageName;
+
+ String className = object.getSimpleName() + PROTO_SUFFIX;
+ this.filePath = "model/" + object.getSimpleName().toLowerCase() + ".proto";
+ this.baseClass = ClassName.get(this.javaPackageName, className);
+ this.message = new Message(object, TypeMapper.INSTANCE.baseClass(baseClass, filePath));
+ }
+
+ public String getJavaClassName() {
+ return baseClass.simpleName();
+ }
+
+ public String getFilePath() {
+ return filePath;
+ }
+
+ public String getProtoPackageName() {
+ return protoPackageName;
+ }
+
+ public String getJavaPackageName() {
+ return javaPackageName;
+ }
+
+ public String getGoPackageName() {
+ return goPackageName;
+ }
+
+ public AbstractMessage getMessage() {
+ return message;
+ }
+
+ public Set
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.Writer;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.*;
+
+import javax.annotation.Generated;
+import javax.lang.model.element.Modifier;
+
+import com.netflix.conductor.annotations.protogen.ProtoMessage;
+
+import com.github.jknack.handlebars.EscapingStrategy;
+import com.github.jknack.handlebars.Handlebars;
+import com.github.jknack.handlebars.Template;
+import com.github.jknack.handlebars.io.ClassPathTemplateLoader;
+import com.github.jknack.handlebars.io.TemplateLoader;
+import com.google.common.reflect.ClassPath;
+import com.squareup.javapoet.AnnotationSpec;
+import com.squareup.javapoet.JavaFile;
+import com.squareup.javapoet.MethodSpec;
+import com.squareup.javapoet.TypeSpec;
+
+public class ProtoGen {
+ private static final String GENERATOR_NAME =
+ "com.netflix.conductor.annotationsprocessor.protogen";
+
+ private String protoPackageName;
+ private String javaPackageName;
+ private String goPackageName;
+ private List
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen;
+
+import java.io.File;
+import java.io.IOException;
+
+public class ProtoGenTask {
+ private String protoPackage;
+ private String javaPackage;
+ private String goPackage;
+
+ private File protosDir;
+ private File mapperDir;
+ private String mapperPackage;
+
+ private File sourceJar;
+ private String sourcePackage;
+
+ public String getProtoPackage() {
+ return protoPackage;
+ }
+
+ public void setProtoPackage(String protoPackage) {
+ this.protoPackage = protoPackage;
+ }
+
+ public String getJavaPackage() {
+ return javaPackage;
+ }
+
+ public void setJavaPackage(String javaPackage) {
+ this.javaPackage = javaPackage;
+ }
+
+ public String getGoPackage() {
+ return goPackage;
+ }
+
+ public void setGoPackage(String goPackage) {
+ this.goPackage = goPackage;
+ }
+
+ public File getProtosDir() {
+ return protosDir;
+ }
+
+ public void setProtosDir(File protosDir) {
+ this.protosDir = protosDir;
+ }
+
+ public File getMapperDir() {
+ return mapperDir;
+ }
+
+ public void setMapperDir(File mapperDir) {
+ this.mapperDir = mapperDir;
+ }
+
+ public String getMapperPackage() {
+ return mapperPackage;
+ }
+
+ public void setMapperPackage(String mapperPackage) {
+ this.mapperPackage = mapperPackage;
+ }
+
+ public File getSourceJar() {
+ return sourceJar;
+ }
+
+ public void setSourceJar(File sourceJar) {
+ this.sourceJar = sourceJar;
+ }
+
+ public String getSourcePackage() {
+ return sourcePackage;
+ }
+
+ public void setSourcePackage(String sourcePackage) {
+ this.sourcePackage = sourcePackage;
+ }
+
+ public void generate() {
+ ProtoGen generator = new ProtoGen(protoPackage, javaPackage, goPackage);
+ try {
+ generator.processPackage(sourceJar, sourcePackage);
+ generator.writeMapper(mapperDir, mapperPackage);
+ generator.writeProtos(protosDir);
+ } catch (IOException e) {
+ System.err.printf("protogen: failed with %s\n", e);
+ }
+ }
+
+ public static void main(String[] args) {
+ if (args == null || args.length < 8) {
+ throw new RuntimeException(
+ "protogen configuration incomplete, please provide all required (8) inputs");
+ }
+ ProtoGenTask task = new ProtoGenTask();
+ int argsId = 0;
+ task.setProtoPackage(args[argsId++]);
+ task.setJavaPackage(args[argsId++]);
+ task.setGoPackage(args[argsId++]);
+ task.setProtosDir(new File(args[argsId++]));
+ task.setMapperDir(new File(args[argsId++]));
+ task.setMapperPackage(args[argsId++]);
+ task.setSourceJar(new File(args[argsId++]));
+ task.setSourcePackage(args[argsId]);
+ System.out.println("Running protogen with arguments: " + task);
+ task.generate();
+ System.out.println("protogen completed.");
+ }
+
+ @Override
+ public String toString() {
+ return "ProtoGenTask{"
+ + "protoPackage='"
+ + protoPackage
+ + '\''
+ + ", javaPackage='"
+ + javaPackage
+ + '\''
+ + ", goPackage='"
+ + goPackage
+ + '\''
+ + ", protosDir="
+ + protosDir
+ + ", mapperDir="
+ + mapperDir
+ + ", mapperPackage='"
+ + mapperPackage
+ + '\''
+ + ", sourceJar="
+ + sourceJar
+ + ", sourcePackage='"
+ + sourcePackage
+ + '\''
+ + '}';
+ }
+}
diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/AbstractType.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/AbstractType.java
new file mode 100644
index 0000000000..fbfa8e72c7
--- /dev/null
+++ b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/AbstractType.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2022 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen.types;
+
+import java.lang.reflect.Type;
+import java.util.Set;
+
+import com.squareup.javapoet.MethodSpec;
+import com.squareup.javapoet.TypeName;
+
+public abstract class AbstractType {
+ Type javaType;
+ TypeName javaProtoType;
+
+ AbstractType(Type javaType, TypeName javaProtoType) {
+ this.javaType = javaType;
+ this.javaProtoType = javaProtoType;
+ }
+
+ public Type getJavaType() {
+ return javaType;
+ }
+
+ public TypeName getJavaProtoType() {
+ return javaProtoType;
+ }
+
+ public abstract String getProtoType();
+
+ public abstract TypeName getRawJavaType();
+
+ public abstract void mapToProto(String field, MethodSpec.Builder method);
+
+ public abstract void mapFromProto(String field, MethodSpec.Builder method);
+
+ public abstract void getDependencies(Set
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen.types;
+
+import java.lang.reflect.Type;
+import java.util.Set;
+
+import javax.lang.model.element.Modifier;
+
+import com.squareup.javapoet.ClassName;
+import com.squareup.javapoet.MethodSpec;
+
+public class ExternMessageType extends MessageType {
+ private String externProtoType;
+
+ public ExternMessageType(
+ Type javaType, ClassName javaProtoType, String externProtoType, String protoFilePath) {
+ super(javaType, javaProtoType, protoFilePath);
+ this.externProtoType = externProtoType;
+ }
+
+ @Override
+ public String getProtoType() {
+ return externProtoType;
+ }
+
+ @Override
+ public void generateAbstractMethods(Set
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen.types;
+
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.util.Set;
+
+import com.squareup.javapoet.ClassName;
+import com.squareup.javapoet.MethodSpec;
+import com.squareup.javapoet.TypeName;
+
+abstract class GenericType extends AbstractType {
+ public GenericType(Type type) {
+ super(type, null);
+ }
+
+ protected Class getRawType() {
+ ParameterizedType tt = (ParameterizedType) this.getJavaType();
+ return (Class) tt.getRawType();
+ }
+
+ protected AbstractType resolveGenericParam(int idx) {
+ ParameterizedType tt = (ParameterizedType) this.getJavaType();
+ Type[] types = tt.getActualTypeArguments();
+
+ AbstractType abstractType = TypeMapper.INSTANCE.get(types[idx]);
+ if (abstractType instanceof GenericType) {
+ return WrappedType.wrap((GenericType) abstractType);
+ }
+ return abstractType;
+ }
+
+ public abstract String getWrapperSuffix();
+
+ public abstract AbstractType getValueType();
+
+ public abstract TypeName resolveJavaProtoType();
+
+ @Override
+ public TypeName getRawJavaType() {
+ return ClassName.get(getRawType());
+ }
+
+ @Override
+ public void getDependencies(Set
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen.types;
+
+import java.lang.reflect.Type;
+import java.util.stream.Collectors;
+
+import com.squareup.javapoet.ClassName;
+import com.squareup.javapoet.MethodSpec;
+import com.squareup.javapoet.ParameterizedTypeName;
+import com.squareup.javapoet.TypeName;
+
+public class ListType extends GenericType {
+ private AbstractType valueType;
+
+ public ListType(Type type) {
+ super(type);
+ }
+
+ @Override
+ public String getWrapperSuffix() {
+ return "List";
+ }
+
+ @Override
+ public AbstractType getValueType() {
+ if (valueType == null) {
+ valueType = resolveGenericParam(0);
+ }
+ return valueType;
+ }
+
+ @Override
+ public void mapToProto(String field, MethodSpec.Builder method) {
+ AbstractType subtype = getValueType();
+ if (subtype instanceof ScalarType) {
+ method.addStatement(
+ "to.$L( from.$L() )",
+ protoMethodName("addAll", field),
+ javaMethodName("get", field));
+ } else {
+ method.beginControlFlow(
+ "for ($T elem : from.$L())",
+ subtype.getJavaType(),
+ javaMethodName("get", field));
+ method.addStatement("to.$L( toProto(elem) )", protoMethodName("add", field));
+ method.endControlFlow();
+ }
+ }
+
+ @Override
+ public void mapFromProto(String field, MethodSpec.Builder method) {
+ AbstractType subtype = getValueType();
+ Type entryType = subtype.getJavaType();
+ Class collector = TypeMapper.PROTO_LIST_TYPES.get(getRawType());
+
+ if (subtype instanceof ScalarType) {
+ if (entryType.equals(String.class)) {
+ method.addStatement(
+ "to.$L( from.$L().stream().collect($T.toCollection($T::new)) )",
+ javaMethodName("set", field),
+ protoMethodName("get", field) + "List",
+ Collectors.class,
+ collector);
+ } else {
+ method.addStatement(
+ "to.$L( from.$L() )",
+ javaMethodName("set", field),
+ protoMethodName("get", field) + "List");
+ }
+ } else {
+ method.addStatement(
+ "to.$L( from.$L().stream().map(this::fromProto).collect($T.toCollection($T::new)) )",
+ javaMethodName("set", field),
+ protoMethodName("get", field) + "List",
+ Collectors.class,
+ collector);
+ }
+ }
+
+ @Override
+ public TypeName resolveJavaProtoType() {
+ return ParameterizedTypeName.get(
+ (ClassName) getRawJavaType(), getValueType().getJavaProtoType());
+ }
+
+ @Override
+ public String getProtoType() {
+ return "repeated " + getValueType().getProtoType();
+ }
+}
diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/MapType.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/MapType.java
new file mode 100644
index 0000000000..fe642fdecc
--- /dev/null
+++ b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/MapType.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2022 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen.types;
+
+import java.lang.reflect.Type;
+import java.util.HashMap;
+import java.util.Map;
+
+import com.squareup.javapoet.ClassName;
+import com.squareup.javapoet.MethodSpec;
+import com.squareup.javapoet.ParameterizedTypeName;
+import com.squareup.javapoet.TypeName;
+
+public class MapType extends GenericType {
+ private AbstractType keyType;
+ private AbstractType valueType;
+
+ public MapType(Type type) {
+ super(type);
+ }
+
+ @Override
+ public String getWrapperSuffix() {
+ return "Map";
+ }
+
+ @Override
+ public AbstractType getValueType() {
+ if (valueType == null) {
+ valueType = resolveGenericParam(1);
+ }
+ return valueType;
+ }
+
+ public AbstractType getKeyType() {
+ if (keyType == null) {
+ keyType = resolveGenericParam(0);
+ }
+ return keyType;
+ }
+
+ @Override
+ public void mapToProto(String field, MethodSpec.Builder method) {
+ AbstractType valueType = getValueType();
+ if (valueType instanceof ScalarType) {
+ method.addStatement(
+ "to.$L( from.$L() )",
+ protoMethodName("putAll", field),
+ javaMethodName("get", field));
+ } else {
+ TypeName typeName =
+ ParameterizedTypeName.get(
+ Map.Entry.class,
+ getKeyType().getJavaType(),
+ getValueType().getJavaType());
+ method.beginControlFlow(
+ "for ($T pair : from.$L().entrySet())", typeName, javaMethodName("get", field));
+ method.addStatement(
+ "to.$L( pair.getKey(), toProto( pair.getValue() ) )",
+ protoMethodName("put", field));
+ method.endControlFlow();
+ }
+ }
+
+ @Override
+ public void mapFromProto(String field, MethodSpec.Builder method) {
+ AbstractType valueType = getValueType();
+ if (valueType instanceof ScalarType) {
+ method.addStatement(
+ "to.$L( from.$L() )",
+ javaMethodName("set", field),
+ protoMethodName("get", field) + "Map");
+ } else {
+ Type keyType = getKeyType().getJavaType();
+ Type valueTypeJava = getValueType().getJavaType();
+ TypeName valueTypePb = getValueType().getJavaProtoType();
+
+ ParameterizedTypeName entryType =
+ ParameterizedTypeName.get(
+ ClassName.get(Map.Entry.class), TypeName.get(keyType), valueTypePb);
+ ParameterizedTypeName mapType =
+ ParameterizedTypeName.get(Map.class, keyType, valueTypeJava);
+ ParameterizedTypeName hashMapType =
+ ParameterizedTypeName.get(HashMap.class, keyType, valueTypeJava);
+ String mapName = field + "Map";
+
+ method.addStatement("$T $L = new $T()", mapType, mapName, hashMapType);
+ method.beginControlFlow(
+ "for ($T pair : from.$L().entrySet())",
+ entryType,
+ protoMethodName("get", field) + "Map");
+ method.addStatement("$L.put( pair.getKey(), fromProto( pair.getValue() ) )", mapName);
+ method.endControlFlow();
+ method.addStatement("to.$L($L)", javaMethodName("set", field), mapName);
+ }
+ }
+
+ @Override
+ public TypeName resolveJavaProtoType() {
+ return ParameterizedTypeName.get(
+ (ClassName) getRawJavaType(),
+ getKeyType().getJavaProtoType(),
+ getValueType().getJavaProtoType());
+ }
+
+ @Override
+ public String getProtoType() {
+ AbstractType keyType = getKeyType();
+ AbstractType valueType = getValueType();
+ if (!(keyType instanceof ScalarType)) {
+ throw new IllegalArgumentException(
+ "cannot map non-scalar map key: " + this.getJavaType());
+ }
+ return String.format("map<%s, %s>", keyType.getProtoType(), valueType.getProtoType());
+ }
+}
diff --git a/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/MessageType.java b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/MessageType.java
new file mode 100644
index 0000000000..d572287733
--- /dev/null
+++ b/annotations-processor/src/main/java/com/netflix/conductor/annotationsprocessor/protogen/types/MessageType.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2022 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen.types;
+
+import java.lang.reflect.Type;
+import java.util.List;
+import java.util.Set;
+
+import com.squareup.javapoet.ClassName;
+import com.squareup.javapoet.MethodSpec;
+import com.squareup.javapoet.TypeName;
+
+public class MessageType extends AbstractType {
+ private String protoFilePath;
+
+ public MessageType(Type javaType, ClassName javaProtoType, String protoFilePath) {
+ super(javaType, javaProtoType);
+ this.protoFilePath = protoFilePath;
+ }
+
+ @Override
+ public String getProtoType() {
+ List
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen.types;
+
+import java.lang.reflect.Type;
+import java.util.Set;
+
+import com.squareup.javapoet.MethodSpec;
+import com.squareup.javapoet.TypeName;
+
+public class ScalarType extends AbstractType {
+ private String protoType;
+
+ public ScalarType(Type javaType, TypeName javaProtoType, String protoType) {
+ super(javaType, javaProtoType);
+ this.protoType = protoType;
+ }
+
+ @Override
+ public String getProtoType() {
+ return protoType;
+ }
+
+ @Override
+ public TypeName getRawJavaType() {
+ return getJavaProtoType();
+ }
+
+ @Override
+ public void mapFromProto(String field, MethodSpec.Builder method) {
+ method.addStatement(
+ "to.$L( from.$L() )", javaMethodName("set", field), protoMethodName("get", field));
+ }
+
+ private boolean isNullableType() {
+ final Type jt = getJavaType();
+ return jt.equals(Boolean.class)
+ || jt.equals(Byte.class)
+ || jt.equals(Character.class)
+ || jt.equals(Short.class)
+ || jt.equals(Integer.class)
+ || jt.equals(Long.class)
+ || jt.equals(Double.class)
+ || jt.equals(Float.class)
+ || jt.equals(String.class);
+ }
+
+ @Override
+ public void mapToProto(String field, MethodSpec.Builder method) {
+ final boolean nullable = isNullableType();
+ String getter =
+ (getJavaType().equals(boolean.class) || getJavaType().equals(Boolean.class))
+ ? javaMethodName("is", field)
+ : javaMethodName("get", field);
+
+ if (nullable) method.beginControlFlow("if (from.$L() != null)", getter);
+
+ method.addStatement("to.$L( from.$L() )", protoMethodName("set", field), getter);
+
+ if (nullable) method.endControlFlow();
+ }
+
+ @Override
+ public void getDependencies(Set
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen.types;
+
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.util.*;
+
+import com.google.protobuf.Any;
+import com.squareup.javapoet.ClassName;
+import com.squareup.javapoet.TypeName;
+
+public class TypeMapper {
+ static Map
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen.types;
+
+import java.lang.reflect.Type;
+import java.util.Set;
+
+import javax.lang.model.element.Modifier;
+
+import com.squareup.javapoet.MethodSpec;
+import com.squareup.javapoet.TypeName;
+
+public class WrappedType extends AbstractType {
+ private AbstractType realType;
+ private MessageType wrappedType;
+
+ public static WrappedType wrap(GenericType realType) {
+ Type valueType = realType.getValueType().getJavaType();
+ if (!(valueType instanceof Class))
+ throw new IllegalArgumentException("cannot wrap primitive type: " + valueType);
+
+ String className = ((Class) valueType).getSimpleName() + realType.getWrapperSuffix();
+ MessageType wrappedType = TypeMapper.INSTANCE.get(className);
+ if (wrappedType == null)
+ throw new IllegalArgumentException("missing wrapper class: " + className);
+ return new WrappedType(realType, wrappedType);
+ }
+
+ public WrappedType(AbstractType realType, MessageType wrappedType) {
+ super(realType.getJavaType(), wrappedType.getJavaProtoType());
+ this.realType = realType;
+ this.wrappedType = wrappedType;
+ }
+
+ @Override
+ public String getProtoType() {
+ return wrappedType.getProtoType();
+ }
+
+ @Override
+ public TypeName getRawJavaType() {
+ return realType.getRawJavaType();
+ }
+
+ @Override
+ public void mapToProto(String field, MethodSpec.Builder method) {
+ wrappedType.mapToProto(field, method);
+ }
+
+ @Override
+ public void mapFromProto(String field, MethodSpec.Builder method) {
+ wrappedType.mapFromProto(field, method);
+ }
+
+ @Override
+ public void getDependencies(Set
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotationsprocessor.protogen;
+
+import java.io.File;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.util.List;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import com.google.common.collect.Lists;
+import com.google.common.io.Files;
+import com.google.common.io.Resources;
+
+import static org.junit.Assert.*;
+
+public class ProtoGenTest {
+ private static final Charset charset = StandardCharsets.UTF_8;
+
+ @Rule public TemporaryFolder folder = new TemporaryFolder();
+
+ @Test
+ public void happyPath() throws Exception {
+ File rootDir = folder.getRoot();
+ String protoPackage = "protoPackage";
+ String javaPackage = "abc.protogen.example";
+ String goPackage = "goPackage";
+ String sourcePackage = "com.example";
+ String mapperPackage = "mapperPackage";
+
+ File jarFile = new File("./build/libs/example.jar");
+ assertTrue(jarFile.exists());
+
+ File mapperDir = new File(rootDir, "mapperDir");
+ mapperDir.mkdirs();
+
+ File protosDir = new File(rootDir, "protosDir");
+ protosDir.mkdirs();
+
+ File modelDir = new File(protosDir, "model");
+ modelDir.mkdirs();
+
+ ProtoGen generator = new ProtoGen(protoPackage, javaPackage, goPackage);
+ generator.processPackage(jarFile, sourcePackage);
+ generator.writeMapper(mapperDir, mapperPackage);
+ generator.writeProtos(protosDir);
+
+ List
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotations.protogen;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * ProtoEnum annotates an enum type that will be exposed via the GRPC API as a native Protocol
+ * Buffers enum.
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface ProtoEnum {}
diff --git a/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoField.java b/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoField.java
new file mode 100644
index 0000000000..25ab478c8c
--- /dev/null
+++ b/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoField.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2022 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotations.protogen;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * ProtoField annotates a field inside an struct with metadata on how to expose it on its
+ * corresponding Protocol Buffers struct. For a field to be exposed in a ProtoBuf struct, the
+ * containing struct must also be annotated with a {@link ProtoMessage} or {@link ProtoEnum} tag.
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.FIELD)
+public @interface ProtoField {
+ /**
+ * Mandatory. Sets the Protocol Buffer ID for this specific field. Once a field has been
+ * annotated with a given ID, the ID can never change to a different value or the resulting
+ * Protocol Buffer struct will not be backwards compatible.
+ *
+ * @return the numeric ID for the field
+ */
+ int id();
+}
diff --git a/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoMessage.java b/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoMessage.java
new file mode 100644
index 0000000000..d66e4aa435
--- /dev/null
+++ b/annotations/src/main/java/com/netflix/conductor/annotations/protogen/ProtoMessage.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2022 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.annotations.protogen;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * ProtoMessage annotates a given Java class so it becomes exposed via the GRPC API as a native
+ * Protocol Buffers struct. The annotated class must be a POJO.
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface ProtoMessage {
+ /**
+ * Sets whether the generated mapping code will contain a helper to translate the POJO for this
+ * class into the equivalent ProtoBuf object.
+ *
+ * @return whether this class will generate a mapper to ProtoBuf objects
+ */
+ boolean toProto() default true;
+
+ /**
+ * Sets whether the generated mapping code will contain a helper to translate the ProtoBuf
+ * object for this class into the equivalent POJO.
+ *
+ * @return whether this class will generate a mapper from ProtoBuf objects
+ */
+ boolean fromProto() default true;
+
+ /**
+ * Sets whether this is a wrapper class that will be used to encapsulate complex nested type
+ * interfaces. Wrapper classes are not directly exposed by the ProtoBuf API and must be mapped
+ * manually.
+ *
+ * @return whether this is a wrapper class
+ */
+ boolean wrapper() default false;
+}
diff --git a/azureblob-storage/README.md b/azureblob-storage/README.md
new file mode 100644
index 0000000000..33a39349c0
--- /dev/null
+++ b/azureblob-storage/README.md
@@ -0,0 +1,44 @@
+# Azure Blob External Storage Module
+
+This module use azure blob to store and retrieve workflows/tasks input/output payload that
+went over the thresholds defined in properties named `conductor.[workflow|task].[input|output].payload.threshold.kb`.
+
+**Warning** Azure Java SDK use libs already present inside `conductor` like `jackson` and `netty`.
+You may encounter deprecated issues, or conflicts and need to adapt the code if the module is not maintained along with `conductor`.
+It has only been tested with **v12.2.0**.
+
+## Configuration
+
+### Usage
+
+Cf. Documentation [External Payload Storage](https://netflix.github.io/conductor/externalpayloadstorage/#azure-blob-storage)
+
+### Example
+
+```properties
+conductor.additional.modules=com.netflix.conductor.azureblob.AzureBlobModule
+es.set.netty.runtime.available.processors=false
+
+workflow.external.payload.storage=AZURE_BLOB
+workflow.external.payload.storage.azure_blob.connection_string=DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;EndpointSuffix=localhost
+workflow.external.payload.storage.azure_blob.signedurlexpirationseconds=360
+```
+
+## Testing
+
+You can use [Azurite](https://github.com/Azure/Azurite) to simulate an Azure Storage.
+
+### Troubleshoots
+
+* When using **es5 persistance** you will receive an `java.lang.IllegalStateException` because the Netty lib will call `setAvailableProcessors` two times. To resolve this issue you need to set the following system property
+
+```
+es.set.netty.runtime.available.processors=false
+```
+
+If you want to change the default HTTP client of azure sdk, you can use `okhttp` instead of `netty`.
+For that you need to add the following [dependency](https://github.com/Azure/azure-sdk-for-java/tree/master/sdk/storage/azure-storage-blob#default-http-client).
+
+```
+com.azure:azure-core-http-okhttp:${compatible version}
+```
diff --git a/azureblob-storage/build.gradle b/azureblob-storage/build.gradle
new file mode 100644
index 0000000000..3b85566a1c
--- /dev/null
+++ b/azureblob-storage/build.gradle
@@ -0,0 +1,8 @@
+dependencies {
+ implementation project(':conductor-common')
+ implementation project(':conductor-core')
+ compileOnly 'org.springframework.boot:spring-boot-starter'
+
+ implementation "com.azure:azure-storage-blob:${revAzureStorageBlobSdk}"
+ implementation "org.apache.commons:commons-lang3"
+}
diff --git a/azureblob-storage/dependencies.lock b/azureblob-storage/dependencies.lock
new file mode 100644
index 0000000000..d5855b0831
--- /dev/null
+++ b/azureblob-storage/dependencies.lock
@@ -0,0 +1,2043 @@
+{
+ "annotationProcessor": {
+ "org.springframework.boot:spring-boot-configuration-processor": {
+ "locked": "2.3.12.RELEASE"
+ }
+ },
+ "compileClasspath": {
+ "com.azure:azure-core": {
+ "locked": "1.5.1",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "com.azure:azure-storage-blob",
+ "com.azure:azure-storage-common"
+ ]
+ },
+ "com.azure:azure-core-http-netty": {
+ "locked": "1.5.2",
+ "transitive": [
+ "com.azure:azure-storage-common"
+ ]
+ },
+ "com.azure:azure-storage-blob": {
+ "locked": "12.7.0"
+ },
+ "com.azure:azure-storage-common": {
+ "locked": "12.7.0",
+ "transitive": [
+ "com.azure:azure-storage-blob"
+ ]
+ },
+ "com.fasterxml.jackson.core:jackson-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations"
+ ]
+ },
+ "com.fasterxml.jackson.core:jackson-core": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations"
+ ]
+ },
+ "com.fasterxml.jackson.core:jackson-databind": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations"
+ ]
+ },
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.azure:azure-core"
+ ]
+ },
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.azure:azure-core"
+ ]
+ },
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml"
+ ]
+ },
+ "com.fasterxml.woodstox:woodstox-core": {
+ "locked": "6.2.3",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml"
+ ]
+ },
+ "com.netflix.conductor:conductor-common": {
+ "project": true
+ },
+ "com.netflix.conductor:conductor-core": {
+ "project": true
+ },
+ "io.netty:netty-buffer": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-codec",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy",
+ "io.netty:netty-transport",
+ "io.netty:netty-transport-native-epoll",
+ "io.netty:netty-transport-native-unix-common"
+ ]
+ },
+ "io.netty:netty-codec": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy"
+ ]
+ },
+ "io.netty:netty-codec-http": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-handler-proxy",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-codec-http2": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-codec-socks": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler-proxy"
+ ]
+ },
+ "io.netty:netty-common": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-buffer",
+ "io.netty:netty-codec",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy",
+ "io.netty:netty-resolver",
+ "io.netty:netty-transport",
+ "io.netty:netty-transport-native-epoll",
+ "io.netty:netty-transport-native-unix-common"
+ ]
+ },
+ "io.netty:netty-handler": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-handler-proxy": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-resolver": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-tcnative-boringssl-static": {
+ "locked": "2.0.39.Final",
+ "transitive": [
+ "com.azure:azure-core"
+ ]
+ },
+ "io.netty:netty-transport": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy",
+ "io.netty:netty-transport-native-epoll",
+ "io.netty:netty-transport-native-unix-common"
+ ]
+ },
+ "io.netty:netty-transport-native-epoll": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-transport-native-unix-common": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-transport-native-epoll"
+ ]
+ },
+ "io.projectreactor.netty:reactor-netty": {
+ "locked": "0.9.20.RELEASE",
+ "transitive": [
+ "com.azure:azure-core-http-netty"
+ ]
+ },
+ "io.projectreactor:reactor-core": {
+ "locked": "3.3.17.RELEASE",
+ "transitive": [
+ "com.azure:azure-core",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "jakarta.activation:jakarta.activation-api": {
+ "locked": "1.2.2",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "jakarta.xml.bind:jakarta.xml.bind-api"
+ ]
+ },
+ "jakarta.annotation:jakarta.annotation-api": {
+ "locked": "1.3.5",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "jakarta.xml.bind:jakarta.xml.bind-api": {
+ "locked": "2.3.3",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations"
+ ]
+ },
+ "org.apache.commons:commons-lang3": {
+ "locked": "3.10"
+ },
+ "org.apache.logging.log4j:log4j-api": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.apache.logging.log4j:log4j-core",
+ "org.apache.logging.log4j:log4j-jul",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-core": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-jul": {
+ "locked": "2.17.0"
+ },
+ "org.apache.logging.log4j:log4j-slf4j-impl": {
+ "locked": "2.17.0"
+ },
+ "org.apache.logging.log4j:log4j-web": {
+ "locked": "2.17.0"
+ },
+ "org.codehaus.woodstox:stax2-api": {
+ "locked": "4.2.1",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.woodstox:woodstox-core"
+ ]
+ },
+ "org.reactivestreams:reactive-streams": {
+ "locked": "1.0.3",
+ "transitive": [
+ "io.projectreactor:reactor-core"
+ ]
+ },
+ "org.slf4j:jul-to-slf4j": {
+ "locked": "1.7.30",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-logging"
+ ]
+ },
+ "org.slf4j:slf4j-api": {
+ "locked": "1.7.30",
+ "transitive": [
+ "com.azure:azure-core",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.slf4j:jul-to-slf4j"
+ ]
+ },
+ "org.springframework.boot:spring-boot": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-autoconfigure",
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "org.springframework.boot:spring-boot-autoconfigure": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter": {
+ "locked": "2.3.12.RELEASE"
+ },
+ "org.springframework.boot:spring-boot-starter-logging": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "org.springframework:spring-aop": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-beans": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-aop",
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-context": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot"
+ ]
+ },
+ "org.springframework:spring-core": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot",
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework:spring-aop",
+ "org.springframework:spring-beans",
+ "org.springframework:spring-context",
+ "org.springframework:spring-expression"
+ ]
+ },
+ "org.springframework:spring-expression": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-jcl": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-core"
+ ]
+ },
+ "org.yaml:snakeyaml": {
+ "locked": "1.26",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ }
+ },
+ "runtimeClasspath": {
+ "com.azure:azure-core": {
+ "locked": "1.5.1",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "com.azure:azure-storage-blob",
+ "com.azure:azure-storage-common"
+ ]
+ },
+ "com.azure:azure-core-http-netty": {
+ "locked": "1.5.2",
+ "transitive": [
+ "com.azure:azure-storage-common"
+ ]
+ },
+ "com.azure:azure-storage-blob": {
+ "locked": "12.7.0"
+ },
+ "com.azure:azure-storage-common": {
+ "locked": "12.7.0",
+ "transitive": [
+ "com.azure:azure-storage-blob"
+ ]
+ },
+ "com.fasterxml.jackson.core:jackson-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.fasterxml.jackson.core:jackson-core": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.fasterxml.jackson.core:jackson-databind": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.azure:azure-core"
+ ]
+ },
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.azure:azure-core"
+ ]
+ },
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml"
+ ]
+ },
+ "com.fasterxml.woodstox:woodstox-core": {
+ "locked": "6.2.3",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml"
+ ]
+ },
+ "com.github.rholder:guava-retrying": {
+ "locked": "2.0.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-common"
+ ]
+ },
+ "com.google.code.findbugs:jsr305": {
+ "locked": "3.0.2",
+ "transitive": [
+ "com.github.rholder:guava-retrying",
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.errorprone:error_prone_annotations": {
+ "locked": "2.3.4",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:failureaccess": {
+ "locked": "1.0.1",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:guava": {
+ "locked": "30.0-jre",
+ "transitive": [
+ "com.github.rholder:guava-retrying",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.google.guava:listenablefuture": {
+ "locked": "9999.0-empty-to-avoid-conflict-with-guava",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.j2objc:j2objc-annotations": {
+ "locked": "1.3",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.protobuf:protobuf-java": {
+ "locked": "3.13.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.jayway.jsonpath:json-path": {
+ "locked": "2.4.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.netflix.conductor:conductor-annotations": {
+ "project": true,
+ "transitive": [
+ "com.netflix.conductor:conductor-common"
+ ]
+ },
+ "com.netflix.conductor:conductor-common": {
+ "project": true,
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.netflix.conductor:conductor-core": {
+ "project": true
+ },
+ "com.netflix.spectator:spectator-api": {
+ "locked": "0.122.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.spotify:completable-futures": {
+ "locked": "0.3.3",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "commons-io:commons-io": {
+ "locked": "2.7",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "io.netty:netty-buffer": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-codec",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy",
+ "io.netty:netty-transport",
+ "io.netty:netty-transport-native-epoll",
+ "io.netty:netty-transport-native-unix-common"
+ ]
+ },
+ "io.netty:netty-codec": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy"
+ ]
+ },
+ "io.netty:netty-codec-http": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-handler-proxy",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-codec-http2": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-codec-socks": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler-proxy"
+ ]
+ },
+ "io.netty:netty-common": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-buffer",
+ "io.netty:netty-codec",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy",
+ "io.netty:netty-resolver",
+ "io.netty:netty-transport",
+ "io.netty:netty-transport-native-epoll",
+ "io.netty:netty-transport-native-unix-common"
+ ]
+ },
+ "io.netty:netty-handler": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-handler-proxy": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-resolver": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-tcnative-boringssl-static": {
+ "locked": "2.0.39.Final",
+ "transitive": [
+ "com.azure:azure-core"
+ ]
+ },
+ "io.netty:netty-transport": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy",
+ "io.netty:netty-transport-native-epoll",
+ "io.netty:netty-transport-native-unix-common"
+ ]
+ },
+ "io.netty:netty-transport-native-epoll": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-transport-native-unix-common": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-transport-native-epoll"
+ ]
+ },
+ "io.projectreactor.netty:reactor-netty": {
+ "locked": "0.9.20.RELEASE",
+ "transitive": [
+ "com.azure:azure-core-http-netty"
+ ]
+ },
+ "io.projectreactor:reactor-core": {
+ "locked": "3.3.17.RELEASE",
+ "transitive": [
+ "com.azure:azure-core",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.reactivex:rxjava": {
+ "locked": "1.3.8",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "jakarta.activation:jakarta.activation-api": {
+ "locked": "1.2.2",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.conductor:conductor-core",
+ "jakarta.xml.bind:jakarta.xml.bind-api"
+ ]
+ },
+ "jakarta.xml.bind:jakarta.xml.bind-api": {
+ "locked": "2.3.3",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "net.minidev:accessors-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "net.minidev:json-smart"
+ ]
+ },
+ "net.minidev:json-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "com.jayway.jsonpath:json-path"
+ ]
+ },
+ "org.apache.bval:bval-jsr": {
+ "locked": "2.0.5",
+ "transitive": [
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "org.apache.commons:commons-lang3": {
+ "locked": "3.10",
+ "transitive": [
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-api": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core",
+ "org.apache.logging.log4j:log4j-core",
+ "org.apache.logging.log4j:log4j-jul",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-core": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-jul": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-slf4j-impl": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-web": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "org.checkerframework:checker-qual": {
+ "locked": "3.5.0",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "org.codehaus.woodstox:stax2-api": {
+ "locked": "4.2.1",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.woodstox:woodstox-core"
+ ]
+ },
+ "org.ow2.asm:asm": {
+ "locked": "5.0.4",
+ "transitive": [
+ "net.minidev:accessors-smart"
+ ]
+ },
+ "org.reactivestreams:reactive-streams": {
+ "locked": "1.0.3",
+ "transitive": [
+ "io.projectreactor:reactor-core"
+ ]
+ },
+ "org.slf4j:slf4j-api": {
+ "locked": "1.7.30",
+ "transitive": [
+ "com.azure:azure-core",
+ "com.jayway.jsonpath:json-path",
+ "com.netflix.spectator:spectator-api",
+ "org.apache.logging.log4j:log4j-slf4j-impl"
+ ]
+ }
+ },
+ "testCompileClasspath": {
+ "com.azure:azure-core": {
+ "locked": "1.5.1",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "com.azure:azure-storage-blob",
+ "com.azure:azure-storage-common"
+ ]
+ },
+ "com.azure:azure-core-http-netty": {
+ "locked": "1.5.2",
+ "transitive": [
+ "com.azure:azure-storage-common"
+ ]
+ },
+ "com.azure:azure-storage-blob": {
+ "locked": "12.7.0"
+ },
+ "com.azure:azure-storage-common": {
+ "locked": "12.7.0",
+ "transitive": [
+ "com.azure:azure-storage-blob"
+ ]
+ },
+ "com.fasterxml.jackson.core:jackson-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations"
+ ]
+ },
+ "com.fasterxml.jackson.core:jackson-core": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations"
+ ]
+ },
+ "com.fasterxml.jackson.core:jackson-databind": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations"
+ ]
+ },
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.azure:azure-core"
+ ]
+ },
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.azure:azure-core"
+ ]
+ },
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml"
+ ]
+ },
+ "com.fasterxml.woodstox:woodstox-core": {
+ "locked": "6.2.3",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml"
+ ]
+ },
+ "com.jayway.jsonpath:json-path": {
+ "locked": "2.4.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "com.netflix.conductor:conductor-common": {
+ "project": true
+ },
+ "com.netflix.conductor:conductor-core": {
+ "project": true
+ },
+ "com.vaadin.external.google:android-json": {
+ "locked": "0.0.20131108.vaadin1",
+ "transitive": [
+ "org.skyscreamer:jsonassert"
+ ]
+ },
+ "io.netty:netty-buffer": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-codec",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy",
+ "io.netty:netty-transport",
+ "io.netty:netty-transport-native-epoll",
+ "io.netty:netty-transport-native-unix-common"
+ ]
+ },
+ "io.netty:netty-codec": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy"
+ ]
+ },
+ "io.netty:netty-codec-http": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-handler-proxy",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-codec-http2": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-codec-socks": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler-proxy"
+ ]
+ },
+ "io.netty:netty-common": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-buffer",
+ "io.netty:netty-codec",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy",
+ "io.netty:netty-resolver",
+ "io.netty:netty-transport",
+ "io.netty:netty-transport-native-epoll",
+ "io.netty:netty-transport-native-unix-common"
+ ]
+ },
+ "io.netty:netty-handler": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-handler-proxy": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-resolver": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-tcnative-boringssl-static": {
+ "locked": "2.0.39.Final",
+ "transitive": [
+ "com.azure:azure-core"
+ ]
+ },
+ "io.netty:netty-transport": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy",
+ "io.netty:netty-transport-native-epoll",
+ "io.netty:netty-transport-native-unix-common"
+ ]
+ },
+ "io.netty:netty-transport-native-epoll": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-transport-native-unix-common": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-transport-native-epoll"
+ ]
+ },
+ "io.projectreactor.netty:reactor-netty": {
+ "locked": "0.9.20.RELEASE",
+ "transitive": [
+ "com.azure:azure-core-http-netty"
+ ]
+ },
+ "io.projectreactor:reactor-core": {
+ "locked": "3.3.17.RELEASE",
+ "transitive": [
+ "com.azure:azure-core",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "jakarta.activation:jakarta.activation-api": {
+ "locked": "1.2.2",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "jakarta.xml.bind:jakarta.xml.bind-api"
+ ]
+ },
+ "jakarta.annotation:jakarta.annotation-api": {
+ "locked": "1.3.5",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "jakarta.xml.bind:jakarta.xml.bind-api": {
+ "locked": "2.3.3",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "junit:junit": {
+ "locked": "4.13.2",
+ "transitive": [
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "net.bytebuddy:byte-buddy": {
+ "locked": "1.10.22",
+ "transitive": [
+ "org.mockito:mockito-core"
+ ]
+ },
+ "net.bytebuddy:byte-buddy-agent": {
+ "locked": "1.10.22",
+ "transitive": [
+ "org.mockito:mockito-core"
+ ]
+ },
+ "net.minidev:accessors-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "net.minidev:json-smart"
+ ]
+ },
+ "net.minidev:json-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "com.jayway.jsonpath:json-path"
+ ]
+ },
+ "org.apache.commons:commons-lang3": {
+ "locked": "3.10"
+ },
+ "org.apache.logging.log4j:log4j-api": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.apache.logging.log4j:log4j-core",
+ "org.apache.logging.log4j:log4j-jul",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-core": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.apache.logging.log4j:log4j-web",
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-jul": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-slf4j-impl": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-web": {
+ "locked": "2.17.0"
+ },
+ "org.apiguardian:apiguardian-api": {
+ "locked": "1.1.0",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.junit.platform:junit-platform-commons",
+ "org.junit.platform:junit-platform-engine",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "org.assertj:assertj-core": {
+ "locked": "3.16.1",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.codehaus.woodstox:stax2-api": {
+ "locked": "4.2.1",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.woodstox:woodstox-core"
+ ]
+ },
+ "org.hamcrest:hamcrest": {
+ "locked": "2.2",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-api": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter",
+ "org.junit.jupiter:junit-jupiter-params"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-params": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter"
+ ]
+ },
+ "org.junit.platform:junit-platform-commons": {
+ "locked": "1.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.platform:junit-platform-engine"
+ ]
+ },
+ "org.junit.platform:junit-platform-engine": {
+ "locked": "1.6.3",
+ "transitive": [
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "org.junit.vintage:junit-vintage-engine": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.junit:junit-bom": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter",
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.junit.platform:junit-platform-commons",
+ "org.junit.platform:junit-platform-engine",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "org.mockito:mockito-core": {
+ "locked": "3.3.3",
+ "transitive": [
+ "org.mockito:mockito-junit-jupiter",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.mockito:mockito-junit-jupiter": {
+ "locked": "3.3.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.objenesis:objenesis": {
+ "locked": "2.6",
+ "transitive": [
+ "org.mockito:mockito-core"
+ ]
+ },
+ "org.opentest4j:opentest4j": {
+ "locked": "1.2.0",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.platform:junit-platform-engine"
+ ]
+ },
+ "org.ow2.asm:asm": {
+ "locked": "5.0.4",
+ "transitive": [
+ "net.minidev:accessors-smart"
+ ]
+ },
+ "org.reactivestreams:reactive-streams": {
+ "locked": "1.0.3",
+ "transitive": [
+ "io.projectreactor:reactor-core"
+ ]
+ },
+ "org.skyscreamer:jsonassert": {
+ "locked": "1.5.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.slf4j:jul-to-slf4j": {
+ "locked": "1.7.30",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-log4j2",
+ "org.springframework.boot:spring-boot-starter-logging"
+ ]
+ },
+ "org.slf4j:slf4j-api": {
+ "locked": "1.7.30",
+ "transitive": [
+ "com.azure:azure-core",
+ "com.jayway.jsonpath:json-path",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.slf4j:jul-to-slf4j"
+ ]
+ },
+ "org.springframework.boot:spring-boot": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-autoconfigure",
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-test",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-autoconfigure": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter-log4j2": {
+ "locked": "2.3.12.RELEASE"
+ },
+ "org.springframework.boot:spring-boot-starter-logging": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter-test": {
+ "locked": "2.3.12.RELEASE"
+ },
+ "org.springframework.boot:spring-boot-test": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-test-autoconfigure": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.springframework:spring-aop": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-beans": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-aop",
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-context": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot"
+ ]
+ },
+ "org.springframework:spring-core": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot",
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-starter-test",
+ "org.springframework:spring-aop",
+ "org.springframework:spring-beans",
+ "org.springframework:spring-context",
+ "org.springframework:spring-expression",
+ "org.springframework:spring-test"
+ ]
+ },
+ "org.springframework:spring-expression": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-jcl": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-core"
+ ]
+ },
+ "org.springframework:spring-test": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.xmlunit:xmlunit-core": {
+ "locked": "2.7.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.yaml:snakeyaml": {
+ "locked": "1.26",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ }
+ },
+ "testRuntimeClasspath": {
+ "com.azure:azure-core": {
+ "locked": "1.5.1",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "com.azure:azure-storage-blob",
+ "com.azure:azure-storage-common"
+ ]
+ },
+ "com.azure:azure-core-http-netty": {
+ "locked": "1.5.2",
+ "transitive": [
+ "com.azure:azure-storage-common"
+ ]
+ },
+ "com.azure:azure-storage-blob": {
+ "locked": "12.7.0"
+ },
+ "com.azure:azure-storage-common": {
+ "locked": "12.7.0",
+ "transitive": [
+ "com.azure:azure-storage-blob"
+ ]
+ },
+ "com.fasterxml.jackson.core:jackson-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.fasterxml.jackson.core:jackson-core": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.fasterxml.jackson.core:jackson-databind": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.azure:azure-core"
+ ]
+ },
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.azure:azure-core"
+ ]
+ },
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml"
+ ]
+ },
+ "com.fasterxml.woodstox:woodstox-core": {
+ "locked": "6.2.3",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml"
+ ]
+ },
+ "com.github.rholder:guava-retrying": {
+ "locked": "2.0.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-common"
+ ]
+ },
+ "com.google.code.findbugs:jsr305": {
+ "locked": "3.0.2",
+ "transitive": [
+ "com.github.rholder:guava-retrying",
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.errorprone:error_prone_annotations": {
+ "locked": "2.3.4",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:failureaccess": {
+ "locked": "1.0.1",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:guava": {
+ "locked": "30.0-jre",
+ "transitive": [
+ "com.github.rholder:guava-retrying",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.google.guava:listenablefuture": {
+ "locked": "9999.0-empty-to-avoid-conflict-with-guava",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.j2objc:j2objc-annotations": {
+ "locked": "1.3",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.protobuf:protobuf-java": {
+ "locked": "3.13.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.jayway.jsonpath:json-path": {
+ "locked": "2.4.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-core",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "com.netflix.conductor:conductor-annotations": {
+ "project": true,
+ "transitive": [
+ "com.netflix.conductor:conductor-common"
+ ]
+ },
+ "com.netflix.conductor:conductor-common": {
+ "project": true,
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.netflix.conductor:conductor-core": {
+ "project": true
+ },
+ "com.netflix.spectator:spectator-api": {
+ "locked": "0.122.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.spotify:completable-futures": {
+ "locked": "0.3.3",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.vaadin.external.google:android-json": {
+ "locked": "0.0.20131108.vaadin1",
+ "transitive": [
+ "org.skyscreamer:jsonassert"
+ ]
+ },
+ "commons-io:commons-io": {
+ "locked": "2.7",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "io.netty:netty-buffer": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-codec",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy",
+ "io.netty:netty-transport",
+ "io.netty:netty-transport-native-epoll",
+ "io.netty:netty-transport-native-unix-common"
+ ]
+ },
+ "io.netty:netty-codec": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy"
+ ]
+ },
+ "io.netty:netty-codec-http": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-handler-proxy",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-codec-http2": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-codec-socks": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler-proxy"
+ ]
+ },
+ "io.netty:netty-common": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-buffer",
+ "io.netty:netty-codec",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy",
+ "io.netty:netty-resolver",
+ "io.netty:netty-transport",
+ "io.netty:netty-transport-native-epoll",
+ "io.netty:netty-transport-native-unix-common"
+ ]
+ },
+ "io.netty:netty-handler": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-handler-proxy": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-resolver": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-tcnative-boringssl-static": {
+ "locked": "2.0.39.Final",
+ "transitive": [
+ "com.azure:azure-core"
+ ]
+ },
+ "io.netty:netty-transport": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec",
+ "io.netty:netty-codec-http",
+ "io.netty:netty-codec-http2",
+ "io.netty:netty-codec-socks",
+ "io.netty:netty-handler",
+ "io.netty:netty-handler-proxy",
+ "io.netty:netty-transport-native-epoll",
+ "io.netty:netty-transport-native-unix-common"
+ ]
+ },
+ "io.netty:netty-transport-native-epoll": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.netty:netty-transport-native-unix-common": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.azure:azure-core-http-netty",
+ "io.netty:netty-transport-native-epoll"
+ ]
+ },
+ "io.projectreactor.netty:reactor-netty": {
+ "locked": "0.9.20.RELEASE",
+ "transitive": [
+ "com.azure:azure-core-http-netty"
+ ]
+ },
+ "io.projectreactor:reactor-core": {
+ "locked": "3.3.17.RELEASE",
+ "transitive": [
+ "com.azure:azure-core",
+ "io.projectreactor.netty:reactor-netty"
+ ]
+ },
+ "io.reactivex:rxjava": {
+ "locked": "1.3.8",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "jakarta.activation:jakarta.activation-api": {
+ "locked": "1.2.2",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.conductor:conductor-core",
+ "jakarta.xml.bind:jakarta.xml.bind-api"
+ ]
+ },
+ "jakarta.annotation:jakarta.annotation-api": {
+ "locked": "1.3.5",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "jakarta.xml.bind:jakarta.xml.bind-api": {
+ "locked": "2.3.3",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.conductor:conductor-core",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "junit:junit": {
+ "locked": "4.13.2",
+ "transitive": [
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "net.bytebuddy:byte-buddy": {
+ "locked": "1.10.22",
+ "transitive": [
+ "org.mockito:mockito-core"
+ ]
+ },
+ "net.bytebuddy:byte-buddy-agent": {
+ "locked": "1.10.22",
+ "transitive": [
+ "org.mockito:mockito-core"
+ ]
+ },
+ "net.minidev:accessors-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "net.minidev:json-smart"
+ ]
+ },
+ "net.minidev:json-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "com.jayway.jsonpath:json-path"
+ ]
+ },
+ "org.apache.bval:bval-jsr": {
+ "locked": "2.0.5",
+ "transitive": [
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "org.apache.commons:commons-lang3": {
+ "locked": "3.10",
+ "transitive": [
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-api": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core",
+ "org.apache.logging.log4j:log4j-core",
+ "org.apache.logging.log4j:log4j-jul",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-core": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web",
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-jul": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core",
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-slf4j-impl": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core",
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-web": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "org.apiguardian:apiguardian-api": {
+ "locked": "1.1.0",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.jupiter:junit-jupiter-engine",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.junit.platform:junit-platform-commons",
+ "org.junit.platform:junit-platform-engine",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "org.assertj:assertj-core": {
+ "locked": "3.16.1",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.checkerframework:checker-qual": {
+ "locked": "3.5.0",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "org.codehaus.woodstox:stax2-api": {
+ "locked": "4.2.1",
+ "transitive": [
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-xml",
+ "com.fasterxml.woodstox:woodstox-core"
+ ]
+ },
+ "org.hamcrest:hamcrest": {
+ "locked": "2.2",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-api": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter",
+ "org.junit.jupiter:junit-jupiter-engine",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.mockito:mockito-junit-jupiter"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-engine": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-params": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter"
+ ]
+ },
+ "org.junit.platform:junit-platform-commons": {
+ "locked": "1.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.platform:junit-platform-engine"
+ ]
+ },
+ "org.junit.platform:junit-platform-engine": {
+ "locked": "1.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-engine",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "org.junit.vintage:junit-vintage-engine": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.junit:junit-bom": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter",
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.jupiter:junit-jupiter-engine",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.junit.platform:junit-platform-commons",
+ "org.junit.platform:junit-platform-engine",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "org.mockito:mockito-core": {
+ "locked": "3.3.3",
+ "transitive": [
+ "org.mockito:mockito-junit-jupiter",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.mockito:mockito-junit-jupiter": {
+ "locked": "3.3.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.objenesis:objenesis": {
+ "locked": "2.6",
+ "transitive": [
+ "org.mockito:mockito-core"
+ ]
+ },
+ "org.opentest4j:opentest4j": {
+ "locked": "1.2.0",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.platform:junit-platform-engine"
+ ]
+ },
+ "org.ow2.asm:asm": {
+ "locked": "5.0.4",
+ "transitive": [
+ "net.minidev:accessors-smart"
+ ]
+ },
+ "org.reactivestreams:reactive-streams": {
+ "locked": "1.0.3",
+ "transitive": [
+ "io.projectreactor:reactor-core"
+ ]
+ },
+ "org.skyscreamer:jsonassert": {
+ "locked": "1.5.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.slf4j:jul-to-slf4j": {
+ "locked": "1.7.30",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-log4j2",
+ "org.springframework.boot:spring-boot-starter-logging"
+ ]
+ },
+ "org.slf4j:slf4j-api": {
+ "locked": "1.7.30",
+ "transitive": [
+ "com.azure:azure-core",
+ "com.jayway.jsonpath:json-path",
+ "com.netflix.spectator:spectator-api",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.slf4j:jul-to-slf4j"
+ ]
+ },
+ "org.springframework.boot:spring-boot": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-autoconfigure",
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-test",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-autoconfigure": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter-log4j2": {
+ "locked": "2.3.12.RELEASE"
+ },
+ "org.springframework.boot:spring-boot-starter-logging": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter-test": {
+ "locked": "2.3.12.RELEASE"
+ },
+ "org.springframework.boot:spring-boot-test": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-test-autoconfigure": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.springframework:spring-aop": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-beans": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-aop",
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-context": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot"
+ ]
+ },
+ "org.springframework:spring-core": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot",
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-starter-test",
+ "org.springframework:spring-aop",
+ "org.springframework:spring-beans",
+ "org.springframework:spring-context",
+ "org.springframework:spring-expression",
+ "org.springframework:spring-test"
+ ]
+ },
+ "org.springframework:spring-expression": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-jcl": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-core"
+ ]
+ },
+ "org.springframework:spring-test": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.xmlunit:xmlunit-core": {
+ "locked": "2.7.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.yaml:snakeyaml": {
+ "locked": "1.26",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/azureblob-storage/src/main/java/com/netflix/conductor/azureblob/config/AzureBlobConfiguration.java b/azureblob-storage/src/main/java/com/netflix/conductor/azureblob/config/AzureBlobConfiguration.java
new file mode 100644
index 0000000000..b36d299728
--- /dev/null
+++ b/azureblob-storage/src/main/java/com/netflix/conductor/azureblob/config/AzureBlobConfiguration.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2020 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.azureblob.config;
+
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import com.netflix.conductor.azureblob.storage.AzureBlobPayloadStorage;
+import com.netflix.conductor.common.utils.ExternalPayloadStorage;
+
+@Configuration(proxyBeanMethods = false)
+@EnableConfigurationProperties(AzureBlobProperties.class)
+@ConditionalOnProperty(name = "conductor.external-payload-storage.type", havingValue = "azureblob")
+public class AzureBlobConfiguration {
+
+ @Bean
+ public ExternalPayloadStorage azureBlobExternalPayloadStorage(AzureBlobProperties properties) {
+ return new AzureBlobPayloadStorage(properties);
+ }
+}
diff --git a/azureblob-storage/src/main/java/com/netflix/conductor/azureblob/config/AzureBlobProperties.java b/azureblob-storage/src/main/java/com/netflix/conductor/azureblob/config/AzureBlobProperties.java
new file mode 100644
index 0000000000..9a1f4fbf96
--- /dev/null
+++ b/azureblob-storage/src/main/java/com/netflix/conductor/azureblob/config/AzureBlobProperties.java
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2020 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.azureblob.config;
+
+import java.time.Duration;
+import java.time.temporal.ChronoUnit;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.boot.convert.DurationUnit;
+
+@ConfigurationProperties("conductor.external-payload-storage.azureblob")
+public class AzureBlobProperties {
+
+ /** The connection string to be used to connect to Azure Blob storage */
+ private String connectionString = null;
+
+ /** The name of the container where the payloads will be stored */
+ private String containerName = "conductor-payloads";
+
+ /** The endpoint to be used to connect to Azure Blob storage */
+ private String endpoint = null;
+
+ /** The sas token to be used for authenticating requests */
+ private String sasToken = null;
+
+ /** The time for which the shared access signature is valid */
+ @DurationUnit(ChronoUnit.SECONDS)
+ private Duration signedUrlExpirationDuration = Duration.ofSeconds(5);
+
+ /** The path at which the workflow inputs will be stored */
+ private String workflowInputPath = "workflow/input/";
+
+ /** The path at which the workflow outputs will be stored */
+ private String workflowOutputPath = "workflow/output/";
+
+ /** The path at which the task inputs will be stored */
+ private String taskInputPath = "task/input/";
+
+ /** The path at which the task outputs will be stored */
+ private String taskOutputPath = "task/output/";
+
+ public String getConnectionString() {
+ return connectionString;
+ }
+
+ public void setConnectionString(String connectionString) {
+ this.connectionString = connectionString;
+ }
+
+ public String getContainerName() {
+ return containerName;
+ }
+
+ public void setContainerName(String containerName) {
+ this.containerName = containerName;
+ }
+
+ public String getEndpoint() {
+ return endpoint;
+ }
+
+ public void setEndpoint(String endpoint) {
+ this.endpoint = endpoint;
+ }
+
+ public String getSasToken() {
+ return sasToken;
+ }
+
+ public void setSasToken(String sasToken) {
+ this.sasToken = sasToken;
+ }
+
+ public Duration getSignedUrlExpirationDuration() {
+ return signedUrlExpirationDuration;
+ }
+
+ public void setSignedUrlExpirationDuration(Duration signedUrlExpirationDuration) {
+ this.signedUrlExpirationDuration = signedUrlExpirationDuration;
+ }
+
+ public String getWorkflowInputPath() {
+ return workflowInputPath;
+ }
+
+ public void setWorkflowInputPath(String workflowInputPath) {
+ this.workflowInputPath = workflowInputPath;
+ }
+
+ public String getWorkflowOutputPath() {
+ return workflowOutputPath;
+ }
+
+ public void setWorkflowOutputPath(String workflowOutputPath) {
+ this.workflowOutputPath = workflowOutputPath;
+ }
+
+ public String getTaskInputPath() {
+ return taskInputPath;
+ }
+
+ public void setTaskInputPath(String taskInputPath) {
+ this.taskInputPath = taskInputPath;
+ }
+
+ public String getTaskOutputPath() {
+ return taskOutputPath;
+ }
+
+ public void setTaskOutputPath(String taskOutputPath) {
+ this.taskOutputPath = taskOutputPath;
+ }
+}
diff --git a/azureblob-storage/src/main/java/com/netflix/conductor/azureblob/storage/AzureBlobPayloadStorage.java b/azureblob-storage/src/main/java/com/netflix/conductor/azureblob/storage/AzureBlobPayloadStorage.java
new file mode 100644
index 0000000000..ea9e1aeb42
--- /dev/null
+++ b/azureblob-storage/src/main/java/com/netflix/conductor/azureblob/storage/AzureBlobPayloadStorage.java
@@ -0,0 +1,228 @@
+/*
+ * Copyright 2020 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.azureblob.storage;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
+import java.io.UncheckedIOException;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
+
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.netflix.conductor.azureblob.config.AzureBlobProperties;
+import com.netflix.conductor.common.run.ExternalStorageLocation;
+import com.netflix.conductor.common.utils.ExternalPayloadStorage;
+import com.netflix.conductor.core.exception.ApplicationException;
+import com.netflix.conductor.core.utils.IDGenerator;
+
+import com.azure.core.exception.UnexpectedLengthException;
+import com.azure.core.util.Context;
+import com.azure.storage.blob.BlobContainerClient;
+import com.azure.storage.blob.BlobContainerClientBuilder;
+import com.azure.storage.blob.models.BlobHttpHeaders;
+import com.azure.storage.blob.models.BlobStorageException;
+import com.azure.storage.blob.sas.BlobSasPermission;
+import com.azure.storage.blob.sas.BlobServiceSasSignatureValues;
+import com.azure.storage.blob.specialized.BlockBlobClient;
+import com.azure.storage.common.Utility;
+import com.azure.storage.common.implementation.credentials.SasTokenCredential;
+
+/**
+ * An implementation of {@link ExternalPayloadStorage} using Azure Blob for storing large JSON
+ * payload data.
+ *
+ * @see Azure Java SDK
+ */
+public class AzureBlobPayloadStorage implements ExternalPayloadStorage {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(AzureBlobPayloadStorage.class);
+ private static final String CONTENT_TYPE = "application/json";
+
+ private final String workflowInputPath;
+ private final String workflowOutputPath;
+ private final String taskInputPath;
+ private final String taskOutputPath;
+
+ private final BlobContainerClient blobContainerClient;
+ private final long expirationSec;
+ private final SasTokenCredential sasTokenCredential;
+
+ public AzureBlobPayloadStorage(AzureBlobProperties properties) {
+ workflowInputPath = properties.getWorkflowInputPath();
+ workflowOutputPath = properties.getWorkflowOutputPath();
+ taskInputPath = properties.getTaskInputPath();
+ taskOutputPath = properties.getTaskOutputPath();
+ expirationSec = properties.getSignedUrlExpirationDuration().getSeconds();
+ String connectionString = properties.getConnectionString();
+ String containerName = properties.getContainerName();
+ String endpoint = properties.getEndpoint();
+ String sasToken = properties.getSasToken();
+
+ BlobContainerClientBuilder blobContainerClientBuilder = new BlobContainerClientBuilder();
+ if (connectionString != null) {
+ blobContainerClientBuilder.connectionString(connectionString);
+ sasTokenCredential = null;
+ } else if (endpoint != null) {
+ blobContainerClientBuilder.endpoint(endpoint);
+ if (sasToken != null) {
+ sasTokenCredential = SasTokenCredential.fromSasTokenString(sasToken);
+ blobContainerClientBuilder.sasToken(sasTokenCredential.getSasToken());
+ } else {
+ sasTokenCredential = null;
+ }
+ } else {
+ String msg = "Missing property for connectionString OR endpoint";
+ LOGGER.error(msg);
+ throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, msg);
+ }
+ blobContainerClient = blobContainerClientBuilder.containerName(containerName).buildClient();
+ }
+
+ /**
+ * @param operation the type of {@link Operation} to be performed
+ * @param payloadType the {@link PayloadType} that is being accessed
+ * @return a {@link ExternalStorageLocation} object which contains the pre-signed URL and the
+ * azure blob name for the json payload
+ */
+ @Override
+ public ExternalStorageLocation getLocation(
+ Operation operation, PayloadType payloadType, String path) {
+ try {
+ ExternalStorageLocation externalStorageLocation = new ExternalStorageLocation();
+
+ String objectKey;
+ if (StringUtils.isNotBlank(path)) {
+ objectKey = path;
+ } else {
+ objectKey = getObjectKey(payloadType);
+ }
+ externalStorageLocation.setPath(objectKey);
+
+ BlockBlobClient blockBlobClient =
+ blobContainerClient.getBlobClient(objectKey).getBlockBlobClient();
+ String blobUrl = Utility.urlDecode(blockBlobClient.getBlobUrl());
+
+ if (sasTokenCredential != null) {
+ blobUrl = blobUrl + "?" + sasTokenCredential.getSasToken();
+ } else {
+ BlobSasPermission blobSASPermission = new BlobSasPermission();
+ if (operation.equals(Operation.READ)) {
+ blobSASPermission.setReadPermission(true);
+ } else if (operation.equals(Operation.WRITE)) {
+ blobSASPermission.setWritePermission(true);
+ blobSASPermission.setCreatePermission(true);
+ }
+ BlobServiceSasSignatureValues blobServiceSasSignatureValues =
+ new BlobServiceSasSignatureValues(
+ OffsetDateTime.now(ZoneOffset.UTC).plusSeconds(expirationSec),
+ blobSASPermission);
+ blobUrl =
+ blobUrl + "?" + blockBlobClient.generateSas(blobServiceSasSignatureValues);
+ }
+
+ externalStorageLocation.setUri(blobUrl);
+ return externalStorageLocation;
+ } catch (BlobStorageException e) {
+ String msg = "Error communicating with Azure";
+ LOGGER.error(msg, e);
+ throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, msg, e);
+ }
+ }
+
+ /**
+ * Uploads the payload to the given azure blob name. It is expected that the caller retrieves
+ * the blob name using {@link #getLocation(Operation, PayloadType, String)} before making this
+ * call.
+ *
+ * @param path the name of the blob to be uploaded
+ * @param payload an {@link InputStream} containing the json payload which is to be uploaded
+ * @param payloadSize the size of the json payload in bytes
+ */
+ @Override
+ public void upload(String path, InputStream payload, long payloadSize) {
+ try {
+ BlockBlobClient blockBlobClient =
+ blobContainerClient.getBlobClient(path).getBlockBlobClient();
+ BlobHttpHeaders blobHttpHeaders = new BlobHttpHeaders().setContentType(CONTENT_TYPE);
+ blockBlobClient.uploadWithResponse(
+ payload,
+ payloadSize,
+ blobHttpHeaders,
+ null,
+ null,
+ null,
+ null,
+ null,
+ Context.NONE);
+ } catch (BlobStorageException | UncheckedIOException | UnexpectedLengthException e) {
+ String msg = "Error communicating with Azure";
+ LOGGER.error(msg, e);
+ throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, msg, e);
+ }
+ }
+
+ /**
+ * Downloads the payload stored in an azure blob.
+ *
+ * @param path the path of the blob
+ * @return an input stream containing the contents of the object Caller is expected to close the
+ * input stream.
+ */
+ @Override
+ public InputStream download(String path) {
+ try {
+ BlockBlobClient blockBlobClient =
+ blobContainerClient.getBlobClient(path).getBlockBlobClient();
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ // Avoid another call to the api to get the blob size
+ // ByteArrayOutputStream outputStream = new
+ // ByteArrayOutputStream(blockBlobClient.getProperties().value().blobSize());
+ blockBlobClient.download(outputStream);
+ return new ByteArrayInputStream(outputStream.toByteArray());
+ } catch (BlobStorageException | UncheckedIOException | NullPointerException e) {
+ String msg = "Error communicating with Azure";
+ LOGGER.error(msg, e);
+ throw new ApplicationException(ApplicationException.Code.BACKEND_ERROR, msg, e);
+ }
+ }
+
+ /**
+ * Build path on external storage. Copied from S3PayloadStorage.
+ *
+ * @param payloadType the {@link PayloadType} which will determine the base path of the object
+ * @return External Storage path
+ */
+ private String getObjectKey(PayloadType payloadType) {
+ StringBuilder stringBuilder = new StringBuilder();
+ switch (payloadType) {
+ case WORKFLOW_INPUT:
+ stringBuilder.append(workflowInputPath);
+ break;
+ case WORKFLOW_OUTPUT:
+ stringBuilder.append(workflowOutputPath);
+ break;
+ case TASK_INPUT:
+ stringBuilder.append(taskInputPath);
+ break;
+ case TASK_OUTPUT:
+ stringBuilder.append(taskOutputPath);
+ break;
+ }
+ stringBuilder.append(IDGenerator.generate()).append(".json");
+ return stringBuilder.toString();
+ }
+}
diff --git a/azureblob-storage/src/test/java/com/netflix/conductor/azureblob/storage/AzureBlobPayloadStorageTest.java b/azureblob-storage/src/test/java/com/netflix/conductor/azureblob/storage/AzureBlobPayloadStorageTest.java
new file mode 100644
index 0000000000..5ce1bb49cc
--- /dev/null
+++ b/azureblob-storage/src/test/java/com/netflix/conductor/azureblob/storage/AzureBlobPayloadStorageTest.java
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2020 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.azureblob.storage;
+
+import java.time.Duration;
+
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import com.netflix.conductor.azureblob.config.AzureBlobProperties;
+import com.netflix.conductor.common.run.ExternalStorageLocation;
+import com.netflix.conductor.common.utils.ExternalPayloadStorage;
+import com.netflix.conductor.core.exception.ApplicationException;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class AzureBlobPayloadStorageTest {
+
+ private AzureBlobProperties properties;
+
+ @Before
+ public void setUp() {
+ properties = mock(AzureBlobProperties.class);
+ when(properties.getConnectionString()).thenReturn(null);
+ when(properties.getContainerName()).thenReturn("conductor-payloads");
+ when(properties.getEndpoint()).thenReturn(null);
+ when(properties.getSasToken()).thenReturn(null);
+ when(properties.getSignedUrlExpirationDuration()).thenReturn(Duration.ofSeconds(5));
+ when(properties.getWorkflowInputPath()).thenReturn("workflow/input/");
+ when(properties.getWorkflowOutputPath()).thenReturn("workflow/output/");
+ when(properties.getTaskInputPath()).thenReturn("task/input");
+ when(properties.getTaskOutputPath()).thenReturn("task/output/");
+ }
+
+ /** Dummy credentials Azure SDK doesn't work with Azurite since it cleans parameters */
+ private final String azuriteConnectionString =
+ "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;EndpointSuffix=localhost";
+
+ @Rule public ExpectedException expectedException = ExpectedException.none();
+
+ @Test
+ public void testNoStorageAccount() {
+ expectedException.expect(ApplicationException.class);
+ new AzureBlobPayloadStorage(properties);
+ }
+
+ @Test
+ public void testUseConnectionString() {
+ when(properties.getConnectionString()).thenReturn(azuriteConnectionString);
+ new AzureBlobPayloadStorage(properties);
+ }
+
+ @Test
+ public void testUseEndpoint() {
+ String azuriteEndpoint = "http://127.0.0.1:10000/";
+ when(properties.getEndpoint()).thenReturn(azuriteEndpoint);
+ new AzureBlobPayloadStorage(properties);
+ }
+
+ @Test
+ public void testGetLocationFixedPath() {
+ when(properties.getConnectionString()).thenReturn(azuriteConnectionString);
+ AzureBlobPayloadStorage azureBlobPayloadStorage = new AzureBlobPayloadStorage(properties);
+ String path = "somewhere";
+ ExternalStorageLocation externalStorageLocation =
+ azureBlobPayloadStorage.getLocation(
+ ExternalPayloadStorage.Operation.READ,
+ ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT,
+ path);
+ assertNotNull(externalStorageLocation);
+ assertEquals(path, externalStorageLocation.getPath());
+ assertNotNull(externalStorageLocation.getUri());
+ }
+
+ private void testGetLocation(
+ AzureBlobPayloadStorage azureBlobPayloadStorage,
+ ExternalPayloadStorage.Operation operation,
+ ExternalPayloadStorage.PayloadType payloadType,
+ String expectedPath) {
+ ExternalStorageLocation externalStorageLocation =
+ azureBlobPayloadStorage.getLocation(operation, payloadType, null);
+ assertNotNull(externalStorageLocation);
+ assertNotNull(externalStorageLocation.getPath());
+ assertTrue(externalStorageLocation.getPath().startsWith(expectedPath));
+ assertNotNull(externalStorageLocation.getUri());
+ assertTrue(externalStorageLocation.getUri().contains(expectedPath));
+ }
+
+ @Test
+ public void testGetAllLocations() {
+ when(properties.getConnectionString()).thenReturn(azuriteConnectionString);
+ AzureBlobPayloadStorage azureBlobPayloadStorage = new AzureBlobPayloadStorage(properties);
+
+ testGetLocation(
+ azureBlobPayloadStorage,
+ ExternalPayloadStorage.Operation.READ,
+ ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT,
+ properties.getWorkflowInputPath());
+ testGetLocation(
+ azureBlobPayloadStorage,
+ ExternalPayloadStorage.Operation.READ,
+ ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT,
+ properties.getWorkflowOutputPath());
+ testGetLocation(
+ azureBlobPayloadStorage,
+ ExternalPayloadStorage.Operation.READ,
+ ExternalPayloadStorage.PayloadType.TASK_INPUT,
+ properties.getTaskInputPath());
+ testGetLocation(
+ azureBlobPayloadStorage,
+ ExternalPayloadStorage.Operation.READ,
+ ExternalPayloadStorage.PayloadType.TASK_OUTPUT,
+ properties.getTaskOutputPath());
+
+ testGetLocation(
+ azureBlobPayloadStorage,
+ ExternalPayloadStorage.Operation.WRITE,
+ ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT,
+ properties.getWorkflowInputPath());
+ testGetLocation(
+ azureBlobPayloadStorage,
+ ExternalPayloadStorage.Operation.WRITE,
+ ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT,
+ properties.getWorkflowOutputPath());
+ testGetLocation(
+ azureBlobPayloadStorage,
+ ExternalPayloadStorage.Operation.WRITE,
+ ExternalPayloadStorage.PayloadType.TASK_INPUT,
+ properties.getTaskInputPath());
+ testGetLocation(
+ azureBlobPayloadStorage,
+ ExternalPayloadStorage.Operation.WRITE,
+ ExternalPayloadStorage.PayloadType.TASK_OUTPUT,
+ properties.getTaskOutputPath());
+ }
+}
diff --git a/build.gradle b/build.gradle
index 9596096ccf..6fb5ea22fb 100644
--- a/build.gradle
+++ b/build.gradle
@@ -1,50 +1,57 @@
buildscript {
- repositories {
- jcenter()
+ repositories {
+ mavenCentral()
maven {
- url "https://artifacts.elastic.co/maven"
+ url "https://plugins.gradle.org/m2/"
}
}
-
dependencies {
- classpath 'com.netflix.nebula:gradle-extra-configurations-plugin:4.0.1'
- classpath 'org.apache.ant:ant:1.9.7'
+ classpath 'com.netflix.nebula:gradle-extra-configurations-plugin:5.0.3'
+ // revElasticSearch7 in dependencies.gradle needs to be updated when spring is upgraded
+ classpath 'org.springframework.boot:spring-boot-gradle-plugin:2.3.12.RELEASE'
+ classpath 'com.diffplug.spotless:spotless-plugin-gradle:5.+'
}
}
+
plugins {
- id 'nebula.netflixoss' version '5.1.1'
- id "io.spring.dependency-management" version "1.0.4.RELEASE"
+ id 'io.spring.dependency-management' version '1.0.9.RELEASE'
+ id 'java'
+ id 'application'
+ id 'jacoco'
+ id 'nebula.netflixoss' version '9.2.2'
+ id 'org.sonarqube' version '3.1.1'
id 'com.github.kt3k.coveralls' version '2.8.2'
}
+/*
+ * Copyright 2021 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+
// Establish version and status
ext.githubProjectName = rootProject.name // Change if github project name is not the same as the root project's name
-apply plugin: 'project-report'
-apply from: "$rootDir/versionsOfDependencies.gradle"
-
-allprojects {
- apply plugin: 'idea'
- apply plugin: 'jacoco'
- apply plugin: 'eclipse'
-
- repositories {
- jcenter()
-
- // oss-candidate for -rc.* verions:
- maven {
- url "https://dl.bintray.com/netflixoss/oss-candidate"
- }
- }
+subprojects {
+ tasks.withType(Javadoc).all { enabled = false }
}
-def javaProjects = subprojects.findAll {
- it.name != "ui"
-}
+apply from: "$rootDir/dependencies.gradle"
-configure(javaProjects) {
+// change the ES version used by Spring Boot Dependency Management plugin
+ext['elasticsearch.version'] = revElasticSearch7
+
+allprojects {
apply plugin: 'nebula.netflixoss'
- apply plugin: 'java'
+ apply plugin: 'io.spring.dependency-management'
+ apply plugin: 'java-library'
apply plugin: 'project-report'
apply plugin: "io.spring.dependency-management"
@@ -56,8 +63,8 @@ configure(javaProjects) {
}
}
- sourceCompatibility = 1.8
- targetCompatibility = 1.8
+ sourceCompatibility = JavaVersion.VERSION_11
+ targetCompatibility = JavaVersion.VERSION_11
repositories {
jcenter()
@@ -66,88 +73,138 @@ configure(javaProjects) {
}
}
- dependencies {
+ dependencies {
testCompile "junit:junit:${revJUnit}"
testCompile("org.mockito:mockito-core:${revMockito}") {
exclude group: 'org.hamcrest', module: 'hamcrest-core'
}
}
- group = "com.netflix.${githubProjectName}"
+ group = 'com.netflix.conductor'
- tasks.withType(Test) {
- maxParallelForks = 100
+ configurations.all {
+ exclude group: 'ch.qos.logback', module: 'logback-classic'
+ exclude group: 'ch.qos.logback', module: 'logback-core'
+ exclude group: 'org.apache.logging.log4j', module: 'log4j-to-slf4j'
+ exclude group: 'org.slf4j', module: 'slf4j-log4j12'
}
- license {
- excludes(['**/*.txt', '**/*.conf', '**/*.properties', '**/*.json', '**/swagger-ui/*'])
- }
+ repositories {
+ mavenCentral()
- task licenseFormatTests (type:nl.javadude.gradle.plugins.license.License) {
- source = fileTree(dir: "src/test").include("**/*")
- }
- licenseFormat.dependsOn licenseFormatTests
+ // oss-candidate for -rc.* verions:
+ maven {
+ url "https://artifactory-oss.prod.netflix.net/artifactory/maven-oss-candidates"
+ }
+ /**
+ * This repository locates artifacts that don't exist in maven central but we had to backup from jcenter
+ * The exclusiveContent
+ */
+ exclusiveContent {
+ forRepository {
+ maven {
+ url "https://artifactory-oss.prod.netflix.net/artifactory/required-jcenter-modules-backup"
+ }
+ }
+ filter {
+ includeGroupByRegex "com\\.github\\.vmg.*"
+ }
+ }
+ }
+ dependencyManagement {
+ imports {
+ mavenBom("org.springframework.boot:spring-boot-dependencies:2.3.12.RELEASE")
+ }
+ }
- tasks.withType(Test) {
- task ->
- // set heap size for the test JVM(s)
- minHeapSize = "256m"
- maxHeapSize = "2g"
+ dependencies {
+ implementation('org.apache.logging.log4j:log4j-core') {
+ version {
+ strictly '2.17.0'
+ }
+ }
+ implementation('org.apache.logging.log4j:log4j-api') {
+ version {
+ strictly '2.17.0'
+ }
+ }
+ implementation('org.apache.logging.log4j:log4j-slf4j-impl') {
+ version {
+ strictly '2.17.0'
+ }
+ }
+ implementation('org.apache.logging.log4j:log4j-jul') {
+ version {
+ strictly '2.17.0'
+ }
+ }
+ implementation('org.apache.logging.log4j:log4j-web') {
+ version {
+ strictly '2.17.0'
+ }
+ }
+ annotationProcessor 'org.springframework.boot:spring-boot-configuration-processor'
- jacocoTestReport.executionData += files("$buildDir/jacoco/${task.name}.exec")
+ testImplementation('org.springframework.boot:spring-boot-starter-test')
+ testImplementation('org.springframework.boot:spring-boot-starter-log4j2')
}
- jacocoTestReport {
- reports {
- html.enabled = true
- xml.enabled = true
- csv.enabled = false
+ // processes additional configuration metadata json file as described here
+ // https://docs.spring.io/spring-boot/docs/2.3.1.RELEASE/reference/html/appendix-configuration-metadata.html#configuration-metadata-additional-metadata
+ compileJava.inputs.files(processResources)
+
+ test {
+ useJUnitPlatform()
+ testLogging {
+ events = ["SKIPPED", "FAILED"]
+ exceptionFormat = "full"
+ showStandardStreams = false
}
}
-}
-
-/**********************************
- * Coverage Tasks
- **********************************/
-task codeCoverageReport(type: JacocoReport, group: "Coverage reports") {
- executionData fileTree(project.rootDir.absolutePath).include("**/build/jacoco/*.exec")
- dependsOn subprojects*.test
+}
- subprojects.each {
- sourceSets it.sourceSets.main
+// all client and their related modules are published with Java 8 compatibility
+["annotations", "common", "client", "client-spring", "grpc", "grpc-client"].each {
+ project(":conductor-$it") {
+ compileJava {
+ options.release = 8
+ }
}
+}
+jacocoTestReport {
reports {
- xml.enabled = true
- xml.destination new File("${buildDir}/reports/jacoco/report.xml")
html.enabled = true
- html.destination new File("${buildDir}/reports/jacoco/html")
+ xml.enabled = true
csv.enabled = false
}
- afterEvaluate {
- // Exclude generated files from top-level coverage report
- classDirectories = files(
- classDirectories.files.collect {
- fileTree(
- dir: it
- )
- }
- )
- }
}
-coveralls {
- sourceDirs = subprojects.sourceSets.main.allSource.srcDirs.flatten()
- jacocoReportPath = "${project.buildDir}/reports/jacoco/report.xml"
+task server {
+ dependsOn ':conductor-server:bootRun'
}
-tasks.coveralls {
- group = "Coverage reports"
- description = "Uploads the aggregated coverage report to Coveralls"
- dependsOn codeCoverageReport
+sonarqube {
+ properties {
+ property "sonar.projectKey", "com.netflix.conductor:conductor"
+ property "sonar.organization", "netflix"
+ property "sonar.host.url", "https://sonarcloud.io"
+ }
}
+configure(allprojects - project(':conductor-grpc')) {
+ apply plugin: 'com.diffplug.spotless'
+
+ spotless {
+ java {
+ googleJavaFormat().aosp()
+ removeUnusedImports()
+ importOrder('java', 'javax', 'org', 'com.netflix', '', '\\#com.netflix', '\\#')
+ licenseHeaderFile("$rootDir/licenseheader.txt")
+ }
+ }
+}
diff --git a/buildViaTravis.sh b/buildViaTravis.sh
deleted file mode 100755
index 4f2c588be9..0000000000
--- a/buildViaTravis.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/bash
-# This script will build the project.
-if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then
- echo -e "Build Pull Request #$TRAVIS_PULL_REQUEST => Branch [$TRAVIS_BRANCH]"
- ./gradlew build codeCoverageReport coveralls
-elif [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_TAG" == "" ]; then
- echo -e 'Build Branch with Snapshot => Branch ['$TRAVIS_BRANCH']'
- ./gradlew -Prelease.travisci=true -PbintrayUser="${bintrayUser}" -PbintrayKey="${bintrayKey}" -PsonatypeUsername="${sonatypeUsername}" -PsonatypePassword="${sonatypePassword}" build snapshot codeCoverageReport coveralls --info --stacktrace
-elif [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_TAG" != "" ]; then
- echo -e 'Build Branch for Release => Branch ['$TRAVIS_BRANCH'] Tag ['$TRAVIS_TAG']'
- case "$TRAVIS_TAG" in
- *-rc\.*)
- ./gradlew -Prelease.travisci=true -Prelease.useLastTag=true -PbintrayUser="${bintrayUser}" -PbintrayKey="${bintrayKey}" -PsonatypeUsername="${sonatypeUsername}" -PsonatypePassword="${sonatypePassword}" candidate codeCoverageReport coveralls --info --stacktrace
- ;;
- *)
- ./gradlew -Prelease.travisci=true -Prelease.useLastTag=true -PbintrayUser="${bintrayUser}" -PbintrayKey="${bintrayKey}" -PsonatypeUsername="${sonatypeUsername}" -PsonatypePassword="${sonatypePassword}" final codeCoverageReport coveralls --info --stacktrace
- ;;
- esac
-else
- echo -e 'WARN: Should not be here => Branch ['$TRAVIS_BRANCH'] Tag ['$TRAVIS_TAG'] Pull Request ['$TRAVIS_PULL_REQUEST']'
- ./gradlew build codeCoverageReport coveralls
-fi
-
diff --git a/cassandra-persistence/README.md b/cassandra-persistence/README.md
deleted file mode 100644
index ba6b82489c..0000000000
--- a/cassandra-persistence/README.md
+++ /dev/null
@@ -1,8 +0,0 @@
-### Note
-This provides a partial implementation of the ExecutionDAO using Cassandra as the datastore.
-The execution data is stored in Cassandra in the `workflows` table. A task to workflow mapping is also maintained in a separate `task_lookup` table.
-
-All datastore operations that are used during the critical execution path of a workflow are currently implemented. This includes CRUD operations for workflows and tasks.
-
-This does not provide implementations for the QueueDAO and MetadataDAO interfaces.
-
\ No newline at end of file
diff --git a/cassandra-persistence/build.gradle b/cassandra-persistence/build.gradle
index a4363adc63..6fee6f8ff0 100644
--- a/cassandra-persistence/build.gradle
+++ b/cassandra-persistence/build.gradle
@@ -1,11 +1,32 @@
-apply plugin: 'java'
+/*
+ * Copyright 2021 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+apply plugin: 'groovy'
dependencies {
- compile project(':conductor-core')
- compile "com.datastax.cassandra:cassandra-driver-core:${revCassandra}"
+ compileOnly 'org.springframework.boot:spring-boot-starter'
- testCompile("org.cassandraunit:cassandra-unit:${revCassandraUnit}") {
- exclude group: "com.datastax.cassandra", module: "cassandra-driver-core"
- }
- testCompile project(':conductor-core').sourceSets.test.output
+ implementation project(':conductor-common')
+ implementation project(':conductor-core')
+ implementation "com.datastax.cassandra:cassandra-driver-core:${revCassandra}"
+ implementation "org.apache.commons:commons-lang3"
+
+ testImplementation project(':conductor-core').sourceSets.test.output
+ testImplementation project(':conductor-common').sourceSets.test.output
+
+ testImplementation "org.codehaus.groovy:groovy-all:${revGroovy}"
+ testImplementation "org.spockframework:spock-core:${revSpock}"
+ testImplementation "org.spockframework:spock-spring:${revSpock}"
+ testImplementation "org.testcontainers:spock:${revTestContainer}"
+ testImplementation "org.testcontainers:cassandra:${revTestContainer}"
+ testImplementation "com.google.protobuf:protobuf-java:${revProtoBuf}"
}
diff --git a/cassandra-persistence/dependencies.lock b/cassandra-persistence/dependencies.lock
index 2964a9d53e..979779956f 100644
--- a/cassandra-persistence/dependencies.lock
+++ b/cassandra-persistence/dependencies.lock
@@ -1,1077 +1,2368 @@
{
- "compile": {
- "com.amazonaws:aws-java-sdk-s3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.11.86"
- },
- "com.datastax.cassandra:cassandra-driver-core": {
- "locked": "3.6.0",
- "requested": "3.6.0"
- },
- "com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common",
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
- },
- "com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common",
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
- },
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
- },
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.inject.extensions:guice-multibindings": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
- },
- "com.google.inject:guice": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
- },
- "com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
- },
- "com.jayway.jsonpath:json-path": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.2.0"
- },
- "com.netflix.conductor:conductor-common": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "project": true
- },
- "com.netflix.conductor:conductor-core": {
- "project": true
- },
- "com.netflix.servo:servo-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.12.17"
- },
- "com.netflix.spectator:spectator-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.68.0"
- },
- "com.spotify:completable-futures": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.3.1"
- },
- "io.reactivex:rxjava": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.2.2"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
- },
- "org.apache.commons:commons-lang3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "3.0"
- },
- "org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.7.25"
+ "annotationProcessor": {
+ "org.springframework.boot:spring-boot-configuration-processor": {
+ "locked": "2.3.12.RELEASE"
}
},
"compileClasspath": {
- "com.amazonaws:aws-java-sdk-s3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.11.86"
- },
"com.datastax.cassandra:cassandra-driver-core": {
- "locked": "3.6.0",
- "requested": "3.6.0"
+ "locked": "3.10.2"
+ },
+ "com.fasterxml.jackson.core:jackson-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind"
+ ]
},
"com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common",
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind"
+ ]
},
"com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common",
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
- },
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
- },
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.inject.extensions:guice-multibindings": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
- },
- "com.google.inject:guice": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
- },
- "com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
- },
- "com.jayway.jsonpath:json-path": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.2.0"
+ "locked": "2.11.4",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
+ },
+ "com.github.jnr:jffi": {
+ "locked": "1.2.16",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "com.github.jnr:jnr-constants": {
+ "locked": "0.9.9",
+ "transitive": [
+ "com.github.jnr:jnr-posix"
+ ]
+ },
+ "com.github.jnr:jnr-ffi": {
+ "locked": "2.1.7",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core",
+ "com.github.jnr:jnr-posix"
+ ]
+ },
+ "com.github.jnr:jnr-posix": {
+ "locked": "3.0.44",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
+ },
+ "com.github.jnr:jnr-x86asm": {
+ "locked": "1.0.2",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "com.google.guava:guava": {
+ "locked": "19.0",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
},
"com.netflix.conductor:conductor-common": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
"project": true
},
"com.netflix.conductor:conductor-core": {
"project": true
},
- "com.netflix.servo:servo-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.12.17"
- },
- "com.netflix.spectator:spectator-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.68.0"
- },
- "com.spotify:completable-futures": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.3.1"
- },
- "io.reactivex:rxjava": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.2.2"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
+ "io.dropwizard.metrics:metrics-core": {
+ "locked": "4.1.22",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
+ },
+ "io.netty:netty-buffer": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec",
+ "io.netty:netty-handler",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-codec": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler"
+ ]
+ },
+ "io.netty:netty-common": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-buffer",
+ "io.netty:netty-codec",
+ "io.netty:netty-handler",
+ "io.netty:netty-resolver",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-handler": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
+ },
+ "io.netty:netty-resolver": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-transport": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec",
+ "io.netty:netty-handler"
+ ]
+ },
+ "jakarta.annotation:jakarta.annotation-api": {
+ "locked": "1.3.5",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
},
"org.apache.commons:commons-lang3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "3.0"
+ "locked": "3.10"
+ },
+ "org.apache.logging.log4j:log4j-api": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.apache.logging.log4j:log4j-core",
+ "org.apache.logging.log4j:log4j-jul",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-core": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-jul": {
+ "locked": "2.17.0"
+ },
+ "org.apache.logging.log4j:log4j-slf4j-impl": {
+ "locked": "2.17.0"
+ },
+ "org.apache.logging.log4j:log4j-web": {
+ "locked": "2.17.0"
+ },
+ "org.ow2.asm:asm": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi",
+ "org.ow2.asm:asm-tree"
+ ]
+ },
+ "org.ow2.asm:asm-analysis": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "org.ow2.asm:asm-commons": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "org.ow2.asm:asm-tree": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi",
+ "org.ow2.asm:asm-analysis",
+ "org.ow2.asm:asm-commons",
+ "org.ow2.asm:asm-util"
+ ]
+ },
+ "org.ow2.asm:asm-util": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "org.slf4j:jul-to-slf4j": {
+ "locked": "1.7.30",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-logging"
+ ]
},
"org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.7.25"
+ "locked": "1.7.30",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core",
+ "io.dropwizard.metrics:metrics-core",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.slf4j:jul-to-slf4j"
+ ]
+ },
+ "org.springframework.boot:spring-boot": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-autoconfigure",
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "org.springframework.boot:spring-boot-autoconfigure": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter": {
+ "locked": "2.3.12.RELEASE"
+ },
+ "org.springframework.boot:spring-boot-starter-logging": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "org.springframework:spring-aop": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-beans": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-aop",
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-context": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot"
+ ]
+ },
+ "org.springframework:spring-core": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot",
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework:spring-aop",
+ "org.springframework:spring-beans",
+ "org.springframework:spring-context",
+ "org.springframework:spring-expression"
+ ]
+ },
+ "org.springframework:spring-expression": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-jcl": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-core"
+ ]
+ },
+ "org.yaml:snakeyaml": {
+ "locked": "1.26",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
}
},
- "default": {
- "com.amazonaws:aws-java-sdk-s3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.11.86"
- },
+ "runtimeClasspath": {
"com.datastax.cassandra:cassandra-driver-core": {
- "locked": "3.6.0",
- "requested": "3.6.0"
+ "locked": "3.10.2"
},
- "com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common",
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
- },
- "com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common",
+ "com.fasterxml.jackson.core:jackson-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
"com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
- },
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
- },
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.inject.extensions:guice-multibindings": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
- },
- "com.google.inject:guice": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
- },
- "com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
- },
- "com.jayway.jsonpath:json-path": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.2.0"
- },
- "com.netflix.conductor:conductor-common": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "project": true
- },
- "com.netflix.conductor:conductor-core": {
- "project": true
- },
- "com.netflix.servo:servo-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.12.17"
- },
- "com.netflix.spectator:spectator-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.68.0"
- },
- "com.spotify:completable-futures": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.3.1"
- },
- "io.reactivex:rxjava": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.2.2"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
- },
- "org.apache.commons:commons-lang3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "3.0"
- },
- "org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.7.25"
- }
- },
- "jacocoAgent": {
- "org.jacoco:org.jacoco.agent": {
- "locked": "0.8.1"
- }
- },
- "jacocoAnt": {
- "org.jacoco:org.jacoco.ant": {
- "locked": "0.8.1"
- }
- },
- "runtime": {
- "com.amazonaws:aws-java-sdk-s3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.11.86"
- },
- "com.datastax.cassandra:cassandra-driver-core": {
- "locked": "3.6.0",
- "requested": "3.6.0"
+ ]
},
"com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
"com.netflix.conductor:conductor-common",
"com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
+ ]
},
"com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
+ "locked": "2.11.4",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core",
"com.netflix.conductor:conductor-common",
"com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
+ ]
+ },
+ "com.github.jnr:jffi": {
+ "locked": "1.2.16",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "com.github.jnr:jnr-constants": {
+ "locked": "0.9.9",
+ "transitive": [
+ "com.github.jnr:jnr-posix"
+ ]
+ },
+ "com.github.jnr:jnr-ffi": {
+ "locked": "2.1.7",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core",
+ "com.github.jnr:jnr-posix"
+ ]
+ },
+ "com.github.jnr:jnr-posix": {
+ "locked": "3.0.44",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
+ },
+ "com.github.jnr:jnr-x86asm": {
+ "locked": "1.0.2",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
},
"com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
- },
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
+ "locked": "2.0.0",
+ "transitive": [
"com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.inject.extensions:guice-multibindings": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
- },
- "com.google.inject:guice": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
+ ]
+ },
+ "com.google.code.findbugs:jsr305": {
+ "locked": "3.0.2",
+ "transitive": [
+ "com.github.rholder:guava-retrying",
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.errorprone:error_prone_annotations": {
+ "locked": "2.3.4",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:failureaccess": {
+ "locked": "1.0.1",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:guava": {
+ "locked": "30.0-jre",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core",
+ "com.github.rholder:guava-retrying",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.google.guava:listenablefuture": {
+ "locked": "9999.0-empty-to-avoid-conflict-with-guava",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.j2objc:j2objc-annotations": {
+ "locked": "1.3",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
},
"com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
- },
- "com.jayway.jsonpath:json-path": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.2.0"
- },
- "com.netflix.conductor:conductor-common": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "project": true
- },
- "com.netflix.conductor:conductor-core": {
- "project": true
- },
- "com.netflix.servo:servo-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.12.17"
- },
- "com.netflix.spectator:spectator-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.68.0"
- },
- "com.spotify:completable-futures": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.3.1"
- },
- "io.reactivex:rxjava": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.2.2"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
- },
- "org.apache.commons:commons-lang3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "3.0"
- },
- "org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.7.25"
- }
- },
- "runtimeClasspath": {
- "com.amazonaws:aws-java-sdk-s3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.11.86"
- },
- "com.datastax.cassandra:cassandra-driver-core": {
- "locked": "3.6.0",
- "requested": "3.6.0"
- },
- "com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
+ "locked": "3.13.0",
+ "transitive": [
"com.netflix.conductor:conductor-common",
"com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
+ ]
},
- "com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common",
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
- },
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
- },
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.inject.extensions:guice-multibindings": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
- },
- "com.google.inject:guice": {
- "firstLevelTransitive": [
+ "com.jayway.jsonpath:json-path": {
+ "locked": "2.4.0",
+ "transitive": [
"com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
+ ]
},
- "com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
+ "com.netflix.conductor:conductor-annotations": {
+ "project": true,
+ "transitive": [
"com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
- },
- "com.jayway.jsonpath:json-path": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.2.0"
+ ]
},
"com.netflix.conductor:conductor-common": {
- "firstLevelTransitive": [
+ "project": true,
+ "transitive": [
"com.netflix.conductor:conductor-core"
- ],
- "project": true
+ ]
},
"com.netflix.conductor:conductor-core": {
"project": true
},
- "com.netflix.servo:servo-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.12.17"
- },
"com.netflix.spectator:spectator-api": {
- "firstLevelTransitive": [
+ "locked": "0.122.0",
+ "transitive": [
"com.netflix.conductor:conductor-core"
- ],
- "locked": "0.68.0"
+ ]
},
"com.spotify:completable-futures": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.3.1"
+ "locked": "0.3.3",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "commons-io:commons-io": {
+ "locked": "2.7",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "io.dropwizard.metrics:metrics-core": {
+ "locked": "4.1.22",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
+ },
+ "io.netty:netty-buffer": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec",
+ "io.netty:netty-handler",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-codec": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler"
+ ]
+ },
+ "io.netty:netty-common": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-buffer",
+ "io.netty:netty-codec",
+ "io.netty:netty-handler",
+ "io.netty:netty-resolver",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-handler": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
+ },
+ "io.netty:netty-resolver": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-transport": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec",
+ "io.netty:netty-handler"
+ ]
},
"io.reactivex:rxjava": {
- "firstLevelTransitive": [
+ "locked": "1.3.8",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "jakarta.activation:jakarta.activation-api": {
+ "locked": "1.2.2",
+ "transitive": [
+ "com.netflix.conductor:conductor-core",
+ "jakarta.xml.bind:jakarta.xml.bind-api"
+ ]
+ },
+ "jakarta.xml.bind:jakarta.xml.bind-api": {
+ "locked": "2.3.3",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "net.minidev:accessors-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "net.minidev:json-smart"
+ ]
+ },
+ "net.minidev:json-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "com.jayway.jsonpath:json-path"
+ ]
+ },
+ "org.apache.bval:bval-jsr": {
+ "locked": "2.0.5",
+ "transitive": [
+ "com.netflix.conductor:conductor-common",
"com.netflix.conductor:conductor-core"
- ],
- "locked": "1.2.2"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
+ ]
},
"org.apache.commons:commons-lang3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "3.0"
- },
- "org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.7.25"
- }
- },
- "testCompile": {
- "com.amazonaws:aws-java-sdk-s3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.11.86"
- },
- "com.datastax.cassandra:cassandra-driver-core": {
- "locked": "3.6.0",
- "requested": "3.6.0"
- },
- "com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
+ "locked": "3.10",
+ "transitive": [
"com.netflix.conductor:conductor-common",
"com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
+ ]
},
- "com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
+ "org.apache.logging.log4j:log4j-api": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core",
+ "org.apache.logging.log4j:log4j-core",
+ "org.apache.logging.log4j:log4j-jul",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-core": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-jul": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
"com.netflix.conductor:conductor-common",
"com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
- },
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
- },
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.inject.extensions:guice-multibindings": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
- },
- "com.google.inject:guice": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
- },
- "com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
- },
- "com.jayway.jsonpath:json-path": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.2.0"
- },
- "com.netflix.conductor:conductor-common": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "project": true
- },
- "com.netflix.conductor:conductor-core": {
- "project": true
- },
- "com.netflix.servo:servo-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.12.17"
- },
- "com.netflix.spectator:spectator-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.68.0"
- },
- "com.spotify:completable-futures": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.3.1"
+ ]
},
- "io.reactivex:rxjava": {
- "firstLevelTransitive": [
+ "org.apache.logging.log4j:log4j-slf4j-impl": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
"com.netflix.conductor:conductor-core"
- ],
- "locked": "1.2.2"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
- },
- "junit:junit": {
- "locked": "4.12",
- "requested": "4.12"
+ ]
},
- "org.apache.commons:commons-lang3": {
- "firstLevelTransitive": [
+ "org.apache.logging.log4j:log4j-web": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
"com.netflix.conductor:conductor-core"
- ],
- "locked": "3.4"
- },
- "org.cassandraunit:cassandra-unit": {
- "locked": "3.5.0.1",
- "requested": "3.5.0.1"
- },
- "org.mockito:mockito-core": {
- "locked": "1.10.19",
- "requested": "1.10.19"
+ ]
+ },
+ "org.checkerframework:checker-qual": {
+ "locked": "3.5.0",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "org.ow2.asm:asm": {
+ "locked": "5.0.4",
+ "transitive": [
+ "com.github.jnr:jnr-ffi",
+ "net.minidev:accessors-smart",
+ "org.ow2.asm:asm-tree"
+ ]
+ },
+ "org.ow2.asm:asm-analysis": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "org.ow2.asm:asm-commons": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "org.ow2.asm:asm-tree": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi",
+ "org.ow2.asm:asm-analysis",
+ "org.ow2.asm:asm-commons",
+ "org.ow2.asm:asm-util"
+ ]
+ },
+ "org.ow2.asm:asm-util": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
},
"org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.7.25"
+ "locked": "1.7.30",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core",
+ "com.jayway.jsonpath:json-path",
+ "com.netflix.spectator:spectator-api",
+ "io.dropwizard.metrics:metrics-core",
+ "org.apache.logging.log4j:log4j-slf4j-impl"
+ ]
}
},
"testCompileClasspath": {
- "com.amazonaws:aws-java-sdk-s3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.11.86"
- },
"com.datastax.cassandra:cassandra-driver-core": {
- "locked": "3.6.0",
- "requested": "3.6.0"
+ "locked": "3.10.2",
+ "transitive": [
+ "org.testcontainers:cassandra"
+ ]
+ },
+ "com.fasterxml.jackson.core:jackson-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.github.docker-java:docker-java-api"
+ ]
},
"com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common",
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind"
+ ]
},
"com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common",
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
- },
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
- },
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.inject.extensions:guice-multibindings": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
- },
- "com.google.inject:guice": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
+ "locked": "2.11.4",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
+ },
+ "com.github.docker-java:docker-java-api": {
+ "locked": "3.2.8",
+ "transitive": [
+ "org.testcontainers:testcontainers"
+ ]
+ },
+ "com.github.docker-java:docker-java-transport": {
+ "locked": "3.2.8",
+ "transitive": [
+ "com.github.docker-java:docker-java-transport-zerodep"
+ ]
+ },
+ "com.github.docker-java:docker-java-transport-zerodep": {
+ "locked": "3.2.8",
+ "transitive": [
+ "org.testcontainers:testcontainers"
+ ]
+ },
+ "com.github.jnr:jffi": {
+ "locked": "1.2.16",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "com.github.jnr:jnr-constants": {
+ "locked": "0.9.9",
+ "transitive": [
+ "com.github.jnr:jnr-posix"
+ ]
+ },
+ "com.github.jnr:jnr-ffi": {
+ "locked": "2.1.7",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core",
+ "com.github.jnr:jnr-posix"
+ ]
+ },
+ "com.github.jnr:jnr-posix": {
+ "locked": "3.0.44",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
+ },
+ "com.github.jnr:jnr-x86asm": {
+ "locked": "1.0.2",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "com.google.guava:guava": {
+ "locked": "19.0",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
},
"com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
+ "locked": "3.13.0"
},
"com.jayway.jsonpath:json-path": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.2.0"
+ "locked": "2.4.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
},
"com.netflix.conductor:conductor-common": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
"project": true
},
"com.netflix.conductor:conductor-core": {
"project": true
},
- "com.netflix.servo:servo-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.12.17"
- },
- "com.netflix.spectator:spectator-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.68.0"
- },
- "com.spotify:completable-futures": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.3.1"
- },
- "io.reactivex:rxjava": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.2.2"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
+ "com.thoughtworks.qdox:qdox": {
+ "locked": "1.12.1",
+ "transitive": [
+ "org.codehaus.groovy:groovy-docgenerator"
+ ]
+ },
+ "com.vaadin.external.google:android-json": {
+ "locked": "0.0.20131108.vaadin1",
+ "transitive": [
+ "org.skyscreamer:jsonassert"
+ ]
+ },
+ "commons-cli:commons-cli": {
+ "locked": "1.4",
+ "transitive": [
+ "org.codehaus.groovy:groovy-cli-commons"
+ ]
+ },
+ "info.picocli:picocli": {
+ "locked": "4.3.2",
+ "transitive": [
+ "org.codehaus.groovy:groovy-cli-picocli"
+ ]
+ },
+ "io.dropwizard.metrics:metrics-core": {
+ "locked": "4.1.22",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
+ },
+ "io.netty:netty-buffer": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec",
+ "io.netty:netty-handler",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-codec": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler"
+ ]
+ },
+ "io.netty:netty-common": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-buffer",
+ "io.netty:netty-codec",
+ "io.netty:netty-handler",
+ "io.netty:netty-resolver",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-handler": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
+ },
+ "io.netty:netty-resolver": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-transport": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec",
+ "io.netty:netty-handler"
+ ]
+ },
+ "jakarta.activation:jakarta.activation-api": {
+ "locked": "1.2.2",
+ "transitive": [
+ "jakarta.xml.bind:jakarta.xml.bind-api"
+ ]
+ },
+ "jakarta.annotation:jakarta.annotation-api": {
+ "locked": "1.3.5",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "jakarta.xml.bind:jakarta.xml.bind-api": {
+ "locked": "2.3.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "jline:jline": {
+ "locked": "2.14.6",
+ "transitive": [
+ "org.codehaus.groovy:groovy-groovysh"
+ ]
},
"junit:junit": {
- "locked": "4.12",
- "requested": "4.12"
+ "locked": "4.13.2",
+ "transitive": [
+ "org.codehaus.groovy:groovy-test",
+ "org.junit.vintage:junit-vintage-engine",
+ "org.spockframework:spock-core",
+ "org.testcontainers:testcontainers"
+ ]
+ },
+ "net.bytebuddy:byte-buddy": {
+ "locked": "1.10.22",
+ "transitive": [
+ "org.mockito:mockito-core"
+ ]
+ },
+ "net.bytebuddy:byte-buddy-agent": {
+ "locked": "1.10.22",
+ "transitive": [
+ "org.mockito:mockito-core"
+ ]
+ },
+ "net.java.dev.jna:jna": {
+ "locked": "5.8.0",
+ "transitive": [
+ "com.github.docker-java:docker-java-transport-zerodep",
+ "org.rnorth.visible-assertions:visible-assertions"
+ ]
+ },
+ "net.minidev:accessors-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "net.minidev:json-smart"
+ ]
+ },
+ "net.minidev:json-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "com.jayway.jsonpath:json-path"
+ ]
+ },
+ "org.apache.ant:ant": {
+ "locked": "1.9.15",
+ "transitive": [
+ "org.codehaus.groovy:groovy-ant"
+ ]
+ },
+ "org.apache.ant:ant-launcher": {
+ "locked": "1.9.15",
+ "transitive": [
+ "org.apache.ant:ant"
+ ]
+ },
+ "org.apache.commons:commons-compress": {
+ "locked": "1.20",
+ "transitive": [
+ "org.testcontainers:testcontainers"
+ ]
},
"org.apache.commons:commons-lang3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "3.4"
- },
- "org.cassandraunit:cassandra-unit": {
- "locked": "3.5.0.1",
- "requested": "3.5.0.1"
+ "locked": "3.10"
+ },
+ "org.apache.logging.log4j:log4j-api": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.apache.logging.log4j:log4j-core",
+ "org.apache.logging.log4j:log4j-jul",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-core": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.apache.logging.log4j:log4j-web",
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-jul": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-slf4j-impl": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-web": {
+ "locked": "2.17.0"
+ },
+ "org.apiguardian:apiguardian-api": {
+ "locked": "1.1.0",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.junit.platform:junit-platform-commons",
+ "org.junit.platform:junit-platform-engine",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "org.assertj:assertj-core": {
+ "locked": "3.16.1",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.codehaus.groovy:groovy": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-ant",
+ "org.codehaus.groovy:groovy-cli-commons",
+ "org.codehaus.groovy:groovy-cli-picocli",
+ "org.codehaus.groovy:groovy-console",
+ "org.codehaus.groovy:groovy-datetime",
+ "org.codehaus.groovy:groovy-docgenerator",
+ "org.codehaus.groovy:groovy-groovydoc",
+ "org.codehaus.groovy:groovy-groovysh",
+ "org.codehaus.groovy:groovy-jmx",
+ "org.codehaus.groovy:groovy-json",
+ "org.codehaus.groovy:groovy-jsr223",
+ "org.codehaus.groovy:groovy-macro",
+ "org.codehaus.groovy:groovy-nio",
+ "org.codehaus.groovy:groovy-servlet",
+ "org.codehaus.groovy:groovy-sql",
+ "org.codehaus.groovy:groovy-swing",
+ "org.codehaus.groovy:groovy-templates",
+ "org.codehaus.groovy:groovy-test",
+ "org.codehaus.groovy:groovy-test-junit5",
+ "org.codehaus.groovy:groovy-testng",
+ "org.codehaus.groovy:groovy-xml",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.codehaus.groovy:groovy-all": {
+ "locked": "2.5.13"
+ },
+ "org.codehaus.groovy:groovy-ant": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-cli-commons": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-cli-picocli": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-console",
+ "org.codehaus.groovy:groovy-docgenerator",
+ "org.codehaus.groovy:groovy-groovydoc",
+ "org.codehaus.groovy:groovy-groovysh"
+ ]
+ },
+ "org.codehaus.groovy:groovy-console": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-groovysh"
+ ]
+ },
+ "org.codehaus.groovy:groovy-datetime": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-docgenerator": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-groovydoc": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-ant"
+ ]
+ },
+ "org.codehaus.groovy:groovy-groovysh": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-jmx": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-json": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.codehaus.groovy:groovy-jsr223": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-macro": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.codehaus.groovy:groovy-nio": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.codehaus.groovy:groovy-servlet": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-sql": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.codehaus.groovy:groovy-swing": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-console"
+ ]
+ },
+ "org.codehaus.groovy:groovy-templates": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-console",
+ "org.codehaus.groovy:groovy-docgenerator",
+ "org.codehaus.groovy:groovy-groovydoc",
+ "org.codehaus.groovy:groovy-servlet",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.codehaus.groovy:groovy-test": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.codehaus.groovy:groovy-test-junit5": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-testng": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-xml": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-servlet",
+ "org.codehaus.groovy:groovy-templates",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.hamcrest:hamcrest": {
+ "locked": "2.2",
+ "transitive": [
+ "org.hamcrest:hamcrest-core",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.hamcrest:hamcrest-core": {
+ "locked": "2.2",
+ "transitive": [
+ "junit:junit"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-api": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.codehaus.groovy:groovy-test-junit5",
+ "org.junit.jupiter:junit-jupiter",
+ "org.junit.jupiter:junit-jupiter-params"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-params": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter"
+ ]
+ },
+ "org.junit.platform:junit-platform-commons": {
+ "locked": "1.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.platform:junit-platform-engine"
+ ]
+ },
+ "org.junit.platform:junit-platform-engine": {
+ "locked": "1.6.3",
+ "transitive": [
+ "org.junit.platform:junit-platform-launcher",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "org.junit.platform:junit-platform-launcher": {
+ "locked": "1.6.3",
+ "transitive": [
+ "org.codehaus.groovy:groovy-test-junit5"
+ ]
+ },
+ "org.junit.vintage:junit-vintage-engine": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.junit:junit-bom": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter",
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.junit.platform:junit-platform-commons",
+ "org.junit.platform:junit-platform-engine",
+ "org.junit.platform:junit-platform-launcher",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
},
"org.mockito:mockito-core": {
- "locked": "1.10.19",
- "requested": "1.10.19"
+ "locked": "3.3.3",
+ "transitive": [
+ "org.mockito:mockito-junit-jupiter",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.mockito:mockito-junit-jupiter": {
+ "locked": "3.3.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.objenesis:objenesis": {
+ "locked": "2.6",
+ "transitive": [
+ "org.mockito:mockito-core"
+ ]
+ },
+ "org.opentest4j:opentest4j": {
+ "locked": "1.2.0",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.platform:junit-platform-engine"
+ ]
+ },
+ "org.ow2.asm:asm": {
+ "locked": "5.0.4",
+ "transitive": [
+ "com.github.jnr:jnr-ffi",
+ "net.minidev:accessors-smart",
+ "org.ow2.asm:asm-tree"
+ ]
+ },
+ "org.ow2.asm:asm-analysis": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "org.ow2.asm:asm-commons": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "org.ow2.asm:asm-tree": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi",
+ "org.ow2.asm:asm-analysis",
+ "org.ow2.asm:asm-commons",
+ "org.ow2.asm:asm-util"
+ ]
+ },
+ "org.ow2.asm:asm-util": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "org.rnorth.duct-tape:duct-tape": {
+ "locked": "1.0.8",
+ "transitive": [
+ "org.testcontainers:testcontainers"
+ ]
+ },
+ "org.rnorth.visible-assertions:visible-assertions": {
+ "locked": "2.1.2",
+ "transitive": [
+ "org.testcontainers:testcontainers"
+ ]
+ },
+ "org.skyscreamer:jsonassert": {
+ "locked": "1.5.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.slf4j:jul-to-slf4j": {
+ "locked": "1.7.30",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-log4j2",
+ "org.springframework.boot:spring-boot-starter-logging"
+ ]
},
"org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.7.25"
+ "locked": "1.7.30",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core",
+ "com.github.docker-java:docker-java-api",
+ "com.github.docker-java:docker-java-transport-zerodep",
+ "com.jayway.jsonpath:json-path",
+ "io.dropwizard.metrics:metrics-core",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.slf4j:jul-to-slf4j",
+ "org.testcontainers:testcontainers"
+ ]
+ },
+ "org.spockframework:spock-core": {
+ "locked": "1.3-groovy-2.5",
+ "transitive": [
+ "org.spockframework:spock-spring",
+ "org.testcontainers:spock"
+ ]
+ },
+ "org.spockframework:spock-spring": {
+ "locked": "1.3-groovy-2.5"
+ },
+ "org.springframework.boot:spring-boot": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-autoconfigure",
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-test",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-autoconfigure": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter-log4j2": {
+ "locked": "2.3.12.RELEASE"
+ },
+ "org.springframework.boot:spring-boot-starter-logging": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter-test": {
+ "locked": "2.3.12.RELEASE"
+ },
+ "org.springframework.boot:spring-boot-test": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-test-autoconfigure": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.springframework:spring-aop": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-beans": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-aop",
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-context": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot"
+ ]
+ },
+ "org.springframework:spring-core": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot",
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-starter-test",
+ "org.springframework:spring-aop",
+ "org.springframework:spring-beans",
+ "org.springframework:spring-context",
+ "org.springframework:spring-expression",
+ "org.springframework:spring-test"
+ ]
+ },
+ "org.springframework:spring-expression": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-jcl": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-core"
+ ]
+ },
+ "org.springframework:spring-test": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.testcontainers:cassandra": {
+ "locked": "1.15.3"
+ },
+ "org.testcontainers:database-commons": {
+ "locked": "1.15.3",
+ "transitive": [
+ "org.testcontainers:cassandra"
+ ]
+ },
+ "org.testcontainers:spock": {
+ "locked": "1.15.3"
+ },
+ "org.testcontainers:testcontainers": {
+ "locked": "1.15.3",
+ "transitive": [
+ "org.testcontainers:database-commons",
+ "org.testcontainers:spock"
+ ]
+ },
+ "org.xmlunit:xmlunit-core": {
+ "locked": "2.7.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.yaml:snakeyaml": {
+ "locked": "1.26",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
}
},
- "testRuntime": {
- "com.amazonaws:aws-java-sdk-s3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.11.86"
+ "testRuntimeClasspath": {
+ "com.beust:jcommander": {
+ "locked": "1.72",
+ "transitive": [
+ "org.testng:testng"
+ ]
},
"com.datastax.cassandra:cassandra-driver-core": {
- "locked": "3.6.0",
- "requested": "3.6.0"
+ "locked": "3.10.2",
+ "transitive": [
+ "org.testcontainers:cassandra"
+ ]
+ },
+ "com.fasterxml.jackson.core:jackson-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.github.docker-java:docker-java-api",
+ "com.netflix.conductor:conductor-core"
+ ]
},
"com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
"com.netflix.conductor:conductor-common",
"com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
+ ]
},
"com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
+ "locked": "2.11.4",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core",
"com.netflix.conductor:conductor-common",
"com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
+ ]
+ },
+ "com.github.docker-java:docker-java-api": {
+ "locked": "3.2.8",
+ "transitive": [
+ "org.testcontainers:testcontainers"
+ ]
+ },
+ "com.github.docker-java:docker-java-transport": {
+ "locked": "3.2.8",
+ "transitive": [
+ "com.github.docker-java:docker-java-transport-zerodep"
+ ]
+ },
+ "com.github.docker-java:docker-java-transport-zerodep": {
+ "locked": "3.2.8",
+ "transitive": [
+ "org.testcontainers:testcontainers"
+ ]
+ },
+ "com.github.jnr:jffi": {
+ "locked": "1.2.16",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "com.github.jnr:jnr-constants": {
+ "locked": "0.9.9",
+ "transitive": [
+ "com.github.jnr:jnr-posix"
+ ]
+ },
+ "com.github.jnr:jnr-ffi": {
+ "locked": "2.1.7",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core",
+ "com.github.jnr:jnr-posix"
+ ]
+ },
+ "com.github.jnr:jnr-posix": {
+ "locked": "3.0.44",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
+ },
+ "com.github.jnr:jnr-x86asm": {
+ "locked": "1.0.2",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
},
"com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
+ "locked": "2.0.0",
+ "transitive": [
"com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
- },
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.inject.extensions:guice-multibindings": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
- },
- "com.google.inject:guice": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
+ ]
+ },
+ "com.google.code.findbugs:jsr305": {
+ "locked": "3.0.2",
+ "transitive": [
+ "com.github.rholder:guava-retrying",
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.errorprone:error_prone_annotations": {
+ "locked": "2.3.4",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:failureaccess": {
+ "locked": "1.0.1",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:guava": {
+ "locked": "30.0-jre",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core",
+ "com.github.rholder:guava-retrying",
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.google.guava:listenablefuture": {
+ "locked": "9999.0-empty-to-avoid-conflict-with-guava",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.j2objc:j2objc-annotations": {
+ "locked": "1.3",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
},
"com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
+ "locked": "3.13.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core"
+ ]
},
"com.jayway.jsonpath:json-path": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.2.0"
+ "locked": "2.4.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-core",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "com.netflix.conductor:conductor-annotations": {
+ "project": true,
+ "transitive": [
+ "com.netflix.conductor:conductor-common"
+ ]
},
"com.netflix.conductor:conductor-common": {
- "firstLevelTransitive": [
+ "project": true,
+ "transitive": [
"com.netflix.conductor:conductor-core"
- ],
- "project": true
+ ]
},
"com.netflix.conductor:conductor-core": {
"project": true
},
- "com.netflix.servo:servo-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.12.17"
- },
"com.netflix.spectator:spectator-api": {
- "firstLevelTransitive": [
+ "locked": "0.122.0",
+ "transitive": [
"com.netflix.conductor:conductor-core"
- ],
- "locked": "0.68.0"
+ ]
},
"com.spotify:completable-futures": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.3.1"
+ "locked": "0.3.3",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "com.thoughtworks.qdox:qdox": {
+ "locked": "1.12.1",
+ "transitive": [
+ "org.codehaus.groovy:groovy-docgenerator"
+ ]
+ },
+ "com.vaadin.external.google:android-json": {
+ "locked": "0.0.20131108.vaadin1",
+ "transitive": [
+ "org.skyscreamer:jsonassert"
+ ]
+ },
+ "commons-cli:commons-cli": {
+ "locked": "1.4",
+ "transitive": [
+ "org.codehaus.groovy:groovy-cli-commons"
+ ]
+ },
+ "commons-io:commons-io": {
+ "locked": "2.7",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "info.picocli:picocli": {
+ "locked": "4.3.2",
+ "transitive": [
+ "org.codehaus.groovy:groovy-cli-picocli"
+ ]
+ },
+ "io.dropwizard.metrics:metrics-core": {
+ "locked": "4.1.22",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
+ },
+ "io.netty:netty-buffer": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec",
+ "io.netty:netty-handler",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-codec": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler"
+ ]
+ },
+ "io.netty:netty-common": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-buffer",
+ "io.netty:netty-codec",
+ "io.netty:netty-handler",
+ "io.netty:netty-resolver",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-handler": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core"
+ ]
+ },
+ "io.netty:netty-resolver": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-handler",
+ "io.netty:netty-transport"
+ ]
+ },
+ "io.netty:netty-transport": {
+ "locked": "4.1.65.Final",
+ "transitive": [
+ "io.netty:netty-codec",
+ "io.netty:netty-handler"
+ ]
},
"io.reactivex:rxjava": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.2.2"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
+ "locked": "1.3.8",
+ "transitive": [
+ "com.netflix.conductor:conductor-core"
+ ]
+ },
+ "jakarta.activation:jakarta.activation-api": {
+ "locked": "1.2.2",
+ "transitive": [
+ "com.netflix.conductor:conductor-core",
+ "jakarta.xml.bind:jakarta.xml.bind-api"
+ ]
+ },
+ "jakarta.annotation:jakarta.annotation-api": {
+ "locked": "1.3.5",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "jakarta.xml.bind:jakarta.xml.bind-api": {
+ "locked": "2.3.3",
+ "transitive": [
+ "com.netflix.conductor:conductor-core",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "jline:jline": {
+ "locked": "2.14.6",
+ "transitive": [
+ "org.codehaus.groovy:groovy-groovysh"
+ ]
},
"junit:junit": {
- "locked": "4.12",
- "requested": "4.12"
- },
- "org.apache.commons:commons-lang3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "3.4"
- },
- "org.cassandraunit:cassandra-unit": {
- "locked": "3.5.0.1",
- "requested": "3.5.0.1"
- },
- "org.mockito:mockito-core": {
- "locked": "1.10.19",
- "requested": "1.10.19"
- },
- "org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.7.25"
- }
- },
- "testRuntimeClasspath": {
- "com.amazonaws:aws-java-sdk-s3": {
- "firstLevelTransitive": [
+ "locked": "4.13.2",
+ "transitive": [
+ "org.codehaus.groovy:groovy-test",
+ "org.junit.vintage:junit-vintage-engine",
+ "org.spockframework:spock-core",
+ "org.testcontainers:testcontainers"
+ ]
+ },
+ "net.bytebuddy:byte-buddy": {
+ "locked": "1.10.22",
+ "transitive": [
+ "org.mockito:mockito-core"
+ ]
+ },
+ "net.bytebuddy:byte-buddy-agent": {
+ "locked": "1.10.22",
+ "transitive": [
+ "org.mockito:mockito-core"
+ ]
+ },
+ "net.java.dev.jna:jna": {
+ "locked": "5.8.0",
+ "transitive": [
+ "com.github.docker-java:docker-java-transport-zerodep",
+ "org.rnorth.visible-assertions:visible-assertions"
+ ]
+ },
+ "net.minidev:accessors-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "net.minidev:json-smart"
+ ]
+ },
+ "net.minidev:json-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "com.jayway.jsonpath:json-path"
+ ]
+ },
+ "org.apache.ant:ant": {
+ "locked": "1.9.15",
+ "transitive": [
+ "org.apache.ant:ant-junit",
+ "org.codehaus.groovy:groovy-ant"
+ ]
+ },
+ "org.apache.ant:ant-antlr": {
+ "locked": "1.9.15",
+ "transitive": [
+ "org.codehaus.groovy:groovy-ant"
+ ]
+ },
+ "org.apache.ant:ant-junit": {
+ "locked": "1.9.15",
+ "transitive": [
+ "org.codehaus.groovy:groovy-ant"
+ ]
+ },
+ "org.apache.ant:ant-launcher": {
+ "locked": "1.9.15",
+ "transitive": [
+ "org.apache.ant:ant",
+ "org.codehaus.groovy:groovy-ant"
+ ]
+ },
+ "org.apache.bval:bval-jsr": {
+ "locked": "2.0.5",
+ "transitive": [
+ "com.netflix.conductor:conductor-common",
"com.netflix.conductor:conductor-core"
- ],
- "locked": "1.11.86"
+ ]
},
- "com.datastax.cassandra:cassandra-driver-core": {
- "locked": "3.6.0",
- "requested": "3.6.0"
+ "org.apache.commons:commons-compress": {
+ "locked": "1.20",
+ "transitive": [
+ "org.testcontainers:testcontainers"
+ ]
},
- "com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
+ "org.apache.commons:commons-lang3": {
+ "locked": "3.10",
+ "transitive": [
"com.netflix.conductor:conductor-common",
"com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
+ ]
},
- "com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
+ "org.apache.logging.log4j:log4j-api": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core",
+ "org.apache.logging.log4j:log4j-core",
+ "org.apache.logging.log4j:log4j-jul",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-core": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web",
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-jul": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core",
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-slf4j-impl": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.conductor:conductor-core",
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-web": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
"com.netflix.conductor:conductor-common",
"com.netflix.conductor:conductor-core"
- ],
- "locked": "2.7.5"
- },
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
- },
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.inject.extensions:guice-multibindings": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
- },
- "com.google.inject:guice": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "4.1.0"
- },
- "com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
- },
- "com.jayway.jsonpath:json-path": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "2.2.0"
- },
- "com.netflix.conductor:conductor-common": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "project": true
- },
- "com.netflix.conductor:conductor-core": {
- "project": true
- },
- "com.netflix.servo:servo-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.12.17"
- },
- "com.netflix.spectator:spectator-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.68.0"
- },
- "com.spotify:completable-futures": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "0.3.1"
- },
- "io.reactivex:rxjava": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "1.2.2"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
- },
- "junit:junit": {
- "locked": "4.12",
- "requested": "4.12"
- },
- "org.apache.commons:commons-lang3": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-core"
- ],
- "locked": "3.4"
- },
- "org.cassandraunit:cassandra-unit": {
- "locked": "3.5.0.1",
- "requested": "3.5.0.1"
+ ]
+ },
+ "org.apiguardian:apiguardian-api": {
+ "locked": "1.1.0",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.jupiter:junit-jupiter-engine",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.junit.platform:junit-platform-commons",
+ "org.junit.platform:junit-platform-engine",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "org.assertj:assertj-core": {
+ "locked": "3.16.1",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.checkerframework:checker-qual": {
+ "locked": "3.5.0",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "org.codehaus.groovy:groovy": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-ant",
+ "org.codehaus.groovy:groovy-cli-commons",
+ "org.codehaus.groovy:groovy-cli-picocli",
+ "org.codehaus.groovy:groovy-console",
+ "org.codehaus.groovy:groovy-datetime",
+ "org.codehaus.groovy:groovy-docgenerator",
+ "org.codehaus.groovy:groovy-groovydoc",
+ "org.codehaus.groovy:groovy-groovysh",
+ "org.codehaus.groovy:groovy-jmx",
+ "org.codehaus.groovy:groovy-json",
+ "org.codehaus.groovy:groovy-jsr223",
+ "org.codehaus.groovy:groovy-macro",
+ "org.codehaus.groovy:groovy-nio",
+ "org.codehaus.groovy:groovy-servlet",
+ "org.codehaus.groovy:groovy-sql",
+ "org.codehaus.groovy:groovy-swing",
+ "org.codehaus.groovy:groovy-templates",
+ "org.codehaus.groovy:groovy-test",
+ "org.codehaus.groovy:groovy-test-junit5",
+ "org.codehaus.groovy:groovy-testng",
+ "org.codehaus.groovy:groovy-xml",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.codehaus.groovy:groovy-all": {
+ "locked": "2.5.13"
+ },
+ "org.codehaus.groovy:groovy-ant": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-cli-commons": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-cli-picocli": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-console",
+ "org.codehaus.groovy:groovy-docgenerator",
+ "org.codehaus.groovy:groovy-groovydoc",
+ "org.codehaus.groovy:groovy-groovysh"
+ ]
+ },
+ "org.codehaus.groovy:groovy-console": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-groovysh"
+ ]
+ },
+ "org.codehaus.groovy:groovy-datetime": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-docgenerator": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-groovydoc"
+ ]
+ },
+ "org.codehaus.groovy:groovy-groovydoc": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-ant"
+ ]
+ },
+ "org.codehaus.groovy:groovy-groovysh": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-jmx": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-json": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.codehaus.groovy:groovy-jsr223": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-macro": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.codehaus.groovy:groovy-nio": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.codehaus.groovy:groovy-servlet": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-sql": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.codehaus.groovy:groovy-swing": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-console"
+ ]
+ },
+ "org.codehaus.groovy:groovy-templates": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-console",
+ "org.codehaus.groovy:groovy-docgenerator",
+ "org.codehaus.groovy:groovy-groovydoc",
+ "org.codehaus.groovy:groovy-servlet",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.codehaus.groovy:groovy-test": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.codehaus.groovy:groovy-test-junit5": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-testng": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all"
+ ]
+ },
+ "org.codehaus.groovy:groovy-xml": {
+ "locked": "2.5.14",
+ "transitive": [
+ "org.codehaus.groovy:groovy-all",
+ "org.codehaus.groovy:groovy-servlet",
+ "org.codehaus.groovy:groovy-templates",
+ "org.spockframework:spock-core",
+ "org.spockframework:spock-spring"
+ ]
+ },
+ "org.hamcrest:hamcrest": {
+ "locked": "2.2",
+ "transitive": [
+ "org.hamcrest:hamcrest-core",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.hamcrest:hamcrest-core": {
+ "locked": "2.2",
+ "transitive": [
+ "junit:junit"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-api": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.codehaus.groovy:groovy-test-junit5",
+ "org.junit.jupiter:junit-jupiter",
+ "org.junit.jupiter:junit-jupiter-engine",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.mockito:mockito-junit-jupiter"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-engine": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.codehaus.groovy:groovy-test-junit5",
+ "org.junit.jupiter:junit-jupiter"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-params": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter"
+ ]
+ },
+ "org.junit.platform:junit-platform-commons": {
+ "locked": "1.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.platform:junit-platform-engine"
+ ]
+ },
+ "org.junit.platform:junit-platform-engine": {
+ "locked": "1.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-engine",
+ "org.junit.platform:junit-platform-launcher",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "org.junit.platform:junit-platform-launcher": {
+ "locked": "1.6.3",
+ "transitive": [
+ "org.codehaus.groovy:groovy-test-junit5"
+ ]
+ },
+ "org.junit.vintage:junit-vintage-engine": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.junit:junit-bom": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter",
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.jupiter:junit-jupiter-engine",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.junit.platform:junit-platform-commons",
+ "org.junit.platform:junit-platform-engine",
+ "org.junit.platform:junit-platform-launcher",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
},
"org.mockito:mockito-core": {
- "locked": "1.10.19",
- "requested": "1.10.19"
+ "locked": "3.3.3",
+ "transitive": [
+ "org.mockito:mockito-junit-jupiter",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.mockito:mockito-junit-jupiter": {
+ "locked": "3.3.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.objenesis:objenesis": {
+ "locked": "2.6",
+ "transitive": [
+ "org.mockito:mockito-core"
+ ]
+ },
+ "org.opentest4j:opentest4j": {
+ "locked": "1.2.0",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.platform:junit-platform-engine"
+ ]
+ },
+ "org.ow2.asm:asm": {
+ "locked": "5.0.4",
+ "transitive": [
+ "com.github.jnr:jnr-ffi",
+ "net.minidev:accessors-smart",
+ "org.ow2.asm:asm-tree"
+ ]
+ },
+ "org.ow2.asm:asm-analysis": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "org.ow2.asm:asm-commons": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "org.ow2.asm:asm-tree": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi",
+ "org.ow2.asm:asm-analysis",
+ "org.ow2.asm:asm-commons",
+ "org.ow2.asm:asm-util"
+ ]
+ },
+ "org.ow2.asm:asm-util": {
+ "locked": "5.0.3",
+ "transitive": [
+ "com.github.jnr:jnr-ffi"
+ ]
+ },
+ "org.rnorth.duct-tape:duct-tape": {
+ "locked": "1.0.8",
+ "transitive": [
+ "org.testcontainers:testcontainers"
+ ]
+ },
+ "org.rnorth.visible-assertions:visible-assertions": {
+ "locked": "2.1.2",
+ "transitive": [
+ "org.testcontainers:testcontainers"
+ ]
+ },
+ "org.skyscreamer:jsonassert": {
+ "locked": "1.5.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.slf4j:jul-to-slf4j": {
+ "locked": "1.7.30",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-log4j2",
+ "org.springframework.boot:spring-boot-starter-logging"
+ ]
},
"org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.7.25"
+ "locked": "1.7.30",
+ "transitive": [
+ "com.datastax.cassandra:cassandra-driver-core",
+ "com.github.docker-java:docker-java-api",
+ "com.github.docker-java:docker-java-transport-zerodep",
+ "com.jayway.jsonpath:json-path",
+ "com.netflix.spectator:spectator-api",
+ "io.dropwizard.metrics:metrics-core",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.slf4j:jul-to-slf4j",
+ "org.testcontainers:testcontainers"
+ ]
+ },
+ "org.spockframework:spock-core": {
+ "locked": "1.3-groovy-2.5",
+ "transitive": [
+ "org.spockframework:spock-spring",
+ "org.testcontainers:spock"
+ ]
+ },
+ "org.spockframework:spock-spring": {
+ "locked": "1.3-groovy-2.5"
+ },
+ "org.springframework.boot:spring-boot": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-autoconfigure",
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-test",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-autoconfigure": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter-log4j2": {
+ "locked": "2.3.12.RELEASE"
+ },
+ "org.springframework.boot:spring-boot-starter-logging": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter-test": {
+ "locked": "2.3.12.RELEASE"
+ },
+ "org.springframework.boot:spring-boot-test": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-test-autoconfigure": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.springframework:spring-aop": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-beans": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-aop",
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-context": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot"
+ ]
+ },
+ "org.springframework:spring-core": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot",
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-starter-test",
+ "org.springframework:spring-aop",
+ "org.springframework:spring-beans",
+ "org.springframework:spring-context",
+ "org.springframework:spring-expression",
+ "org.springframework:spring-test"
+ ]
+ },
+ "org.springframework:spring-expression": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-jcl": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-core"
+ ]
+ },
+ "org.springframework:spring-test": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.testcontainers:cassandra": {
+ "locked": "1.15.3"
+ },
+ "org.testcontainers:database-commons": {
+ "locked": "1.15.3",
+ "transitive": [
+ "org.testcontainers:cassandra"
+ ]
+ },
+ "org.testcontainers:spock": {
+ "locked": "1.15.3"
+ },
+ "org.testcontainers:testcontainers": {
+ "locked": "1.15.3",
+ "transitive": [
+ "org.testcontainers:database-commons",
+ "org.testcontainers:spock"
+ ]
+ },
+ "org.testng:testng": {
+ "locked": "6.13.1",
+ "transitive": [
+ "org.codehaus.groovy:groovy-testng"
+ ]
+ },
+ "org.xmlunit:xmlunit-core": {
+ "locked": "2.7.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.yaml:snakeyaml": {
+ "locked": "1.26",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
}
}
}
\ No newline at end of file
diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/CassandraClusterProvider.java b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/CassandraClusterProvider.java
deleted file mode 100644
index 7f1f9ecbee..0000000000
--- a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/CassandraClusterProvider.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright 2016 Netflix, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
- * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations under the License.
- */
-package com.netflix.conductor.cassandra;
-
-import com.datastax.driver.core.Cluster;
-import com.datastax.driver.core.Metadata;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.inject.Inject;
-import javax.inject.Provider;
-
-public class CassandraClusterProvider implements Provider
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.cassandra.config;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import com.netflix.conductor.cassandra.dao.CassandraEventHandlerDAO;
+import com.netflix.conductor.cassandra.dao.CassandraExecutionDAO;
+import com.netflix.conductor.cassandra.dao.CassandraMetadataDAO;
+import com.netflix.conductor.cassandra.dao.CassandraPollDataDAO;
+import com.netflix.conductor.cassandra.util.Statements;
+import com.netflix.conductor.dao.EventHandlerDAO;
+import com.netflix.conductor.dao.ExecutionDAO;
+import com.netflix.conductor.dao.MetadataDAO;
+
+import com.datastax.driver.core.Cluster;
+import com.datastax.driver.core.Metadata;
+import com.datastax.driver.core.Session;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+@Configuration(proxyBeanMethods = false)
+@EnableConfigurationProperties(CassandraProperties.class)
+@ConditionalOnProperty(name = "conductor.db.type", havingValue = "cassandra")
+public class CassandraConfiguration {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(CassandraConfiguration.class);
+
+ @Bean
+ public Cluster cluster(CassandraProperties properties) {
+ String host = properties.getHostAddress();
+ int port = properties.getPort();
+
+ LOGGER.info("Connecting to cassandra cluster with host:{}, port:{}", host, port);
+
+ Cluster cluster = Cluster.builder().addContactPoint(host).withPort(port).build();
+
+ Metadata metadata = cluster.getMetadata();
+ LOGGER.info("Connected to cluster: {}", metadata.getClusterName());
+ metadata.getAllHosts()
+ .forEach(
+ h ->
+ LOGGER.info(
+ "Datacenter:{}, host:{}, rack: {}",
+ h.getDatacenter(),
+ h.getEndPoint().resolve().getHostName(),
+ h.getRack()));
+ return cluster;
+ }
+
+ @Bean
+ public Session session(Cluster cluster) {
+ LOGGER.info("Initializing cassandra session");
+ return cluster.connect();
+ }
+
+ @Bean
+ public MetadataDAO cassandraMetadataDAO(
+ Session session,
+ ObjectMapper objectMapper,
+ CassandraProperties properties,
+ Statements statements) {
+ return new CassandraMetadataDAO(session, objectMapper, properties, statements);
+ }
+
+ @Bean
+ public ExecutionDAO cassandraExecutionDAO(
+ Session session,
+ ObjectMapper objectMapper,
+ CassandraProperties properties,
+ Statements statements) {
+ return new CassandraExecutionDAO(session, objectMapper, properties, statements);
+ }
+
+ @Bean
+ public EventHandlerDAO cassandraEventHandlerDAO(
+ Session session,
+ ObjectMapper objectMapper,
+ CassandraProperties properties,
+ Statements statements) {
+ return new CassandraEventHandlerDAO(session, objectMapper, properties, statements);
+ }
+
+ @Bean
+ public CassandraPollDataDAO cassandraPollDataDAO() {
+ return new CassandraPollDataDAO();
+ }
+
+ @Bean
+ public Statements statements(CassandraProperties cassandraProperties) {
+ return new Statements(cassandraProperties.getKeyspace());
+ }
+}
diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/config/CassandraProperties.java b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/config/CassandraProperties.java
new file mode 100644
index 0000000000..19286cad45
--- /dev/null
+++ b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/config/CassandraProperties.java
@@ -0,0 +1,174 @@
+/*
+ * Copyright 2021 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.cassandra.config;
+
+import java.time.Duration;
+import java.time.temporal.ChronoUnit;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.boot.convert.DurationUnit;
+
+import com.datastax.driver.core.ConsistencyLevel;
+
+@ConfigurationProperties("conductor.cassandra")
+public class CassandraProperties {
+
+ /** The address for the cassandra database host */
+ private String hostAddress = "127.0.0.1";
+
+ /** The port to be used to connect to the cassandra database instance */
+ private int port = 9142;
+
+ /** The name of the cassandra cluster */
+ private String cluster = "";
+
+ /** The keyspace to be used in the cassandra datastore */
+ private String keyspace = "conductor";
+
+ /**
+ * The number of tasks to be stored in a single partition which will be used for sharding
+ * workflows in the datastore
+ */
+ private int shardSize = 100;
+
+ /** The replication strategy with which to configure the keyspace */
+ private String replicationStrategy = "SimpleStrategy";
+
+ /** The key to be used while configuring the replication factor */
+ private String replicationFactorKey = "replication_factor";
+
+ /** The replication factor value with which the keyspace is configured */
+ private int replicationFactorValue = 3;
+
+ /** The consistency level to be used for read operations */
+ private ConsistencyLevel readConsistencyLevel = ConsistencyLevel.LOCAL_QUORUM;
+
+ /** The consistency level to be used for write operations */
+ private ConsistencyLevel writeConsistencyLevel = ConsistencyLevel.LOCAL_QUORUM;
+
+ /** The time in seconds after which the in-memory task definitions cache will be refreshed */
+ @DurationUnit(ChronoUnit.SECONDS)
+ private Duration taskDefCacheRefreshInterval = Duration.ofSeconds(60);
+
+ /** The time in seconds after which the in-memory event handler cache will be refreshed */
+ @DurationUnit(ChronoUnit.SECONDS)
+ private Duration eventHandlerCacheRefreshInterval = Duration.ofSeconds(60);
+
+ /** The time to live in seconds for which the event execution will be persisted */
+ @DurationUnit(ChronoUnit.SECONDS)
+ private Duration eventExecutionPersistenceTtl = Duration.ZERO;
+
+ public String getHostAddress() {
+ return hostAddress;
+ }
+
+ public void setHostAddress(String hostAddress) {
+ this.hostAddress = hostAddress;
+ }
+
+ public int getPort() {
+ return port;
+ }
+
+ public void setPort(int port) {
+ this.port = port;
+ }
+
+ public String getCluster() {
+ return cluster;
+ }
+
+ public void setCluster(String cluster) {
+ this.cluster = cluster;
+ }
+
+ public String getKeyspace() {
+ return keyspace;
+ }
+
+ public void setKeyspace(String keyspace) {
+ this.keyspace = keyspace;
+ }
+
+ public int getShardSize() {
+ return shardSize;
+ }
+
+ public void setShardSize(int shardSize) {
+ this.shardSize = shardSize;
+ }
+
+ public String getReplicationStrategy() {
+ return replicationStrategy;
+ }
+
+ public void setReplicationStrategy(String replicationStrategy) {
+ this.replicationStrategy = replicationStrategy;
+ }
+
+ public String getReplicationFactorKey() {
+ return replicationFactorKey;
+ }
+
+ public void setReplicationFactorKey(String replicationFactorKey) {
+ this.replicationFactorKey = replicationFactorKey;
+ }
+
+ public int getReplicationFactorValue() {
+ return replicationFactorValue;
+ }
+
+ public void setReplicationFactorValue(int replicationFactorValue) {
+ this.replicationFactorValue = replicationFactorValue;
+ }
+
+ public ConsistencyLevel getReadConsistencyLevel() {
+ return readConsistencyLevel;
+ }
+
+ public void setReadConsistencyLevel(ConsistencyLevel readConsistencyLevel) {
+ this.readConsistencyLevel = readConsistencyLevel;
+ }
+
+ public ConsistencyLevel getWriteConsistencyLevel() {
+ return writeConsistencyLevel;
+ }
+
+ public void setWriteConsistencyLevel(ConsistencyLevel writeConsistencyLevel) {
+ this.writeConsistencyLevel = writeConsistencyLevel;
+ }
+
+ public Duration getTaskDefCacheRefreshInterval() {
+ return taskDefCacheRefreshInterval;
+ }
+
+ public void setTaskDefCacheRefreshInterval(Duration taskDefCacheRefreshInterval) {
+ this.taskDefCacheRefreshInterval = taskDefCacheRefreshInterval;
+ }
+
+ public Duration getEventHandlerCacheRefreshInterval() {
+ return eventHandlerCacheRefreshInterval;
+ }
+
+ public void setEventHandlerCacheRefreshInterval(Duration eventHandlerCacheRefreshInterval) {
+ this.eventHandlerCacheRefreshInterval = eventHandlerCacheRefreshInterval;
+ }
+
+ public Duration getEventExecutionPersistenceTtl() {
+ return eventExecutionPersistenceTtl;
+ }
+
+ public void setEventExecutionPersistenceTtl(Duration eventExecutionPersistenceTtl) {
+ this.eventExecutionPersistenceTtl = eventExecutionPersistenceTtl;
+ }
+}
diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraBaseDAO.java b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraBaseDAO.java
new file mode 100644
index 0000000000..70664c1694
--- /dev/null
+++ b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/dao/CassandraBaseDAO.java
@@ -0,0 +1,279 @@
+/*
+ * Copyright 2020 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.cassandra.dao;
+
+import java.io.IOException;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.netflix.conductor.cassandra.config.CassandraProperties;
+import com.netflix.conductor.metrics.Monitors;
+
+import com.datastax.driver.core.DataType;
+import com.datastax.driver.core.Session;
+import com.datastax.driver.core.schemabuilder.SchemaBuilder;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.ImmutableMap;
+
+import static com.netflix.conductor.cassandra.util.Constants.DAO_NAME;
+import static com.netflix.conductor.cassandra.util.Constants.ENTITY_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.EVENT_EXECUTION_ID_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.EVENT_HANDLER_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.EVENT_HANDLER_NAME_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.HANDLERS_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.MESSAGE_ID_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.PAYLOAD_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.SHARD_ID_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_EVENT_EXECUTIONS;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_EVENT_HANDLERS;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_TASK_DEFS;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_TASK_DEF_LIMIT;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_TASK_LOOKUP;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_WORKFLOWS;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_WORKFLOW_DEFS;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_WORKFLOW_DEFS_INDEX;
+import static com.netflix.conductor.cassandra.util.Constants.TASK_DEFINITION_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TASK_DEFS_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TASK_DEF_NAME_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TASK_ID_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TOTAL_PARTITIONS_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TOTAL_TASKS_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEFINITION_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_INDEX_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_INDEX_VALUE;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_NAME_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_NAME_VERSION_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_ID_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_VERSION_KEY;
+
+/**
+ * Creates the keyspace and tables.
+ *
+ * CREATE KEYSPACE IF NOT EXISTS conductor WITH replication = { 'class' :
+ * 'NetworkTopologyStrategy', 'us-east': '3'};
+ *
+ * CREATE TABLE IF NOT EXISTS conductor.workflows ( workflow_id uuid, shard_id int, task_id text,
+ * entity text, payload text, total_tasks int STATIC, total_partitions int STATIC, PRIMARY
+ * KEY((workflow_id, shard_id), entity, task_id) );
+ *
+ * CREATE TABLE IF NOT EXISTS conductor.task_lookup( task_id uuid, workflow_id uuid, PRIMARY KEY
+ * (task_id) );
+ *
+ * CREATE TABLE IF NOT EXISTS conductor.task_def_limit( task_def_name text, task_id uuid,
+ * workflow_id uuid, PRIMARY KEY ((task_def_name), task_id_key) );
+ *
+ * CREATE TABLE IF NOT EXISTS conductor.workflow_definitions( workflow_def_name text, version
+ * int, workflow_definition text, PRIMARY KEY ((workflow_def_name), version) );
+ *
+ * CREATE TABLE IF NOT EXISTS conductor.workflow_defs_index( workflow_def_version_index text,
+ * workflow_def_name_version text, workflow_def_index_value text,PRIMARY KEY
+ * ((workflow_def_version_index), workflow_def_name_version) );
+ *
+ * CREATE TABLE IF NOT EXISTS conductor.task_definitions( task_defs text, task_def_name text,
+ * task_definition text, PRIMARY KEY ((task_defs), task_def_name) );
+ *
+ * CREATE TABLE IF NOT EXISTS conductor.event_handlers( handlers text, event_handler_name text,
+ * event_handler text, PRIMARY KEY ((handlers), event_handler_name) );
+ *
+ * CREATE TABLE IF NOT EXISTS conductor.event_executions( message_id text, event_handler_name
+ * text, event_execution_id text, payload text, PRIMARY KEY ((message_id, event_handler_name),
+ * event_execution_id) );
+ */
+public abstract class CassandraBaseDAO {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(CassandraBaseDAO.class);
+
+ private final ObjectMapper objectMapper;
+ protected final Session session;
+ protected final CassandraProperties properties;
+
+ private boolean initialized = false;
+
+ public CassandraBaseDAO(
+ Session session, ObjectMapper objectMapper, CassandraProperties properties) {
+ this.session = session;
+ this.objectMapper = objectMapper;
+ this.properties = properties;
+
+ init();
+ }
+
+ private void init() {
+ try {
+ if (!initialized) {
+ session.execute(getCreateKeyspaceStatement());
+ session.execute(getCreateWorkflowsTableStatement());
+ session.execute(getCreateTaskLookupTableStatement());
+ session.execute(getCreateTaskDefLimitTableStatement());
+ session.execute(getCreateWorkflowDefsTableStatement());
+ session.execute(getCreateWorkflowDefsIndexTableStatement());
+ session.execute(getCreateTaskDefsTableStatement());
+ session.execute(getCreateEventHandlersTableStatement());
+ session.execute(getCreateEventExecutionsTableStatement());
+ LOGGER.info(
+ "{} initialization complete! Tables created!", getClass().getSimpleName());
+ initialized = true;
+ }
+ } catch (Exception e) {
+ LOGGER.error("Error initializing and setting up keyspace and table in cassandra", e);
+ throw e;
+ }
+ }
+
+ private String getCreateKeyspaceStatement() {
+ return SchemaBuilder.createKeyspace(properties.getKeyspace())
+ .ifNotExists()
+ .with()
+ .replication(
+ ImmutableMap.of(
+ "class",
+ properties.getReplicationStrategy(),
+ properties.getReplicationFactorKey(),
+ properties.getReplicationFactorValue()))
+ .durableWrites(true)
+ .getQueryString();
+ }
+
+ private String getCreateWorkflowsTableStatement() {
+ return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_WORKFLOWS)
+ .ifNotExists()
+ .addPartitionKey(WORKFLOW_ID_KEY, DataType.uuid())
+ .addPartitionKey(SHARD_ID_KEY, DataType.cint())
+ .addClusteringColumn(ENTITY_KEY, DataType.text())
+ .addClusteringColumn(TASK_ID_KEY, DataType.text())
+ .addColumn(PAYLOAD_KEY, DataType.text())
+ .addStaticColumn(TOTAL_TASKS_KEY, DataType.cint())
+ .addStaticColumn(TOTAL_PARTITIONS_KEY, DataType.cint())
+ .getQueryString();
+ }
+
+ private String getCreateTaskLookupTableStatement() {
+ return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_TASK_LOOKUP)
+ .ifNotExists()
+ .addPartitionKey(TASK_ID_KEY, DataType.uuid())
+ .addColumn(WORKFLOW_ID_KEY, DataType.uuid())
+ .getQueryString();
+ }
+
+ private String getCreateTaskDefLimitTableStatement() {
+ return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_TASK_DEF_LIMIT)
+ .ifNotExists()
+ .addPartitionKey(TASK_DEF_NAME_KEY, DataType.text())
+ .addClusteringColumn(TASK_ID_KEY, DataType.uuid())
+ .addColumn(WORKFLOW_ID_KEY, DataType.uuid())
+ .getQueryString();
+ }
+
+ private String getCreateWorkflowDefsTableStatement() {
+ return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_WORKFLOW_DEFS)
+ .ifNotExists()
+ .addPartitionKey(WORKFLOW_DEF_NAME_KEY, DataType.text())
+ .addClusteringColumn(WORKFLOW_VERSION_KEY, DataType.cint())
+ .addColumn(WORKFLOW_DEFINITION_KEY, DataType.text())
+ .getQueryString();
+ }
+
+ private String getCreateWorkflowDefsIndexTableStatement() {
+ return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_WORKFLOW_DEFS_INDEX)
+ .ifNotExists()
+ .addPartitionKey(WORKFLOW_DEF_INDEX_KEY, DataType.text())
+ .addClusteringColumn(WORKFLOW_DEF_NAME_VERSION_KEY, DataType.text())
+ .addColumn(WORKFLOW_DEF_INDEX_VALUE, DataType.text())
+ .getQueryString();
+ }
+
+ private String getCreateTaskDefsTableStatement() {
+ return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_TASK_DEFS)
+ .ifNotExists()
+ .addPartitionKey(TASK_DEFS_KEY, DataType.text())
+ .addClusteringColumn(TASK_DEF_NAME_KEY, DataType.text())
+ .addColumn(TASK_DEFINITION_KEY, DataType.text())
+ .getQueryString();
+ }
+
+ private String getCreateEventHandlersTableStatement() {
+ return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_EVENT_HANDLERS)
+ .ifNotExists()
+ .addPartitionKey(HANDLERS_KEY, DataType.text())
+ .addClusteringColumn(EVENT_HANDLER_NAME_KEY, DataType.text())
+ .addColumn(EVENT_HANDLER_KEY, DataType.text())
+ .getQueryString();
+ }
+
+ private String getCreateEventExecutionsTableStatement() {
+ return SchemaBuilder.createTable(properties.getKeyspace(), TABLE_EVENT_EXECUTIONS)
+ .ifNotExists()
+ .addPartitionKey(MESSAGE_ID_KEY, DataType.text())
+ .addPartitionKey(EVENT_HANDLER_NAME_KEY, DataType.text())
+ .addClusteringColumn(EVENT_EXECUTION_ID_KEY, DataType.text())
+ .addColumn(PAYLOAD_KEY, DataType.text())
+ .getQueryString();
+ }
+
+ String toJson(Object value) {
+ try {
+ return objectMapper.writeValueAsString(value);
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.cassandra.dao;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.netflix.conductor.annotations.Trace;
+import com.netflix.conductor.cassandra.config.CassandraProperties;
+import com.netflix.conductor.cassandra.util.Statements;
+import com.netflix.conductor.common.metadata.events.EventHandler;
+import com.netflix.conductor.core.exception.ApplicationException;
+import com.netflix.conductor.core.exception.ApplicationException.Code;
+import com.netflix.conductor.dao.EventHandlerDAO;
+import com.netflix.conductor.metrics.Monitors;
+
+import com.datastax.driver.core.PreparedStatement;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.Session;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import static com.netflix.conductor.cassandra.util.Constants.EVENT_HANDLER_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.HANDLERS_KEY;
+
+@Trace
+public class CassandraEventHandlerDAO extends CassandraBaseDAO implements EventHandlerDAO {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(CassandraEventHandlerDAO.class);
+ private static final String CLASS_NAME = CassandraEventHandlerDAO.class.getSimpleName();
+
+ private volatile Map
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.cassandra.dao;
+
+import java.util.*;
+import java.util.stream.Collectors;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.netflix.conductor.annotations.Trace;
+import com.netflix.conductor.cassandra.config.CassandraProperties;
+import com.netflix.conductor.cassandra.util.Statements;
+import com.netflix.conductor.common.metadata.events.EventExecution;
+import com.netflix.conductor.common.metadata.tasks.Task;
+import com.netflix.conductor.common.metadata.tasks.TaskDef;
+import com.netflix.conductor.common.run.Workflow;
+import com.netflix.conductor.common.utils.RetryUtil;
+import com.netflix.conductor.core.exception.ApplicationException;
+import com.netflix.conductor.core.exception.ApplicationException.Code;
+import com.netflix.conductor.dao.ConcurrentExecutionLimitDAO;
+import com.netflix.conductor.dao.ExecutionDAO;
+import com.netflix.conductor.metrics.Monitors;
+
+import com.datastax.driver.core.BatchStatement;
+import com.datastax.driver.core.PreparedStatement;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.Session;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+
+import static com.netflix.conductor.cassandra.util.Constants.DEFAULT_SHARD_ID;
+import static com.netflix.conductor.cassandra.util.Constants.DEFAULT_TOTAL_PARTITIONS;
+import static com.netflix.conductor.cassandra.util.Constants.ENTITY_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.ENTITY_TYPE_TASK;
+import static com.netflix.conductor.cassandra.util.Constants.ENTITY_TYPE_WORKFLOW;
+import static com.netflix.conductor.cassandra.util.Constants.PAYLOAD_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TASK_ID_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TOTAL_PARTITIONS_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TOTAL_TASKS_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_ID_KEY;
+import static com.netflix.conductor.common.metadata.tasks.Task.Status.IN_PROGRESS;
+
+@Trace
+public class CassandraExecutionDAO extends CassandraBaseDAO
+ implements ExecutionDAO, ConcurrentExecutionLimitDAO {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(CassandraExecutionDAO.class);
+ private static final String CLASS_NAME = CassandraExecutionDAO.class.getSimpleName();
+
+ private final PreparedStatement insertWorkflowStatement;
+ private final PreparedStatement insertTaskStatement;
+ private final PreparedStatement insertEventExecutionStatement;
+
+ private final PreparedStatement selectTotalStatement;
+ private final PreparedStatement selectTaskStatement;
+ private final PreparedStatement selectWorkflowStatement;
+ private final PreparedStatement selectWorkflowWithTasksStatement;
+ private final PreparedStatement selectTaskLookupStatement;
+ private final PreparedStatement selectTasksFromTaskDefLimitStatement;
+ private final PreparedStatement selectEventExecutionsStatement;
+
+ private final PreparedStatement updateWorkflowStatement;
+ private final PreparedStatement updateTotalTasksStatement;
+ private final PreparedStatement updateTotalPartitionsStatement;
+ private final PreparedStatement updateTaskLookupStatement;
+ private final PreparedStatement updateTaskDefLimitStatement;
+ private final PreparedStatement updateEventExecutionStatement;
+
+ private final PreparedStatement deleteWorkflowStatement;
+ private final PreparedStatement deleteTaskStatement;
+ private final PreparedStatement deleteTaskLookupStatement;
+ private final PreparedStatement deleteTaskDefLimitStatement;
+ private final PreparedStatement deleteEventExecutionStatement;
+
+ private final int eventExecutionsTTL;
+
+ public CassandraExecutionDAO(
+ Session session,
+ ObjectMapper objectMapper,
+ CassandraProperties properties,
+ Statements statements) {
+ super(session, objectMapper, properties);
+
+ eventExecutionsTTL = (int) properties.getEventExecutionPersistenceTtl().getSeconds();
+
+ this.insertWorkflowStatement =
+ session.prepare(statements.getInsertWorkflowStatement())
+ .setConsistencyLevel(properties.getWriteConsistencyLevel());
+ this.insertTaskStatement =
+ session.prepare(statements.getInsertTaskStatement())
+ .setConsistencyLevel(properties.getWriteConsistencyLevel());
+ this.insertEventExecutionStatement =
+ session.prepare(statements.getInsertEventExecutionStatement())
+ .setConsistencyLevel(properties.getWriteConsistencyLevel());
+
+ this.selectTotalStatement =
+ session.prepare(statements.getSelectTotalStatement())
+ .setConsistencyLevel(properties.getReadConsistencyLevel());
+ this.selectTaskStatement =
+ session.prepare(statements.getSelectTaskStatement())
+ .setConsistencyLevel(properties.getReadConsistencyLevel());
+ this.selectWorkflowStatement =
+ session.prepare(statements.getSelectWorkflowStatement())
+ .setConsistencyLevel(properties.getReadConsistencyLevel());
+ this.selectWorkflowWithTasksStatement =
+ session.prepare(statements.getSelectWorkflowWithTasksStatement())
+ .setConsistencyLevel(properties.getReadConsistencyLevel());
+ this.selectTaskLookupStatement =
+ session.prepare(statements.getSelectTaskFromLookupTableStatement())
+ .setConsistencyLevel(properties.getReadConsistencyLevel());
+ this.selectTasksFromTaskDefLimitStatement =
+ session.prepare(statements.getSelectTasksFromTaskDefLimitStatement())
+ .setConsistencyLevel(properties.getReadConsistencyLevel());
+ this.selectEventExecutionsStatement =
+ session.prepare(
+ statements
+ .getSelectAllEventExecutionsForMessageFromEventExecutionsStatement())
+ .setConsistencyLevel(properties.getReadConsistencyLevel());
+
+ this.updateWorkflowStatement =
+ session.prepare(statements.getUpdateWorkflowStatement())
+ .setConsistencyLevel(properties.getWriteConsistencyLevel());
+ this.updateTotalTasksStatement =
+ session.prepare(statements.getUpdateTotalTasksStatement())
+ .setConsistencyLevel(properties.getWriteConsistencyLevel());
+ this.updateTotalPartitionsStatement =
+ session.prepare(statements.getUpdateTotalPartitionsStatement())
+ .setConsistencyLevel(properties.getWriteConsistencyLevel());
+ this.updateTaskLookupStatement =
+ session.prepare(statements.getUpdateTaskLookupStatement())
+ .setConsistencyLevel(properties.getWriteConsistencyLevel());
+ this.updateTaskDefLimitStatement =
+ session.prepare(statements.getUpdateTaskDefLimitStatement())
+ .setConsistencyLevel(properties.getWriteConsistencyLevel());
+ this.updateEventExecutionStatement =
+ session.prepare(statements.getUpdateEventExecutionStatement())
+ .setConsistencyLevel(properties.getWriteConsistencyLevel());
+
+ this.deleteWorkflowStatement =
+ session.prepare(statements.getDeleteWorkflowStatement())
+ .setConsistencyLevel(properties.getWriteConsistencyLevel());
+ this.deleteTaskStatement =
+ session.prepare(statements.getDeleteTaskStatement())
+ .setConsistencyLevel(properties.getWriteConsistencyLevel());
+ this.deleteTaskLookupStatement =
+ session.prepare(statements.getDeleteTaskLookupStatement())
+ .setConsistencyLevel(properties.getWriteConsistencyLevel());
+ this.deleteTaskDefLimitStatement =
+ session.prepare(statements.getDeleteTaskDefLimitStatement())
+ .setConsistencyLevel(properties.getWriteConsistencyLevel());
+ this.deleteEventExecutionStatement =
+ session.prepare(statements.getDeleteEventExecutionsStatement())
+ .setConsistencyLevel(properties.getWriteConsistencyLevel());
+ }
+
+ @Override
+ public List
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.cassandra.dao;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.netflix.conductor.annotations.Trace;
+import com.netflix.conductor.cassandra.config.CassandraProperties;
+import com.netflix.conductor.cassandra.util.Statements;
+import com.netflix.conductor.common.metadata.tasks.TaskDef;
+import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
+import com.netflix.conductor.core.exception.ApplicationException;
+import com.netflix.conductor.core.exception.ApplicationException.Code;
+import com.netflix.conductor.dao.MetadataDAO;
+import com.netflix.conductor.metrics.Monitors;
+
+import com.datastax.driver.core.PreparedStatement;
+import com.datastax.driver.core.ResultSet;
+import com.datastax.driver.core.Row;
+import com.datastax.driver.core.Session;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.annotations.VisibleForTesting;
+
+import static com.netflix.conductor.cassandra.util.Constants.TASK_DEFINITION_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TASK_DEFS_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEFINITION_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_INDEX_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_NAME_VERSION_KEY;
+
+@Trace
+public class CassandraMetadataDAO extends CassandraBaseDAO implements MetadataDAO {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(CassandraMetadataDAO.class);
+ private static final String CLASS_NAME = CassandraMetadataDAO.class.getSimpleName();
+ private static final String INDEX_DELIMITER = "/";
+
+ private Map
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.cassandra.dao;
+
+import java.util.List;
+
+import com.netflix.conductor.common.metadata.tasks.PollData;
+import com.netflix.conductor.dao.PollDataDAO;
+
+/**
+ * This is a dummy implementation and this feature is not implemented for Cassandra backed
+ * Conductor.
+ */
+public class CassandraPollDataDAO implements PollDataDAO {
+
+ @Override
+ public void updateLastPollData(String taskDefName, String domain, String workerId) {
+ throw new UnsupportedOperationException(
+ "This method is not implemented in CassandraPollDataDAO. Please use ExecutionDAOFacade instead.");
+ }
+
+ @Override
+ public PollData getPollData(String taskDefName, String domain) {
+ throw new UnsupportedOperationException(
+ "This method is not implemented in CassandraPollDataDAO. Please use ExecutionDAOFacade instead.");
+ }
+
+ @Override
+ public List
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.cassandra.util;
+
+public interface Constants {
+
+ String DAO_NAME = "cassandra";
+
+ String TABLE_WORKFLOWS = "workflows";
+ String TABLE_TASK_LOOKUP = "task_lookup";
+ String TABLE_TASK_DEF_LIMIT = "task_def_limit";
+ String TABLE_WORKFLOW_DEFS = "workflow_definitions";
+ String TABLE_WORKFLOW_DEFS_INDEX = "workflow_defs_index";
+ String TABLE_TASK_DEFS = "task_definitions";
+ String TABLE_EVENT_HANDLERS = "event_handlers";
+ String TABLE_EVENT_EXECUTIONS = "event_executions";
+
+ String WORKFLOW_ID_KEY = "workflow_id";
+ String SHARD_ID_KEY = "shard_id";
+ String TASK_ID_KEY = "task_id";
+ String ENTITY_KEY = "entity";
+ String PAYLOAD_KEY = "payload";
+ String TOTAL_TASKS_KEY = "total_tasks";
+ String TOTAL_PARTITIONS_KEY = "total_partitions";
+ String TASK_DEF_NAME_KEY = "task_def_name";
+ String WORKFLOW_DEF_NAME_KEY = "workflow_def_name";
+ String WORKFLOW_VERSION_KEY = "version";
+ String WORKFLOW_DEFINITION_KEY = "workflow_definition";
+ String WORKFLOW_DEF_INDEX_KEY = "workflow_def_version_index";
+ String WORKFLOW_DEF_INDEX_VALUE = "workflow_def_index_value";
+ String WORKFLOW_DEF_NAME_VERSION_KEY = "workflow_def_name_version";
+ String TASK_DEFS_KEY = "task_defs";
+ String TASK_DEFINITION_KEY = "task_definition";
+ String HANDLERS_KEY = "handlers";
+ String EVENT_HANDLER_NAME_KEY = "event_handler_name";
+ String EVENT_HANDLER_KEY = "event_handler";
+ String MESSAGE_ID_KEY = "message_id";
+ String EVENT_EXECUTION_ID_KEY = "event_execution_id";
+
+ String ENTITY_TYPE_WORKFLOW = "workflow";
+ String ENTITY_TYPE_TASK = "task";
+
+ int DEFAULT_SHARD_ID = 1;
+ int DEFAULT_TOTAL_PARTITIONS = 1;
+}
diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/util/Statements.java b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/util/Statements.java
new file mode 100644
index 0000000000..5c538c41e7
--- /dev/null
+++ b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/util/Statements.java
@@ -0,0 +1,574 @@
+/*
+ * Copyright 2020 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.cassandra.util;
+
+import com.datastax.driver.core.querybuilder.QueryBuilder;
+
+import static com.netflix.conductor.cassandra.util.Constants.ENTITY_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.ENTITY_TYPE_TASK;
+import static com.netflix.conductor.cassandra.util.Constants.ENTITY_TYPE_WORKFLOW;
+import static com.netflix.conductor.cassandra.util.Constants.EVENT_EXECUTION_ID_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.EVENT_HANDLER_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.EVENT_HANDLER_NAME_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.HANDLERS_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.MESSAGE_ID_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.PAYLOAD_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.SHARD_ID_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_EVENT_EXECUTIONS;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_EVENT_HANDLERS;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_TASK_DEFS;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_TASK_DEF_LIMIT;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_TASK_LOOKUP;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_WORKFLOWS;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_WORKFLOW_DEFS;
+import static com.netflix.conductor.cassandra.util.Constants.TABLE_WORKFLOW_DEFS_INDEX;
+import static com.netflix.conductor.cassandra.util.Constants.TASK_DEFINITION_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TASK_DEFS_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TASK_DEF_NAME_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TASK_ID_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TOTAL_PARTITIONS_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.TOTAL_TASKS_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEFINITION_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_INDEX_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_INDEX_VALUE;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_NAME_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_DEF_NAME_VERSION_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_ID_KEY;
+import static com.netflix.conductor.cassandra.util.Constants.WORKFLOW_VERSION_KEY;
+
+import static com.datastax.driver.core.querybuilder.QueryBuilder.bindMarker;
+import static com.datastax.driver.core.querybuilder.QueryBuilder.eq;
+import static com.datastax.driver.core.querybuilder.QueryBuilder.set;
+
+/**
+ * DML statements
+ *
+ * MetadataDAO
+ *
+ *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.netflix.conductor.dao.cassandra;
-
-import com.datastax.driver.core.DataType;
-import com.datastax.driver.core.Session;
-import com.datastax.driver.core.schemabuilder.SchemaBuilder;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.ImmutableMap;
-import com.netflix.conductor.cassandra.CassandraConfiguration;
-import com.netflix.conductor.metrics.Monitors;
-import org.apache.commons.lang3.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-
-import static com.netflix.conductor.util.Constants.DAO_NAME;
-import static com.netflix.conductor.util.Constants.ENTITY_KEY;
-import static com.netflix.conductor.util.Constants.PAYLOAD_KEY;
-import static com.netflix.conductor.util.Constants.SHARD_ID_KEY;
-import static com.netflix.conductor.util.Constants.TABLE_TASK_LOOKUP;
-import static com.netflix.conductor.util.Constants.TABLE_WORKFLOWS;
-import static com.netflix.conductor.util.Constants.TASK_ID_KEY;
-import static com.netflix.conductor.util.Constants.TOTAL_PARTITIONS_KEY;
-import static com.netflix.conductor.util.Constants.TOTAL_TASKS_KEY;
-import static com.netflix.conductor.util.Constants.WORKFLOW_ID_KEY;
-
-/**
- * Creates the keyspace and tables.
- *
- * CREATE KEYSPACE IF NOT EXISTS conductor
- * WITH replication = { 'class' : 'NetworkTopologyStrategy', 'us-east': '3'};
- *
- * CREATE TABLE IF NOT EXISTS conductor.workflows (
- * workflow_id uuid,
- * shard_id int,
- * task_id text,
- * entity text,
- * payload text,
- * total_tasks int STATIC,
- * total_partitions int STATIC,
- * PRIMARY KEY((workflow_id, shard_id), entity, task_id)
- * );
- *
- * CREATE TABLE IF NOT EXISTS conductor.task_lookup(
- * task_id uuid,
- * workflow_id uuid,
- * PRIMARY KEY (task_id)
- * );
- */
-public class CassandraBaseDAO {
- private static final Logger LOGGER = LoggerFactory.getLogger(CassandraBaseDAO.class);
-
- private final ObjectMapper objectMapper;
-
- protected final Session session;
- protected final CassandraConfiguration config;
-
- public CassandraBaseDAO(Session session, ObjectMapper objectMapper, CassandraConfiguration config) {
- this.session = session;
- this.objectMapper = objectMapper;
- this.config = config;
-
- init();
- }
-
- private void init() {
- try {
- session.execute(getCreateKeyspaceStatement());
- session.execute(getCreateWorkflowsTableStatement());
- session.execute(getCreateTaskLookupTableStatement());
- LOGGER.info("CassandraDAO initialization complete! Tables created!");
- } catch (Exception e) {
- LOGGER.error("Error initializing and setting up keyspace and table in cassandra", e);
- throw e;
- }
- }
-
- private String getCreateKeyspaceStatement() {
- return SchemaBuilder.createKeyspace(config.getCassandraKeyspace())
- .ifNotExists()
- .with()
- .replication(ImmutableMap.of("class", config.getReplicationStrategy(), config.getReplicationFactorKey(), config.getReplicationFactorValue()))
- .durableWrites(true)
- .getQueryString();
- }
-
- private String getCreateWorkflowsTableStatement() {
- return SchemaBuilder.createTable(config.getCassandraKeyspace(), TABLE_WORKFLOWS)
- .ifNotExists()
- .addPartitionKey(WORKFLOW_ID_KEY, DataType.uuid())
- .addPartitionKey(SHARD_ID_KEY, DataType.cint())
- .addClusteringColumn(ENTITY_KEY, DataType.text())
- .addClusteringColumn(TASK_ID_KEY, DataType.text())
- .addColumn(PAYLOAD_KEY, DataType.text())
- .addStaticColumn(TOTAL_TASKS_KEY, DataType.cint())
- .addStaticColumn(TOTAL_PARTITIONS_KEY, DataType.cint())
- .getQueryString();
- }
-
- private String getCreateTaskLookupTableStatement() {
- return SchemaBuilder.createTable(config.getCassandraKeyspace(), TABLE_TASK_LOOKUP)
- .ifNotExists()
- .addPartitionKey(TASK_ID_KEY, DataType.uuid())
- .addColumn(WORKFLOW_ID_KEY, DataType.uuid())
- .getQueryString();
- }
-
- String toJson(Object value) {
- try {
- return objectMapper.writeValueAsString(value);
- } catch (JsonProcessingException e) {
- throw new RuntimeException(e);
- }
- }
-
-
- * INSERT INTO conductor.workflows (workflow_id,shard_id,task_id,entity,payload,total_tasks,total_partitions) VALUES (?,?,?,'workflow',?,?,?);
- * INSERT INTO conductor.workflows (workflow_id,shard_id,task_id,entity,payload) VALUES (?,?,?,'task',?);
- *
- * SELECT total_tasks,total_partitions FROM conductor.workflows WHERE workflow_id=? AND shard_id=1;
- * SELECT payload FROM conductor.workflows WHERE workflow_id=? AND shard_id=? AND entity='task' AND task_id=?;
- * SELECT payload FROM conductor.workflows WHERE workflow_id=? AND shard_id=1 AND entity='workflow';
- * SELECT * FROM conductor.workflows WHERE workflow_id=? AND shard_id=?;
- * SELECT workflow_id FROM conductor.task_lookup WHERE task_id=?;
- *
- * UPDATE conductor.workflows SET payload=? WHERE workflow_id=? AND shard_id=1 AND entity='workflow' AND task_id='';
- * UPDATE conductor.workflows SET total_tasks=? WHERE workflow_id=? AND shard_id=?;
- * UPDATE conductor.workflows SET total_partitions=?,total_tasks=? WHERE workflow_id=? AND shard_id=1;
- * UPDATE conductor.task_lookup SET workflow_id=? WHERE task_id=?;
- *
- * DELETE FROM conductor.workflows WHERE workflow_id=? AND shard_id=?;
- * DELETE FROM conductor.workflows WHERE workflow_id=? AND shard_id=? AND entity='task' AND task_id=?;
- * DELETE FROM conductor.task_lookup WHERE task_id=?;
- */
-public class Statements {
- private final String keyspace;
-
- @Inject
- public Statements(CassandraConfiguration config) {
- this.keyspace = config.getCassandraKeyspace();
- }
-
- // Insert Statements
-
- /**
- * @return cql query statement to insert a new workflow into the "workflows" table
- */
- public String getInsertWorkflowStatement() {
- return QueryBuilder.insertInto(keyspace, TABLE_WORKFLOWS)
- .value(WORKFLOW_ID_KEY, bindMarker())
- .value(SHARD_ID_KEY, bindMarker())
- .value(TASK_ID_KEY, bindMarker())
- .value(ENTITY_KEY, ENTITY_TYPE_WORKFLOW)
- .value(PAYLOAD_KEY, bindMarker())
- .value(TOTAL_TASKS_KEY, bindMarker())
- .value(TOTAL_PARTITIONS_KEY, bindMarker())
- .getQueryString();
- }
-
- /**
- * @return cql query statement to insert a new task into the "workflows" table
- */
- public String getInsertTaskStatement() {
- return QueryBuilder.insertInto(keyspace, TABLE_WORKFLOWS)
- .value(WORKFLOW_ID_KEY, bindMarker())
- .value(SHARD_ID_KEY, bindMarker())
- .value(TASK_ID_KEY, bindMarker())
- .value(ENTITY_KEY, ENTITY_TYPE_TASK)
- .value(PAYLOAD_KEY, bindMarker())
- .getQueryString();
- }
-
- // Select Statements
-
- /**
- * @return cql query statement to retrieve the total_tasks and total_partitions for a workflow from the "workflows" table
- */
- public String getSelectTotalStatement() {
- return QueryBuilder.select(TOTAL_TASKS_KEY, TOTAL_PARTITIONS_KEY)
- .from(keyspace, TABLE_WORKFLOWS)
- .where(eq(WORKFLOW_ID_KEY, bindMarker()))
- .and(eq(SHARD_ID_KEY, 1))
- .getQueryString();
- }
-
- /**
- * @return cql query statement to retrieve a task from the "workflows" table
- */
- public String getSelectTaskStatement() {
- return QueryBuilder.select(PAYLOAD_KEY)
- .from(keyspace, TABLE_WORKFLOWS)
- .where(eq(WORKFLOW_ID_KEY, bindMarker()))
- .and(eq(SHARD_ID_KEY, bindMarker()))
- .and(eq(ENTITY_KEY, ENTITY_TYPE_TASK))
- .and(eq(TASK_ID_KEY, bindMarker()))
- .getQueryString();
- }
-
- /**
- * @return cql query statement to retrieve a workflow (without its tasks) from the "workflows" table
- */
- public String getSelectWorkflowStatement() {
- return QueryBuilder.select(PAYLOAD_KEY)
- .from(keyspace, TABLE_WORKFLOWS)
- .where(eq(WORKFLOW_ID_KEY, bindMarker()))
- .and(eq(SHARD_ID_KEY, 1))
- .and(eq(ENTITY_KEY, ENTITY_TYPE_WORKFLOW))
- .getQueryString();
- }
-
- /**
- * @return cql query statement to retrieve a workflow with its tasks from the "workflows" table
- */
- public String getSelectWorkflowWithTasksStatement() {
- return QueryBuilder.select()
- .all()
- .from(keyspace, TABLE_WORKFLOWS)
- .where(eq(WORKFLOW_ID_KEY, bindMarker()))
- .and(eq(SHARD_ID_KEY, bindMarker()))
- .getQueryString();
- }
-
- /**
- * @return cql query statement to retrieve the workflow_id for a particular task_id from the "task_lookup" table
- */
- public String getSelectTaskFromLookupTableStatement() {
- return QueryBuilder.select(WORKFLOW_ID_KEY)
- .from(keyspace, TABLE_TASK_LOOKUP)
- .where(eq(TASK_ID_KEY, bindMarker()))
- .getQueryString();
- }
-
- // Update Statements
-
- /**
- * @return cql query statement to update a workflow in the "workflows" table
- */
- public String getUpdateWorkflowStatement() {
- return QueryBuilder.update(keyspace, TABLE_WORKFLOWS)
- .with(set(PAYLOAD_KEY, bindMarker()))
- .where(eq(WORKFLOW_ID_KEY, bindMarker()))
- .and(eq(SHARD_ID_KEY, 1))
- .and(eq(ENTITY_KEY, ENTITY_TYPE_WORKFLOW))
- .and(eq(TASK_ID_KEY, ""))
- .getQueryString();
- }
-
- /**
- * @return cql query statement to update the total_tasks in a shard for a workflow in the "workflows" table
- */
- public String getUpdateTotalTasksStatement() {
- return QueryBuilder.update(keyspace, TABLE_WORKFLOWS)
- .with(set(TOTAL_TASKS_KEY, bindMarker()))
- .where(eq(WORKFLOW_ID_KEY, bindMarker()))
- .and(eq(SHARD_ID_KEY, bindMarker()))
- .getQueryString();
- }
-
- /**
- * @return cql query statement to update the total_partitions for a workflow in the "workflows" table
- */
- public String getUpdateTotalPartitionsStatement() {
- return QueryBuilder.update(keyspace, TABLE_WORKFLOWS)
- .with(set(TOTAL_PARTITIONS_KEY, bindMarker()))
- .and(set(TOTAL_TASKS_KEY, bindMarker()))
- .where(eq(WORKFLOW_ID_KEY, bindMarker()))
- .and(eq(SHARD_ID_KEY, 1))
- .getQueryString();
- }
-
- /**
- * @return cql query statement to add a new task_id to workflow_id mapping to the "task_lookup" table
- */
- public String getUpdateTaskLookupStatement() {
- return QueryBuilder.update(keyspace, TABLE_TASK_LOOKUP)
- .with(set(WORKFLOW_ID_KEY, bindMarker()))
- .where(eq(TASK_ID_KEY, bindMarker()))
- .getQueryString();
- }
-
- // Delete statements
-
- /**
- * @return cql query statement to delete a workflow from the "workflows" table
- */
- public String getDeleteWorkflowStatement() {
- return QueryBuilder.delete()
- .from(keyspace, TABLE_WORKFLOWS)
- .where(eq(WORKFLOW_ID_KEY, bindMarker()))
- .and(eq(SHARD_ID_KEY, bindMarker()))
- .getQueryString();
- }
-
- /**
- * @return cql query statement to delete a task_id to workflow_id mapping from the "task_lookup" table
- */
- public String getDeleteTaskLookupStatement() {
- return QueryBuilder.delete()
- .from(keyspace, TABLE_TASK_LOOKUP)
- .where(eq(TASK_ID_KEY, bindMarker()))
- .getQueryString();
- }
-
- /**
- * @return cql query statement to delete a task from the "workflows" table
- */
- public String getDeleteTaskStatement() {
- return QueryBuilder.delete()
- .from(keyspace, TABLE_WORKFLOWS)
- .where(eq(WORKFLOW_ID_KEY, bindMarker()))
- .and(eq(SHARD_ID_KEY, bindMarker()))
- .and(eq(ENTITY_KEY, ENTITY_TYPE_TASK))
- .and(eq(TASK_ID_KEY, bindMarker()))
- .getQueryString();
- }
-}
diff --git a/cassandra-persistence/src/main/resources/META-INF/additional-spring-configuration-metadata.json b/cassandra-persistence/src/main/resources/META-INF/additional-spring-configuration-metadata.json
new file mode 100644
index 0000000000..8c1d52fe40
--- /dev/null
+++ b/cassandra-persistence/src/main/resources/META-INF/additional-spring-configuration-metadata.json
@@ -0,0 +1,36 @@
+{
+ "properties": [
+ {
+ "name": "conductor.cassandra.write-consistency-level",
+ "defaultValue": "LOCAL_QUORUM"
+ },
+ {
+ "name": "conductor.cassandra.read-consistency-level",
+ "defaultValue": "LOCAL_QUORUM"
+ }
+ ],
+ "hints": [
+ {
+ "name": "conductor.cassandra.write-consistency-level",
+ "providers": [
+ {
+ "name": "handle-as",
+ "parameters": {
+ "target": "java.lang.Enum"
+ }
+ }
+ ]
+ },
+ {
+ "name": "conductor.cassandra.read-consistency-level",
+ "providers": [
+ {
+ "name": "handle-as",
+ "parameters": {
+ "target": "java.lang.Enum"
+ }
+ }
+ ]
+ }
+ ]
+}
diff --git a/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraEventHandlerDAOSpec.groovy b/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraEventHandlerDAOSpec.groovy
new file mode 100644
index 0000000000..912d36c65c
--- /dev/null
+++ b/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraEventHandlerDAOSpec.groovy
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2021 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+
+package com.netflix.conductor.cassandra.dao
+
+import com.netflix.conductor.common.metadata.events.EventExecution
+import com.netflix.conductor.common.metadata.events.EventHandler
+import spock.lang.Subject
+
+class CassandraEventHandlerDAOSpec extends CassandraSpec {
+
+ @Subject
+ CassandraEventHandlerDAO eventHandlerDAO
+
+ CassandraExecutionDAO executionDAO
+
+ def setup() {
+ eventHandlerDAO = new CassandraEventHandlerDAO(session, objectMapper, cassandraProperties, statements)
+ executionDAO = new CassandraExecutionDAO(session, objectMapper, cassandraProperties, statements)
+ }
+
+ def testEventHandlerCRUD() {
+ given:
+ String event = "event"
+ String eventHandlerName1 = "event_handler1"
+ String eventHandlerName2 = "event_handler2"
+
+ EventHandler eventHandler = new EventHandler()
+ eventHandler.setName(eventHandlerName1)
+ eventHandler.setEvent(event)
+
+ when: // create event handler
+ eventHandlerDAO.addEventHandler(eventHandler)
+ List
+ * Licensed under the Apache License, Version 2.0 (the "License" you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+
+package com.netflix.conductor.cassandra.dao
+
+import com.netflix.conductor.common.metadata.events.EventExecution
+import com.netflix.conductor.common.metadata.tasks.Task
+import com.netflix.conductor.common.metadata.tasks.TaskDef
+import com.netflix.conductor.common.metadata.workflow.WorkflowDef
+import com.netflix.conductor.common.metadata.workflow.WorkflowTask
+import com.netflix.conductor.common.run.Workflow
+import com.netflix.conductor.core.exception.ApplicationException
+import com.netflix.conductor.core.utils.IDGenerator
+import spock.lang.Subject
+
+import static com.netflix.conductor.common.metadata.events.EventExecution.Status.COMPLETED
+import static com.netflix.conductor.core.exception.ApplicationException.Code.INVALID_INPUT
+
+class CassandraExecutionDAOSpec extends CassandraSpec {
+
+ @Subject
+ CassandraExecutionDAO executionDAO
+
+ def setup() {
+ executionDAO = new CassandraExecutionDAO(session, objectMapper, cassandraProperties, statements)
+ }
+
+ def "verify if tasks are validated"() {
+ given:
+ def tasks = []
+
+ // create tasks for a workflow and add to list
+ Task task1 = new Task(workflowInstanceId: 'uuid', taskId: 'task1id', referenceTaskName: 'task1')
+ Task task2 = new Task(workflowInstanceId: 'uuid', taskId: 'task2id', referenceTaskName: 'task2')
+ tasks << task1 << task2
+
+ when:
+ executionDAO.validateTasks(tasks)
+
+ then:
+ noExceptionThrown()
+
+ and:
+ // add a task from a different workflow to the list
+ Task task3 = new Task(workflowInstanceId: 'other-uuid', taskId: 'task3id', referenceTaskName: 'task3')
+ tasks << task3
+
+ when:
+ executionDAO.validateTasks(tasks)
+
+ then:
+ def ex = thrown(ApplicationException.class)
+ ex.message == "Tasks of multiple workflows cannot be created/updated simultaneously"
+ }
+
+ def "workflow CRUD"() {
+ given:
+ String workflowId = IDGenerator.generate()
+ WorkflowDef workflowDef = new WorkflowDef()
+ workflowDef.name = "def1"
+ workflowDef.setVersion(1)
+ Workflow workflow = new Workflow()
+ workflow.setWorkflowDefinition(workflowDef)
+ workflow.setWorkflowId(workflowId)
+ workflow.setInput(new HashMap<>())
+ workflow.setStatus(Workflow.WorkflowStatus.RUNNING)
+ workflow.setCreateTime(System.currentTimeMillis())
+
+ when:
+ // create a new workflow in the datastore
+ String id = executionDAO.createWorkflow(workflow)
+
+ then:
+ workflowId == id
+
+ when:
+ // read the workflow from the datastore
+ Workflow found = executionDAO.getWorkflow(workflowId)
+
+ then:
+ workflow == found
+
+ and:
+ // update the workflow
+ workflow.setStatus(Workflow.WorkflowStatus.COMPLETED)
+ executionDAO.updateWorkflow(workflow)
+
+ when:
+ found = executionDAO.getWorkflow(workflowId)
+
+ then:
+ workflow == found
+
+ when:
+ // remove the workflow from datastore
+ boolean removed = executionDAO.removeWorkflow(workflowId)
+
+ then:
+ removed
+
+ when:
+ // read workflow again
+ workflow = executionDAO.getWorkflow(workflowId, true)
+
+ then:
+ workflow == null
+ }
+
+ def "create tasks and verify methods that read tasks and workflow"() {
+ given: 'we create a workflow'
+ String workflowId = IDGenerator.generate()
+ WorkflowDef workflowDef = new WorkflowDef(name: 'def1', version: 1)
+ Workflow workflow = new Workflow(workflowDefinition: workflowDef, workflowId: workflowId, input: new HashMap(), status: Workflow.WorkflowStatus.RUNNING, createTime: System.currentTimeMillis())
+ executionDAO.createWorkflow(workflow)
+
+ and: 'create tasks for this workflow'
+ Task task1 = new Task(workflowInstanceId: workflowId, taskType: 'task1', referenceTaskName: 'task1', status: Task.Status.SCHEDULED, taskId: IDGenerator.generate())
+ Task task2 = new Task(workflowInstanceId: workflowId, taskType: 'task2', referenceTaskName: 'task2', status: Task.Status.SCHEDULED, taskId: IDGenerator.generate())
+ Task task3 = new Task(workflowInstanceId: workflowId, taskType: 'task3', referenceTaskName: 'task3', status: Task.Status.SCHEDULED, taskId: IDGenerator.generate())
+
+ def taskList = [task1, task2, task3]
+
+ when: 'add the tasks to the datastore'
+ List
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+
+package com.netflix.conductor.cassandra.dao
+
+import com.netflix.conductor.common.metadata.tasks.TaskDef
+import com.netflix.conductor.common.metadata.workflow.WorkflowDef
+import spock.lang.Subject
+
+class CassandraMetadataDAOSpec extends CassandraSpec {
+
+ @Subject
+ CassandraMetadataDAO metadataDAO
+
+ def setup() {
+ metadataDAO = new CassandraMetadataDAO(session, objectMapper, cassandraProperties, statements)
+ }
+
+ def cleanup() {
+
+ }
+
+ def "CRUD on WorkflowDef"() throws Exception {
+ given:
+ String name = "workflow_def_1"
+ int version = 1
+
+ WorkflowDef workflowDef = new WorkflowDef()
+ workflowDef.setName(name)
+ workflowDef.setVersion(version)
+ workflowDef.setOwnerEmail("test@junit.com")
+
+ when: 'create workflow definition'
+ metadataDAO.createWorkflowDef(workflowDef)
+
+ then: // fetch the workflow definition
+ def defOptional = metadataDAO.getWorkflowDef(name, version)
+ defOptional.present
+ defOptional.get() == workflowDef
+
+ and: // register a higher version
+ int higherVersion = 2
+ workflowDef.setVersion(higherVersion)
+ workflowDef.setDescription("higher version")
+
+ when: // register the higher version definition
+ metadataDAO.createWorkflowDef(workflowDef)
+ defOptional = metadataDAO.getWorkflowDef(name, higherVersion)
+
+ then: // fetch the higher version
+ defOptional.present
+ defOptional.get() == workflowDef
+
+ when: // fetch latest version
+ defOptional = metadataDAO.getLatestWorkflowDef(name)
+
+ then:
+ defOptional && defOptional.present
+ defOptional.get() == workflowDef
+
+ when: // modify the definition
+ workflowDef.setOwnerEmail("junit@test.com")
+ metadataDAO.updateWorkflowDef(workflowDef)
+ defOptional = metadataDAO.getWorkflowDef(name, higherVersion)
+
+ then: // fetch the workflow definition
+ defOptional.present
+ defOptional.get() == workflowDef
+
+ when: // delete workflow def
+ metadataDAO.removeWorkflowDef(name, higherVersion)
+ defOptional = metadataDAO.getWorkflowDef(name, higherVersion)
+
+ then:
+ defOptional.empty
+ }
+
+ def "CRUD on TaskDef"() {
+ given:
+ String task1Name = "task1"
+ String task2Name = "task2"
+
+ when: // fetch all task defs
+ def taskDefList = metadataDAO.getAllTaskDefs()
+
+ then:
+ taskDefList.empty
+
+ when: // register a task definition
+ TaskDef taskDef = new TaskDef()
+ taskDef.setName(task1Name)
+ metadataDAO.createTaskDef(taskDef)
+ taskDefList = metadataDAO.getAllTaskDefs()
+
+ then: // fetch all task defs
+ taskDefList && taskDefList.size() == 1
+
+ when: // fetch the task def
+ def returnTaskDef = metadataDAO.getTaskDef(task1Name)
+
+ then:
+ returnTaskDef == taskDef
+
+ when: // register another task definition
+ TaskDef taskDef1 = new TaskDef()
+ taskDef1.setName(task2Name)
+ metadataDAO.createTaskDef(taskDef1)
+ // fetch all task defs
+ taskDefList = metadataDAO.getAllTaskDefs()
+
+ then:
+ taskDefList && taskDefList.size() == 2
+
+ when: // update task def
+ taskDef.setOwnerEmail("juni@test.com")
+ metadataDAO.updateTaskDef(taskDef)
+ returnTaskDef = metadataDAO.getTaskDef(task1Name)
+
+ then:
+ returnTaskDef == taskDef
+
+ when: // delete task def
+ metadataDAO.removeTaskDef(task2Name)
+ taskDefList = metadataDAO.getAllTaskDefs()
+
+ then:
+ taskDefList && taskDefList.size() == 1
+ // fetch deleted task def
+ metadataDAO.getTaskDef(task2Name) == null
+ }
+}
diff --git a/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraSpec.groovy b/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraSpec.groovy
new file mode 100644
index 0000000000..d9531f2b36
--- /dev/null
+++ b/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/dao/CassandraSpec.groovy
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2021 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+
+package com.netflix.conductor.cassandra.dao
+
+import com.datastax.driver.core.ConsistencyLevel
+import com.datastax.driver.core.Session
+import com.fasterxml.jackson.databind.ObjectMapper
+import com.netflix.conductor.cassandra.config.CassandraProperties
+import com.netflix.conductor.cassandra.util.Statements
+import com.netflix.conductor.common.config.TestObjectMapperConfiguration
+import groovy.transform.PackageScope
+import org.springframework.beans.factory.annotation.Autowired
+import org.springframework.test.context.ContextConfiguration
+import org.testcontainers.containers.CassandraContainer
+import org.testcontainers.spock.Testcontainers
+import spock.lang.Shared
+import spock.lang.Specification
+
+import java.time.Duration
+
+@ContextConfiguration(classes = [TestObjectMapperConfiguration.class])
+@Testcontainers
+@PackageScope
+abstract class CassandraSpec extends Specification {
+
+ @Shared
+ CassandraContainer cassandra = new CassandraContainer()
+
+ @Shared
+ Session session
+
+ @Autowired
+ ObjectMapper objectMapper
+
+ CassandraProperties cassandraProperties
+ Statements statements
+
+ def setupSpec() {
+ session = cassandra.cluster.newSession()
+ }
+
+ def setup() {
+ String keyspaceName = "junit"
+ cassandraProperties = Mock(CassandraProperties.class) {
+ getKeyspace() >> keyspaceName
+ getReplicationStrategy() >> "SimpleStrategy"
+ getReplicationFactorKey() >> "replication_factor"
+ getReplicationFactorValue() >> 1
+ getReadConsistencyLevel() >> ConsistencyLevel.LOCAL_ONE
+ getWriteConsistencyLevel() >> ConsistencyLevel.LOCAL_ONE
+ getTaskDefCacheRefreshInterval() >> Duration.ofSeconds(60)
+ getEventHandlerCacheRefreshInterval() >> Duration.ofSeconds(60)
+ getEventExecutionPersistenceTtl() >> Duration.ofSeconds(5)
+ }
+
+ statements = new Statements(keyspaceName)
+ }
+}
diff --git a/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/util/StatementsSpec.groovy b/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/util/StatementsSpec.groovy
new file mode 100644
index 0000000000..f674688b9e
--- /dev/null
+++ b/cassandra-persistence/src/test/groovy/com/netflix/conductor/cassandra/util/StatementsSpec.groovy
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2021 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+
+package com.netflix.conductor.cassandra.util
+
+import spock.lang.Specification
+import spock.lang.Subject
+
+class StatementsSpec extends Specification {
+
+ @Subject
+ Statements subject
+
+ def setup() {
+ subject = new Statements('test')
+ }
+
+ def "verify statements"() {
+ when:
+ subject
+
+ then:
+ with(subject) {
+ insertWorkflowDefStatement == "INSERT INTO test.workflow_definitions (workflow_def_name,version,workflow_definition) VALUES (?,?,?) IF NOT EXISTS;"
+ insertTaskDefStatement == "INSERT INTO test.task_definitions (task_defs,task_def_name,task_definition) VALUES ('task_defs',?,?);"
+ selectWorkflowDefStatement == "SELECT workflow_definition FROM test.workflow_definitions WHERE workflow_def_name=? AND version=?;"
+ selectAllWorkflowDefVersionsByNameStatement == "SELECT * FROM test.workflow_definitions WHERE workflow_def_name=?;"
+ selectAllWorkflowDefsStatement == "SELECT * FROM test.workflow_defs_index WHERE workflow_def_version_index=?;"
+ selectTaskDefStatement == "SELECT task_definition FROM test.task_definitions WHERE task_defs='task_defs' AND task_def_name=?;"
+ selectAllTaskDefsStatement == "SELECT * FROM test.task_definitions WHERE task_defs=?;"
+ updateWorkflowDefStatement == "UPDATE test.workflow_definitions SET workflow_definition=? WHERE workflow_def_name=? AND version=?;"
+ deleteWorkflowDefStatement == "DELETE FROM test.workflow_definitions WHERE workflow_def_name=? AND version=?;"
+ deleteWorkflowDefIndexStatement == "DELETE FROM test.workflow_defs_index WHERE workflow_def_version_index=? AND workflow_def_name_version=?;"
+ deleteTaskDefStatement == "DELETE FROM test.task_definitions WHERE task_defs='task_defs' AND task_def_name=?;"
+ insertWorkflowStatement == "INSERT INTO test.workflows (workflow_id,shard_id,task_id,entity,payload,total_tasks,total_partitions) VALUES (?,?,?,'workflow',?,?,?);"
+ insertTaskStatement == "INSERT INTO test.workflows (workflow_id,shard_id,task_id,entity,payload) VALUES (?,?,?,'task',?);"
+ insertEventExecutionStatement == "INSERT INTO test.event_executions (message_id,event_handler_name,event_execution_id,payload) VALUES (?,?,?,?) IF NOT EXISTS;"
+ selectTotalStatement == "SELECT total_tasks,total_partitions FROM test.workflows WHERE workflow_id=? AND shard_id=1;"
+ selectTaskStatement == "SELECT payload FROM test.workflows WHERE workflow_id=? AND shard_id=? AND entity='task' AND task_id=?;"
+ selectWorkflowStatement == "SELECT payload FROM test.workflows WHERE workflow_id=? AND shard_id=1 AND entity='workflow';"
+ selectWorkflowWithTasksStatement == "SELECT * FROM test.workflows WHERE workflow_id=? AND shard_id=?;"
+ selectTaskFromLookupTableStatement == "SELECT workflow_id FROM test.task_lookup WHERE task_id=?;"
+ selectTasksFromTaskDefLimitStatement == "SELECT * FROM test.task_def_limit WHERE task_def_name=?;"
+ selectAllEventExecutionsForMessageFromEventExecutionsStatement == "SELECT * FROM test.event_executions WHERE message_id=? AND event_handler_name=?;"
+ updateWorkflowStatement == "UPDATE test.workflows SET payload=? WHERE workflow_id=? AND shard_id=1 AND entity='workflow' AND task_id='';"
+ updateTotalTasksStatement == "UPDATE test.workflows SET total_tasks=? WHERE workflow_id=? AND shard_id=?;"
+ updateTotalPartitionsStatement == "UPDATE test.workflows SET total_partitions=?,total_tasks=? WHERE workflow_id=? AND shard_id=1;"
+ updateTaskLookupStatement == "UPDATE test.task_lookup SET workflow_id=? WHERE task_id=?;"
+ updateTaskDefLimitStatement == "UPDATE test.task_def_limit SET workflow_id=? WHERE task_def_name=? AND task_id=?;"
+ updateEventExecutionStatement == "UPDATE test.event_executions USING TTL ? SET payload=? WHERE message_id=? AND event_handler_name=? AND event_execution_id=?;"
+ deleteWorkflowStatement == "DELETE FROM test.workflows WHERE workflow_id=? AND shard_id=?;"
+ deleteTaskLookupStatement == "DELETE FROM test.task_lookup WHERE task_id=?;"
+ deleteTaskStatement == "DELETE FROM test.workflows WHERE workflow_id=? AND shard_id=? AND entity='task' AND task_id=?;"
+ deleteTaskDefLimitStatement == "DELETE FROM test.task_def_limit WHERE task_def_name=? AND task_id=?;"
+ deleteEventExecutionsStatement == "DELETE FROM test.event_executions WHERE message_id=? AND event_handler_name=? AND event_execution_id=?;"
+ insertEventHandlerStatement == "INSERT INTO test.event_handlers (handlers,event_handler_name,event_handler) VALUES ('handlers',?,?);"
+ selectAllEventHandlersStatement == "SELECT * FROM test.event_handlers WHERE handlers=?;"
+ deleteEventHandlerStatement == "DELETE FROM test.event_handlers WHERE handlers='handlers' AND event_handler_name=?;"
+ }
+ }
+}
diff --git a/cassandra-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java b/cassandra-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java
deleted file mode 100644
index a6535ca8ee..0000000000
--- a/cassandra-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java
+++ /dev/null
@@ -1,174 +0,0 @@
-/*
- * Copyright 2016 Netflix, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.netflix.conductor.config;
-
-import com.datastax.driver.core.ConsistencyLevel;
-import com.netflix.conductor.cassandra.CassandraConfiguration;
-
-import java.util.Map;
-
-public class TestConfiguration implements CassandraConfiguration {
-
- @Override
- public int getSweepFrequency() {
- return 1;
- }
-
- @Override
- public boolean disableSweep() {
- return false;
- }
-
- @Override
- public boolean disableAsyncWorkers() {
- return false;
- }
-
- @Override
- public String getServerId() {
- return "server_id";
- }
-
- @Override
- public String getEnvironment() {
- return "test";
- }
-
- @Override
- public String getStack() {
- return "junit";
- }
-
- @Override
- public String getAppId() {
- return "conductor";
- }
-
- @Override
- public String getRegion() {
- return "us-east-1";
- }
-
- @Override
- public String getAvailabilityZone() {
- return "us-east-1c";
- }
-
- @Override
- public String getProperty(String name, String defaultValue) {
- return "test";
- }
-
- @Override
- public int getIntProperty(String name, int defaultValue) {
- return 0;
- }
-
- @Override
- public boolean getBooleanProperty(String name, boolean defaultValue) {
- return false;
- }
-
- @Override
- public Map
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.client.spring;
+
+import java.time.Duration;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+@ConfigurationProperties("conductor.client")
+public class ClientProperties {
+
+ private String rootUri;
+
+ private String workerNamePrefix = "workflow-worker-%d";
+
+ private int threadCount = 1;
+
+ private Duration sleepWhenRetryDuration = Duration.ofMillis(500);
+
+ private int updateRetryCount = 3;
+
+ private Map
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.client.spring;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import com.netflix.conductor.client.automator.TaskRunnerConfigurer;
+import com.netflix.conductor.client.http.TaskClient;
+import com.netflix.conductor.client.worker.Worker;
+import com.netflix.discovery.EurekaClient;
+
+@Configuration(proxyBeanMethods = false)
+@EnableConfigurationProperties(ClientProperties.class)
+public class ConductorClientAutoConfiguration {
+
+ @Autowired(required = false)
+ private EurekaClient eurekaClient;
+
+ @Autowired(required = false)
+ private List
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.client.spring;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.context.annotation.Bean;
+
+import com.netflix.conductor.client.worker.Worker;
+import com.netflix.conductor.common.metadata.tasks.Task;
+import com.netflix.conductor.common.metadata.tasks.TaskResult;
+
+@SpringBootApplication
+public class ExampleClient {
+
+ public static void main(String[] args) {
+
+ SpringApplication.run(ExampleClient.class, args);
+ }
+
+ @Bean
+ public Worker worker() {
+ return new Worker() {
+ @Override
+ public String getTaskDefName() {
+ return "taskDef";
+ }
+
+ @Override
+ public TaskResult execute(Task task) {
+ return new TaskResult(task);
+ }
+ };
+ }
+}
diff --git a/client/build.gradle b/client/build.gradle
index bd2e04ce9c..b121b65c1d 100644
--- a/client/build.gradle
+++ b/client/build.gradle
@@ -1,4 +1,15 @@
-apply plugin: 'findbugs'
+buildscript {
+ repositories {
+ maven {
+ url "https://plugins.gradle.org/m2/"
+ }
+ }
+ dependencies {
+ classpath "gradle.plugin.com.github.spotbugs.snom:spotbugs-gradle-plugin:4.6.2"
+ }
+}
+
+apply plugin: 'com.github.spotbugs'
apply plugin: 'pmd'
configurations.all {
@@ -6,21 +17,39 @@ configurations.all {
}
dependencies {
+ implementation project(':conductor-common')
+ // SBMTODO: remove guava dep
+ implementation "com.google.guava:guava:${revGuava}"
+
+ implementation "com.sun.jersey:jersey-client:${revJersey}"
- compile project(':conductor-common')
- compile "com.sun.jersey:jersey-client:${revJerseyClient}"
- compile "com.netflix.spectator:spectator-api:${revSpectator}"
- compile "com.netflix.eureka:eureka-client:${revEurekaClient}"
- compile "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:${revJaxrsJackson}"
- compile "com.netflix.archaius:archaius-core:${revArchaius}"
- compile "com.amazonaws:aws-java-sdk-core:${revAwsSdk}"
+ implementation "com.netflix.spectator:spectator-api:${revSpectator}"
+ implementation "com.netflix.eureka:eureka-client:${revEurekaClient}"
+ implementation "com.amazonaws:aws-java-sdk-core:${revAwsSdk}"
- testCompile "org.slf4j:slf4j-log4j12:${revSlf4jlog4j}"
+ implementation "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider"
+ implementation "com.fasterxml.jackson.datatype:jackson-datatype-jsr310"
+
+ implementation "org.apache.commons:commons-lang3"
+ implementation "commons-io:commons-io:${revCommonsIo}"
+
+ implementation "org.slf4j:slf4j-api"
+
+ testImplementation "org.powermock:powermock-module-junit4:${revPowerMock}"
+ testImplementation "org.powermock:powermock-api-mockito2:${revPowerMock}"
}
-tasks.withType(FindBugs) {
+spotbugsMain {
reports {
- xml.enabled false
- html.enabled true
+ xml {
+ enabled = false
+ }
+ html {
+ enabled = true
+ }
}
}
+
+pmd {
+ ignoreFailures = true
+}
\ No newline at end of file
diff --git a/client/dependencies.lock b/client/dependencies.lock
index 87507c7efc..e91f2b9ae5 100644
--- a/client/dependencies.lock
+++ b/client/dependencies.lock
@@ -1,709 +1,2343 @@
{
- "compile": {
- "com.amazonaws:aws-java-sdk-core": {
- "locked": "1.11.86",
- "requested": "1.11.86"
- },
- "com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
- },
- "com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
- },
- "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": {
- "locked": "2.7.5",
- "requested": "2.7.5"
- },
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
- },
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
- },
- "com.netflix.archaius:archaius-core": {
- "locked": "0.7.5",
- "requested": "0.7.5"
- },
- "com.netflix.conductor:conductor-common": {
- "project": true
- },
- "com.netflix.eureka:eureka-client": {
- "locked": "1.8.7",
- "requested": "1.8.7"
- },
- "com.netflix.spectator:spectator-api": {
- "locked": "0.68.0",
- "requested": "0.68.0"
- },
- "com.sun.jersey:jersey-client": {
- "locked": "1.19.4",
- "requested": "1.19.4"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
- },
- "org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.7.25"
+ "annotationProcessor": {
+ "org.springframework.boot:spring-boot-configuration-processor": {
+ "locked": "2.3.12.RELEASE"
}
},
"compileClasspath": {
+ "aopalliance:aopalliance": {
+ "locked": "1.0",
+ "transitive": [
+ "com.google.inject:guice"
+ ]
+ },
"com.amazonaws:aws-java-sdk-core": {
- "locked": "1.11.86",
- "requested": "1.11.86"
+ "locked": "1.11.86"
+ },
+ "com.fasterxml.jackson.core:jackson-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
+ "locked": "2.11.4",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core"
+ ]
+ },
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": {
+ "locked": "2.11.4"
+ },
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider"
+ ]
},
"com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": {
- "locked": "2.7.5",
- "requested": "2.7.5"
- },
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
- },
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
+ "locked": "2.11.4"
+ },
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider"
+ ]
+ },
+ "com.github.andrewoma.dexx:dexx-collections": {
+ "locked": "0.2",
+ "transitive": [
+ "com.github.vlsi.compactmap:compactmap"
+ ]
+ },
+ "com.github.vlsi.compactmap:compactmap": {
+ "locked": "2.0",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.google.code.findbugs:jsr305": {
+ "locked": "3.0.2",
+ "transitive": [
+ "com.google.guava:guava",
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.google.errorprone:error_prone_annotations": {
+ "locked": "2.3.4",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:failureaccess": {
+ "locked": "1.0.1",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:guava": {
+ "locked": "30.0-jre",
+ "transitive": [
+ "com.google.inject:guice",
+ "com.netflix.servo:servo-core"
+ ]
+ },
+ "com.google.guava:listenablefuture": {
+ "locked": "9999.0-empty-to-avoid-conflict-with-guava",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.inject:guice": {
+ "locked": "4.1.0",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.google.j2objc:j2objc-annotations": {
+ "locked": "1.3",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
},
"com.netflix.archaius:archaius-core": {
- "locked": "0.7.5",
- "requested": "0.7.5"
+ "locked": "0.7.6",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.netflix.conductor:conductor-common": {
"project": true
},
"com.netflix.eureka:eureka-client": {
- "locked": "1.8.7",
- "requested": "1.8.7"
- },
- "com.netflix.spectator:spectator-api": {
- "locked": "0.68.0",
- "requested": "0.68.0"
- },
- "com.sun.jersey:jersey-client": {
- "locked": "1.19.4",
- "requested": "1.19.4"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
- },
- "org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.7.25"
- }
- },
- "default": {
- "com.amazonaws:aws-java-sdk-core": {
- "locked": "1.11.86",
- "requested": "1.11.86"
- },
- "com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
- },
- "com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
- },
- "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": {
- "locked": "2.7.5",
- "requested": "2.7.5"
- },
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
- },
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
- },
- "com.netflix.archaius:archaius-core": {
- "locked": "0.7.5",
- "requested": "0.7.5"
+ "locked": "1.10.10"
},
- "com.netflix.conductor:conductor-common": {
- "project": true
+ "com.netflix.netflix-commons:netflix-eventbus": {
+ "locked": "0.3.0",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
},
- "com.netflix.eureka:eureka-client": {
- "locked": "1.8.7",
- "requested": "1.8.7"
+ "com.netflix.servo:servo-core": {
+ "locked": "0.12.21",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.netflix.spectator:spectator-api": {
- "locked": "0.68.0",
- "requested": "0.68.0"
+ "locked": "0.122.0"
+ },
+ "com.sun.jersey.contribs:jersey-apache-client4": {
+ "locked": "1.19.1",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.sun.jersey:jersey-client": {
"locked": "1.19.4",
- "requested": "1.19.4"
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey.contribs:jersey-apache-client4"
+ ]
+ },
+ "com.sun.jersey:jersey-core": {
+ "locked": "1.19.4",
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey:jersey-client"
+ ]
+ },
+ "com.thoughtworks.xstream:xstream": {
+ "locked": "1.4.13",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "commons-codec:commons-codec": {
+ "locked": "1.14",
+ "transitive": [
+ "org.apache.httpcomponents:httpclient"
+ ]
+ },
+ "commons-configuration:commons-configuration": {
+ "locked": "1.10",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "commons-io:commons-io": {
+ "locked": "2.7"
+ },
+ "commons-lang:commons-lang": {
+ "locked": "2.6",
+ "transitive": [
+ "commons-configuration:commons-configuration"
+ ]
+ },
+ "commons-logging:commons-logging": {
+ "locked": "1.2",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core",
+ "commons-configuration:commons-configuration",
+ "org.apache.httpcomponents:httpclient"
+ ]
+ },
+ "jakarta.activation:jakarta.activation-api": {
+ "locked": "1.2.2",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "jakarta.xml.bind:jakarta.xml.bind-api"
+ ]
+ },
+ "jakarta.xml.bind:jakarta.xml.bind-api": {
+ "locked": "2.3.3",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations"
+ ]
},
"javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
+ "locked": "1",
+ "transitive": [
+ "com.google.inject:guice"
+ ]
+ },
+ "javax.ws.rs:jsr311-api": {
+ "locked": "1.1.1",
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey:jersey-core"
+ ]
+ },
+ "joda-time:joda-time": {
+ "locked": "2.8.1",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core"
+ ]
+ },
+ "org.apache.commons:commons-lang3": {
+ "locked": "3.10"
+ },
+ "org.apache.httpcomponents:httpclient": {
+ "locked": "4.5.13",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core",
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey.contribs:jersey-apache-client4"
+ ]
+ },
+ "org.apache.httpcomponents:httpcore": {
+ "locked": "4.4.14",
+ "transitive": [
+ "org.apache.httpcomponents:httpclient"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-api": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.apache.logging.log4j:log4j-core",
+ "org.apache.logging.log4j:log4j-jul",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-core": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-jul": {
+ "locked": "2.17.0"
+ },
+ "org.apache.logging.log4j:log4j-slf4j-impl": {
+ "locked": "2.17.0"
+ },
+ "org.apache.logging.log4j:log4j-web": {
+ "locked": "2.17.0"
+ },
+ "org.checkerframework:checker-qual": {
+ "locked": "3.5.0",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
},
"org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.7.25"
- }
- },
- "findbugs": {
- "com.google.code.findbugs:findbugs": {
- "locked": "3.0.1"
- }
- },
- "jacocoAgent": {
- "org.jacoco:org.jacoco.agent": {
- "locked": "0.8.1"
- }
- },
- "jacocoAnt": {
- "org.jacoco:org.jacoco.ant": {
- "locked": "0.8.1"
+ "locked": "1.7.30",
+ "transitive": [
+ "com.netflix.servo:servo-core",
+ "org.apache.logging.log4j:log4j-slf4j-impl"
+ ]
+ },
+ "software.amazon.ion:ion-java": {
+ "locked": "1.0.1",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core"
+ ]
+ },
+ "xmlpull:xmlpull": {
+ "locked": "1.1.3.1",
+ "transitive": [
+ "com.thoughtworks.xstream:xstream"
+ ]
+ },
+ "xpp3:xpp3_min": {
+ "locked": "1.1.4c",
+ "transitive": [
+ "com.thoughtworks.xstream:xstream"
+ ]
}
},
"pmd": {
+ "com.beust:jcommander": {
+ "locked": "1.72",
+ "transitive": [
+ "net.sourceforge.pmd:pmd-core"
+ ]
+ },
+ "com.google.code.gson:gson": {
+ "locked": "2.8.7",
+ "transitive": [
+ "net.sourceforge.pmd:pmd-core"
+ ]
+ },
+ "commons-io:commons-io": {
+ "locked": "2.6",
+ "transitive": [
+ "net.sourceforge.pmd:pmd-core",
+ "net.sourceforge.pmd:pmd-java"
+ ]
+ },
+ "net.sourceforge.pmd:pmd-core": {
+ "locked": "6.26.0",
+ "transitive": [
+ "net.sourceforge.pmd:pmd-java"
+ ]
+ },
"net.sourceforge.pmd:pmd-java": {
- "locked": "5.6.1"
+ "locked": "6.26.0"
+ },
+ "net.sourceforge.saxon:saxon": {
+ "locked": "9.1.0.8",
+ "transitive": [
+ "net.sourceforge.pmd:pmd-core",
+ "net.sourceforge.pmd:pmd-java"
+ ]
+ },
+ "org.antlr:antlr4-runtime": {
+ "locked": "4.7",
+ "transitive": [
+ "net.sourceforge.pmd:pmd-core"
+ ]
+ },
+ "org.apache.commons:commons-lang3": {
+ "locked": "3.10",
+ "transitive": [
+ "net.sourceforge.pmd:pmd-core",
+ "net.sourceforge.pmd:pmd-java",
+ "org.apache.commons:commons-text"
+ ]
+ },
+ "org.apache.commons:commons-text": {
+ "locked": "1.6",
+ "transitive": [
+ "net.sourceforge.pmd:pmd-core"
+ ]
+ },
+ "org.ow2.asm:asm": {
+ "locked": "7.3.1",
+ "transitive": [
+ "net.sourceforge.pmd:pmd-core",
+ "net.sourceforge.pmd:pmd-java"
+ ]
}
},
- "runtime": {
+ "runtimeClasspath": {
+ "antlr:antlr": {
+ "locked": "2.7.7",
+ "transitive": [
+ "org.antlr:antlr-runtime",
+ "org.antlr:stringtemplate"
+ ]
+ },
+ "aopalliance:aopalliance": {
+ "locked": "1.0",
+ "transitive": [
+ "com.google.inject:guice"
+ ]
+ },
"com.amazonaws:aws-java-sdk-core": {
- "locked": "1.11.86",
- "requested": "1.11.86"
+ "locked": "1.11.86"
+ },
+ "com.fasterxml.jackson.core:jackson-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.archaius:archaius-core",
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.archaius:archaius-core",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
+ "locked": "2.11.4",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.archaius:archaius-core",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core"
+ ]
+ },
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": {
+ "locked": "2.11.4"
+ },
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider"
+ ]
},
"com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": {
- "locked": "2.7.5",
- "requested": "2.7.5"
+ "locked": "2.11.4"
},
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider"
+ ]
+ },
+ "com.github.andrewoma.dexx:dexx-collections": {
+ "locked": "0.2",
+ "transitive": [
+ "com.github.vlsi.compactmap:compactmap"
+ ]
},
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
+ "com.github.rholder:guava-retrying": {
+ "locked": "2.0.0",
+ "transitive": [
"com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
+ ]
+ },
+ "com.github.vlsi.compactmap:compactmap": {
+ "locked": "2.0",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.google.code.findbugs:jsr305": {
+ "locked": "3.0.2",
+ "transitive": [
+ "com.github.rholder:guava-retrying",
+ "com.google.guava:guava",
+ "com.netflix.archaius:archaius-core",
+ "com.netflix.eureka:eureka-client",
+ "com.netflix.netflix-commons:netflix-infix"
+ ]
+ },
+ "com.google.code.gson:gson": {
+ "locked": "2.8.7",
+ "transitive": [
+ "com.netflix.netflix-commons:netflix-infix"
+ ]
+ },
+ "com.google.errorprone:error_prone_annotations": {
+ "locked": "2.3.4",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:failureaccess": {
+ "locked": "1.0.1",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:guava": {
+ "locked": "30.0-jre",
+ "transitive": [
+ "com.github.rholder:guava-retrying",
+ "com.google.inject:guice",
+ "com.netflix.archaius:archaius-core",
+ "com.netflix.netflix-commons:netflix-infix",
+ "com.netflix.servo:servo-core"
+ ]
+ },
+ "com.google.guava:listenablefuture": {
+ "locked": "9999.0-empty-to-avoid-conflict-with-guava",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.inject:guice": {
+ "locked": "4.1.0",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.google.j2objc:j2objc-annotations": {
+ "locked": "1.3",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
},
"com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
+ "locked": "3.13.0",
+ "transitive": [
"com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
+ ]
},
"com.netflix.archaius:archaius-core": {
- "locked": "0.7.5",
- "requested": "0.7.5"
+ "locked": "0.7.6",
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.netflix.netflix-commons:netflix-eventbus"
+ ]
+ },
+ "com.netflix.conductor:conductor-annotations": {
+ "project": true,
+ "transitive": [
+ "com.netflix.conductor:conductor-common"
+ ]
},
"com.netflix.conductor:conductor-common": {
"project": true
},
"com.netflix.eureka:eureka-client": {
- "locked": "1.8.7",
- "requested": "1.8.7"
+ "locked": "1.10.10"
+ },
+ "com.netflix.netflix-commons:netflix-eventbus": {
+ "locked": "0.3.0",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.netflix.netflix-commons:netflix-infix": {
+ "locked": "0.3.0",
+ "transitive": [
+ "com.netflix.netflix-commons:netflix-eventbus"
+ ]
+ },
+ "com.netflix.servo:servo-core": {
+ "locked": "0.12.21",
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.netflix.netflix-commons:netflix-eventbus"
+ ]
},
"com.netflix.spectator:spectator-api": {
- "locked": "0.68.0",
- "requested": "0.68.0"
+ "locked": "0.122.0"
+ },
+ "com.sun.jersey.contribs:jersey-apache-client4": {
+ "locked": "1.19.1",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.sun.jersey:jersey-client": {
"locked": "1.19.4",
- "requested": "1.19.4"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey.contribs:jersey-apache-client4"
+ ]
},
- "org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.7.25"
- }
- },
- "runtimeClasspath": {
- "com.amazonaws:aws-java-sdk-core": {
- "locked": "1.11.86",
- "requested": "1.11.86"
+ "com.sun.jersey:jersey-core": {
+ "locked": "1.19.4",
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey:jersey-client"
+ ]
+ },
+ "com.thoughtworks.xstream:xstream": {
+ "locked": "1.4.13",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "commons-codec:commons-codec": {
+ "locked": "1.14",
+ "transitive": [
+ "org.apache.httpcomponents:httpclient"
+ ]
+ },
+ "commons-configuration:commons-configuration": {
+ "locked": "1.10",
+ "transitive": [
+ "com.netflix.archaius:archaius-core",
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "commons-io:commons-io": {
+ "locked": "2.7"
+ },
+ "commons-jxpath:commons-jxpath": {
+ "locked": "1.3",
+ "transitive": [
+ "com.netflix.netflix-commons:netflix-infix"
+ ]
+ },
+ "commons-lang:commons-lang": {
+ "locked": "2.6",
+ "transitive": [
+ "commons-configuration:commons-configuration"
+ ]
+ },
+ "commons-logging:commons-logging": {
+ "locked": "1.2",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core",
+ "commons-configuration:commons-configuration",
+ "org.apache.httpcomponents:httpclient"
+ ]
+ },
+ "jakarta.activation:jakarta.activation-api": {
+ "locked": "1.2.2",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "jakarta.xml.bind:jakarta.xml.bind-api"
+ ]
+ },
+ "jakarta.xml.bind:jakarta.xml.bind-api": {
+ "locked": "2.3.3",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations"
+ ]
},
- "com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
+ "javax.inject:javax.inject": {
+ "locked": "1",
+ "transitive": [
+ "com.google.inject:guice"
+ ]
+ },
+ "javax.servlet:servlet-api": {
+ "locked": "2.5",
+ "transitive": [
+ "com.netflix.netflix-commons:netflix-infix"
+ ]
+ },
+ "javax.ws.rs:jsr311-api": {
+ "locked": "1.1.1",
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey:jersey-core"
+ ]
+ },
+ "joda-time:joda-time": {
+ "locked": "2.8.1",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core",
+ "com.netflix.netflix-commons:netflix-infix"
+ ]
+ },
+ "org.antlr:antlr-runtime": {
+ "locked": "3.4",
+ "transitive": [
+ "com.netflix.netflix-commons:netflix-infix"
+ ]
+ },
+ "org.antlr:stringtemplate": {
+ "locked": "3.2.1",
+ "transitive": [
+ "org.antlr:antlr-runtime"
+ ]
+ },
+ "org.apache.bval:bval-jsr": {
+ "locked": "2.0.5",
+ "transitive": [
"com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
+ ]
},
- "com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
+ "org.apache.commons:commons-lang3": {
+ "locked": "3.10",
+ "transitive": [
"com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
- },
- "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": {
- "locked": "2.7.5",
- "requested": "2.7.5"
- },
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
+ ]
+ },
+ "org.apache.commons:commons-math": {
+ "locked": "2.2",
+ "transitive": [
+ "com.netflix.netflix-commons:netflix-eventbus"
+ ]
+ },
+ "org.apache.httpcomponents:httpclient": {
+ "locked": "4.5.13",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core",
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey.contribs:jersey-apache-client4"
+ ]
+ },
+ "org.apache.httpcomponents:httpcore": {
+ "locked": "4.4.14",
+ "transitive": [
+ "org.apache.httpcomponents:httpclient"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-api": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "org.apache.logging.log4j:log4j-core",
+ "org.apache.logging.log4j:log4j-jul",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-core": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-jul": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
"com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
+ ]
},
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
+ "org.apache.logging.log4j:log4j-slf4j-impl": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
"com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
+ ]
},
- "com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
+ "org.apache.logging.log4j:log4j-web": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
"com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
+ ]
},
- "com.netflix.archaius:archaius-core": {
- "locked": "0.7.5",
- "requested": "0.7.5"
+ "org.checkerframework:checker-qual": {
+ "locked": "3.5.0",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
},
- "com.netflix.conductor:conductor-common": {
- "project": true
- },
- "com.netflix.eureka:eureka-client": {
- "locked": "1.8.7",
- "requested": "1.8.7"
- },
- "com.netflix.spectator:spectator-api": {
- "locked": "0.68.0",
- "requested": "0.68.0"
- },
- "com.sun.jersey:jersey-client": {
- "locked": "1.19.4",
- "requested": "1.19.4"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
+ "org.codehaus.jettison:jettison": {
+ "locked": "1.4.0",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
},
"org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.7.25"
+ "locked": "1.7.30",
+ "transitive": [
+ "com.netflix.archaius:archaius-core",
+ "com.netflix.netflix-commons:netflix-eventbus",
+ "com.netflix.netflix-commons:netflix-infix",
+ "com.netflix.servo:servo-core",
+ "com.netflix.spectator:spectator-api",
+ "org.apache.logging.log4j:log4j-slf4j-impl"
+ ]
+ },
+ "software.amazon.ion:ion-java": {
+ "locked": "1.0.1",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core"
+ ]
+ },
+ "xmlpull:xmlpull": {
+ "locked": "1.1.3.1",
+ "transitive": [
+ "com.thoughtworks.xstream:xstream"
+ ]
+ },
+ "xpp3:xpp3_min": {
+ "locked": "1.1.4c",
+ "transitive": [
+ "com.thoughtworks.xstream:xstream"
+ ]
}
},
- "testCompile": {
- "com.amazonaws:aws-java-sdk-core": {
- "locked": "1.11.86",
- "requested": "1.11.86"
- },
- "com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
- },
- "com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
- },
- "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": {
- "locked": "2.7.5",
- "requested": "2.7.5"
- },
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
- },
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
- },
- "com.netflix.archaius:archaius-core": {
- "locked": "0.7.5",
- "requested": "0.7.5"
- },
- "com.netflix.conductor:conductor-common": {
- "project": true
- },
- "com.netflix.eureka:eureka-client": {
- "locked": "1.8.7",
- "requested": "1.8.7"
- },
- "com.netflix.spectator:spectator-api": {
- "locked": "0.68.0",
- "requested": "0.68.0"
- },
- "com.sun.jersey:jersey-client": {
- "locked": "1.19.4",
- "requested": "1.19.4"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
- },
- "junit:junit": {
- "locked": "4.12",
- "requested": "4.12"
- },
- "org.mockito:mockito-core": {
- "locked": "1.10.19",
- "requested": "1.10.19"
+ "spotbugs": {
+ "com.github.spotbugs:spotbugs": {
+ "locked": "4.2.1"
+ },
+ "com.github.spotbugs:spotbugs-annotations": {
+ "locked": "4.2.1",
+ "transitive": [
+ "com.github.spotbugs:spotbugs"
+ ]
+ },
+ "com.google.code.findbugs:jsr305": {
+ "locked": "3.0.2",
+ "transitive": [
+ "com.github.spotbugs:spotbugs-annotations"
+ ]
+ },
+ "jaxen:jaxen": {
+ "locked": "1.2.0",
+ "transitive": [
+ "com.github.spotbugs:spotbugs"
+ ]
+ },
+ "net.jcip:jcip-annotations": {
+ "locked": "1.0",
+ "transitive": [
+ "com.github.spotbugs:spotbugs"
+ ]
+ },
+ "net.sf.saxon:Saxon-HE": {
+ "locked": "10.3",
+ "transitive": [
+ "com.github.spotbugs:spotbugs"
+ ]
+ },
+ "org.apache.bcel:bcel": {
+ "locked": "6.5.0",
+ "transitive": [
+ "com.github.spotbugs:spotbugs"
+ ]
+ },
+ "org.apache.commons:commons-lang3": {
+ "locked": "3.10",
+ "transitive": [
+ "com.github.spotbugs:spotbugs",
+ "org.apache.commons:commons-text"
+ ]
+ },
+ "org.apache.commons:commons-text": {
+ "locked": "1.9",
+ "transitive": [
+ "com.github.spotbugs:spotbugs"
+ ]
+ },
+ "org.dom4j:dom4j": {
+ "locked": "2.1.3",
+ "transitive": [
+ "com.github.spotbugs:spotbugs"
+ ]
+ },
+ "org.json:json": {
+ "locked": "20201115",
+ "transitive": [
+ "com.github.spotbugs:spotbugs"
+ ]
+ },
+ "org.ow2.asm:asm": {
+ "locked": "9.0",
+ "transitive": [
+ "com.github.spotbugs:spotbugs",
+ "org.ow2.asm:asm-commons",
+ "org.ow2.asm:asm-tree",
+ "org.ow2.asm:asm-util"
+ ]
+ },
+ "org.ow2.asm:asm-analysis": {
+ "locked": "9.0",
+ "transitive": [
+ "com.github.spotbugs:spotbugs",
+ "org.ow2.asm:asm-commons",
+ "org.ow2.asm:asm-util"
+ ]
+ },
+ "org.ow2.asm:asm-commons": {
+ "locked": "9.0",
+ "transitive": [
+ "com.github.spotbugs:spotbugs"
+ ]
+ },
+ "org.ow2.asm:asm-tree": {
+ "locked": "9.0",
+ "transitive": [
+ "com.github.spotbugs:spotbugs",
+ "org.ow2.asm:asm-analysis",
+ "org.ow2.asm:asm-commons",
+ "org.ow2.asm:asm-util"
+ ]
+ },
+ "org.ow2.asm:asm-util": {
+ "locked": "9.0",
+ "transitive": [
+ "com.github.spotbugs:spotbugs"
+ ]
},
"org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.8.0-alpha1"
+ "locked": "1.7.30",
+ "transitive": [
+ "com.github.spotbugs:spotbugs"
+ ]
+ }
+ },
+ "spotbugsSlf4j": {
+ "org.slf4j:slf4j-api": {
+ "locked": "1.7.30",
+ "transitive": [
+ "org.slf4j:slf4j-simple"
+ ]
},
- "org.slf4j:slf4j-log4j12": {
- "locked": "1.8.0-alpha1",
- "requested": "1.8.0-alpha1"
+ "org.slf4j:slf4j-simple": {
+ "locked": "1.8.0-beta4"
}
},
"testCompileClasspath": {
+ "aopalliance:aopalliance": {
+ "locked": "1.0",
+ "transitive": [
+ "com.google.inject:guice"
+ ]
+ },
"com.amazonaws:aws-java-sdk-core": {
- "locked": "1.11.86",
- "requested": "1.11.86"
+ "locked": "1.11.86"
+ },
+ "com.fasterxml.jackson.core:jackson-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
+ "locked": "2.11.4",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core"
+ ]
+ },
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": {
+ "locked": "2.11.4"
+ },
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider"
+ ]
},
"com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": {
- "locked": "2.7.5",
- "requested": "2.7.5"
- },
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
- },
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
+ "locked": "2.11.4"
+ },
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider"
+ ]
+ },
+ "com.github.andrewoma.dexx:dexx-collections": {
+ "locked": "0.2",
+ "transitive": [
+ "com.github.vlsi.compactmap:compactmap"
+ ]
+ },
+ "com.github.vlsi.compactmap:compactmap": {
+ "locked": "2.0",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.google.code.findbugs:jsr305": {
+ "locked": "3.0.2",
+ "transitive": [
+ "com.google.guava:guava",
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.google.errorprone:error_prone_annotations": {
+ "locked": "2.3.4",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:failureaccess": {
+ "locked": "1.0.1",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:guava": {
+ "locked": "30.0-jre",
+ "transitive": [
+ "com.google.inject:guice",
+ "com.netflix.servo:servo-core"
+ ]
+ },
+ "com.google.guava:listenablefuture": {
+ "locked": "9999.0-empty-to-avoid-conflict-with-guava",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.inject:guice": {
+ "locked": "4.1.0",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.google.j2objc:j2objc-annotations": {
+ "locked": "1.3",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.jayway.jsonpath:json-path": {
+ "locked": "2.4.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
},
"com.netflix.archaius:archaius-core": {
- "locked": "0.7.5",
- "requested": "0.7.5"
+ "locked": "0.7.6",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.netflix.conductor:conductor-common": {
"project": true
},
"com.netflix.eureka:eureka-client": {
- "locked": "1.8.7",
- "requested": "1.8.7"
+ "locked": "1.10.10"
+ },
+ "com.netflix.netflix-commons:netflix-eventbus": {
+ "locked": "0.3.0",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.netflix.servo:servo-core": {
+ "locked": "0.12.21",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.netflix.spectator:spectator-api": {
- "locked": "0.68.0",
- "requested": "0.68.0"
+ "locked": "0.122.0"
+ },
+ "com.sun.jersey.contribs:jersey-apache-client4": {
+ "locked": "1.19.1",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.sun.jersey:jersey-client": {
"locked": "1.19.4",
- "requested": "1.19.4"
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey.contribs:jersey-apache-client4"
+ ]
+ },
+ "com.sun.jersey:jersey-core": {
+ "locked": "1.19.4",
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey:jersey-client"
+ ]
+ },
+ "com.thoughtworks.xstream:xstream": {
+ "locked": "1.4.13",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.vaadin.external.google:android-json": {
+ "locked": "0.0.20131108.vaadin1",
+ "transitive": [
+ "org.skyscreamer:jsonassert"
+ ]
+ },
+ "commons-codec:commons-codec": {
+ "locked": "1.14",
+ "transitive": [
+ "org.apache.httpcomponents:httpclient"
+ ]
+ },
+ "commons-configuration:commons-configuration": {
+ "locked": "1.10",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "commons-io:commons-io": {
+ "locked": "2.7"
+ },
+ "commons-lang:commons-lang": {
+ "locked": "2.6",
+ "transitive": [
+ "commons-configuration:commons-configuration"
+ ]
+ },
+ "commons-logging:commons-logging": {
+ "locked": "1.2",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core",
+ "commons-configuration:commons-configuration",
+ "org.apache.httpcomponents:httpclient"
+ ]
+ },
+ "jakarta.activation:jakarta.activation-api": {
+ "locked": "1.2.2",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "jakarta.xml.bind:jakarta.xml.bind-api"
+ ]
+ },
+ "jakarta.annotation:jakarta.annotation-api": {
+ "locked": "1.3.5",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "jakarta.xml.bind:jakarta.xml.bind-api": {
+ "locked": "2.3.3",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
},
"javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
+ "locked": "1",
+ "transitive": [
+ "com.google.inject:guice"
+ ]
+ },
+ "javax.ws.rs:jsr311-api": {
+ "locked": "1.1.1",
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey:jersey-core"
+ ]
+ },
+ "joda-time:joda-time": {
+ "locked": "2.8.1",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core"
+ ]
},
"junit:junit": {
- "locked": "4.12",
- "requested": "4.12"
+ "locked": "4.13.2",
+ "transitive": [
+ "org.junit.vintage:junit-vintage-engine",
+ "org.powermock:powermock-module-junit4",
+ "org.powermock:powermock-module-junit4-common"
+ ]
+ },
+ "net.bytebuddy:byte-buddy": {
+ "locked": "1.10.22",
+ "transitive": [
+ "org.mockito:mockito-core",
+ "org.powermock:powermock-core",
+ "org.powermock:powermock-reflect"
+ ]
+ },
+ "net.bytebuddy:byte-buddy-agent": {
+ "locked": "1.10.22",
+ "transitive": [
+ "org.mockito:mockito-core",
+ "org.powermock:powermock-core",
+ "org.powermock:powermock-reflect"
+ ]
+ },
+ "net.minidev:accessors-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "net.minidev:json-smart"
+ ]
+ },
+ "net.minidev:json-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "com.jayway.jsonpath:json-path"
+ ]
+ },
+ "org.apache.commons:commons-lang3": {
+ "locked": "3.10"
+ },
+ "org.apache.httpcomponents:httpclient": {
+ "locked": "4.5.13",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core",
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey.contribs:jersey-apache-client4"
+ ]
+ },
+ "org.apache.httpcomponents:httpcore": {
+ "locked": "4.4.14",
+ "transitive": [
+ "org.apache.httpcomponents:httpclient"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-api": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.apache.logging.log4j:log4j-core",
+ "org.apache.logging.log4j:log4j-jul",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-core": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.apache.logging.log4j:log4j-web",
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-jul": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-slf4j-impl": {
+ "locked": "2.17.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-web": {
+ "locked": "2.17.0"
+ },
+ "org.apiguardian:apiguardian-api": {
+ "locked": "1.1.0",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.junit.platform:junit-platform-commons",
+ "org.junit.platform:junit-platform-engine",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "org.assertj:assertj-core": {
+ "locked": "3.16.1",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.checkerframework:checker-qual": {
+ "locked": "3.5.0",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "org.hamcrest:hamcrest": {
+ "locked": "2.2",
+ "transitive": [
+ "org.hamcrest:hamcrest-core",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.hamcrest:hamcrest-core": {
+ "locked": "2.2",
+ "transitive": [
+ "org.powermock:powermock-module-junit4",
+ "org.powermock:powermock-module-junit4-common"
+ ]
+ },
+ "org.javassist:javassist": {
+ "locked": "3.27.0-GA",
+ "transitive": [
+ "org.powermock:powermock-core"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-api": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter",
+ "org.junit.jupiter:junit-jupiter-params"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-params": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter"
+ ]
+ },
+ "org.junit.platform:junit-platform-commons": {
+ "locked": "1.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.platform:junit-platform-engine"
+ ]
+ },
+ "org.junit.platform:junit-platform-engine": {
+ "locked": "1.6.3",
+ "transitive": [
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "org.junit.vintage:junit-vintage-engine": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.junit:junit-bom": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter",
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.junit.platform:junit-platform-commons",
+ "org.junit.platform:junit-platform-engine",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
},
"org.mockito:mockito-core": {
- "locked": "1.10.19",
- "requested": "1.10.19"
+ "locked": "3.3.3",
+ "transitive": [
+ "org.mockito:mockito-junit-jupiter",
+ "org.powermock:powermock-api-mockito2",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.mockito:mockito-junit-jupiter": {
+ "locked": "3.3.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.objenesis:objenesis": {
+ "locked": "3.0.1",
+ "transitive": [
+ "org.mockito:mockito-core",
+ "org.powermock:powermock-reflect"
+ ]
+ },
+ "org.opentest4j:opentest4j": {
+ "locked": "1.2.0",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.platform:junit-platform-engine"
+ ]
+ },
+ "org.ow2.asm:asm": {
+ "locked": "5.0.4",
+ "transitive": [
+ "net.minidev:accessors-smart"
+ ]
+ },
+ "org.powermock:powermock-api-mockito2": {
+ "locked": "2.0.9"
+ },
+ "org.powermock:powermock-api-support": {
+ "locked": "2.0.9",
+ "transitive": [
+ "org.powermock:powermock-api-mockito2"
+ ]
+ },
+ "org.powermock:powermock-core": {
+ "locked": "2.0.9",
+ "transitive": [
+ "org.powermock:powermock-api-support",
+ "org.powermock:powermock-module-junit4-common"
+ ]
+ },
+ "org.powermock:powermock-module-junit4": {
+ "locked": "2.0.9"
+ },
+ "org.powermock:powermock-module-junit4-common": {
+ "locked": "2.0.9",
+ "transitive": [
+ "org.powermock:powermock-module-junit4"
+ ]
+ },
+ "org.powermock:powermock-reflect": {
+ "locked": "2.0.9",
+ "transitive": [
+ "org.powermock:powermock-api-support",
+ "org.powermock:powermock-core",
+ "org.powermock:powermock-module-junit4-common"
+ ]
+ },
+ "org.skyscreamer:jsonassert": {
+ "locked": "1.5.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.slf4j:jul-to-slf4j": {
+ "locked": "1.7.30",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-log4j2",
+ "org.springframework.boot:spring-boot-starter-logging"
+ ]
},
"org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.8.0-alpha1"
- },
- "org.slf4j:slf4j-log4j12": {
- "locked": "1.8.0-alpha1",
- "requested": "1.8.0-alpha1"
+ "locked": "1.7.30",
+ "transitive": [
+ "com.jayway.jsonpath:json-path",
+ "com.netflix.servo:servo-core",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.slf4j:jul-to-slf4j"
+ ]
+ },
+ "org.springframework.boot:spring-boot": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-autoconfigure",
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-test",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-autoconfigure": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter-log4j2": {
+ "locked": "2.3.12.RELEASE"
+ },
+ "org.springframework.boot:spring-boot-starter-logging": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter-test": {
+ "locked": "2.3.12.RELEASE"
+ },
+ "org.springframework.boot:spring-boot-test": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-test-autoconfigure": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.springframework:spring-aop": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-beans": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-aop",
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-context": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot"
+ ]
+ },
+ "org.springframework:spring-core": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot",
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-starter-test",
+ "org.springframework:spring-aop",
+ "org.springframework:spring-beans",
+ "org.springframework:spring-context",
+ "org.springframework:spring-expression",
+ "org.springframework:spring-test"
+ ]
+ },
+ "org.springframework:spring-expression": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-jcl": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-core"
+ ]
+ },
+ "org.springframework:spring-test": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.xmlunit:xmlunit-core": {
+ "locked": "2.7.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.yaml:snakeyaml": {
+ "locked": "1.26",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "software.amazon.ion:ion-java": {
+ "locked": "1.0.1",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core"
+ ]
+ },
+ "xmlpull:xmlpull": {
+ "locked": "1.1.3.1",
+ "transitive": [
+ "com.thoughtworks.xstream:xstream"
+ ]
+ },
+ "xpp3:xpp3_min": {
+ "locked": "1.1.4c",
+ "transitive": [
+ "com.thoughtworks.xstream:xstream"
+ ]
}
},
- "testRuntime": {
+ "testRuntimeClasspath": {
+ "antlr:antlr": {
+ "locked": "2.7.7",
+ "transitive": [
+ "org.antlr:antlr-runtime",
+ "org.antlr:stringtemplate"
+ ]
+ },
+ "aopalliance:aopalliance": {
+ "locked": "1.0",
+ "transitive": [
+ "com.google.inject:guice"
+ ]
+ },
"com.amazonaws:aws-java-sdk-core": {
- "locked": "1.11.86",
- "requested": "1.11.86"
+ "locked": "1.11.86"
+ },
+ "com.fasterxml.jackson.core:jackson-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.archaius:archaius-core",
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.core:jackson-databind",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.archaius:archaius-core",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
+ "locked": "2.11.4",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core",
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor",
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310",
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base",
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "com.netflix.archaius:archaius-core",
+ "com.netflix.conductor:conductor-common",
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core"
+ ]
+ },
+ "com.fasterxml.jackson.datatype:jackson-datatype-jsr310": {
+ "locked": "2.11.4"
+ },
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-base": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider"
+ ]
},
"com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": {
- "locked": "2.7.5",
- "requested": "2.7.5"
+ "locked": "2.11.4"
},
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations": {
+ "locked": "2.11.4",
+ "transitive": [
+ "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider"
+ ]
+ },
+ "com.github.andrewoma.dexx:dexx-collections": {
+ "locked": "0.2",
+ "transitive": [
+ "com.github.vlsi.compactmap:compactmap"
+ ]
},
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
+ "com.github.rholder:guava-retrying": {
+ "locked": "2.0.0",
+ "transitive": [
"com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
+ ]
+ },
+ "com.github.vlsi.compactmap:compactmap": {
+ "locked": "2.0",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.google.code.findbugs:jsr305": {
+ "locked": "3.0.2",
+ "transitive": [
+ "com.github.rholder:guava-retrying",
+ "com.google.guava:guava",
+ "com.netflix.archaius:archaius-core",
+ "com.netflix.eureka:eureka-client",
+ "com.netflix.netflix-commons:netflix-infix"
+ ]
+ },
+ "com.google.code.gson:gson": {
+ "locked": "2.8.7",
+ "transitive": [
+ "com.netflix.netflix-commons:netflix-infix"
+ ]
+ },
+ "com.google.errorprone:error_prone_annotations": {
+ "locked": "2.3.4",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:failureaccess": {
+ "locked": "1.0.1",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.guava:guava": {
+ "locked": "30.0-jre",
+ "transitive": [
+ "com.github.rholder:guava-retrying",
+ "com.google.inject:guice",
+ "com.netflix.archaius:archaius-core",
+ "com.netflix.netflix-commons:netflix-infix",
+ "com.netflix.servo:servo-core"
+ ]
+ },
+ "com.google.guava:listenablefuture": {
+ "locked": "9999.0-empty-to-avoid-conflict-with-guava",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "com.google.inject:guice": {
+ "locked": "4.1.0",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.google.j2objc:j2objc-annotations": {
+ "locked": "1.3",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
},
"com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
+ "locked": "3.13.0",
+ "transitive": [
"com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
+ ]
+ },
+ "com.jayway.jsonpath:json-path": {
+ "locked": "2.4.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
},
"com.netflix.archaius:archaius-core": {
- "locked": "0.7.5",
- "requested": "0.7.5"
+ "locked": "0.7.6",
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.netflix.netflix-commons:netflix-eventbus"
+ ]
+ },
+ "com.netflix.conductor:conductor-annotations": {
+ "project": true,
+ "transitive": [
+ "com.netflix.conductor:conductor-common"
+ ]
},
"com.netflix.conductor:conductor-common": {
"project": true
},
"com.netflix.eureka:eureka-client": {
- "locked": "1.8.7",
- "requested": "1.8.7"
+ "locked": "1.10.10"
+ },
+ "com.netflix.netflix-commons:netflix-eventbus": {
+ "locked": "0.3.0",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.netflix.netflix-commons:netflix-infix": {
+ "locked": "0.3.0",
+ "transitive": [
+ "com.netflix.netflix-commons:netflix-eventbus"
+ ]
+ },
+ "com.netflix.servo:servo-core": {
+ "locked": "0.12.21",
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.netflix.netflix-commons:netflix-eventbus"
+ ]
},
"com.netflix.spectator:spectator-api": {
- "locked": "0.68.0",
- "requested": "0.68.0"
+ "locked": "0.122.0"
+ },
+ "com.sun.jersey.contribs:jersey-apache-client4": {
+ "locked": "1.19.1",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
},
"com.sun.jersey:jersey-client": {
"locked": "1.19.4",
- "requested": "1.19.4"
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey.contribs:jersey-apache-client4"
+ ]
+ },
+ "com.sun.jersey:jersey-core": {
+ "locked": "1.19.4",
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey:jersey-client"
+ ]
+ },
+ "com.thoughtworks.xstream:xstream": {
+ "locked": "1.4.13",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "com.vaadin.external.google:android-json": {
+ "locked": "0.0.20131108.vaadin1",
+ "transitive": [
+ "org.skyscreamer:jsonassert"
+ ]
+ },
+ "commons-codec:commons-codec": {
+ "locked": "1.14",
+ "transitive": [
+ "org.apache.httpcomponents:httpclient"
+ ]
+ },
+ "commons-configuration:commons-configuration": {
+ "locked": "1.10",
+ "transitive": [
+ "com.netflix.archaius:archaius-core",
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "commons-io:commons-io": {
+ "locked": "2.7"
+ },
+ "commons-jxpath:commons-jxpath": {
+ "locked": "1.3",
+ "transitive": [
+ "com.netflix.netflix-commons:netflix-infix"
+ ]
+ },
+ "commons-lang:commons-lang": {
+ "locked": "2.6",
+ "transitive": [
+ "commons-configuration:commons-configuration"
+ ]
+ },
+ "commons-logging:commons-logging": {
+ "locked": "1.2",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core",
+ "commons-configuration:commons-configuration",
+ "org.apache.httpcomponents:httpclient"
+ ]
+ },
+ "jakarta.activation:jakarta.activation-api": {
+ "locked": "1.2.2",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "jakarta.xml.bind:jakarta.xml.bind-api"
+ ]
+ },
+ "jakarta.annotation:jakarta.annotation-api": {
+ "locked": "1.3.5",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "jakarta.xml.bind:jakarta.xml.bind-api": {
+ "locked": "2.3.3",
+ "transitive": [
+ "com.fasterxml.jackson.module:jackson-module-jaxb-annotations",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
},
"javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
+ "locked": "1",
+ "transitive": [
+ "com.google.inject:guice"
+ ]
+ },
+ "javax.servlet:servlet-api": {
+ "locked": "2.5",
+ "transitive": [
+ "com.netflix.netflix-commons:netflix-infix"
+ ]
+ },
+ "javax.ws.rs:jsr311-api": {
+ "locked": "1.1.1",
+ "transitive": [
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey:jersey-core"
+ ]
+ },
+ "joda-time:joda-time": {
+ "locked": "2.8.1",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core",
+ "com.netflix.netflix-commons:netflix-infix"
+ ]
},
"junit:junit": {
- "locked": "4.12",
- "requested": "4.12"
- },
- "org.mockito:mockito-core": {
- "locked": "1.10.19",
- "requested": "1.10.19"
- },
- "org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.8.0-alpha1"
- },
- "org.slf4j:slf4j-log4j12": {
- "locked": "1.8.0-alpha1",
- "requested": "1.8.0-alpha1"
- }
- },
- "testRuntimeClasspath": {
- "com.amazonaws:aws-java-sdk-core": {
- "locked": "1.11.86",
- "requested": "1.11.86"
- },
- "com.fasterxml.jackson.core:jackson-core": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
- },
- "com.fasterxml.jackson.core:jackson-databind": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "2.8.7"
- },
- "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider": {
- "locked": "2.7.5",
- "requested": "2.7.5"
- },
- "com.github.rholder:guava-retrying": {
- "firstLevelTransitive": [
+ "locked": "4.13.2",
+ "transitive": [
+ "org.junit.vintage:junit-vintage-engine",
+ "org.powermock:powermock-module-junit4",
+ "org.powermock:powermock-module-junit4-common"
+ ]
+ },
+ "net.bytebuddy:byte-buddy": {
+ "locked": "1.10.22",
+ "transitive": [
+ "org.mockito:mockito-core",
+ "org.powermock:powermock-core",
+ "org.powermock:powermock-reflect"
+ ]
+ },
+ "net.bytebuddy:byte-buddy-agent": {
+ "locked": "1.10.22",
+ "transitive": [
+ "org.mockito:mockito-core",
+ "org.powermock:powermock-core",
+ "org.powermock:powermock-reflect"
+ ]
+ },
+ "net.minidev:accessors-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "net.minidev:json-smart"
+ ]
+ },
+ "net.minidev:json-smart": {
+ "locked": "2.3.1",
+ "transitive": [
+ "com.jayway.jsonpath:json-path"
+ ]
+ },
+ "org.antlr:antlr-runtime": {
+ "locked": "3.4",
+ "transitive": [
+ "com.netflix.netflix-commons:netflix-infix"
+ ]
+ },
+ "org.antlr:stringtemplate": {
+ "locked": "3.2.1",
+ "transitive": [
+ "org.antlr:antlr-runtime"
+ ]
+ },
+ "org.apache.bval:bval-jsr": {
+ "locked": "2.0.5",
+ "transitive": [
"com.netflix.conductor:conductor-common"
- ],
- "locked": "2.0.0"
+ ]
},
- "com.github.vmg.protogen:protogen-annotations": {
- "firstLevelTransitive": [
+ "org.apache.commons:commons-lang3": {
+ "locked": "3.10",
+ "transitive": [
"com.netflix.conductor:conductor-common"
- ],
- "locked": "1.0.0"
- },
- "com.google.protobuf:protobuf-java": {
- "firstLevelTransitive": [
+ ]
+ },
+ "org.apache.commons:commons-math": {
+ "locked": "2.2",
+ "transitive": [
+ "com.netflix.netflix-commons:netflix-eventbus"
+ ]
+ },
+ "org.apache.httpcomponents:httpclient": {
+ "locked": "4.5.13",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core",
+ "com.netflix.eureka:eureka-client",
+ "com.sun.jersey.contribs:jersey-apache-client4"
+ ]
+ },
+ "org.apache.httpcomponents:httpcore": {
+ "locked": "4.4.14",
+ "transitive": [
+ "org.apache.httpcomponents:httpclient"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-api": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "org.apache.logging.log4j:log4j-core",
+ "org.apache.logging.log4j:log4j-jul",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-core": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.apache.logging.log4j:log4j-web",
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-jul": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-slf4j-impl": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
+ "com.netflix.conductor:conductor-common",
+ "org.springframework.boot:spring-boot-starter-log4j2"
+ ]
+ },
+ "org.apache.logging.log4j:log4j-web": {
+ "locked": "2.17.0",
+ "transitive": [
+ "com.netflix.conductor:conductor-annotations",
"com.netflix.conductor:conductor-common"
- ],
- "locked": "3.5.1"
- },
- "com.netflix.archaius:archaius-core": {
- "locked": "0.7.5",
- "requested": "0.7.5"
- },
- "com.netflix.conductor:conductor-common": {
- "project": true
- },
- "com.netflix.eureka:eureka-client": {
- "locked": "1.8.7",
- "requested": "1.8.7"
- },
- "com.netflix.spectator:spectator-api": {
- "locked": "0.68.0",
- "requested": "0.68.0"
- },
- "com.sun.jersey:jersey-client": {
- "locked": "1.19.4",
- "requested": "1.19.4"
- },
- "javax.inject:javax.inject": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1"
- },
- "junit:junit": {
- "locked": "4.12",
- "requested": "4.12"
+ ]
+ },
+ "org.apiguardian:apiguardian-api": {
+ "locked": "1.1.0",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.jupiter:junit-jupiter-engine",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.junit.platform:junit-platform-commons",
+ "org.junit.platform:junit-platform-engine",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "org.assertj:assertj-core": {
+ "locked": "3.16.1",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.checkerframework:checker-qual": {
+ "locked": "3.5.0",
+ "transitive": [
+ "com.google.guava:guava"
+ ]
+ },
+ "org.codehaus.jettison:jettison": {
+ "locked": "1.4.0",
+ "transitive": [
+ "com.netflix.eureka:eureka-client"
+ ]
+ },
+ "org.hamcrest:hamcrest": {
+ "locked": "2.2",
+ "transitive": [
+ "org.hamcrest:hamcrest-core",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.hamcrest:hamcrest-core": {
+ "locked": "2.2",
+ "transitive": [
+ "org.powermock:powermock-module-junit4",
+ "org.powermock:powermock-module-junit4-common"
+ ]
+ },
+ "org.javassist:javassist": {
+ "locked": "3.27.0-GA",
+ "transitive": [
+ "org.powermock:powermock-core"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-api": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter",
+ "org.junit.jupiter:junit-jupiter-engine",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.mockito:mockito-junit-jupiter"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-engine": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter"
+ ]
+ },
+ "org.junit.jupiter:junit-jupiter-params": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter"
+ ]
+ },
+ "org.junit.platform:junit-platform-commons": {
+ "locked": "1.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.platform:junit-platform-engine"
+ ]
+ },
+ "org.junit.platform:junit-platform-engine": {
+ "locked": "1.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-engine",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
+ },
+ "org.junit.vintage:junit-vintage-engine": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.junit:junit-bom": {
+ "locked": "5.6.3",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter",
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.jupiter:junit-jupiter-engine",
+ "org.junit.jupiter:junit-jupiter-params",
+ "org.junit.platform:junit-platform-commons",
+ "org.junit.platform:junit-platform-engine",
+ "org.junit.vintage:junit-vintage-engine"
+ ]
},
"org.mockito:mockito-core": {
- "locked": "1.10.19",
- "requested": "1.10.19"
+ "locked": "3.3.3",
+ "transitive": [
+ "org.mockito:mockito-junit-jupiter",
+ "org.powermock:powermock-api-mockito2",
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.mockito:mockito-junit-jupiter": {
+ "locked": "3.3.3",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.objenesis:objenesis": {
+ "locked": "3.0.1",
+ "transitive": [
+ "org.mockito:mockito-core",
+ "org.powermock:powermock-reflect"
+ ]
+ },
+ "org.opentest4j:opentest4j": {
+ "locked": "1.2.0",
+ "transitive": [
+ "org.junit.jupiter:junit-jupiter-api",
+ "org.junit.platform:junit-platform-engine"
+ ]
+ },
+ "org.ow2.asm:asm": {
+ "locked": "5.0.4",
+ "transitive": [
+ "net.minidev:accessors-smart"
+ ]
+ },
+ "org.powermock:powermock-api-mockito2": {
+ "locked": "2.0.9"
+ },
+ "org.powermock:powermock-api-support": {
+ "locked": "2.0.9",
+ "transitive": [
+ "org.powermock:powermock-api-mockito2"
+ ]
+ },
+ "org.powermock:powermock-core": {
+ "locked": "2.0.9",
+ "transitive": [
+ "org.powermock:powermock-api-support",
+ "org.powermock:powermock-module-junit4-common"
+ ]
+ },
+ "org.powermock:powermock-module-junit4": {
+ "locked": "2.0.9"
+ },
+ "org.powermock:powermock-module-junit4-common": {
+ "locked": "2.0.9",
+ "transitive": [
+ "org.powermock:powermock-module-junit4"
+ ]
+ },
+ "org.powermock:powermock-reflect": {
+ "locked": "2.0.9",
+ "transitive": [
+ "org.powermock:powermock-api-support",
+ "org.powermock:powermock-core",
+ "org.powermock:powermock-module-junit4-common"
+ ]
+ },
+ "org.skyscreamer:jsonassert": {
+ "locked": "1.5.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.slf4j:jul-to-slf4j": {
+ "locked": "1.7.30",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-log4j2",
+ "org.springframework.boot:spring-boot-starter-logging"
+ ]
},
"org.slf4j:slf4j-api": {
- "firstLevelTransitive": [
- "com.netflix.conductor:conductor-common"
- ],
- "locked": "1.8.0-alpha1"
- },
- "org.slf4j:slf4j-log4j12": {
- "locked": "1.8.0-alpha1",
- "requested": "1.8.0-alpha1"
+ "locked": "1.7.30",
+ "transitive": [
+ "com.jayway.jsonpath:json-path",
+ "com.netflix.archaius:archaius-core",
+ "com.netflix.netflix-commons:netflix-eventbus",
+ "com.netflix.netflix-commons:netflix-infix",
+ "com.netflix.servo:servo-core",
+ "com.netflix.spectator:spectator-api",
+ "org.apache.logging.log4j:log4j-slf4j-impl",
+ "org.slf4j:jul-to-slf4j"
+ ]
+ },
+ "org.springframework.boot:spring-boot": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-autoconfigure",
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-test",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-autoconfigure": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter-log4j2": {
+ "locked": "2.3.12.RELEASE"
+ },
+ "org.springframework.boot:spring-boot-starter-logging": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "org.springframework.boot:spring-boot-starter-test": {
+ "locked": "2.3.12.RELEASE"
+ },
+ "org.springframework.boot:spring-boot-test": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test",
+ "org.springframework.boot:spring-boot-test-autoconfigure"
+ ]
+ },
+ "org.springframework.boot:spring-boot-test-autoconfigure": {
+ "locked": "2.3.12.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.springframework:spring-aop": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-beans": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-aop",
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-context": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot"
+ ]
+ },
+ "org.springframework:spring-core": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot",
+ "org.springframework.boot:spring-boot-starter",
+ "org.springframework.boot:spring-boot-starter-test",
+ "org.springframework:spring-aop",
+ "org.springframework:spring-beans",
+ "org.springframework:spring-context",
+ "org.springframework:spring-expression",
+ "org.springframework:spring-test"
+ ]
+ },
+ "org.springframework:spring-expression": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-context"
+ ]
+ },
+ "org.springframework:spring-jcl": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework:spring-core"
+ ]
+ },
+ "org.springframework:spring-test": {
+ "locked": "5.2.15.RELEASE",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.xmlunit:xmlunit-core": {
+ "locked": "2.7.0",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter-test"
+ ]
+ },
+ "org.yaml:snakeyaml": {
+ "locked": "1.26",
+ "transitive": [
+ "org.springframework.boot:spring-boot-starter"
+ ]
+ },
+ "software.amazon.ion:ion-java": {
+ "locked": "1.0.1",
+ "transitive": [
+ "com.amazonaws:aws-java-sdk-core"
+ ]
+ },
+ "xmlpull:xmlpull": {
+ "locked": "1.1.3.1",
+ "transitive": [
+ "com.thoughtworks.xstream:xstream"
+ ]
+ },
+ "xpp3:xpp3_min": {
+ "locked": "1.1.4c",
+ "transitive": [
+ "com.thoughtworks.xstream:xstream"
+ ]
}
}
}
\ No newline at end of file
diff --git a/client/go/README.md b/client/go/README.md
deleted file mode 100644
index e0d48dead9..0000000000
--- a/client/go/README.md
+++ /dev/null
@@ -1,121 +0,0 @@
-# Go client for Conductor
-Go client for Conductor provides two sets of functions:
-
-1. Workflow Management APIs (start, terminate, get workflow status etc.)
-2. Worker execution framework
-
-## Prerequisites
-Go must be installed and GOPATH env variable set. Directory $GOPATH/src/conductor must not be in use.
-
-## Install
-
-```shell
-./install.sh
-```
-This will create a Go project under $GOPATH/src/conductor and download any dependencies.
-It can then be ran:
-```shell
-go run $GOPATH/src/conductor/startclient/startclient.go
-```
-
-## Install and Run
-
-```shell
-./install_and_run.sh
-```
-This will create a Go project under $GOPATH/src/conductor and download any dependencies. In addition, it will run the go application listed under startclient/startclient.go
-
-## Uninstall
-WARNING: This will simply remove the $GOPATH/src/conductor directory where it has installed so if other work is there, it will be deleted. Use with caution.
-
-```shell
-./uninstall.sh
-```
-
-## Using Workflow Management API
-Go struct ```ConductorHttpClient``` provides client API calls to the conductor server to start and manage workflows and tasks.
-
-### Example
-```go
-package main
-
-import (
- "conductor"
-)
-
-func main() {
- conductorClient := conductor.NewConductorHttpClient("http://localhost:8080")
-
- // Example API that will print out workflow definition meta
- conductorClient.GetAllWorkflowDefs()
-}
-
-```
-
-## Task Worker Execution
-Task Worker execution APIs facilitates execution of a task worker using go. The API provides necessary tools to poll for tasks at a specified interval and executing the go worker in a separate goroutine.
-
-### Example
-The following go code demonstrates workers for tasks "task_1" and "task_2".
-
-```go
-package task
-
-import (
- "fmt"
-)
-
-// Implementation for "task_1"
-func Task_1_Execution_Function(t *task.Task) (taskResult *task.TaskResult, err error) {
- log.Println("Executing Task_1_Execution_Function for", t.TaskType)
-
- //Do some logic
- taskResult = task.NewTaskResult(t)
-
- output := map[string]interface{}{"task":"task_1", "key2":"value2", "key3":3, "key4":false}
- taskResult.OutputData = output
- taskResult.Status = "COMPLETED"
- err = nil
-
- return taskResult, err
-}
-
-// Implementation for "task_2"
-func Task_2_Execution_Function(t *task.Task) (taskResult *task.TaskResult, err error) {
- log.Println("Executing Task_2_Execution_Function for", t.TaskType)
-
- //Do some logic
- taskResult = task.NewTaskResult(t)
-
- output := map[string]interface{}{"task":"task_2", "key2":"value2", "key3":3, "key4":false}
- taskResult.OutputData = output
- taskResult.Status = "COMPLETED"
- err = nil
-
- return taskResult, err
-}
-
-```
-
-
-Then main application to utilize these workers
-
-```go
-package main
-
-import (
- "conductor"
- "conductor/task/sample"
-)
-
-func main() {
- c := conductor.NewConductorWorker("http://localhost:8080", 1, 10000)
-
- c.Start("task_1", sample.Task_1_Execution_Function, false)
- c.Start("task_2", sample.Task_2_Execution_Function, true)
-}
-
-```
-
-Note: For the example listed above the example task implementations are in conductor/task/sample package. Real task implementations can be placed in conductor/task directory or new subdirectory.
-
diff --git a/client/go/install.sh b/client/go/install.sh
deleted file mode 100755
index 4a023eba7b..0000000000
--- a/client/go/install.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-CURR_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-GO_CONDUCTOR_DIR=$GOPATH/src/conductor
-
-mkdir -p $GO_CONDUCTOR_DIR
-cp -r $CURR_DIR/* $GO_CONDUCTOR_DIR
-
-# Install dependencies
-cd $GO_CONDUCTOR_DIR
-go get
diff --git a/client/go/install_and_run.sh b/client/go/install_and_run.sh
deleted file mode 100755
index d47c8e3aff..0000000000
--- a/client/go/install_and_run.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/bash
-
-CURR_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-GO_CONDUCTOR_DIR=$GOPATH/src/conductor
-
-$CURR_DIR/install.sh
-
-go run $GO_CONDUCTOR_DIR/startclient/startclient.go
diff --git a/client/go/startclient/startclient.go b/client/go/startclient/startclient.go
deleted file mode 100644
index 3da7b292da..0000000000
--- a/client/go/startclient/startclient.go
+++ /dev/null
@@ -1,26 +0,0 @@
-// Copyright 2017 Netflix, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-package main
-
-import (
- "conductor"
- "conductor/task/sample"
-)
-
-func main() {
- c := conductor.NewConductorWorker("http://localhost:8080/api", 1, 10000)
-
- c.Start("task_1", sample.Task_1_Execution_Function, false)
- c.Start("task_2", sample.Task_2_Execution_Function, true)
-}
diff --git a/client/go/task/taskresult.go b/client/go/task/taskresult.go
deleted file mode 100644
index 5029a4f90e..0000000000
--- a/client/go/task/taskresult.go
+++ /dev/null
@@ -1,64 +0,0 @@
-// Copyright 2017 Netflix, Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-package task
-
-import (
- "encoding/json"
-)
-
-type TaskResultStatus string
-
-type TaskResult struct {
- Status TaskResultStatus `json:"status"`
- WorkflowInstanceId string `json:"workflowInstanceId"`
- TaskId string `json:"taskId"`
- ReasonForIncompletion string `json:"reasonForIncompletion"`
- CallbackAfterSeconds int64 `json:"callbackAfterSeconds"`
- WorkerId string `json:"workerId"`
- OutputData map[string]interface{} `json:"outputData"`
-}
-
-// "Constructor" to initialze non zero value defaults
-func NewEmptyTaskResult() *TaskResult {
- taskResult := new(TaskResult)
- taskResult.OutputData = make(map[string]interface{})
- return taskResult
-}
-
-func NewTaskResult(t *Task) *TaskResult {
- taskResult := new(TaskResult)
- taskResult.CallbackAfterSeconds = t.CallbackAfterSeconds
- taskResult.WorkflowInstanceId = t.WorkflowInstanceId
- taskResult.TaskId = t.TaskId
- taskResult.ReasonForIncompletion = t.ReasonForIncompletion
- taskResult.Status = TaskResultStatus(t.Status)
- taskResult.WorkerId = t.WorkerId
- taskResult.OutputData = t.OutputData
- return taskResult
-}
-
-func (t *TaskResult) ToJSONString() (string, error) {
- var jsonString string
- b, err := json.Marshal(t)
- if err == nil {
- jsonString = string(b)
- }
- return jsonString, err
-}
-
-func ParseTaskResult(inputJSON string) (*TaskResult, error) {
- t := NewEmptyTaskResult()
- err := json.Unmarshal([]byte(inputJSON), t)
- return t, err
-}
diff --git a/client/go/uninstall.sh b/client/go/uninstall.sh
deleted file mode 100755
index 8bbe9c0aa5..0000000000
--- a/client/go/uninstall.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-
-CURR_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-GO_CONDUCTOR_DIR=$GOPATH/src/conductor
-
-rm -rf $GO_CONDUCTOR_DIR
diff --git a/client/python/README.md b/client/python/README.md
deleted file mode 100644
index 64f0fd7338..0000000000
--- a/client/python/README.md
+++ /dev/null
@@ -1,64 +0,0 @@
-# Python client for Conductor
-Python clinet for Conductor provides two sets of functions:
-
-1. Workflow management APIs (start, terminate, get workflow status etc.)
-2. Worker execution framework
-
-## Install
-
-```shell
-pip install conductor
-```
-
-## Using Workflow Management API
-Python class ```WorkflowClient``` provides client API calls to the conductor server to start manage the workflows.
-
-### Example
-
-```python
-import sys
-from conductor import conductor
-import json
-
-def getStatus(workflowId):
-
- workflowClient = conductor.WorkflowClient('http://localhost:8080/api')
-
- workflow_json = workflowClient.getWorkflow(workflowId)
- print json.dumps(workflow_json, indent=True, separators=(',', ': '))
-
- return workflow_json
-
-```
-
-## Task Worker Execution
-Task worker execution APIs facilitates execution of a task worker using python client.
-The API provides necessary mechanism to poll for task work at regular interval and executing the python worker in a separate threads.
-
-### Example
-The following python script demonstrates workers for the kitchensink workflow.
-
-```python
-from conductor.ConductorWorker import ConductorWorker
-
-def execute(task):
- return {'status': 'COMPLETED', 'output': {'mod': 5, 'taskToExecute': 'task_1', 'oddEven': 0}}
-
-def execute4(task):
- forkTasks = [{"name": "task_1", "taskReferenceName": "task_1_1", "type": "SIMPLE"},{"name": "sub_workflow_4", "taskReferenceName": "wf_dyn", "type": "SUB_WORKFLOW", "subWorkflowParam": {"name": "sub_flow_1"}}];
- input = {'task_1_1': {}, 'wf_dyn': {}}
- return {'status': 'COMPLETED', 'output': {'mod': 5, 'taskToExecute': 'task_1', 'oddEven': 0, 'dynamicTasks': forkTasks, 'inputs': input}}
-
-def main():
- print 'Hello World'
- cc = ConductorWorker('http://localhost:8080/api', 1, 0.1)
- for x in range(1, 30):
- if(x == 4):
- cc.start('task_{0}'.format(x), execute4, False)
- else:
- cc.start('task_{0}'.format(x), execute, False)
- cc.start('task_30', execute, True)
-
-if __name__ == '__main__':
- main()
-```
\ No newline at end of file
diff --git a/client/python/conductor/ConductorWorker.py b/client/python/conductor/ConductorWorker.py
deleted file mode 100644
index 603c5d868a..0000000000
--- a/client/python/conductor/ConductorWorker.py
+++ /dev/null
@@ -1,79 +0,0 @@
-#
-# Copyright 2017 Netflix, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from __future__ import print_function, absolute_import
-import sys
-import time
-from conductor.conductor import WFClientMgr
-from threading import Thread
-import socket
-
-hostname = socket.gethostname()
-
-
-class ConductorWorker:
- def __init__(self, server_url, thread_count, polling_interval, worker_id=None):
- wfcMgr = WFClientMgr(server_url)
- self.workflowClient = wfcMgr.workflowClient
- self.taskClient = wfcMgr.taskClient
- self.thread_count = thread_count
- self.polling_interval = polling_interval
- self.worker_id = worker_id or hostname
-
- def execute(self, task, exec_function):
- try:
- resp = exec_function(task)
- if resp is None:
- raise Exception('Task execution function MUST return a response as a dict with status and output fields')
- task['status'] = resp['status']
- task['outputData'] = resp['output']
- task['logs'] = resp['logs']
- self.taskClient.updateTask(task)
- except Exception as err:
- print('Error executing task: ' + str(err))
- task['status'] = 'FAILED'
- self.taskClient.updateTask(task)
-
- def poll_and_execute(self, taskType, exec_function, domain=None):
- while True:
- time.sleep(float(self.polling_interval))
- polled = self.taskClient.pollForTask(taskType, self.worker_id, domain)
- if polled is not None:
- if self.taskClient.ackTask(polled['taskId'], self.worker_id):
- self.execute(polled, exec_function)
-
- def start(self, taskType, exec_function, wait, domain=None):
- print('Polling for task %s at a %f ms interval with %d threads for task execution, with worker id as %s' % (taskType, self.polling_interval * 1000, self.thread_count, self.worker_id))
- for x in range(0, int(self.thread_count)):
- thread = Thread(target=self.poll_and_execute, args=(taskType, exec_function, domain,))
- thread.daemon = True
- thread.start()
- if wait:
- while 1:
- time.sleep(1)
-
-
-def exc(taskType, inputData, startTime, retryCount, status, callbackAfterSeconds, pollCount):
- print('Executing the function')
- return {'status': 'COMPLETED', 'output': {}}
-
-
-def main():
- cc = ConductorWorker('http://localhost:8080/api', 5, 0.1)
- cc.start(sys.argv[1], exc, False)
- cc.start(sys.argv[2], exc, True)
-
-
-if __name__ == '__main__':
- main()
diff --git a/client/src/main/java/com/netflix/conductor/client/automator/PollingSemaphore.java b/client/src/main/java/com/netflix/conductor/client/automator/PollingSemaphore.java
new file mode 100644
index 0000000000..c57e3ec5ae
--- /dev/null
+++ b/client/src/main/java/com/netflix/conductor/client/automator/PollingSemaphore.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2020 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.client.automator;
+
+import java.util.concurrent.Semaphore;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A class wrapping a semaphore which holds the number of permits available for polling and
+ * executing tasks.
+ */
+class PollingSemaphore {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(PollingSemaphore.class);
+ private final Semaphore semaphore;
+
+ PollingSemaphore(int numSlots) {
+ LOGGER.debug("Polling semaphore initialized with {} permits", numSlots);
+ semaphore = new Semaphore(numSlots);
+ }
+
+ /**
+ * Signals if polling is allowed based on whether a permit can be acquired.
+ *
+ * @return {@code true} - if permit is acquired {@code false} - if permit could not be acquired
+ */
+ boolean canPoll() {
+ boolean acquired = semaphore.tryAcquire();
+ LOGGER.debug("Trying to acquire permit: {}", acquired);
+ return acquired;
+ }
+
+ /** Signals that processing is complete and the permit can be released. */
+ void complete() {
+ LOGGER.debug("Completed execution; releasing permit");
+ semaphore.release();
+ }
+
+ /**
+ * Gets the number of threads available for processing.
+ *
+ * @return number of available permits
+ */
+ int availableThreads() {
+ int available = semaphore.availablePermits();
+ LOGGER.debug("Number of available permits: {}", available);
+ return available;
+ }
+}
diff --git a/client/src/main/java/com/netflix/conductor/client/automator/TaskPollExecutor.java b/client/src/main/java/com/netflix/conductor/client/automator/TaskPollExecutor.java
new file mode 100644
index 0000000000..38424b2560
--- /dev/null
+++ b/client/src/main/java/com/netflix/conductor/client/automator/TaskPollExecutor.java
@@ -0,0 +1,344 @@
+/*
+ * Copyright 2020 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.client.automator;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.lang3.concurrent.BasicThreadFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.netflix.appinfo.InstanceInfo.InstanceStatus;
+import com.netflix.conductor.client.config.PropertyFactory;
+import com.netflix.conductor.client.http.TaskClient;
+import com.netflix.conductor.client.telemetry.MetricsContainer;
+import com.netflix.conductor.client.worker.Worker;
+import com.netflix.conductor.common.metadata.tasks.Task;
+import com.netflix.conductor.common.metadata.tasks.TaskResult;
+import com.netflix.conductor.common.utils.RetryUtil;
+import com.netflix.discovery.EurekaClient;
+
+import com.google.common.base.Stopwatch;
+
+/**
+ * Manages the threadpool used by the workers for execution and server communication (polling and
+ * task update).
+ */
+class TaskPollExecutor {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(TaskPollExecutor.class);
+
+ private final EurekaClient eurekaClient;
+ private final TaskClient taskClient;
+ private final int updateRetryCount;
+ private final ExecutorService executorService;
+ private final Map
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.client.automator;
+
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.netflix.conductor.client.exception.ConductorClientException;
+import com.netflix.conductor.client.http.TaskClient;
+import com.netflix.conductor.client.worker.Worker;
+import com.netflix.discovery.EurekaClient;
+
+import com.google.common.base.Preconditions;
+
+/** Configures automated polling of tasks and execution via the registered {@link Worker}s. */
+public class TaskRunnerConfigurer {
+ private static final Logger LOGGER = LoggerFactory.getLogger(TaskRunnerConfigurer.class);
+ private static final String INVALID_THREAD_COUNT =
+ "Invalid worker thread count specified, use either shared thread pool or config thread count per task";
+ private static final String MISSING_TASK_THREAD_COUNT =
+ "Missing task thread count config for %s";
+
+ private ScheduledExecutorService scheduledExecutorService;
+
+ private final EurekaClient eurekaClient;
+ private final TaskClient taskClient;
+ private final List Please see {@link TaskRunnerConfigurer#init()} method. The method must be called after
+ * this constructor for the polling to start.
+ */
+ public TaskRunnerConfigurer build() {
+ return new TaskRunnerConfigurer(this);
+ }
+ }
+
+ /** @return Thread Count for the shared executor pool */
+ public int getThreadCount() {
+ return threadCount;
+ }
+
+ /** @return Thread Count for individual task type */
+ public Map
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
*/
-
package com.netflix.conductor.client.config;
public interface ConductorClientConfiguration {
/**
- * @return the workflow input payload size threshold in KB,
- * beyond which the payload will be processed based on {@link ConductorClientConfiguration#isExternalPayloadStorageEnabled()}.
+ * @return the workflow input payload size threshold in KB, beyond which the payload will be
+ * processed based on {@link
+ * ConductorClientConfiguration#isExternalPayloadStorageEnabled()}.
*/
int getWorkflowInputPayloadThresholdKB();
/**
- * @return the max value of workflow input payload size threshold in KB,
- * beyond which the payload will be rejected regardless external payload storage is enabled.
+ * @return the max value of workflow input payload size threshold in KB, beyond which the
+ * payload will be rejected regardless external payload storage is enabled.
*/
int getWorkflowInputMaxPayloadThresholdKB();
/**
- * @return the task output payload size threshold in KB,
- * beyond which the payload will be processed based on {@link ConductorClientConfiguration#isExternalPayloadStorageEnabled()}.
+ * @return the task output payload size threshold in KB, beyond which the payload will be
+ * processed based on {@link
+ * ConductorClientConfiguration#isExternalPayloadStorageEnabled()}.
*/
int getTaskOutputPayloadThresholdKB();
/**
- * @return the max value of task output payload size threshold in KB,
- * beyond which the payload will be rejected regardless external payload storage is enabled.
+ * @return the max value of task output payload size threshold in KB, beyond which the payload
+ * will be rejected regardless external payload storage is enabled.
*/
int getTaskOutputMaxPayloadThresholdKB();
/**
- * @return the flag which controls the use of external storage for storing workflow/task
- * input and output JSON payloads with size greater than threshold.
- * If it is set to true, the payload is stored in external location.
- * If it is set to false, the payload is rejected and the task/workflow execution fails.
+ * @return the flag which controls the use of external storage for storing workflow/task input
+ * and output JSON payloads with size greater than threshold. If it is set to true, the
+ * payload is stored in external location. If it is set to false, the payload is rejected
+ * and the task/workflow execution fails.
*/
boolean isExternalPayloadStorageEnabled();
}
diff --git a/client/src/main/java/com/netflix/conductor/client/config/DefaultConductorClientConfiguration.java b/client/src/main/java/com/netflix/conductor/client/config/DefaultConductorClientConfiguration.java
index 5ea2c12435..f15cf3bab0 100644
--- a/client/src/main/java/com/netflix/conductor/client/config/DefaultConductorClientConfiguration.java
+++ b/client/src/main/java/com/netflix/conductor/client/config/DefaultConductorClientConfiguration.java
@@ -1,24 +1,20 @@
/*
- * Copyright 2018 Netflix, Inc.
+ * Copyright 2020 Netflix, Inc.
*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
*/
-
package com.netflix.conductor.client.config;
/**
- * A default implementation of {@link ConductorClientConfiguration}
- * where external payload storage is disabled.
+ * A default implementation of {@link ConductorClientConfiguration} where external payload storage
+ * is disabled.
*/
public class DefaultConductorClientConfiguration implements ConductorClientConfiguration {
diff --git a/client/src/main/java/com/netflix/conductor/client/config/PropertyFactory.java b/client/src/main/java/com/netflix/conductor/client/config/PropertyFactory.java
new file mode 100644
index 0000000000..443b854817
--- /dev/null
+++ b/client/src/main/java/com/netflix/conductor/client/config/PropertyFactory.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2020 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.client.config;
+
+import java.util.concurrent.ConcurrentHashMap;
+
+import com.netflix.config.DynamicProperty;
+
+/** Used to configure the Conductor workers using properties. */
+public class PropertyFactory {
+
+ private final DynamicProperty global;
+ private final DynamicProperty local;
+
+ private static final String PROPERTY_PREFIX = "conductor.worker";
+
+ private static final ConcurrentHashMap
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.client.exception;
import java.util.List;
-/**
- * Client exception thrown from Conductor api clients.
- */
+import com.netflix.conductor.common.validation.ErrorResponse;
+import com.netflix.conductor.common.validation.ValidationError;
+
+/** Client exception thrown from Conductor api clients. */
public class ConductorClientException extends RuntimeException {
private int status;
@@ -48,6 +59,7 @@ public ConductorClientException(int status, String message) {
public ConductorClientException(int status, ErrorResponse errorResponse) {
super(errorResponse.getMessage());
this.status = status;
+ this.retryable = errorResponse.isRetryable();
this.message = errorResponse.getMessage();
this.code = errorResponse.getCode();
this.instance = errorResponse.getInstance();
diff --git a/client/src/main/java/com/netflix/conductor/client/exceptions/ErrorResponse.java b/client/src/main/java/com/netflix/conductor/client/exceptions/ErrorResponse.java
deleted file mode 100644
index 092202b0a1..0000000000
--- a/client/src/main/java/com/netflix/conductor/client/exceptions/ErrorResponse.java
+++ /dev/null
@@ -1,68 +0,0 @@
-package com.netflix.conductor.client.exceptions;
-
-import java.util.List;
-import com.netflix.conductor.common.validation.ValidationError;
-import java.util.StringJoiner;
-
-
-//TODO: Use one from common
-public class ErrorResponse {
-
- private String code;
- private String message;
- private String instance;
- private boolean retryable;
-
- public List
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
*/
-
package com.netflix.conductor.client.http;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider;
-import com.google.common.base.Preconditions;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URI;
+import java.util.Collection;
+import java.util.Map;
+import java.util.function.Function;
+
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.UriBuilder;
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import com.netflix.conductor.client.config.ConductorClientConfiguration;
import com.netflix.conductor.client.config.DefaultConductorClientConfiguration;
-import com.netflix.conductor.client.exceptions.ConductorClientException;
-import com.netflix.conductor.client.exceptions.ErrorResponse;
+import com.netflix.conductor.client.exception.ConductorClientException;
+import com.netflix.conductor.common.config.ObjectMapperProvider;
+import com.netflix.conductor.common.model.BulkResponse;
import com.netflix.conductor.common.run.ExternalStorageLocation;
import com.netflix.conductor.common.utils.ExternalPayloadStorage;
-import com.netflix.conductor.common.utils.JsonMapperProvider;
+import com.netflix.conductor.common.validation.ErrorResponse;
+
+import com.fasterxml.jackson.core.Version;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
+import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider;
+import com.google.common.base.Preconditions;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientHandler;
import com.sun.jersey.api.client.ClientHandlerException;
@@ -35,26 +50,11 @@
import com.sun.jersey.api.client.WebResource.Builder;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.UriBuilder;
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URI;
-import java.util.Collection;
-import java.util.Map;
-import java.util.function.Function;
-
-/**
- * Abstract client for the REST template
- */
+/** Abstract client for the REST template */
public abstract class ClientBase {
- private static Logger logger = LoggerFactory.getLogger(ClientBase.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(ClientBase.class);
protected final Client client;
@@ -78,8 +78,16 @@ protected ClientBase(ClientConfig config, ClientHandler handler) {
this(config, new DefaultConductorClientConfiguration(), handler);
}
- protected ClientBase(ClientConfig config, ConductorClientConfiguration clientConfiguration, ClientHandler handler) {
- objectMapper = new JsonMapperProvider().get();
+ protected ClientBase(
+ ClientConfig config,
+ ConductorClientConfiguration clientConfiguration,
+ ClientHandler handler) {
+ objectMapper = new ObjectMapperProvider().getObjectMapper();
+
+ // https://github.com/FasterXML/jackson-databind/issues/2683
+ if (isNewerJacksonVersion()) {
+ objectMapper.registerModule(new JavaTimeModule());
+ }
JacksonJsonProvider provider = new JacksonJsonProvider(objectMapper);
config.getSingletons().add(provider);
@@ -94,24 +102,47 @@ protected ClientBase(ClientConfig config, ConductorClientConfiguration clientCon
payloadStorage = new PayloadStorage(this);
}
+ private boolean isNewerJacksonVersion() {
+ Version version = com.fasterxml.jackson.databind.cfg.PackageVersion.VERSION;
+ return version.getMajorVersion() == 2 && version.getMinorVersion() >= 12;
+ }
+
public void setRootURI(String root) {
this.root = root;
}
protected void delete(String url, Object... uriVariables) {
- delete(null, url, uriVariables);
+ deleteWithUriVariables(null, url, uriVariables);
+ }
+
+ protected void deleteWithUriVariables(
+ Object[] queryParams, String url, Object... uriVariables) {
+ delete(queryParams, url, uriVariables, null);
+ }
+
+ protected BulkResponse deleteWithRequestBody(Object[] queryParams, String url, Object body) {
+ return delete(queryParams, url, null, body);
}
- protected void delete(Object[] queryParams, String url, Object... uriVariables) {
+ private BulkResponse delete(
+ Object[] queryParams, String url, Object[] uriVariables, Object body) {
URI uri = null;
try {
uri = getURIBuilder(root + url, queryParams).build(uriVariables);
- client.resource(uri).delete();
+ if (body != null) {
+ return client.resource(uri)
+ .type(MediaType.APPLICATION_JSON_TYPE)
+ .delete(BulkResponse.class, body);
+ } else {
+ client.resource(uri).delete();
+ }
} catch (UniformInterfaceException e) {
handleUniformInterfaceException(e, uri);
} catch (RuntimeException e) {
handleRuntimeException(e, uri);
}
+
+ return null;
}
protected void put(String url, Object[] queryParams, Object request, Object... uriVariables) {
@@ -134,16 +165,43 @@ protected void postForEntityWithUriVariablesOnly(String url, Object... uriVariab
postForEntity(url, null, null, type, uriVariables);
}
-
- protected
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.client.http;
+
+import java.util.List;
+
+import org.apache.commons.lang.StringUtils;
+
+import com.netflix.conductor.client.config.ConductorClientConfiguration;
+import com.netflix.conductor.client.config.DefaultConductorClientConfiguration;
+import com.netflix.conductor.common.metadata.events.EventHandler;
+
+import com.google.common.base.Preconditions;
+import com.sun.jersey.api.client.ClientHandler;
+import com.sun.jersey.api.client.GenericType;
+import com.sun.jersey.api.client.config.ClientConfig;
+import com.sun.jersey.api.client.config.DefaultClientConfig;
+import com.sun.jersey.api.client.filter.ClientFilter;
+
+// Client class for all Event Handler operations
+public class EventClient extends ClientBase {
+ private static final GenericType
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
*/
-
package com.netflix.conductor.client.http;
-import com.google.common.base.Preconditions;
+import java.util.List;
+
+import org.apache.commons.lang.StringUtils;
+
import com.netflix.conductor.client.config.ConductorClientConfiguration;
import com.netflix.conductor.client.config.DefaultConductorClientConfiguration;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
+
+import com.google.common.base.Preconditions;
import com.sun.jersey.api.client.ClientHandler;
-import com.sun.jersey.api.client.GenericType;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.api.client.filter.ClientFilter;
-import org.apache.commons.lang.StringUtils;
-
-import java.util.List;
public class MetadataClient extends ClientBase {
- private static GenericType
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
*/
-
package com.netflix.conductor.client.http;
-import com.amazonaws.util.IOUtils;
-import com.netflix.conductor.client.exceptions.ConductorClientException;
-import com.netflix.conductor.common.run.ExternalStorageLocation;
-import com.netflix.conductor.common.utils.ExternalPayloadStorage;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
@@ -32,11 +21,21 @@
import java.net.URISyntaxException;
import java.net.URL;
-/**
- * An implementation of {@link ExternalPayloadStorage} for storing large JSON payload data.
- */
+import javax.ws.rs.core.Response;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.netflix.conductor.client.exception.ConductorClientException;
+import com.netflix.conductor.common.run.ExternalStorageLocation;
+import com.netflix.conductor.common.utils.ExternalPayloadStorage;
+
+import com.amazonaws.util.IOUtils;
+
+/** An implementation of {@link ExternalPayloadStorage} for storing large JSON payload data. */
class PayloadStorage implements ExternalPayloadStorage {
- private static final Logger logger = LoggerFactory.getLogger(PayloadStorage.class);
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(PayloadStorage.class);
private final ClientBase clientBase;
@@ -45,11 +44,12 @@ class PayloadStorage implements ExternalPayloadStorage {
}
/**
- * This method is not intended to be used in the client.
- * The client makes a request to the server to get the {@link ExternalStorageLocation}
+ * This method is not intended to be used in the client. The client makes a request to the
+ * server to get the {@link ExternalStorageLocation}
*/
@Override
- public ExternalStorageLocation getLocation(Operation operation, PayloadType payloadType, String path) {
+ public ExternalStorageLocation getLocation(
+ Operation operation, PayloadType payloadType, String path) {
String uri;
switch (payloadType) {
case WORKFLOW_INPUT:
@@ -61,18 +61,32 @@ public ExternalStorageLocation getLocation(Operation operation, PayloadType payl
uri = "tasks";
break;
default:
- throw new ConductorClientException(String.format("Invalid payload type: %s for operation: %s", payloadType.toString(), operation.toString()));
+ throw new ConductorClientException(
+ String.format(
+ "Invalid payload type: %s for operation: %s",
+ payloadType.toString(), operation.toString()));
}
- return clientBase.getForEntity(String.format("%s/externalstoragelocation", uri), new Object[]{"path", path}, ExternalStorageLocation.class);
+ return clientBase.getForEntity(
+ String.format("%s/externalstoragelocation", uri),
+ new Object[] {
+ "path",
+ path,
+ "operation",
+ operation.toString(),
+ "payloadType",
+ payloadType.toString()
+ },
+ ExternalStorageLocation.class);
}
/**
* Uploads the payload to the uri specified.
*
- * @param uri the location to which the object is to be uploaded
- * @param payload an {@link InputStream} containing the json payload which is to be uploaded
+ * @param uri the location to which the object is to be uploaded
+ * @param payload an {@link InputStream} containing the json payload which is to be uploaded
* @param payloadSize the size of the json payload in bytes
- * @throws ConductorClientException if the upload fails due to an invalid path or an error from external storage
+ * @throws ConductorClientException if the upload fails due to an invalid path or an error from
+ * external storage
*/
@Override
public void upload(String uri, InputStream payload, long payloadSize) {
@@ -84,22 +98,32 @@ public void upload(String uri, InputStream payload, long payloadSize) {
connection.setDoOutput(true);
connection.setRequestMethod("PUT");
- try (BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(connection.getOutputStream())) {
+ try (BufferedOutputStream bufferedOutputStream =
+ new BufferedOutputStream(connection.getOutputStream())) {
long count = IOUtils.copy(payload, bufferedOutputStream);
bufferedOutputStream.flush();
- logger.debug("Uploaded {} bytes to uri: {}", count, uri);
-
// Check the HTTP response code
int responseCode = connection.getResponseCode();
- logger.debug("Upload completed with HTTP response code: {}", responseCode);
+ if (Response.Status.fromStatusCode(responseCode).getFamily()
+ != Response.Status.Family.SUCCESSFUL) {
+ String errorMsg =
+ String.format("Unable to upload. Response code: %d", responseCode);
+ LOGGER.error(errorMsg);
+ throw new ConductorClientException(errorMsg);
+ }
+ LOGGER.debug(
+ "Uploaded {} bytes to uri: {}, with HTTP response code: {}",
+ count,
+ uri,
+ responseCode);
}
} catch (URISyntaxException | MalformedURLException e) {
String errorMsg = String.format("Invalid path specified: %s", uri);
- logger.error(errorMsg, e);
+ LOGGER.error(errorMsg, e);
throw new ConductorClientException(errorMsg, e);
} catch (IOException e) {
String errorMsg = String.format("Error uploading to path: %s", uri);
- logger.error(errorMsg, e);
+ LOGGER.error(errorMsg, e);
throw new ConductorClientException(errorMsg, e);
} finally {
if (connection != null) {
@@ -110,7 +134,7 @@ public void upload(String uri, InputStream payload, long payloadSize) {
payload.close();
}
} catch (IOException e) {
- logger.warn("Unable to close inputstream when uploading to uri: {}", uri);
+ LOGGER.warn("Unable to close inputstream when uploading to uri: {}", uri);
}
}
}
@@ -120,7 +144,8 @@ public void upload(String uri, InputStream payload, long payloadSize) {
*
* @param uri the location from where the object is to be downloaded
* @return an inputstream of the payload in the external storage
- * @throws ConductorClientException if the download fails due to an invalid path or an error from external storage
+ * @throws ConductorClientException if the download fails due to an invalid path or an error
+ * from external storage
*/
@Override
public InputStream download(String uri) {
@@ -134,19 +159,22 @@ public InputStream download(String uri) {
// Check the HTTP response code
int responseCode = connection.getResponseCode();
if (responseCode == HttpURLConnection.HTTP_OK) {
- logger.debug("Download completed with HTTP response code: {}", connection.getResponseCode());
- return connection.getInputStream();
+ LOGGER.debug(
+ "Download completed with HTTP response code: {}",
+ connection.getResponseCode());
+ return org.apache.commons.io.IOUtils.toBufferedInputStream(
+ connection.getInputStream());
}
errorMsg = String.format("Unable to download. Response code: %d", responseCode);
- logger.error(errorMsg);
+ LOGGER.error(errorMsg);
throw new ConductorClientException(errorMsg);
} catch (URISyntaxException | MalformedURLException e) {
errorMsg = String.format("Invalid uri specified: %s", uri);
- logger.error(errorMsg, e);
+ LOGGER.error(errorMsg, e);
throw new ConductorClientException(errorMsg, e);
} catch (IOException e) {
errorMsg = String.format("Error downloading from uri: %s", uri);
- logger.error(errorMsg, e);
+ LOGGER.error(errorMsg, e);
throw new ConductorClientException(errorMsg, e);
} finally {
if (connection != null) {
diff --git a/client/src/main/java/com/netflix/conductor/client/http/TaskClient.java b/client/src/main/java/com/netflix/conductor/client/http/TaskClient.java
index 923119bc79..3e7ca3699b 100644
--- a/client/src/main/java/com/netflix/conductor/client/http/TaskClient.java
+++ b/client/src/main/java/com/netflix/conductor/client/http/TaskClient.java
@@ -1,25 +1,31 @@
/*
- * Copyright 2016 Netflix, Inc.
+ * Copyright 2020 Netflix, Inc.
*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
*/
package com.netflix.conductor.client.http;
-import com.google.common.base.Preconditions;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import com.netflix.conductor.client.config.ConductorClientConfiguration;
import com.netflix.conductor.client.config.DefaultConductorClientConfiguration;
-import com.netflix.conductor.client.exceptions.ConductorClientException;
-import com.netflix.conductor.client.task.WorkflowTaskMetrics;
+import com.netflix.conductor.client.exception.ConductorClientException;
+import com.netflix.conductor.client.telemetry.MetricsContainer;
import com.netflix.conductor.common.metadata.tasks.PollData;
import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.common.metadata.tasks.TaskExecLog;
@@ -27,70 +33,60 @@
import com.netflix.conductor.common.run.SearchResult;
import com.netflix.conductor.common.run.TaskSummary;
import com.netflix.conductor.common.utils.ExternalPayloadStorage;
+import com.netflix.conductor.common.utils.ExternalPayloadStorage.PayloadType;
+
+import com.google.common.base.Preconditions;
import com.sun.jersey.api.client.ClientHandler;
import com.sun.jersey.api.client.GenericType;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.api.client.filter.ClientFilter;
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
-/**
- * @author visingh
- * @author Viren
- * Client for conductor task management including polling for task, updating task status etc.
- */
-@SuppressWarnings("unchecked")
+/** Client for conductor task management including polling for task, updating task status etc. */
public class TaskClient extends ClientBase {
- private static GenericType
+ *
+ *
+ * ExecutionDAO
+ *
+ *
+ *
+ *
+ * EventHandlerDAO
+ *
+ *
+ *
+ */
+public class Statements {
+
+ private final String keyspace;
+
+ public Statements(String keyspace) {
+ this.keyspace = keyspace;
+ }
+
+ // MetadataDAO
+ // Insert Statements
+
+ /**
+ * @return cql query statement to insert a new workflow definition into the
+ * "workflow_definitions" table
+ */
+ public String getInsertWorkflowDefStatement() {
+ return QueryBuilder.insertInto(keyspace, TABLE_WORKFLOW_DEFS)
+ .value(WORKFLOW_DEF_NAME_KEY, bindMarker())
+ .value(WORKFLOW_VERSION_KEY, bindMarker())
+ .value(WORKFLOW_DEFINITION_KEY, bindMarker())
+ .ifNotExists()
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to insert a workflow def name version index into the
+ * "workflow_defs_index" table
+ */
+ public String getInsertWorkflowDefVersionIndexStatement() {
+ return QueryBuilder.insertInto(keyspace, TABLE_WORKFLOW_DEFS_INDEX)
+ .value(WORKFLOW_DEF_INDEX_KEY, WORKFLOW_DEF_INDEX_KEY)
+ .value(WORKFLOW_DEF_NAME_VERSION_KEY, bindMarker())
+ .value(WORKFLOW_DEF_INDEX_VALUE, bindMarker())
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to insert a new task definition into the "task_definitions" table
+ */
+ public String getInsertTaskDefStatement() {
+ return QueryBuilder.insertInto(keyspace, TABLE_TASK_DEFS)
+ .value(TASK_DEFS_KEY, TASK_DEFS_KEY)
+ .value(TASK_DEF_NAME_KEY, bindMarker())
+ .value(TASK_DEFINITION_KEY, bindMarker())
+ .getQueryString();
+ }
+
+ // Select Statements
+
+ /**
+ * @return cql query statement to fetch a workflow definition by name and version from the
+ * "workflow_definitions" table
+ */
+ public String getSelectWorkflowDefStatement() {
+ return QueryBuilder.select(WORKFLOW_DEFINITION_KEY)
+ .from(keyspace, TABLE_WORKFLOW_DEFS)
+ .where(eq(WORKFLOW_DEF_NAME_KEY, bindMarker()))
+ .and(eq(WORKFLOW_VERSION_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to retrieve all versions of a workflow definition by name from
+ * the "workflow_definitions" table
+ */
+ public String getSelectAllWorkflowDefVersionsByNameStatement() {
+ return QueryBuilder.select()
+ .all()
+ .from(keyspace, TABLE_WORKFLOW_DEFS)
+ .where(eq(WORKFLOW_DEF_NAME_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to fetch all workflow def names and version from the
+ * "workflow_defs_index" table
+ */
+ public String getSelectAllWorkflowDefsStatement() {
+ return QueryBuilder.select()
+ .all()
+ .from(keyspace, TABLE_WORKFLOW_DEFS_INDEX)
+ .where(eq(WORKFLOW_DEF_INDEX_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to fetch a task definition by name from the "task_definitions"
+ * table
+ */
+ public String getSelectTaskDefStatement() {
+ return QueryBuilder.select(TASK_DEFINITION_KEY)
+ .from(keyspace, TABLE_TASK_DEFS)
+ .where(eq(TASK_DEFS_KEY, TASK_DEFS_KEY))
+ .and(eq(TASK_DEF_NAME_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to retrieve all task definitions from the "task_definitions"
+ * table
+ */
+ public String getSelectAllTaskDefsStatement() {
+ return QueryBuilder.select()
+ .all()
+ .from(keyspace, TABLE_TASK_DEFS)
+ .where(eq(TASK_DEFS_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ // Update Statement
+
+ /**
+ * @return cql query statement to update a workflow definitinos in the "workflow_definitions"
+ * table
+ */
+ public String getUpdateWorkflowDefStatement() {
+ return QueryBuilder.update(keyspace, TABLE_WORKFLOW_DEFS)
+ .with(set(WORKFLOW_DEFINITION_KEY, bindMarker()))
+ .where(eq(WORKFLOW_DEF_NAME_KEY, bindMarker()))
+ .and(eq(WORKFLOW_VERSION_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ // Delete Statements
+
+ /**
+ * @return cql query statement to delete a workflow definition by name and version from the
+ * "workflow_definitions" table
+ */
+ public String getDeleteWorkflowDefStatement() {
+ return QueryBuilder.delete()
+ .from(keyspace, TABLE_WORKFLOW_DEFS)
+ .where(eq(WORKFLOW_DEF_NAME_KEY, bindMarker()))
+ .and(eq(WORKFLOW_VERSION_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to delete a workflow def name/version from the
+ * "workflow_defs_index" table
+ */
+ public String getDeleteWorkflowDefIndexStatement() {
+ return QueryBuilder.delete()
+ .from(keyspace, TABLE_WORKFLOW_DEFS_INDEX)
+ .where(eq(WORKFLOW_DEF_INDEX_KEY, bindMarker()))
+ .and(eq(WORKFLOW_DEF_NAME_VERSION_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to delete a task definition by name from the "task_definitions"
+ * table
+ */
+ public String getDeleteTaskDefStatement() {
+ return QueryBuilder.delete()
+ .from(keyspace, TABLE_TASK_DEFS)
+ .where(eq(TASK_DEFS_KEY, TASK_DEFS_KEY))
+ .and(eq(TASK_DEF_NAME_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ // ExecutionDAO
+ // Insert Statements
+
+ /** @return cql query statement to insert a new workflow into the "workflows" table */
+ public String getInsertWorkflowStatement() {
+ return QueryBuilder.insertInto(keyspace, TABLE_WORKFLOWS)
+ .value(WORKFLOW_ID_KEY, bindMarker())
+ .value(SHARD_ID_KEY, bindMarker())
+ .value(TASK_ID_KEY, bindMarker())
+ .value(ENTITY_KEY, ENTITY_TYPE_WORKFLOW)
+ .value(PAYLOAD_KEY, bindMarker())
+ .value(TOTAL_TASKS_KEY, bindMarker())
+ .value(TOTAL_PARTITIONS_KEY, bindMarker())
+ .getQueryString();
+ }
+
+ /** @return cql query statement to insert a new task into the "workflows" table */
+ public String getInsertTaskStatement() {
+ return QueryBuilder.insertInto(keyspace, TABLE_WORKFLOWS)
+ .value(WORKFLOW_ID_KEY, bindMarker())
+ .value(SHARD_ID_KEY, bindMarker())
+ .value(TASK_ID_KEY, bindMarker())
+ .value(ENTITY_KEY, ENTITY_TYPE_TASK)
+ .value(PAYLOAD_KEY, bindMarker())
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to insert a new event execution into the "event_executions" table
+ */
+ public String getInsertEventExecutionStatement() {
+ return QueryBuilder.insertInto(keyspace, TABLE_EVENT_EXECUTIONS)
+ .value(MESSAGE_ID_KEY, bindMarker())
+ .value(EVENT_HANDLER_NAME_KEY, bindMarker())
+ .value(EVENT_EXECUTION_ID_KEY, bindMarker())
+ .value(PAYLOAD_KEY, bindMarker())
+ .ifNotExists()
+ .getQueryString();
+ }
+
+ // Select Statements
+
+ /**
+ * @return cql query statement to retrieve the total_tasks and total_partitions for a workflow
+ * from the "workflows" table
+ */
+ public String getSelectTotalStatement() {
+ return QueryBuilder.select(TOTAL_TASKS_KEY, TOTAL_PARTITIONS_KEY)
+ .from(keyspace, TABLE_WORKFLOWS)
+ .where(eq(WORKFLOW_ID_KEY, bindMarker()))
+ .and(eq(SHARD_ID_KEY, 1))
+ .getQueryString();
+ }
+
+ /** @return cql query statement to retrieve a task from the "workflows" table */
+ public String getSelectTaskStatement() {
+ return QueryBuilder.select(PAYLOAD_KEY)
+ .from(keyspace, TABLE_WORKFLOWS)
+ .where(eq(WORKFLOW_ID_KEY, bindMarker()))
+ .and(eq(SHARD_ID_KEY, bindMarker()))
+ .and(eq(ENTITY_KEY, ENTITY_TYPE_TASK))
+ .and(eq(TASK_ID_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to retrieve a workflow (without its tasks) from the "workflows"
+ * table
+ */
+ public String getSelectWorkflowStatement() {
+ return QueryBuilder.select(PAYLOAD_KEY)
+ .from(keyspace, TABLE_WORKFLOWS)
+ .where(eq(WORKFLOW_ID_KEY, bindMarker()))
+ .and(eq(SHARD_ID_KEY, 1))
+ .and(eq(ENTITY_KEY, ENTITY_TYPE_WORKFLOW))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to retrieve a workflow with its tasks from the "workflows" table
+ */
+ public String getSelectWorkflowWithTasksStatement() {
+ return QueryBuilder.select()
+ .all()
+ .from(keyspace, TABLE_WORKFLOWS)
+ .where(eq(WORKFLOW_ID_KEY, bindMarker()))
+ .and(eq(SHARD_ID_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to retrieve the workflow_id for a particular task_id from the
+ * "task_lookup" table
+ */
+ public String getSelectTaskFromLookupTableStatement() {
+ return QueryBuilder.select(WORKFLOW_ID_KEY)
+ .from(keyspace, TABLE_TASK_LOOKUP)
+ .where(eq(TASK_ID_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to retrieve all task ids for a given taskDefName with concurrent
+ * execution limit configured from the "task_def_limit" table
+ */
+ public String getSelectTasksFromTaskDefLimitStatement() {
+ return QueryBuilder.select()
+ .all()
+ .from(keyspace, TABLE_TASK_DEF_LIMIT)
+ .where(eq(TASK_DEF_NAME_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to retrieve all event executions for a given message and event
+ * handler from the "event_executions" table
+ */
+ public String getSelectAllEventExecutionsForMessageFromEventExecutionsStatement() {
+ return QueryBuilder.select()
+ .all()
+ .from(keyspace, TABLE_EVENT_EXECUTIONS)
+ .where(eq(MESSAGE_ID_KEY, bindMarker()))
+ .and(eq(EVENT_HANDLER_NAME_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ // Update Statements
+
+ /** @return cql query statement to update a workflow in the "workflows" table */
+ public String getUpdateWorkflowStatement() {
+ return QueryBuilder.update(keyspace, TABLE_WORKFLOWS)
+ .with(set(PAYLOAD_KEY, bindMarker()))
+ .where(eq(WORKFLOW_ID_KEY, bindMarker()))
+ .and(eq(SHARD_ID_KEY, 1))
+ .and(eq(ENTITY_KEY, ENTITY_TYPE_WORKFLOW))
+ .and(eq(TASK_ID_KEY, ""))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to update the total_tasks in a shard for a workflow in the
+ * "workflows" table
+ */
+ public String getUpdateTotalTasksStatement() {
+ return QueryBuilder.update(keyspace, TABLE_WORKFLOWS)
+ .with(set(TOTAL_TASKS_KEY, bindMarker()))
+ .where(eq(WORKFLOW_ID_KEY, bindMarker()))
+ .and(eq(SHARD_ID_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to update the total_partitions for a workflow in the "workflows"
+ * table
+ */
+ public String getUpdateTotalPartitionsStatement() {
+ return QueryBuilder.update(keyspace, TABLE_WORKFLOWS)
+ .with(set(TOTAL_PARTITIONS_KEY, bindMarker()))
+ .and(set(TOTAL_TASKS_KEY, bindMarker()))
+ .where(eq(WORKFLOW_ID_KEY, bindMarker()))
+ .and(eq(SHARD_ID_KEY, 1))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to add a new task_id to workflow_id mapping to the "task_lookup"
+ * table
+ */
+ public String getUpdateTaskLookupStatement() {
+ return QueryBuilder.update(keyspace, TABLE_TASK_LOOKUP)
+ .with(set(WORKFLOW_ID_KEY, bindMarker()))
+ .where(eq(TASK_ID_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /** @return cql query statement to add a new task_id to the "task_def_limit" table */
+ public String getUpdateTaskDefLimitStatement() {
+ return QueryBuilder.update(keyspace, TABLE_TASK_DEF_LIMIT)
+ .with(set(WORKFLOW_ID_KEY, bindMarker()))
+ .where(eq(TASK_DEF_NAME_KEY, bindMarker()))
+ .and(eq(TASK_ID_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /** @return cql query statement to update an event execution in the "event_executions" table */
+ public String getUpdateEventExecutionStatement() {
+ return QueryBuilder.update(keyspace, TABLE_EVENT_EXECUTIONS)
+ .using(QueryBuilder.ttl(bindMarker()))
+ .with(set(PAYLOAD_KEY, bindMarker()))
+ .where(eq(MESSAGE_ID_KEY, bindMarker()))
+ .and(eq(EVENT_HANDLER_NAME_KEY, bindMarker()))
+ .and(eq(EVENT_EXECUTION_ID_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ // Delete statements
+
+ /** @return cql query statement to delete a workflow from the "workflows" table */
+ public String getDeleteWorkflowStatement() {
+ return QueryBuilder.delete()
+ .from(keyspace, TABLE_WORKFLOWS)
+ .where(eq(WORKFLOW_ID_KEY, bindMarker()))
+ .and(eq(SHARD_ID_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /**
+ * @return cql query statement to delete a task_id to workflow_id mapping from the "task_lookup"
+ * table
+ */
+ public String getDeleteTaskLookupStatement() {
+ return QueryBuilder.delete()
+ .from(keyspace, TABLE_TASK_LOOKUP)
+ .where(eq(TASK_ID_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /** @return cql query statement to delete a task from the "workflows" table */
+ public String getDeleteTaskStatement() {
+ return QueryBuilder.delete()
+ .from(keyspace, TABLE_WORKFLOWS)
+ .where(eq(WORKFLOW_ID_KEY, bindMarker()))
+ .and(eq(SHARD_ID_KEY, bindMarker()))
+ .and(eq(ENTITY_KEY, ENTITY_TYPE_TASK))
+ .and(eq(TASK_ID_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /** @return cql query statement to delete a task_id from the "task_def_limit" table */
+ public String getDeleteTaskDefLimitStatement() {
+ return QueryBuilder.delete()
+ .from(keyspace, TABLE_TASK_DEF_LIMIT)
+ .where(eq(TASK_DEF_NAME_KEY, bindMarker()))
+ .and(eq(TASK_ID_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ /** @return cql query statement to delete an event execution from the "event_execution" table */
+ public String getDeleteEventExecutionsStatement() {
+ return QueryBuilder.delete()
+ .from(keyspace, TABLE_EVENT_EXECUTIONS)
+ .where(eq(MESSAGE_ID_KEY, bindMarker()))
+ .and(eq(EVENT_HANDLER_NAME_KEY, bindMarker()))
+ .and(eq(EVENT_EXECUTION_ID_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ // EventHandlerDAO
+ // Insert Statements
+
+ /** @return cql query statement to insert an event handler into the "event_handlers" table */
+ public String getInsertEventHandlerStatement() {
+ return QueryBuilder.insertInto(keyspace, TABLE_EVENT_HANDLERS)
+ .value(HANDLERS_KEY, HANDLERS_KEY)
+ .value(EVENT_HANDLER_NAME_KEY, bindMarker())
+ .value(EVENT_HANDLER_KEY, bindMarker())
+ .getQueryString();
+ }
+
+ // Select Statements
+
+ /**
+ * @return cql query statement to retrieve all event handlers from the "event_handlers" table
+ */
+ public String getSelectAllEventHandlersStatement() {
+ return QueryBuilder.select()
+ .all()
+ .from(keyspace, TABLE_EVENT_HANDLERS)
+ .where(eq(HANDLERS_KEY, bindMarker()))
+ .getQueryString();
+ }
+
+ // Delete Statements
+
+ /**
+ * @return cql query statement to delete an event handler by name from the "event_handlers"
+ * table
+ */
+ public String getDeleteEventHandlerStatement() {
+ return QueryBuilder.delete()
+ .from(keyspace, TABLE_EVENT_HANDLERS)
+ .where(eq(HANDLERS_KEY, HANDLERS_KEY))
+ .and(eq(EVENT_HANDLER_NAME_KEY, bindMarker()))
+ .getQueryString();
+ }
+}
diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/dao/cassandra/CassandraBaseDAO.java b/cassandra-persistence/src/main/java/com/netflix/conductor/dao/cassandra/CassandraBaseDAO.java
deleted file mode 100644
index 07ca92807c..0000000000
--- a/cassandra-persistence/src/main/java/com/netflix/conductor/dao/cassandra/CassandraBaseDAO.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
- * Copyright 2016 Netflix, Inc.
- * > eventHandlerList =
+ new GenericType
>() {};
+ /** Creates a default metadata client */
+ public EventClient() {
+ this(new DefaultClientConfig(), new DefaultConductorClientConfiguration(), null);
+ }
+
+ /** @param clientConfig REST Client configuration */
+ public EventClient(ClientConfig clientConfig) {
+ this(clientConfig, new DefaultConductorClientConfiguration(), null);
+ }
+
+ /**
+ * @param clientConfig REST Client configuration
+ * @param clientHandler Jersey client handler. Useful when plugging in various http client
+ * interaction modules (e.g. ribbon)
+ */
+ public EventClient(ClientConfig clientConfig, ClientHandler clientHandler) {
+ this(clientConfig, new DefaultConductorClientConfiguration(), clientHandler);
+ }
+
+ /**
+ * @param config config REST Client configuration
+ * @param handler handler Jersey client handler. Useful when plugging in various http client
+ * interaction modules (e.g. ribbon)
+ * @param filters Chain of client side filters to be applied per request
+ */
+ public EventClient(ClientConfig config, ClientHandler handler, ClientFilter... filters) {
+ this(config, new DefaultConductorClientConfiguration(), handler, filters);
+ }
+
+ /**
+ * @param config REST Client configuration
+ * @param clientConfiguration Specific properties configured for the client, see {@link
+ * ConductorClientConfiguration}
+ * @param handler Jersey client handler. Useful when plugging in various http client interaction
+ * modules (e.g. ribbon)
+ * @param filters Chain of client side filters to be applied per request
+ */
+ public EventClient(
+ ClientConfig config,
+ ConductorClientConfiguration clientConfiguration,
+ ClientHandler handler,
+ ClientFilter... filters) {
+ super(config, clientConfiguration, handler);
+ for (ClientFilter filter : filters) {
+ super.client.addFilter(filter);
+ }
+ }
+
+ /**
+ * Register an event handler with the server
+ *
+ * @param eventHandler the eventHandler definition
+ */
+ public void registerEventHandler(EventHandler eventHandler) {
+ Preconditions.checkNotNull(eventHandler, "Event Handler definition cannot be null");
+ postForEntityWithRequestOnly("event", eventHandler);
+ }
+
+ /**
+ * Updates an event handler with the server
+ *
+ * @param eventHandler the eventHandler definition
+ */
+ public void updateEventHandler(EventHandler eventHandler) {
+ Preconditions.checkNotNull(eventHandler, "Event Handler definition cannot be null");
+ put("event", null, eventHandler);
+ }
+
+ /**
+ * @param event name of the event
+ * @param activeOnly if true, returns only the active handlers
+ * @return Returns the list of all the event handlers for a given event
+ */
+ public List
> workflowDefList = new GenericType
>() {
- };
-
- private static GenericType
> taskDefList = new GenericType
>() {
- };
-
- /**
- * Creates a default metadata client
- */
+ /** Creates a default metadata client */
public MetadataClient() {
this(new DefaultClientConfig(), new DefaultConductorClientConfiguration(), null);
}
- /**
- * @param clientConfig REST Client configuration
- */
+ /** @param clientConfig REST Client configuration */
public MetadataClient(ClientConfig clientConfig) {
this(clientConfig, new DefaultConductorClientConfiguration(), null);
}
/**
- * @param clientConfig REST Client configuration
- * @param clientHandler Jersey client handler. Useful when plugging in various http client interaction modules (e.g. ribbon)
+ * @param clientConfig REST Client configuration
+ * @param clientHandler Jersey client handler. Useful when plugging in various http client
+ * interaction modules (e.g. ribbon)
*/
public MetadataClient(ClientConfig clientConfig, ClientHandler clientHandler) {
this(clientConfig, new DefaultConductorClientConfiguration(), clientHandler);
}
/**
- * @param config config REST Client configuration
- * @param handler handler Jersey client handler. Useful when plugging in various http client interaction modules (e.g. ribbon)
+ * @param config config REST Client configuration
+ * @param handler handler Jersey client handler. Useful when plugging in various http client
+ * interaction modules (e.g. ribbon)
* @param filters Chain of client side filters to be applied per request
*/
public MetadataClient(ClientConfig config, ClientHandler handler, ClientFilter... filters) {
@@ -70,19 +59,24 @@ public MetadataClient(ClientConfig config, ClientHandler handler, ClientFilter..
}
/**
- * @param config REST Client configuration
- * @param clientConfiguration Specific properties configured for the client, see {@link ConductorClientConfiguration}
- * @param handler Jersey client handler. Useful when plugging in various http client interaction modules (e.g. ribbon)
- * @param filters Chain of client side filters to be applied per request
+ * @param config REST Client configuration
+ * @param clientConfiguration Specific properties configured for the client, see {@link
+ * ConductorClientConfiguration}
+ * @param handler Jersey client handler. Useful when plugging in various http client interaction
+ * modules (e.g. ribbon)
+ * @param filters Chain of client side filters to be applied per request
*/
- public MetadataClient(ClientConfig config, ConductorClientConfiguration clientConfiguration, ClientHandler handler, ClientFilter... filters) {
+ public MetadataClient(
+ ClientConfig config,
+ ConductorClientConfiguration clientConfiguration,
+ ClientHandler handler,
+ ClientFilter... filters) {
super(config, clientConfiguration, handler);
for (ClientFilter filter : filters) {
super.client.addFilter(filter);
}
}
-
// Workflow Metadata Operations
/**
@@ -108,20 +102,24 @@ public void updateWorkflowDefs(List
> taskList = new GenericType
>() {
- };
+ private static final GenericType
> taskList = new GenericType
>() {};
- private static GenericType
> taskExecLogList = new GenericType
>() {
- };
+ private static final GenericType
> taskExecLogList =
+ new GenericType
>() {};
- private static GenericType
> pollDataList = new GenericType
>() {
- };
+ private static final GenericType
> pollDataList =
+ new GenericType
>() {};
- private static GenericType