diff --git a/.github/docker/Dockerfile b/.github/docker/Dockerfile index 0966ec3fb9..3b363f384a 100644 --- a/.github/docker/Dockerfile +++ b/.github/docker/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:22.04 +FROM openjdk:22-jdk-slim RUN apt-get update && \ apt-get install --no-install-recommends --yes \ @@ -8,10 +8,10 @@ RUN apt-get update && \ unzip=6.* \ zip=3.* \ jq=1.* \ - python3=3.10.* \ - python3-pip=22.* \ - python3-setuptools=59.6.* \ - python3-wheel=0.37.* \ + python3=3.11.* \ + python3-pip=23.* \ + python3-setuptools=66.* \ + python3-wheel=0.38.* \ gnupg=2.2.* \ default-jre=2:1.* \ build-essential=12.* && \ @@ -23,3 +23,5 @@ RUN apt-get update && \ apt-get install --no-install-recommends --yes \ sbt=1.4.* \ && rm -rf /var/lib/apt/lists/* + +CMD [ "/bin/bash" ] diff --git a/.github/labeler.yml b/.github/labeler.yml index e5b2b0b4cd..847b789f24 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -6,8 +6,6 @@ connect: - connect/**/* pollux: - pollux/**/* -iris: - - iris/**/* prism-agent: - prism-agent/**/* prism-node: diff --git a/.github/workflows/build-test-docker.yml b/.github/workflows/build-test-docker.yml index 3d9432b2ab..3fe693f62d 100644 --- a/.github/workflows/build-test-docker.yml +++ b/.github/workflows/build-test-docker.yml @@ -13,7 +13,7 @@ on: env: REGISTRY: ghcr.io - IMAGE_NAME: agent-ci-ubuntu-22-jdk-11 + IMAGE_NAME: ci-debian-jdk-22 jobs: build-and-push-image: @@ -55,6 +55,6 @@ jobs: context: .github/docker push: true tags: | - "ghcr.io/input-output-hk/${{ env.IMAGE_NAME }}:latest" - "ghcr.io/input-output-hk/${{ env.IMAGE_NAME }}:main" + "ghcr.io/hyperledger-labs/${{ env.IMAGE_NAME }}:latest" + "ghcr.io/hyperledger-labs/${{ env.IMAGE_NAME }}:main" labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/deployment.yml b/.github/workflows/deployment.yml index 3980caf026..ec53127b4c 100644 --- a/.github/workflows/deployment.yml +++ b/.github/workflows/deployment.yml @@ -15,7 +15,7 @@ on: workflow_dispatch: inputs: component-tag: - description: "Tag of a component to trigger the update, e.g. iris-service-v0.5.0" + description: "Tag of a component to trigger the update" required: true env: description: "Environment to trigger update on" diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/integration-tests.yml similarity index 92% rename from .github/workflows/e2e-tests.yml rename to .github/workflows/integration-tests.yml index ff4f0829ea..ce047ef1a8 100644 --- a/.github/workflows/e2e-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -1,7 +1,7 @@ name: Integration tests concurrency: - group: ${{ github.head_ref }}${{ github.ref }}-e2e-tests + group: ${{ github.head_ref }}${{ github.ref }}-integration-tests cancel-in-progress: true on: @@ -16,14 +16,14 @@ on: defaults: run: shell: bash - working-directory: "tests/e2e-tests" + working-directory: "tests/integration-tests" jobs: - run-e2e-tests: + run-integration-tests: name: "Run e2e tests" runs-on: ubuntu-latest env: - REPORTS_DIR: "tests/e2e-tests/target/site/serenity" + REPORTS_DIR: "tests/integration-tests/target/site/serenity" steps: - name: Checkout uses: actions/checkout@v3 @@ -99,10 +99,13 @@ jobs: distribution: 'zulu' java-version: '19' - - name: Run e2e tests + - name: Run integration tests + env: + ATALA_GITHUB_ACTOR: ${{ secrets.ATALA_GITHUB_ACTOR }} + ATALA_GITHUB_TOKEN: ${{ secrets.ATALA_GITHUB_TOKEN }} continue-on-error: true run: | - ./gradlew test --tests "E2eTestsRunner" || true + ./gradlew test --tests "IntegrationTestsRunner" || true ./gradlew reports - name: Extract test results @@ -148,7 +151,7 @@ jobs: if: github.ref_name == 'main' || steps.analyze_test_results.outputs.conclusion == 'failure' uses: actions/upload-artifact@v2 with: - name: e2e-tests-result + name: integration-tests-result path: ${{ env.REPORTS_DIR }} - name: Slack Notification diff --git a/.github/workflows/release-clients.yml b/.github/workflows/release-clients.yml index a1c49fd0b5..ea7a8a3f7f 100644 --- a/.github/workflows/release-clients.yml +++ b/.github/workflows/release-clients.yml @@ -41,6 +41,9 @@ jobs: username: ${{ secrets.ATALA_GITHUB_ACTOR }} password: ${{ secrets.ATALA_GITHUB_TOKEN }} + - name: Setup yq - portable yaml processor + uses: mikefarah/yq@v4.34.2 + - name: Install generator dependencies working-directory: prism-agent/client/generator run: yarn install diff --git a/.github/workflows/unit-tests-common.yml b/.github/workflows/unit-tests-common.yml index 8f02e5b22d..26c0ec134d 100644 --- a/.github/workflows/unit-tests-common.yml +++ b/.github/workflows/unit-tests-common.yml @@ -19,7 +19,7 @@ jobs: name: "Build and unit tests for ${{ inputs.component-name }}" runs-on: self-hosted container: - image: ghcr.io/input-output-hk/agent-ci-ubuntu-22-jdk-11:0.1.0 + image: ghcr.io/hyperledger-labs/ci-debian-jdk-22:0.1.0 volumes: - /nix:/nix credentials: diff --git a/CHANGELOG.md b/CHANGELOG.md index 85d6a8a44a..b2855d4fca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,19 @@ +# [1.18.0](https://github.com/hyperledger-labs/open-enterprise-agent/compare/prism-agent-v1.17.0...prism-agent-v1.18.0) (2023-10-24) + + +### Bug Fixes + +* all performance tests run succesfully, add group thresholds ([#750](https://github.com/hyperledger-labs/open-enterprise-agent/issues/750)) ([5204838](https://github.com/hyperledger-labs/open-enterprise-agent/commit/520483836e5b572e8aeeecd28f4bbe7cc668c3d9)) + + +### Features + +* presentation API refactor ([#765](https://github.com/hyperledger-labs/open-enterprise-agent/issues/765)) ([045d829](https://github.com/hyperledger-labs/open-enterprise-agent/commit/045d8298f8865baeb13e243ed058e8e440b3f496)) +* add new auth params ([#762](https://github.com/hyperledger-labs/open-enterprise-agent/issues/762)) ([b8bfb86](https://github.com/hyperledger-labs/open-enterprise-agent/commit/b8bfb867061c58fc12987b5405f561e8f10cb718)) +* disable cors by default ([#747](https://github.com/hyperledger-labs/open-enterprise-agent/issues/747)) ([1dd8c8b](https://github.com/hyperledger-labs/open-enterprise-agent/commit/1dd8c8b0e9b0d2593bd1c17a95bf013192a64532)) +* migrate docker image of the agent to Java 21 ([#758](https://github.com/hyperledger-labs/open-enterprise-agent/issues/758)) ([d36dbf0](https://github.com/hyperledger-labs/open-enterprise-agent/commit/d36dbf0dfbf45b64185e5b54aba0444d6e1ada88)) +* **prism-agent:** add keycloak authorization support to endpoints ([#753](https://github.com/hyperledger-labs/open-enterprise-agent/issues/753)) ([3e7534f](https://github.com/hyperledger-labs/open-enterprise-agent/commit/3e7534ff1a75e9ecaa0c2b670c1c158890021f8d)) + # [1.17.0](https://github.com/hyperledger-labs/open-enterprise-agent/compare/prism-agent-v1.16.4...prism-agent-v1.17.0) (2023-10-14) diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index ca5673f8ab..98a7c20683 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -72,11 +72,13 @@ Apache | [Apache License 2.0](http://www.apache.org/licenses/LICENSE-2.0) | [io. Apache | [Apache License 2.0](http://www.apache.org/licenses/LICENSE-2.0) | [io.getquill # quill-sql_3 # 4.7.3](https://zio.dev/zio-protoquill) | Apache | [Apache License 2.0](http://www.apache.org/licenses/LICENSE-2.0) | [io.getquill # quill-util_3 # 4.7.3](https://zio.dev/zio-quill) | Apache | [Apache License 2.0](http://www.apache.org/licenses/LICENSE-2.0) | [io.getquill # quill-zio_3 # 4.7.3](https://zio.dev/zio-protoquill) | +Apache | [Apache License 2.0](https://repository.jboss.org/licenses/apache-2.0.txt) | [org.jboss.logging # jboss-logging # 3.5.1.Final](http://www.jboss.org) | Apache | [Apache License Version 2.0](https://www.apache.org/licenses/LICENSE-2.0) | [com.networknt # json-schema-validator # 1.0.86](https://github.com/networknt/json-schema-validator) | Apache | [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0.txt) | [com.ethlo.time # itu # 1.7.0](https://github.com/ethlo/itu) | Apache | [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0.txt) | [com.github.stephenc.jcip # jcip-annotations # 1.0-1](http://stephenc.github.com/jcip-annotations) | Apache | [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0.txt) | [com.google.crypto.tink # tink # 1.6.1](http://github.com/google/tink) | Apache | [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0.txt) | [com.google.guava # guava # 31.0.1-android](https://github.com/google/guava) | +Apache | [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0.txt) | [commons-codec # commons-codec # 1.11](http://commons.apache.org/proper/commons-codec/) | Apache | [Apache License, Version 2.0](https://opensource.org/licenses/Apache-2.0) | [io.iohk.atala # prism-common-jvm # 1.4.1](https://github.com/input-output-hk/atala-prism-sdk.git) | Apache | [Apache License, Version 2.0](https://opensource.org/licenses/Apache-2.0) | [io.iohk.atala # prism-crypto-jvm # 1.4.1](https://github.com/input-output-hk/atala-prism-sdk.git) | Apache | [Apache License, Version 2.0](https://opensource.org/licenses/Apache-2.0) | [io.iohk.atala # prism-identity-jvm # 1.4.1](https://github.com/input-output-hk/atala-prism-sdk.git) | @@ -99,7 +101,12 @@ Apache | [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2 Apache | [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0) | io.netty.incubator # netty-incubator-transport-native-io_uring # 0.0.20.Final | Apache | [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0.txt) | net.bytebuddy # byte-buddy # 1.12.19 | Apache | [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0.txt) | net.bytebuddy # byte-buddy-agent # 1.12.19 | +Apache | [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0.txt) | [org.apache.httpcomponents # httpclient # 4.5.14](http://hc.apache.org/httpcomponents-client-ga) | +Apache | [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0.txt) | [org.apache.httpcomponents # httpcore # 4.4.16](http://hc.apache.org/httpcomponents-core-ga) | Apache | [Apache License, Version 2.0](https://flywaydb.org/licenses/flyway-community) | org.flywaydb # flyway-core # 9.8.3 | +Apache | [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0) | org.keycloak # keycloak-authz-client # 22.0.4 | +Apache | [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0) | org.keycloak # keycloak-common # 22.0.4 | +Apache | [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0) | org.keycloak # keycloak-core # 22.0.4 | Apache | [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0.txt) | [org.log4s # log4s_3 # 1.10.0](http://log4s.org/) | Apache | [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0.txt) | org.objenesis # objenesis # 3.3 | Apache | [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0.txt) | [org.yaml # snakeyaml # 2.0](https://bitbucket.org/snakeyaml/snakeyaml) | @@ -249,7 +256,7 @@ Apache | [The Apache Software License, Version 2.0](https://www.apache.org/licen Apache | [The Apache Software License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0.txt) | [org.jetbrains # annotations # 13.0](http://www.jetbrains.org) | Apache | [The Apache Software License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0.txt) | [org.jetbrains # annotations # 17.0.0](https://github.com/JetBrains/java-annotations) | Apache | [The Apache Software License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0.txt) | [org.jetbrains.kotlinx # kotlinx-coroutines-core # 1.5.1-new-mm-dev2](https://github.com/Kotlin/kotlinx.coroutines) | -Apache | [The Apache Software License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0.txt) | [org.jetbrains.kotlinx # kotlinx-coroutines-core-jvm # 1.5.1-new-mm-dev2](https://github.com/Kotlin/kotlinx.coroutines) | +Apache | [The Apache Software License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0.txt) | [org.jetbrains.kotlinx # kotlinx-coroutines-core-jvm # 1.5.2](https://github.com/Kotlin/kotlinx.coroutines) | Apache | [The Apache Software License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0.txt) | [org.jetbrains.kotlinx # kotlinx-coroutines-jdk8 # 1.5.2](https://github.com/Kotlin/kotlinx.coroutines) | Apache | [The Apache Software License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0.txt) | [org.jetbrains.kotlinx # kotlinx-serialization-core-jvm # 1.3.0](https://github.com/Kotlin/kotlinx.serialization) | Apache | [The Apache Software License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0.txt) | [org.jetbrains.kotlinx # kotlinx-serialization-json-jvm # 1.3.0](https://github.com/Kotlin/kotlinx.serialization) | @@ -299,7 +306,9 @@ BSD | [BSD-3-Clause](https://github.com/scodec/scodec-bits/blob/main/LICENSE) | BSD | [New BSD License](http://www.opensource.org/licenses/bsd-license.php) | org.hamcrest # hamcrest-core # 1.3 | BSD | [The BSD License](https://opensource.org/licenses/BSD-3-Clause) | org.jline # jline # 3.22.0 | Bouncy Castle License | [Bouncy Castle Licence](https://www.bouncycastle.org/licence.html) | [org.bouncycastle # bcpkix-jdk15on # 1.70](https://www.bouncycastle.org/java.html) | +Bouncy Castle License | [Bouncy Castle Licence](http://www.bouncycastle.org/licence.html) | [org.bouncycastle # bcprov-jdk15on # 1.68](http://www.bouncycastle.org/java.html) | Bouncy Castle License | [Bouncy Castle Licence](https://www.bouncycastle.org/licence.html) | [org.bouncycastle # bcprov-jdk15on # 1.70](https://www.bouncycastle.org/java.html) | +Bouncy Castle License | [Bouncy Castle Licence](http://www.bouncycastle.org/licence.html) | [org.bouncycastle # bcprov-jdk15to18 # 1.68](http://www.bouncycastle.org/java.html) | Bouncy Castle License | [Bouncy Castle Licence](https://www.bouncycastle.org/licence.html) | [org.bouncycastle # bcutil-jdk15on # 1.70](https://www.bouncycastle.org/java.html) | EPL | [Eclipse Public License 1.0](http://www.eclipse.org/legal/epl-v10.html) | [junit # junit # 4.13.1](http://junit.org) | EPL | [Eclipse Public License 1.0](http://www.eclipse.org/legal/epl-v10.html) | [junit # junit # 4.13.2](http://junit.org) | @@ -318,7 +327,7 @@ MIT | [MIT](https://spdx.org/licenses/MIT.html) | [com.lihaoyi # pprint_3 # 0.6. MIT | [MIT](https://spdx.org/licenses/MIT.html) | [com.lihaoyi # pprint_3 # 0.8.1](https://github.com/com-lihaoyi/PPrint) | MIT | [MIT](https://spdx.org/licenses/MIT.html) | [com.lihaoyi # sourcecode_3 # 0.3.0](https://github.com/com-lihaoyi/sourcecode) | MIT | [MIT](https://github.com/jopenlibs/vault-java-driver/blob/master/README.md#license) | [io.github.jopenlibs # vault-java-driver # 6.1.0](https://github.com/jopenlibs/vault-java-driver) | -MIT | [MIT](http://opensource.org/licenses/MIT) | [org.rnorth.duct-tape # duct-tape # 1.0.8](https://github.com/rnorth/${project.artifactId}) | +MIT | [MIT](http://opensource.org/licenses/MIT) | org.rnorth.duct-tape # duct-tape # 1.0.8 | MIT | [MIT](http://opensource.org/licenses/MIT) | [org.testcontainers # database-commons # 1.19.0](https://java.testcontainers.org) | MIT | [MIT](http://opensource.org/licenses/MIT) | [org.testcontainers # jdbc # 1.19.0](https://java.testcontainers.org) | MIT | [MIT](http://opensource.org/licenses/MIT) | [org.testcontainers # postgresql # 1.19.0](https://java.testcontainers.org) | @@ -355,32 +364,33 @@ MIT | [The MIT License (MIT)](https://opensource.org/licenses/MIT) | [com.dimafe Public Domain | [Public Domain, per Creative Commons CC0](http://creativecommons.org/publicdomain/zero/1.0/) | [org.hdrhistogram # HdrHistogram # 2.1.12](http://hdrhistogram.github.io/HdrHistogram/) | Public Domain | [Public Domain, per Creative Commons CC0](http://creativecommons.org/publicdomain/zero/1.0/) | [org.latencyutils # LatencyUtils # 2.0.3](http://latencyutils.github.io/LatencyUtils/) | unrecognized | [APL2](http://www.apache.org/licenses/LICENSE-2.0.txt) | [net.reactivecore # circe-json-schema_2.13 # 0.3.0](https://github.com/reactivecore/rc-circe-json-schema) | -unrecognized | [none specified](none specified) | [io.iohk.atala # castor-core_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # connect-core_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # connect-sql-doobie_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # event-notification_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # iris-client_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-agent-core_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-agent-didcommx_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-data-models_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-connection_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-coordinate-mediation_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-invitation_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-issue-credential_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-mailbox_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-outofband-login_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-present-proof_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-report-problem_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-routing-2-0_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-trust-ping_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-resolver_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-verifiable-credentials_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # pollux-anoncreds_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # pollux-core_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # pollux-sql-doobie_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # pollux-vc-jwt_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # prism-agent-wallet-api_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # prism-node-client_3 # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | -unrecognized | [none specified](none specified) | [io.iohk.atala # shared # 1.16.4-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [EDL 1.0](http://www.eclipse.org/org/documents/edl-v10.php) | [jakarta.activation # jakarta.activation-api # 2.1.2](https://github.com/jakartaee/jaf-api) | +unrecognized | [none specified](none specified) | [io.iohk.atala # castor-core_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # connect-core_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # connect-sql-doobie_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # event-notification_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # iris-client_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-agent-core_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-agent-didcommx_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-data-models_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-connection_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-coordinate-mediation_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-invitation_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-issue-credential_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-mailbox_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-outofband-login_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-present-proof_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-report-problem_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-routing-2-0_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-protocol-trust-ping_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-resolver_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # mercury-verifiable-credentials_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # pollux-anoncreds_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # pollux-core_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # pollux-sql-doobie_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # pollux-vc-jwt_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # prism-agent-wallet-api_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # prism-node-client_3 # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | +unrecognized | [none specified](none specified) | [io.iohk.atala # shared # 1.17.0-SNAPSHOT](https://github.com/input-output-hk/atala-prism-building-blocks) | unrecognized | [none specified](none specified) | [net.jcip # jcip-annotations # 1.0](http://jcip.net/) | diff --git a/README.md b/README.md index 1867a7c59e..19a45729d3 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@
Coverage Status Unit tests - End-to-end tests + End-to-end tests Performance tests


diff --git a/build.sbt b/build.sbt index dca138b4c0..69304ca23d 100644 --- a/build.sbt +++ b/build.sbt @@ -651,20 +651,6 @@ val prismNodeClient = project ) ) -// ############## -// ### iris #### -// ############## -val irisClient = project - .in(file("iris/client/scala-client")) - .settings( - name := "iris-client", - libraryDependencies ++= Seq(D.scalaPbGrpc, D.scalaPbRuntime), - coverageEnabled := false, - // gRPC settings - Compile / PB.targets := Seq(scalapb.gen() -> (Compile / sourceManaged).value / "scalapb"), - Compile / PB.protoSources := Seq(baseDirectory.value / ".." / ".." / "api" / "grpc") - ) - // ##################### // ##### castor ###### // ##################### @@ -712,7 +698,6 @@ lazy val polluxCore = project libraryDependencies ++= D_Pollux.coreDependencies ) .dependsOn(shared) - .dependsOn(irisClient) .dependsOn(prismAgentWalletAPI) .dependsOn(polluxVcJWT) .dependsOn(vc, resolver, agentDidcommx, eventNotification, polluxAnoncreds) @@ -826,12 +811,13 @@ lazy val prismAgentServer = project Docker / dockerUsername := Some("input-output-hk"), Docker / dockerRepository := Some("ghcr.io"), dockerExposedPorts := Seq(8080, 8085, 8090), - dockerBaseImage := "amazoncorretto:21.0.0-alpine3.18", + // Official docker image for openjdk 21 with curl and bash + dockerBaseImage := "openjdk:21-jdk", buildInfoKeys := Seq[BuildInfoKey](name, version, scalaVersion, sbtVersion), buildInfoPackage := "io.iohk.atala.agent.server.buildinfo", Compile / packageDoc / publishArtifact := false ) - .enablePlugins(JavaAppPackaging, DockerPlugin, AshScriptPlugin) + .enablePlugins(JavaAppPackaging, DockerPlugin) .enablePlugins(BuildInfoPlugin) .dependsOn(prismAgentWalletAPI % "compile->compile;test->test") .dependsOn( diff --git a/connect/lib/core/src/test/scala/io/iohk/atala/connect/core/repository/ConnectionRepositorySpecSuite.scala b/connect/lib/core/src/test/scala/io/iohk/atala/connect/core/repository/ConnectionRepositorySpecSuite.scala index 5eef1f5974..d65f963227 100644 --- a/connect/lib/core/src/test/scala/io/iohk/atala/connect/core/repository/ConnectionRepositorySpecSuite.scala +++ b/connect/lib/core/src/test/scala/io/iohk/atala/connect/core/repository/ConnectionRepositorySpecSuite.scala @@ -11,6 +11,7 @@ import zio.test.* import zio.{Cause, Exit, ZIO, ZLayer} import java.time.Instant +import java.time.temporal.ChronoUnit import java.util.UUID object ConnectionRepositorySpecSuite { @@ -19,7 +20,7 @@ object ConnectionRepositorySpecSuite { private def connectionRecord = ConnectionRecord( UUID.randomUUID, - Instant.now, + Instant.now.truncatedTo(ChronoUnit.MICROS), None, UUID.randomUUID().toString, None, @@ -34,7 +35,7 @@ object ConnectionRepositorySpecSuite { None, None, maxRetries, - Some(Instant.now), + Some(Instant.now.truncatedTo(ChronoUnit.MICROS)), None ).withTruncatedTimestamp() diff --git a/infrastructure/charts/agent/Chart.yaml b/infrastructure/charts/agent/Chart.yaml index 286b903c44..1f8628ced1 100644 --- a/infrastructure/charts/agent/Chart.yaml +++ b/infrastructure/charts/agent/Chart.yaml @@ -13,12 +13,12 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 1.17.0 +version: 1.18.0 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: 1.17.0 +appVersion: 1.18.0 dependencies: - name: vault version: 0.24.1 diff --git a/infrastructure/charts/index.yaml b/infrastructure/charts/index.yaml index b69452236f..f0f89a23e8 100644 --- a/infrastructure/charts/index.yaml +++ b/infrastructure/charts/index.yaml @@ -1,9 +1,23 @@ apiVersion: v1 entries: prism-agent: + - apiVersion: v2 + appVersion: 1.18.0 + created: "2023-10-24T11:07:35.004549218Z" + dependencies: + - name: vault + repository: https://helm.releases.hashicorp.com + version: 0.24.1 + description: A Helm chart for deploying prism-agent + digest: 5a803d916bc19c824aec9d39efb0206ff9a2a3b1932b276acaaf97c580f62387 + name: prism-agent + type: application + urls: + - https://raw.githubusercontent.com/hyperledger-labs/open-enterprise-agent/main/infrastructure/charts/prism-agent-1.18.0.tgz + version: 1.18.0 - apiVersion: v2 appVersion: 1.17.0 - created: "2023-10-14T17:17:05.1408815Z" + created: "2023-10-24T11:07:35.000795657Z" dependencies: - name: vault repository: https://helm.releases.hashicorp.com @@ -17,7 +31,7 @@ entries: version: 1.17.0 - apiVersion: v2 appVersion: 1.16.4 - created: "2023-10-14T17:17:05.135639936Z" + created: "2023-10-24T11:07:34.99668489Z" dependencies: - name: vault repository: https://helm.releases.hashicorp.com @@ -31,7 +45,7 @@ entries: version: 1.16.4 - apiVersion: v2 appVersion: 1.16.3 - created: "2023-10-14T17:17:05.131265084Z" + created: "2023-10-24T11:07:34.991583906Z" dependencies: - name: vault repository: https://helm.releases.hashicorp.com @@ -45,7 +59,7 @@ entries: version: 1.16.3 - apiVersion: v2 appVersion: 1.16.2 - created: "2023-10-14T17:17:05.126243223Z" + created: "2023-10-24T11:07:34.987422338Z" dependencies: - name: vault repository: https://helm.releases.hashicorp.com @@ -57,4 +71,4 @@ entries: urls: - https://raw.githubusercontent.com/hyperledger-labs/open-enterprise-agent/main/infrastructure/charts/prism-agent-1.16.2.tgz version: 1.16.2 -generated: "2023-10-14T17:17:05.120712157Z" +generated: "2023-10-24T11:07:34.982721561Z" diff --git a/infrastructure/charts/prism-agent-1.18.0.tgz b/infrastructure/charts/prism-agent-1.18.0.tgz new file mode 100644 index 0000000000..e504e23ecf Binary files /dev/null and b/infrastructure/charts/prism-agent-1.18.0.tgz differ diff --git a/infrastructure/local/.env b/infrastructure/local/.env index bf0f27606c..a40f9fe1cb 100644 --- a/infrastructure/local/.env +++ b/infrastructure/local/.env @@ -1,3 +1,3 @@ -PRISM_AGENT_VERSION=1.16.4-SNAPSHOT +PRISM_AGENT_VERSION=1.17.0 PRISM_NODE_VERSION=2.2.1 VAULT_DEV_ROOT_TOKEN_ID=root diff --git a/infrastructure/local/update_env.sh b/infrastructure/local/update_env.sh index 50a308f4b1..a8831c9e51 100755 --- a/infrastructure/local/update_env.sh +++ b/infrastructure/local/update_env.sh @@ -5,8 +5,6 @@ ENV_FILE="${SCRIPT_DIR}/.env" pip install ${SCRIPT_DIR}/../utils/python/github-helpers > /dev/null 2>&1 -IRIS_SERVICE_VERSION=$(github get-latest-package-version --package iris-service --package-type container) PRISM_AGENT_VERSION=$(github get-latest-package-version --package prism-agent --package-type container) -sed -i.bak "s/IRIS_SERVICE_VERSION=.*/IRIS_SERVICE_VERSION=${IRIS_SERVICE_VERSION}/" ${ENV_FILE} && rm -f ${ENV_FILE}.bak sed -i.bak "s/PRISM_AGENT_VERSION=.*/PRISM_AGENT_VERSION=${PRISM_AGENT_VERSION}/" ${ENV_FILE} && rm -f ${ENV_FILE}.bak diff --git a/infrastructure/multi/.env b/infrastructure/multi/.env index f4acdb3a67..a6c57a3d91 100644 --- a/infrastructure/multi/.env +++ b/infrastructure/multi/.env @@ -1,3 +1,2 @@ MERCURY_MEDIATOR_VERSION=0.2.0 -IRIS_SERVICE_VERSION=0.1.0 PRISM_AGENT_VERSION=0.6.0 diff --git a/infrastructure/shared/docker-compose-demo.yml b/infrastructure/shared/docker-compose-demo.yml index cb1bdef9c8..9bd6b00fa8 100644 --- a/infrastructure/shared/docker-compose-demo.yml +++ b/infrastructure/shared/docker-compose-demo.yml @@ -5,7 +5,7 @@ services: db: image: postgres:13 environment: - POSTGRES_MULTIPLE_DATABASES: "castor,pollux,connect,iris,agent,node_db" + POSTGRES_MULTIPLE_DATABASES: "castor,pollux,connect,agent,node_db" POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres volumes: @@ -13,7 +13,7 @@ services: - ./postgres/init-script.sh:/docker-entrypoint-initdb.d/init-script.sh - ./postgres/max_conns.sql:/docker-entrypoint-initdb.d/max_conns.sql healthcheck: - test: ["CMD", "pg_isready", "-U", "postgres", "-d", "iris"] + test: ["CMD", "pg_isready", "-U", "postgres", "-d", "agent"] interval: 10s timeout: 5s retries: 5 @@ -22,6 +22,8 @@ services: image: ghcr.io/input-output-hk/prism-node:${PRISM_NODE_VERSION} environment: NODE_PSQL_HOST: db:5432 + NODE_REFRESH_AND_SUBMIT_PERIOD: 1s + NODE_MOVE_SCHEDULED_TO_PENDING_PERIOD: 1s depends_on: db: condition: service_healthy @@ -30,6 +32,7 @@ services: image: ghcr.io/input-output-hk/prism-agent:${PRISM_AGENT_VERSION} environment: DIDCOMM_SERVICE_URL: http://${DOCKERHOST}:${PORT}/didcomm + REST_SERVICE_URL: http://${DOCKERHOST}:${PORT}/prism-agent PRISM_NODE_HOST: prism-node PRISM_NODE_PORT: 50053 SECRET_STORAGE_BACKEND: postgres diff --git a/infrastructure/shared/docker-compose-mt-keycloak.yml b/infrastructure/shared/docker-compose-mt-keycloak.yml index da6033d048..c6af5eb386 100644 --- a/infrastructure/shared/docker-compose-mt-keycloak.yml +++ b/infrastructure/shared/docker-compose-mt-keycloak.yml @@ -10,7 +10,7 @@ services: db: image: postgres:13 environment: - POSTGRES_MULTIPLE_DATABASES: "castor,pollux,connect,iris,agent,node_db" + POSTGRES_MULTIPLE_DATABASES: "castor,pollux,connect,agent,node_db" POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres volumes: @@ -20,7 +20,7 @@ services: # ports: # - "5432:5432" healthcheck: - test: ["CMD", "pg_isready", "-U", "postgres", "-d", "iris"] + test: ["CMD", "pg_isready", "-U", "postgres", "-d", "agent"] interval: 10s timeout: 5s retries: 5 @@ -72,8 +72,6 @@ services: prism-agent: image: ghcr.io/input-output-hk/prism-agent:${PRISM_AGENT_VERSION} environment: - IRIS_HOST: iris - IRIS_PORT: 8081 CASTOR_DB_HOST: db CASTOR_DB_PORT: 5432 CASTOR_DB_NAME: castor diff --git a/infrastructure/shared/docker-compose.yml b/infrastructure/shared/docker-compose.yml index 9681d3e0bb..64d2a7f004 100644 --- a/infrastructure/shared/docker-compose.yml +++ b/infrastructure/shared/docker-compose.yml @@ -8,7 +8,7 @@ services: db: image: postgres:13 environment: - POSTGRES_MULTIPLE_DATABASES: "castor,pollux,connect,iris,agent,node_db" + POSTGRES_MULTIPLE_DATABASES: "castor,pollux,connect,agent,node_db" POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres volumes: @@ -18,7 +18,7 @@ services: ports: - "${PG_PORT:-5432}:5432" healthcheck: - test: ["CMD", "pg_isready", "-U", "postgres", "-d", "iris"] + test: ["CMD", "pg_isready", "-U", "postgres", "-d", "agent"] interval: 10s timeout: 5s retries: 5 @@ -70,8 +70,6 @@ services: prism-agent: image: ghcr.io/input-output-hk/prism-agent:${PRISM_AGENT_VERSION} environment: - IRIS_HOST: iris - IRIS_PORT: 8081 CASTOR_DB_HOST: db CASTOR_DB_PORT: 5432 CASTOR_DB_NAME: castor diff --git a/infrastructure/single-tenant-testing-stack/docker-compose.yml b/infrastructure/single-tenant-testing-stack/docker-compose.yml index b5ca092a70..410c68076f 100644 --- a/infrastructure/single-tenant-testing-stack/docker-compose.yml +++ b/infrastructure/single-tenant-testing-stack/docker-compose.yml @@ -87,8 +87,6 @@ services: issuer-oea: image: ghcr.io/input-output-hk/prism-agent:${PRISM_AGENT_VERSION} environment: - IRIS_HOST: iris - IRIS_PORT: 8081 CASTOR_DB_HOST: issuer-db CASTOR_DB_PORT: 5432 CASTOR_DB_NAME: castor @@ -152,8 +150,6 @@ services: verifier-oea: image: ghcr.io/input-output-hk/prism-agent:${PRISM_AGENT_VERSION} environment: - IRIS_HOST: iris - IRIS_PORT: 8081 CASTOR_DB_HOST: verifier-db CASTOR_DB_PORT: 5432 CASTOR_DB_NAME: castor @@ -217,8 +213,6 @@ services: holder-oea: image: ghcr.io/input-output-hk/prism-agent:${PRISM_AGENT_VERSION} environment: - IRIS_HOST: iris - IRIS_PORT: 8081 CASTOR_DB_HOST: holder-db CASTOR_DB_PORT: 5432 CASTOR_DB_NAME: castor diff --git a/iris/api/grpc/README.md b/iris/api/grpc/README.md deleted file mode 100644 index 47abfff63c..0000000000 --- a/iris/api/grpc/README.md +++ /dev/null @@ -1,9 +0,0 @@ -## gRPC API ## - -We use gRPC messages for both: interactions with Iris, and also to serialise messages which reside in the DLT. - -`protocol` folder contains only definitions which are posted to the DLT and -basically define low-level protocol operations. - -Files outside the `protocol` folder describe messages and services which are used in gRPC interface of Iris, -using protocol messages in their definitions. diff --git a/iris/api/grpc/protocol/did_operations.proto b/iris/api/grpc/protocol/did_operations.proto deleted file mode 100644 index 4a6dd89301..0000000000 --- a/iris/api/grpc/protocol/did_operations.proto +++ /dev/null @@ -1,96 +0,0 @@ -syntax = "proto3"; - -import "scalapb/scalapb.proto"; - -option (scalapb.options) = { - no_default_values_in_constructor: true - package_name: "io.iohk.atala.iris.proto" -}; - -message PublicKeyJwk { - enum Curve { - SECP256K1 = 0; - } - message ECKeyData { - Curve curve = 1; // The curve name, like secp256k1. - bytes x = 2; // The x coordinate, represented as bytes. - bytes y = 3; // The y coordinate, represented as bytes. - } - - oneof key { - ECKeyData ec_key = 1; - } -} - -message DocumentDefinition { - message PublicKey { - string id = 1; - - PublicKeyJwk jwk = 2; - - enum Purpose { - AUTHENTICATION = 0; - KEY_AGREEMENT = 1; - ASSERTION_METHOD = 2; - CAPABILITY_INVOCATION = 3; - } - repeated Purpose purposes = 3; - } - - message Service { - string id = 1; - enum Type { - MEDIATOR_SERVICE = 0; - } - Type type = 2; - string service_endpoint = 3; - } - - repeated PublicKey public_keys = 1; - repeated Service services = 2; -} - -message CreateDid { - bytes initial_update_commitment = 1; - bytes initial_recovery_commitment = 2; - string ledger = 3; - DocumentDefinition document = 4; -} - -message UpdateDid { - message Patch { - oneof patch { - DocumentDefinition.PublicKey add_public_key = 1; - string remove_public_key = 2; - DocumentDefinition.Service add_service = 3; - string remove_service = 4; - } - } - - string did = 1; - string ledger = 2; - bytes revealed_update_key = 3; - bytes previous_version = 4; - bytes forward_update_commitment = 5; - repeated Patch patches = 6; - bytes signature = 7; -} - -message RecoverDid { - string did = 1; - string ledger = 2; - bytes revealed_recovery_key = 3; - bytes previous_version = 4; - bytes forward_update_commitment = 5; - bytes forward_recovery_commitment = 6; - repeated DocumentDefinition document = 7; - bytes signature = 8; -} - -message DeactivateDid { - string did = 1; - string ledger = 2; - bytes revealed_recovery_key = 3; - bytes previous_version = 4; - bytes signature = 5; -} diff --git a/iris/api/grpc/protocol/dlt.proto b/iris/api/grpc/protocol/dlt.proto deleted file mode 100644 index a366a7b540..0000000000 --- a/iris/api/grpc/protocol/dlt.proto +++ /dev/null @@ -1,37 +0,0 @@ -syntax = "proto3"; - -import "scalapb/scalapb.proto"; -import "protocol/vc_operations.proto"; -import "protocol/did_operations.proto"; - -option (scalapb.options) = { - no_default_values_in_constructor: true - package_name: "io.iohk.atala.iris.proto" -}; - -// The possible operations affecting the blockchain. -message IrisOperation { - // The actual operation. - oneof operation { - // Used to create a public DID. - CreateDid create_did = 1; - - // Used to update an existing public DID. - UpdateDid update_did = 2; - - // Used to recover an existing public DID. - RecoverDid recover_did = 3; - - // Used to deactivate DID - DeactivateDid deactivate_did = 4; - - IssueCredentialsBatch issue_credentials_batch = 5; - - RevokeCredentials revoke_credentials = 6; - }; -} - -// List of operations which will be stored in the blockchain transaction metadata -message IrisBatch { - repeated IrisOperation operations = 1; -} diff --git a/iris/api/grpc/protocol/vc_operations.proto b/iris/api/grpc/protocol/vc_operations.proto deleted file mode 100644 index 6bf3efc658..0000000000 --- a/iris/api/grpc/protocol/vc_operations.proto +++ /dev/null @@ -1,19 +0,0 @@ -syntax = "proto3"; - -import "scalapb/scalapb.proto"; - -option (scalapb.options) = { - no_default_values_in_constructor: true - package_name: "io.iohk.atala.iris.proto" -}; - -message IssueCredentialsBatch { - string issuer_did = 1; - bytes merkle_root = 2; -} - -message RevokeCredentials { - string revoker_did = 1; - bytes issuance_batch_digest = 2; - repeated bytes credentials_to_revoke = 3; -} diff --git a/iris/api/grpc/service.proto b/iris/api/grpc/service.proto deleted file mode 100644 index b51a0937ba..0000000000 --- a/iris/api/grpc/service.proto +++ /dev/null @@ -1,59 +0,0 @@ -syntax = "proto3"; - -import "scalapb/scalapb.proto"; -import "google/protobuf/timestamp.proto"; - -import "protocol/did_operations.proto"; -import "protocol/vc_operations.proto"; -import "protocol/dlt.proto"; - -option (scalapb.options) = { - no_default_values_in_constructor: true - package_name: "io.iohk.atala.iris.proto" -}; - -message IrisOperationId { - bytes id = 1; -} - -message IrisOperationOutcome { - bytes operation_id = 1; -} - -enum IrisOperationStatus { - PENDING = 0; - SUBMITTED = 1; - ROLLED_BACK = 2; - CONFIRMED = 3; -}; - -message IrisOperationInfo { - bytes operationId = 1; - oneof operation { - CreateDid create_did = 2; - UpdateDid update_did = 3; - RecoverDid recovery_did = 4; - DeactivateDid deactivate_did = 5; - IssueCredentialsBatch issue_credentials_batch = 6; - RevokeCredentials revoke_credentials = 7; - } -} - -message IrisBatchRequest { - // Hex representation of transaction id - // There is one to one correspondence between transaction and batch in it. - string last_seen_transaction_id = 1; -} - -message ConfirmedIrisBatch { - int32 block_level = 1; - google.protobuf.Timestamp blockTimestamp = 2; - string transactionId = 3; - IrisBatch batch = 4; -} - -service IrisService { - rpc ScheduleOperation(IrisOperation) returns (IrisOperationOutcome) {} - rpc GetOperation(IrisOperationId) returns (IrisOperationInfo) {} - rpc GetIrisBatchStream(IrisBatchRequest) returns (stream ConfirmedIrisBatch) {} -} diff --git a/iris/client/scala-client/CHANGELOG.md b/iris/client/scala-client/CHANGELOG.md deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/iris/client/scala-client/README.md b/iris/client/scala-client/README.md deleted file mode 100644 index 102c5cad5f..0000000000 --- a/iris/client/scala-client/README.md +++ /dev/null @@ -1,8 +0,0 @@ -## sbt project compiled with Scala 3 - -### Usage - -This is a normal sbt project. You can compile code with `sbt compile`, run it with `sbt run`, and `sbt console` will start a Scala 3 REPL. - -For more information on the sbt-dotty plugin, see the -[scala3-example-project](https://github.com/scala/scala3-example-project/blob/main/README.md). diff --git a/iris/service/CHANGELOG.md b/iris/service/CHANGELOG.md deleted file mode 100644 index 68a0abd94e..0000000000 --- a/iris/service/CHANGELOG.md +++ /dev/null @@ -1,30 +0,0 @@ -# [iris-service-v0.2.0](https://github.com/input-output-hk/atala-prism-building-blocks/compare/iris-service-v0.1.0...iris-service-v0.2.0) (2022-11-30) - - -### Features - -* **pollux:** implement Issue Credential v2 Protocol ([#144](https://github.com/input-output-hk/atala-prism-building-blocks/issues/144)) ([a80702f](https://github.com/input-output-hk/atala-prism-building-blocks/commit/a80702f5b255d8079085a6ec27c87baa6a23ac59)), closes [#92](https://github.com/input-output-hk/atala-prism-building-blocks/issues/92) - -# [iris-service-v0.1.0](https://github.com/input-output-hk/atala-prism-building-blocks/compare/iris-service-v0.0.1...iris-service-v0.1.0) (2022-11-11) - - -### Bug Fixes - -* **iris:** align type signature ([#72](https://github.com/input-output-hk/atala-prism-building-blocks/issues/72)) ([a19a781](https://github.com/input-output-hk/atala-prism-building-blocks/commit/a19a7814c3fc1e1cc89a861ae3942bf4a5fbad0a)) - - -### Features - -* **iris:** ATL-1791 Implement blockchain syncer functionality ([#49](https://github.com/input-output-hk/atala-prism-building-blocks/issues/49)) ([431b657](https://github.com/input-output-hk/atala-prism-building-blocks/commit/431b6575b8df2f4744285b1c5e2dd56072fa874c)) -* **shared:** Add environmnet configuration for Iris DB and bump scala version in other components to enable build ([#96](https://github.com/input-output-hk/atala-prism-building-blocks/issues/96)) ([a5b583f](https://github.com/input-output-hk/atala-prism-building-blocks/commit/a5b583f445b7efd31987cf9ca017bc544a877986)) - -# iris-service-v0.1.0 (2022-11-09) - -### Bug Fixes - - * iris: align type signature (#72 (https://github.com/input-output-hk/atala-prism-building-blocks/issues/72)) (a19a781 (https://github.com/input-output-hk/atala-prism-building-blocks/commit/a19a7814c3fc1e1cc89a861ae3942bf4a5fbad0a)) - -### Features - - * iris: ATL-1791 Implement blockchain syncer functionality (#49 (https://github.com/input-output-hk/atala-prism-building-blocks/issues/49)) (431b657 (https://github.com/input-output-hk/atala-prism-building-blocks/commit/431b6575b8df2f4744285b1c5e2dd56072fa874c)) - * shared: Add environmnet configuration for Iris DB and bump scala version in other components to enable build (#96 (https://github.com/input-output-hk/atala-prism-building-blocks/issues/96)) (a5b583f (https://github.com/input-output-hk/atala-prism-building-blocks/commit/a5b583f445b7efd31987cf9ca017bc544a877986)) diff --git a/iris/service/README.md b/iris/service/README.md deleted file mode 100644 index a22e626962..0000000000 --- a/iris/service/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# Castor BB service - -## Quickstart - -__Running Iris service locally for development__ - -```bash -docker-compose -f docker/docker-compose-local.yaml up -d -sbt api-server/run -``` diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ConfirmedBlock.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ConfirmedBlock.scala deleted file mode 100644 index 2aa7507400..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ConfirmedBlock.scala +++ /dev/null @@ -1,12 +0,0 @@ -package io.iohk.atala.iris.core.model - -import io.iohk.atala.iris.proto.dlt as proto -import io.iohk.atala.iris.core.model.ledger.TransactionId - -import java.time.Instant - -case class ConfirmedBlock( - blockLevel: Int, - blockTimestamp: Instant, - transactions: Seq[(TransactionId, proto.IrisBatch)] -) diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ConfirmedIrisBatch.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ConfirmedIrisBatch.scala deleted file mode 100644 index ccdb4d8ee7..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ConfirmedIrisBatch.scala +++ /dev/null @@ -1,14 +0,0 @@ -package io.iohk.atala.iris.core.model - -import io.iohk.atala.iris.core.model.ledger.TransactionId -import io.iohk.atala.iris.proto.dlt as proto - -import java.time.Instant - -case class ConfirmedIrisBatch( - blockLevel: Int, - blockTimestamp: Instant, - transactionSeqId: Int, - transactionId: TransactionId, - batch: proto.IrisBatch -) diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/Models.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/Models.scala deleted file mode 100644 index 7fc22339cd..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/Models.scala +++ /dev/null @@ -1,7 +0,0 @@ -package io.iohk.atala.iris.core.model - -// TODO: replace with actual implementation -final case class IrisNotification(foo: String) - -final case class IrisOperationId(id: String) -final case class IrisOperation(foo: String) diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Block.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Block.scala deleted file mode 100644 index accb1400f5..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Block.scala +++ /dev/null @@ -1,16 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -sealed trait Block extends Product with Serializable { - def header: BlockHeader -} - -object Block { - final case class Canonical(override val header: BlockHeader) extends Block - - final case class Full( - override val header: BlockHeader, - transactions: List[Transaction] - ) extends Block { - def toCanonical: Canonical = Canonical(header) - } -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockError.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockError.scala deleted file mode 100644 index 9b5df63a9f..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockError.scala +++ /dev/null @@ -1,10 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -sealed trait BlockError extends Product with Serializable - -object BlockError { - - case class NotFound(blockNo: Int) extends BlockError - - case object NoneAvailable extends BlockError -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockHash.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockHash.scala deleted file mode 100644 index 85a4374840..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockHash.scala +++ /dev/null @@ -1,18 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -import com.typesafe.config.ConfigMemorySize -import io.iohk.atala.shared.{HashValue, HashValueConfig, HashValueFrom} - -import scala.collection.compat.immutable.ArraySeq - -class BlockHash private (val value: ArraySeq[Byte]) extends AnyVal with HashValue {} - -object BlockHash extends HashValueFrom[BlockHash] { - - override val config: HashValueConfig = HashValueConfig( - ConfigMemorySize.ofBytes(32) - ) - - override protected def constructor(value: ArraySeq[Byte]): BlockHash = - new BlockHash(value) -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockHeader.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockHeader.scala deleted file mode 100644 index f20b1f4d98..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/BlockHeader.scala +++ /dev/null @@ -1,10 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -import java.time.Instant - -case class BlockHeader( - hash: BlockHash, - blockNo: Int, - time: Instant, - previousBlockHash: Option[BlockHash] -) diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Funds.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Funds.scala deleted file mode 100644 index 197ba8fd6f..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Funds.scala +++ /dev/null @@ -1,3 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -case class Funds(lovelaces: Int) diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Ledger.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Ledger.scala deleted file mode 100644 index 1533fb532a..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Ledger.scala +++ /dev/null @@ -1,12 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -import enumeratum.{Enum, EnumEntry} - -import scala.collection.immutable.ArraySeq - -case class Ledger(name: String) - -object Ledger { - val InMemory: Ledger = Ledger("in-memory") - val Mainnet: Ledger = Ledger("mainnet") -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Transaction.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Transaction.scala deleted file mode 100644 index c9c45334dd..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/Transaction.scala +++ /dev/null @@ -1,8 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -case class Transaction( - id: TransactionId, - blockHash: BlockHash, - blockIndex: Int, - metadata: Option[TransactionMetadata] -) diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionDetails.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionDetails.scala deleted file mode 100644 index a979b38dc3..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionDetails.scala +++ /dev/null @@ -1,3 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -case class TransactionDetails(id: TransactionId, status: TransactionStatus) diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionId.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionId.scala deleted file mode 100644 index 3662437bc3..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionId.scala +++ /dev/null @@ -1,20 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -import com.typesafe.config.ConfigMemorySize -import io.iohk.atala.shared.{HashValue, HashValueConfig, HashValueFrom} - -import scala.collection.immutable.ArraySeq - -class TransactionId private (bytes: ArraySeq[Byte]) extends HashValue { - override def value: ArraySeq[Byte] = bytes -} - -object TransactionId extends HashValueFrom[TransactionId] { - - override val config: HashValueConfig = HashValueConfig( - ConfigMemorySize.ofBytes(32) - ) - - override protected def constructor(value: ArraySeq[Byte]): TransactionId = - new TransactionId(value) -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionMetadata.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionMetadata.scala deleted file mode 100644 index 30611d7f23..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionMetadata.scala +++ /dev/null @@ -1,148 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -import io.circe.Json -import io.circe.{ACursor, Json} -import io.iohk.atala.iris.proto.dlt as proto -import io.iohk.atala.shared.utils.BytesOps - -import scala.util.Try - -case class TransactionMetadata(json: Json) - -object TransactionMetadata { - // Last 16 bits of 344977920845, which is the decimal representation of the concatenation of the hexadecimal values - // (50 52 49 53 4d) of the word PRISM in ASCII. - val METADATA_PRISM_INDEX = 21325 - - private val VERSION_KEY = "v" - private val CONTENT_KEY = "c" - private val LEDGER_KEY = "l" - // Prefix to denote that the following characters represent a string of bytes in hexadecimal format - // (needed by Cardano Wallet) - private val BYTE_STRING_PREFIX = "0x" - // Maximum number of bytes that can be represented by a byte string (enforced by Cardano Node) - private val BYTE_STRING_LIMIT = 64 - - private val MAP_KEY = "k" - private val MAP_VALUE = "v" - private val MAP_TYPE = "map" - private val LIST_TYPE = "list" - private val INT_TYPE = "int" - private val STRING_TYPE = "string" - private val BYTES_TYPE = "bytes" - - // TODO add ledger here - def fromTransactionMetadata( - expectedLedger: Ledger, - metadata: TransactionMetadata - ): Option[proto.IrisBatch] = { - val prismMetadata = metadata.json.hcursor - .downField(METADATA_PRISM_INDEX.toString) - - for { - _ <- prismMetadata - .downField(VERSION_KEY) - .focus - .flatMap(_.asNumber) - .flatMap(_.toInt) - .find(_ == 2) - - _ <- prismMetadata - .downField(LEDGER_KEY) - .focus - .flatMap(_.asString) - .find(_ == expectedLedger.name) - - result <- fromTransactionMetadataV2(prismMetadata) - } yield result - } - - private def fromTransactionMetadataV2( - prismMetadata: ACursor - ): Option[proto.IrisBatch] = { - val bytes = prismMetadata - .downField(CONTENT_KEY) - .focus - .flatMap(_.asArray) - .getOrElse(Vector[Json]()) - .flatMap(parseByteString) - .toArray - if (bytes.isEmpty) { - // Either the content does not exist, is not the right type, or is truly empty - None - } else { - proto.IrisBatch.validate(bytes).toOption - } - } - - private def parseByteString(byteString: Json): Array[Byte] = { - byteString.asString - .map(_.stripPrefix(BYTE_STRING_PREFIX)) - .map(hex => Try(BytesOps.hexToBytes(hex)).getOrElse(Array[Byte]())) - .getOrElse(Array()) - } - - def toCardanoTransactionMetadata( - ledger: Ledger, - irisBatch: proto.IrisBatch - ): TransactionMetadata = { - // This definition aligns with the rules described here https://developers.cardano.org/docs/transaction-metadata/ - // After posting that data to the Cardano blockchain, it gets transformed to JSON - TransactionMetadata( - Json.obj( - METADATA_PRISM_INDEX.toString -> Json.obj( - MAP_TYPE -> Json.arr( - Json.obj( - MAP_KEY -> Json.obj(STRING_TYPE -> Json.fromString(VERSION_KEY)), - MAP_VALUE -> Json.obj(INT_TYPE -> Json.fromInt(2)) - ), - Json.obj( - MAP_KEY -> Json.obj(STRING_TYPE -> Json.fromString(LEDGER_KEY)), - MAP_VALUE -> Json.obj(STRING_TYPE -> Json.fromString(ledger.name)) - ), - Json.obj( - MAP_KEY -> Json.obj(STRING_TYPE -> Json.fromString(CONTENT_KEY)), - MAP_VALUE -> Json.obj( - LIST_TYPE -> Json.arr( - irisBatch.toByteArray - .grouped(BYTE_STRING_LIMIT) - .map(bytes => - Json.obj( - BYTES_TYPE -> Json.fromString( - BytesOps.bytesToHex(bytes) - ) - ) - ) - .toSeq: _* - ) - ) - ) - ) - ) - ) - ) - } - - def toInmemoryTransactionMetadata( - ledger: Ledger, - irisBatch: proto.IrisBatch - ): TransactionMetadata = - TransactionMetadata( - Json.obj( - METADATA_PRISM_INDEX.toString -> Json.obj( - VERSION_KEY -> Json.fromInt(2), - LEDGER_KEY -> Json.fromString(ledger.name), - CONTENT_KEY -> Json.arr( - irisBatch.toByteArray - .grouped(BYTE_STRING_LIMIT) - .map(bytes => Json.fromString(BytesOps.bytesToHex(bytes))) - .toSeq: _* - ) - ) - ) - ) - - def estimateTxMetadataSize(ledger: Ledger, irisBatch: proto.IrisBatch): Int = { - toCardanoTransactionMetadata(ledger, irisBatch).json.noSpaces.length - } -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionStatus.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionStatus.scala deleted file mode 100644 index 9cc38a9225..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/model/ledger/TransactionStatus.scala +++ /dev/null @@ -1,16 +0,0 @@ -package io.iohk.atala.iris.core.model.ledger - -import enumeratum.{Enum, EnumEntry} -import enumeratum.EnumEntry.Snakecase -import scala.collection.immutable.ArraySeq - -sealed trait TransactionStatus extends EnumEntry with Snakecase - -object TransactionStatus extends Enum[TransactionStatus] { - val values = ArraySeq(InMempool, Submitted, Expired, InLedger) - - case object InMempool extends TransactionStatus - case object Submitted extends TransactionStatus - case object Expired extends TransactionStatus - case object InLedger extends TransactionStatus -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/BlocksRepository.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/BlocksRepository.scala deleted file mode 100644 index 156e36ca15..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/BlocksRepository.scala +++ /dev/null @@ -1,9 +0,0 @@ -package io.iohk.atala.iris.core.repository - -import io.iohk.atala.iris.core.model.ledger.BlockError -import io.iohk.atala.iris.core.model.ledger.Block - -trait ROBlocksRepository[F[_]] { - def getFullBlock(blockNo: Int): F[Either[BlockError.NotFound, Block.Full]] - def getLatestBlock: F[Either[BlockError.NoneAvailable.type, Block.Canonical]] -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/DbRepositoryTransactor.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/DbRepositoryTransactor.scala deleted file mode 100644 index 3df7e9e1e8..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/DbRepositoryTransactor.scala +++ /dev/null @@ -1,11 +0,0 @@ -package io.iohk.atala.iris.core.repository - -import zio.* - -/** This component intended to run several combined repository operations in one database transaction. The idea to have - * repositories traits instantiated with IOConnection and ZIO monads. Former to make possible to combine several - * operations in one DB transaction, latter to run repository operations without additional hustle. - */ -trait DbRepositoryTransactor[F[_]] { - def runAtomically[A](action: F[A]): Task[A] -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/IrisBatchesRepository.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/IrisBatchesRepository.scala deleted file mode 100644 index bf4796bffa..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/IrisBatchesRepository.scala +++ /dev/null @@ -1,24 +0,0 @@ -package io.iohk.atala.iris.core.repository - -import io.iohk.atala.iris.core.model.ConfirmedIrisBatch -import io.iohk.atala.iris.core.model.ledger.TransactionId -import io.iohk.atala.iris.proto.dlt as proto - -import java.time.Instant - -trait ROIrisBatchesRepository[S[_]] { - - // Streams batches which are already on the database - // Every transaction contains a IrisBatch in its metadata, hence, - // there is one to one correspondence between TransactionId and IrisBatch - def getIrisBatchesStream(lastSeen: Option[TransactionId]): S[ConfirmedIrisBatch] -} - -/** @tparam F - * represents a monad where CRUD requests are executed - * @tparam S - * represents a monad for streaming of data - */ -trait IrisBatchesRepository[F[_], S[_]] extends ROIrisBatchesRepository[S] { - def saveIrisBatch(irisBatch: ConfirmedIrisBatch): F[Unit] -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/KeyValueRepository.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/KeyValueRepository.scala deleted file mode 100644 index 97c43b7b64..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/KeyValueRepository.scala +++ /dev/null @@ -1,10 +0,0 @@ -package io.iohk.atala.iris.core.repository - -trait ROKeyValueRepository[F[_]] { - def get(key: String): F[Option[String]] - def getInt(key: String): F[Option[Int]] -} - -trait KeyValueRepository[F[_]] extends ROKeyValueRepository[F] { - def set(key: String, value: Option[Int | String]): F[Unit] -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/OperationsRepository.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/OperationsRepository.scala deleted file mode 100644 index 5775e6954e..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/repository/OperationsRepository.scala +++ /dev/null @@ -1,10 +0,0 @@ -package io.iohk.atala.iris.core.repository - -import io.iohk.atala.iris.core.model as model -import zio.* - -// TODO: replace with actual implementation -trait OperationsRepository[F[_]] { - def getOperation(id: model.IrisOperationId): F[model.IrisOperation] - def saveOperations(ops: Seq[model.IrisOperation]): F[Unit] -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/BlocksSaveSinker.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/BlocksSaveSinker.scala deleted file mode 100644 index 1e6e7036d0..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/BlocksSaveSinker.scala +++ /dev/null @@ -1,61 +0,0 @@ -package io.iohk.atala.iris.core.service - -import cats.Monad -import cats.syntax.applicative.* -import cats.syntax.flatMap.* -import cats.syntax.functor.* -import cats.syntax.traverse.* -import io.iohk.atala.iris.core.model.{ConfirmedBlock, ConfirmedIrisBatch} -import io.iohk.atala.iris.core.repository.{DbRepositoryTransactor, IrisBatchesRepository, KeyValueRepository} -import zio.* -import zio.stream.* - -trait BlocksSaveSinker { - val sink: ZSink[Any, Throwable, ConfirmedBlock, Nothing, Unit] -} - -object BlocksSaveSinker { - def layer[F[_]: TagK: Monad, S[_]: TagK]: URLayer[ - KeyValueRepository[F] & IrisBatchesRepository[F, S] & DbRepositoryTransactor[F], - BlocksSaveSinker - ] = - ZLayer.fromFunction((x: KeyValueRepository[F], y: IrisBatchesRepository[F, S], z: DbRepositoryTransactor[F]) => - BlocksSaveSinkerImpl(x, y, z) - ) - -} - -/** @param keyValueRepo - * @param batchesRepo - * @param transactor - * @tparam F - * \- a monad which support combining operations which might be performed within one database transaction, like - * doobie.ConnectionIO - * @tparam S - * \- type representing a streaming type - */ -class BlocksSaveSinkerImpl[F[_]: Monad, S[_]]( - keyValueRepo: KeyValueRepository[F], - batchesRepo: IrisBatchesRepository[F, S], - transactor: DbRepositoryTransactor[F] -) extends BlocksSaveSinker { - - private val LAST_SYNCED_BLOCK_NO = "last_synced_block_no" - private val LAST_SYNCED_BLOCK_TIMESTAMP = "last_synced_block_timestamp" - - override val sink: ZSink[Any, Throwable, ConfirmedBlock, Nothing, Unit] = - ZSink.foreach[Any, Throwable, ConfirmedBlock](updateLastSyncedBlock) - - private def updateLastSyncedBlock(block: ConfirmedBlock): Task[Unit] = { - val timestampEpochMilli = block.blockTimestamp.toEpochMilli - transactor.runAtomically { - for { - _ <- keyValueRepo.set(LAST_SYNCED_BLOCK_NO, Some(block.blockLevel)) - _ <- keyValueRepo.set(LAST_SYNCED_BLOCK_TIMESTAMP, Some(timestampEpochMilli.toString)) - _ <- block.transactions.zipWithIndex.traverse { case ((txId, batch), i) => - batchesRepo.saveIrisBatch(ConfirmedIrisBatch(block.blockLevel, block.blockTimestamp, i, txId, batch)) - } - } yield () - } - } -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/BlocksStreamer.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/BlocksStreamer.scala deleted file mode 100644 index fb06ff66db..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/BlocksStreamer.scala +++ /dev/null @@ -1,155 +0,0 @@ -package io.iohk.atala.iris.core.service - -import io.iohk.atala.iris.core.model.ConfirmedBlock -import io.iohk.atala.iris.core.model.ledger.{Block, Ledger, TransactionId, TransactionMetadata} -import io.iohk.atala.iris.core.repository.{ROBlocksRepository, ROKeyValueRepository} -import io.iohk.atala.iris.proto.dlt as proto -import zio.* -import zio.stream.* - -trait BlocksStreamer { - val blocksStream: UStream[ConfirmedBlock] -} - -object BlocksStreamer { - case class Config(targetLedger: Ledger, genesisBlockNumber: Int, blockConfirmationsToWait: Int, blockEvery: Duration) - - def layer( - config: Config - ): URLayer[ROBlocksRepository[Task] & ROKeyValueRepository[Task], BlocksStreamer] = - ZLayer.fromFunction(BlocksStreamerImpl(_, _, config)) -} - -/** The goal of this streaming service is to emit batches of operations which are confirmed in the blockchain. It - * stateful and reply on block read only database and key value read only database. - * @param blocksRep - * \- read only storage of blocks from the blockchain - * @param keyValueRep - * \- read only key-value storage - * @param config - * \- protocol specific constants - */ -class BlocksStreamerImpl( - val blocksRep: ROBlocksRepository[Task], - val keyValueRep: ROKeyValueRepository[Task], - val config: BlocksStreamer.Config -) extends BlocksStreamer { - private val LAST_SYNCED_BLOCK_NO = "last_synced_block_no" - private val MAX_SYNC_BLOCKS = 100 - - private sealed trait BlocksSyncOutcome - - private object BlocksSyncOutcome { - case object MoreBlocksToSyncExist extends BlocksSyncOutcome - case object NoMoreBlocks extends BlocksSyncOutcome - } - - private type ConfirmedBlockCallback = ZStream.Emit[Any, Nothing, ConfirmedBlock, Unit] - - override val blocksStream: UStream[ConfirmedBlock] = ZStream.asyncZIO[Any, Nothing, ConfirmedBlock] { cb => - startSyncing().provideLayer(ZLayer.succeed(cb)).fork - } - - private def startSyncing(): RIO[ConfirmedBlockCallback, Unit] = { - for { - outcome <- syncMissingBlocks() - _ <- - if (outcome == BlocksSyncOutcome.NoMoreBlocks) { - ZIO.sleep(config.blockEvery).flatMap(_ => startSyncing()) - } else startSyncing() - } yield () - } - - /** Sync up on blocks from the blockchain and returns whether there are remaining blocks to sync. - */ - private def syncMissingBlocks(): RIO[ConfirmedBlockCallback, BlocksSyncOutcome] = { - for { - // Gets the number of the latest block processed by PRISM Node. - maybeLastSyncedBlockNo <- keyValueRep.getInt(LAST_SYNCED_BLOCK_NO) - // Calculates the next block based on the initial `blockNumberSyncStart` and the latest synced block. - lastSyncedBlockNo = calculateLastSyncedBlockNo( - maybeLastSyncedBlockNo, - config.genesisBlockNumber - ) - // Gets the latest block from the blocks database. - latestBlock <- blocksRep.getLatestBlock - // Calculates the latest confirmed block based on amount of required confirmations. - lastConfirmedBlockNo = latestBlock.map( - _.header.blockNo - config.blockConfirmationsToWait - ) - syncStart = lastSyncedBlockNo + 1 - // Sync no more than `MAX_SYNC_BLOCKS` during one `syncMissingBlocks` iteration. - syncEnd = lastConfirmedBlockNo.map( - math.min(_, lastSyncedBlockNo + MAX_SYNC_BLOCKS) - ) - // Sync all blocks with numbers from `syncStart` to `syncEnd` - _ <- syncEnd.fold(_ => ZIO.unit, end => syncBlocksInRange(syncStart to end)) - } yield lastConfirmedBlockNo - .flatMap(last => - syncEnd.map(end => if (last > end) BlocksSyncOutcome.MoreBlocksToSyncExist else BlocksSyncOutcome.NoMoreBlocks) - ) - .getOrElse(BlocksSyncOutcome.NoMoreBlocks) - } - - // Sync blocks in the given range. - private def syncBlocksInRange(blockNos: Range): RIO[ConfirmedBlockCallback, Unit] = { - if (blockNos.isEmpty) ZIO.unit - else { - // Sequentially sync blocks from the given range one by one. - ZIO.foreachDiscard(blockNos)(blockNo => syncBlock(blockNo)) - } - } - - // Sync block `blockNo` with internal state. - private def syncBlock(blockNo: Int): RIO[ConfirmedBlockCallback, Unit] = { - for { - // Retrieve block header and the list of transactions in the block. - block <- blocksRep.getFullBlock(blockNo) - // Maybe in future we will add block handler here - // Look over transactions in the block. - _ <- block.fold(_ => ZIO.unit, filterNPushBlock) - } yield () - } - - /** Filter out transactions in the `block` and push the block to the stream */ - private def filterNPushBlock(block: Block.Full): RIO[ConfirmedBlockCallback, Unit] = { - val transactions: List[(TransactionId, proto.IrisBatch)] = for { - // Iterate over transactions in the block. - transaction <- block.transactions - // Retrieve metadata from the transaction if it exists. - metadata <- transaction.metadata - // Parse metadata in accordance with the PRISM protocol if it's possible. - irisBatch <- TransactionMetadata.fromTransactionMetadata(config.targetLedger, metadata) - // Verify that operations related to the ledger protocol works on - ops = irisBatch.operations.filter(op => - op.operation.createDid.forall(_.ledger == config.targetLedger.name) && - op.operation.updateDid.forall(_.ledger == config.targetLedger.name) && - op.operation.recoverDid.forall(_.ledger == config.targetLedger.name) && - op.operation.deactivateDid.forall(_.ledger == config.targetLedger.name) - ) - nonEmptyBatch <- - if (ops.nonEmpty) { Some(proto.IrisBatch(ops)) } - else None - } yield (transaction.id, nonEmptyBatch) - - val confirmedBlock = - ConfirmedBlock( - blockLevel = block.header.blockNo, - blockTimestamp = block.header.time, - transactions = transactions - ) - - for { - cb <- ZIO.service[ConfirmedBlockCallback] - // Trigger callback attached to ZStream on every block - _ <- ZIO.succeed(cb(ZIO.succeed(Chunk(confirmedBlock)))) - } yield () - } - - private def calculateLastSyncedBlockNo( - maybeLastSyncedBlockNo: Option[Int], - blockNumberSyncStart: Int - ): Int = - math.max(maybeLastSyncedBlockNo.getOrElse(0), blockNumberSyncStart - 1) - -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/InmemoryUnderlyingLedgerService.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/InmemoryUnderlyingLedgerService.scala deleted file mode 100644 index 050ccdffb2..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/InmemoryUnderlyingLedgerService.scala +++ /dev/null @@ -1,166 +0,0 @@ -package io.iohk.atala.iris.core.service - -import io.iohk.atala.iris.core.model.ledger.TransactionStatus.{InLedger, InMempool} -import io.iohk.atala.iris.core.model.ledger.* -import io.iohk.atala.iris.core.repository.ROBlocksRepository -import io.iohk.atala.iris.core.service.InmemoryUnderlyingLedgerService.{CardanoBlock, CardanoTransaction, Config} -import io.iohk.atala.iris.proto.dlt as proto -import io.iohk.atala.prism.crypto.Sha256 -import io.circe.{Json, parser} -import zio.* -import zio.stm.* - -import java.time.Instant -import java.util.concurrent.TimeUnit - -object InmemoryUnderlyingLedgerService { - case class Config(blockEvery: Duration, initialFunds: Funds, txFee: Funds, ledger: Ledger) - - case class CardanoTransaction(operations: Seq[proto.IrisOperation]) { - lazy val transactionId: TransactionId = { - val objectBytes = proto.IrisBatch(operations).toByteArray - val hash = Sha256.compute(objectBytes) - TransactionId - .from(hash.getValue) - .getOrElse(throw new RuntimeException("Unexpected invalid hash")) - } - } - - case class CardanoBlock(header: BlockHeader, txs: Seq[CardanoTransaction]) { - def toBlockFull(ledger: Ledger): Block.Full = { - Block.Full( - header, - txs.toList.map(tx => - Transaction( - id = tx.transactionId, - blockHash = header.hash, - blockIndex = header.blockNo, - metadata = Some(TransactionMetadata.toInmemoryTransactionMetadata(ledger, proto.IrisBatch(tx.operations))) - ) - ) - ) - } - } - - object CardanoBlock { - def evalBlockHash(txs: Seq[CardanoTransaction], prevHash: Option[BlockHash]): BlockHash = { - val bytes = prevHash.fold(Array.empty[Byte])(bh => bh.value.toArray) - val hash = Sha256.compute( - Array.concat(txs.map(_.transactionId.value.toArray).appended(bytes): _*) - ) - BlockHash.from(hash.getValue).getOrElse(throw new RuntimeException("Unexpected invalid hash")) - } - } - - def layer(config: Config): ULayer[InmemoryUnderlyingLedgerService] = ZLayer.fromZIO { - for { - mempoolRef <- TRef.make(Vector[CardanoTransaction]()).commit - blocksRef <- TRef.make(Vector[CardanoBlock]()).commit - initialBalance <- TRef.make(config.initialFunds).commit - srv = InmemoryUnderlyingLedgerService(config, mempoolRef, blocksRef, initialBalance) - _ <- srv.startBackgroundProcess() - } yield srv - } -} - -class InmemoryUnderlyingLedgerService( - config: Config, - mempoolRef: TRef[Vector[CardanoTransaction]], - blocksRef: TRef[Vector[CardanoBlock]], - balanceRef: TRef[Funds] -) extends UnderlyingLedgerService - with ROBlocksRepository[Task] { - - override def publish(operations: Seq[proto.IrisOperation]): IO[LedgerError, Unit] = - STM.atomically { - for { - curFunds <- balanceRef.get - newFunds <- STM.cond( - curFunds.lovelaces >= config.txFee.lovelaces, - Funds(curFunds.lovelaces - config.txFee.lovelaces), - LedgerError("Insufficient wallet balance") - ) - _ <- balanceRef.set(newFunds) - _ <- mempoolRef.update(_.appended(CardanoTransaction(operations))) - } yield () - } - - override def getTransactionDetails(transactionId: TransactionId): IO[LedgerError, TransactionDetails] = - STM.atomically { - for { - mempool <- mempoolRef.get - blockchain <- blocksRef.get - tdetails <- STM - .fromOption { - mempool - .find(_.transactionId == transactionId) - .map(_ => TransactionDetails(transactionId, InMempool)) - } - .orElse { - STM.fromOption { - blockchain - .find(block => block.txs.exists(t => t.transactionId == transactionId)) - .map(_ => TransactionDetails(transactionId, InLedger)) - } - } - .orElseFail(LedgerError(s"Couldn't find tx $transactionId")) - } yield tdetails - } - - override def deleteTransaction(transactionId: TransactionId): IO[LedgerError, Unit] = STM.atomically { - for { - mempool <- mempoolRef.get - _ <- STM.cond( - mempool.exists(_.transactionId == transactionId), - (), - LedgerError(s"Transaction $transactionId not found in the mempool") - ) - _ <- mempoolRef.update(m => m.filter(_.transactionId != transactionId)) - _ <- balanceRef.update(b => Funds(b.lovelaces + config.txFee.lovelaces)) - } yield () - } - - override def getWalletBalance: IO[LedgerError, Funds] = balanceRef.get.commit - - def getMempool: UIO[List[CardanoTransaction]] = mempoolRef.get.commit.map(_.toList) - - def getBlocks: UIO[List[CardanoBlock]] = blocksRef.get.commit.map(_.toList) - - private[service] def startBackgroundProcess(): UIO[Unit] = (for { - curTime <- Clock.currentTime(TimeUnit.MILLISECONDS).map(Instant.ofEpochMilli) - _ <- STM - .atomically { - for { - // Craft a new block from mempool transactions - txs <- mempoolRef.modify(old => (old, Vector.empty)) - prevHash <- blocksRef.get.map(_.lastOption.map(_.header.hash)) - blockIdx <- blocksRef.get.map(_.size) - blockHash = CardanoBlock.evalBlockHash(txs, prevHash) - blockHeader = BlockHeader(blockHash, blockIdx, curTime, prevHash) - _ <- blocksRef.update(_.appended(CardanoBlock(blockHeader, txs))) - } yield () - } - } yield ()) - .repeat(Schedule.spaced(config.blockEvery)) - .fork - .map(_ => ()) - - override def getFullBlock(blockNo: Int): Task[Either[BlockError.NotFound, Block.Full]] = STM.atomically { - for { - blocks <- blocksRef.get - res = - if (blockNo < blocks.size) { - Right(blocks.drop(blockNo).head.toBlockFull(config.ledger)) - } else { - Left(BlockError.NotFound(blockNo)) - } - } yield res - } - - override def getLatestBlock: Task[Either[BlockError.NoneAvailable.type, Block.Canonical]] = for { - blocks <- blocksRef.get.commit - res <- - if (blocks.isEmpty) { ZIO.succeed(Left(BlockError.NoneAvailable)) } - else ZIO.succeed(Right(Block.Canonical(blocks.last.header))) - } yield res -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/PublishingService.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/PublishingService.scala deleted file mode 100644 index 4f63fd8782..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/PublishingService.scala +++ /dev/null @@ -1,17 +0,0 @@ -package io.iohk.atala.iris.core.service - -import io.iohk.atala.iris.proto.dlt as proto -import zio.* - -// TODO: replace with actual implementation -trait PublishingService { - def publishOperation(op: proto.IrisOperation): UIO[Unit] -} - -object MockPublishingService { - val layer: ULayer[PublishingService] = ZLayer.succeed { - new PublishingService { - override def publishOperation(op: proto.IrisOperation): UIO[Unit] = ZIO.unit - } - } -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/UnderlyingLedgerService.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/UnderlyingLedgerService.scala deleted file mode 100644 index ff23525b09..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/service/UnderlyingLedgerService.scala +++ /dev/null @@ -1,20 +0,0 @@ -package io.iohk.atala.iris.core.service - -import io.iohk.atala.iris.proto.dlt as proto -import io.iohk.atala.iris.core.model.IrisOperation -import io.iohk.atala.iris.core.model.ledger.{Funds, TransactionDetails, TransactionId} -import zio.{IO, UIO} - -case class LedgerError(msg: String) extends RuntimeException(msg) - -trait UnderlyingLedgerService { -// def getType: Ledger - - def publish(operations: Seq[proto.IrisOperation]): IO[LedgerError, Unit] - - def getTransactionDetails(transactionId: TransactionId): IO[LedgerError, TransactionDetails] - - def deleteTransaction(transactionId: TransactionId): IO[LedgerError, Unit] - - def getWalletBalance: IO[LedgerError, Funds] -} diff --git a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/worker/PublishingScheduler.scala b/iris/service/core/src/main/scala/io/iohk/atala/iris/core/worker/PublishingScheduler.scala deleted file mode 100644 index 9db35963ef..0000000000 --- a/iris/service/core/src/main/scala/io/iohk/atala/iris/core/worker/PublishingScheduler.scala +++ /dev/null @@ -1,16 +0,0 @@ -package io.iohk.atala.iris.core.worker - -import io.iohk.atala.iris.proto.dlt as proto -import zio.{UIO, ULayer, ZIO, ZLayer} - -trait PublishingScheduler { - def scheduleOperations(op: proto.IrisOperation): UIO[Unit] -} - -object MockPublishingScheduler { - val layer: ULayer[PublishingScheduler] = ZLayer.succeed { - new PublishingScheduler { - def scheduleOperations(op: proto.IrisOperation): UIO[Unit] = ZIO.unit - } - } -} diff --git a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/DummyDbRepositoryTransactor.scala b/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/DummyDbRepositoryTransactor.scala deleted file mode 100644 index afa68afa0f..0000000000 --- a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/DummyDbRepositoryTransactor.scala +++ /dev/null @@ -1,12 +0,0 @@ -package io.iohk.atala.iris.core.mock - -import io.iohk.atala.iris.core.repository.DbRepositoryTransactor -import zio.* - -object DummyDbRepositoryTransactor { - val layer: ULayer[DbRepositoryTransactor[Task]] = ZLayer.succeed(DummyDbRepositoryTransactor()) -} - -class DummyDbRepositoryTransactor extends DbRepositoryTransactor[Task] { - override def runAtomically[A](action: Task[A]): Task[A] = action -} diff --git a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/InMemoryIrisBatchesRepository.scala b/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/InMemoryIrisBatchesRepository.scala deleted file mode 100644 index ef790ffce5..0000000000 --- a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/InMemoryIrisBatchesRepository.scala +++ /dev/null @@ -1,28 +0,0 @@ -package io.iohk.atala.iris.core.mock - -import io.iohk.atala.iris.core.model.ConfirmedIrisBatch -import io.iohk.atala.iris.core.model.ledger.TransactionId -import io.iohk.atala.iris.core.repository.IrisBatchesRepository -import zio.* -import zio.stream.* - -type StreamZIO[A] = Stream[Throwable, A] - -object InMemoryIrisBatchesRepository { - val layer: ULayer[InMemoryIrisBatchesRepository] = ZLayer.fromZIO { - for { - ref <- Ref.make(Vector[ConfirmedIrisBatch]()) - srv = InMemoryIrisBatchesRepository(ref) - } yield srv - } -} - -class InMemoryIrisBatchesRepository(list: Ref[Vector[ConfirmedIrisBatch]]) - extends IrisBatchesRepository[Task, StreamZIO] { - override def saveIrisBatch(irisBatch: ConfirmedIrisBatch): Task[Unit] = list.update(_.appended(irisBatch)) - - override def getIrisBatchesStream(lastSeen: Option[TransactionId]): StreamZIO[ConfirmedIrisBatch] = - ZStream.fromIterableZIO(list.get) - - def getConfirmedBatches: Task[Vector[ConfirmedIrisBatch]] = list.get -} diff --git a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/InMemoryKeyValueRepository.scala b/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/InMemoryKeyValueRepository.scala deleted file mode 100644 index 935924b817..0000000000 --- a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/mock/InMemoryKeyValueRepository.scala +++ /dev/null @@ -1,21 +0,0 @@ -package io.iohk.atala.iris.core.mock - -import io.iohk.atala.iris.core.repository.KeyValueRepository -import zio.* - -object InMemoryKeyValueRepository { - val layer: ULayer[KeyValueRepository[Task]] = ZLayer.fromZIO { - for { - ref <- Ref.make(Map[String, Any]()) - srv = InMemoryKeyValueRepository(ref) - } yield srv - } -} - -class InMemoryKeyValueRepository(kv: Ref[Map[String, Any]]) extends KeyValueRepository[Task] { - override def get(key: String): Task[Option[String]] = kv.get.map(_.get(key).map(_.asInstanceOf[String])) - - override def getInt(key: String): Task[Option[Int]] = kv.get.map(_.get(key).map(_.asInstanceOf[Int])) - - override def set(key: String, value: Option[Int | String]): Task[Unit] = kv.update(_.updatedWith(key)(_ => value)) -} diff --git a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/service/BlockchainSyncSpec.scala b/iris/service/core/src/test/scala/io/iohk/atala/iris/core/service/BlockchainSyncSpec.scala deleted file mode 100644 index a33d80c917..0000000000 --- a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/service/BlockchainSyncSpec.scala +++ /dev/null @@ -1,118 +0,0 @@ -package io.iohk.atala.iris.core.service - -import io.iohk.atala.iris.core.mock.{ - InMemoryIrisBatchesRepository, - InMemoryKeyValueRepository, - DummyDbRepositoryTransactor -} -import io.iohk.atala.iris.core.model.ledger.{Funds, Ledger, TransactionId} -import io.iohk.atala.iris.core.testutils.PublishThenAdjust -import io.iohk.atala.iris.core.testutils.PublishThenAdjust.* -import io.iohk.atala.iris.core.testutils.RandomUtils.* -import zio.* -import zio.stream.* -import zio.test.* -import zio.interop.catz.* - -object BlockchainSyncSpec extends ZIOSpecDefault { - val blockEvery = 10.seconds - val inmemoryDefaultConfig = InmemoryUnderlyingLedgerService.Config(blockEvery, Funds(1000), Funds(1), Ledger.Mainnet) - val inmemoryLedgerLayer = InmemoryUnderlyingLedgerService.layer(inmemoryDefaultConfig) - - val keyValueRepoLayer = InMemoryKeyValueRepository.layer - - val blockchainStreamerConfig1BlockConfirm = BlocksStreamer.Config(Ledger.Mainnet, 0, 1, blockEvery) - val blockchainStreamer1Layer: TaskLayer[BlocksStreamer] = - (inmemoryLedgerLayer ++ keyValueRepoLayer) >>> BlocksStreamer.layer(blockchainStreamerConfig1BlockConfirm) - - val blockchainStreamerConfig3BlocksConfirm: BlocksStreamer.Config = - BlocksStreamer.Config(Ledger.Mainnet, 0, 3, blockEvery) - val blockchainStreamer3Layer: TaskLayer[BlocksStreamer] = - (inmemoryLedgerLayer ++ keyValueRepoLayer) >>> BlocksStreamer.layer(blockchainStreamerConfig3BlocksConfirm) - - type StreamZIO[A] = Stream[Throwable, A] - val irisBatchesRepoLayer = InMemoryIrisBatchesRepository.layer - val blockchainSaver: TaskLayer[BlocksSaveSinker] = - (keyValueRepoLayer ++ irisBatchesRepoLayer ++ DummyDbRepositoryTransactor.layer) >>> BlocksSaveSinker - .layer[Task, StreamZIO] - - override def spec = suite("BlockchainSyncSpec")( - test("Sync up 1 block with 4 transactions") { - val testCase = - for { - blocksSource <- ZIO.service[BlocksStreamer] - blocksSink <- ZIO.service[BlocksSaveSinker] - _ <- blocksSource.blocksStream.run(blocksSink.sink).fork - op <- ZIO.replicateZIO(4)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 1.seconds, - Seq(op(1)) >> 1.seconds, - Seq(op(2)) >> 0.seconds, - Seq(op(3)) >> 20.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - irisBatchesRepo <- ZIO.service[InMemoryIrisBatchesRepository] - irisBatches <- irisBatchesRepo.getConfirmedBatches - expected = Vector( - "c4556a3d133b0a184a01baa9f3ea76a8fef2a06e66dec0907038997b2d7588de", - "8a89c3c1bbc39b5e5eb0db0ed8b12d876ec89f45a7dfeaaa7c24e39ed974aab1", - "0872cced55cab747ae0a3d463e5713e5cb9225617af04d7243b21d9d82751986", - "29798b9678930bc07c097adffaf3e13ae044af64d2b950af0414c231e3a06b8a" - ).map(TransactionId.from(_).get) - } yield assertTrue(irisBatches.map(_.transactionId) == expected) - testCase.provideLayer(inmemoryLedgerLayer ++ blockchainStreamer1Layer ++ blockchainSaver ++ irisBatchesRepoLayer) - }, - test("Sync up 1 block with 2 transaction") { - val testCase = - for { - blocksSource <- ZIO.service[BlocksStreamer] - blocksSink <- ZIO.service[BlocksSaveSinker] - _ <- blocksSource.blocksStream.run(blocksSink.sink).fork - op <- ZIO.replicateZIO(4)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0), op(1)) >> 1.seconds, - Seq(op(2), op(3)) >> 20.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - irisBatchesRepo <- ZIO.service[InMemoryIrisBatchesRepository] - irisBatches <- irisBatchesRepo.getConfirmedBatches - expected = Vector( - "1c6fd29ae378a773b2e3957be37ead077aa98d7041a3d4bb6533eb0a95e4058c", - "919e893246ffd0543d9005d9fdea6e2b26b85b150eb953fc4ba368097546d347" - ).map(TransactionId.from(_).get) - } yield assertTrue(irisBatches.map(_.transactionId) == expected) - testCase.provideLayer( - inmemoryLedgerLayer ++ blockchainStreamer1Layer ++ blockchainSaver ++ irisBatchesRepoLayer - ) - }, - test("Block confirmation is 3") { - val testCase = - for { - blocksSource <- ZIO.service[BlocksStreamer] - blocksSink <- ZIO.service[BlocksSaveSinker] - _ <- blocksSource.blocksStream.run(blocksSink.sink).fork - op <- ZIO.replicateZIO(6)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 1.seconds, - Seq(op(1), op(2)) >> blockEvery, - Seq(op(3)) >> blockEvery, - Seq(op(4)) >> blockEvery, - Seq(op(5)) >> blockEvery, - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - irisBatchesRepo <- ZIO.service[InMemoryIrisBatchesRepository] - expected = Vector( - "c4556a3d133b0a184a01baa9f3ea76a8fef2a06e66dec0907038997b2d7588de", - "de69eda103be2676872937a0622cb9d831939d595fb9f80fba5da36cfe28d174" - ).map(TransactionId.from(_).get) - irisBatches <- irisBatchesRepo.getConfirmedBatches - } yield assertTrue(irisBatches.map(_.transactionId) == expected) - testCase.provideLayer( - inmemoryLedgerLayer ++ blockchainStreamer3Layer ++ blockchainSaver ++ irisBatchesRepoLayer - ) - }, - ) -} diff --git a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/service/InmemoryUnderlyingLedgerServiceSpec.scala b/iris/service/core/src/test/scala/io/iohk/atala/iris/core/service/InmemoryUnderlyingLedgerServiceSpec.scala deleted file mode 100644 index fb2054301e..0000000000 --- a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/service/InmemoryUnderlyingLedgerServiceSpec.scala +++ /dev/null @@ -1,179 +0,0 @@ -package io.iohk.atala.iris.core.service - -import com.google.protobuf.ByteString -import io.iohk.atala.iris.core.model.ledger.TransactionStatus.{InLedger, InMempool} -import io.iohk.atala.iris.core.model.ledger.{Funds, Ledger, TransactionDetails} -import io.iohk.atala.iris.core.service.InmemoryUnderlyingLedgerService.{CardanoBlock, CardanoTransaction} -import io.iohk.atala.iris.core.testutils.RandomUtils.* -import io.iohk.atala.iris.core.testutils.PublishThenAdjust.* -import io.iohk.atala.iris.core.testutils.PublishThenAdjust -import io.iohk.atala.iris.proto.did_operations.{CreateDid, DocumentDefinition, UpdateDid} -import io.iohk.atala.iris.proto.dlt as proto -import zio.* -import zio.test.* -import zio.test.TestAspect.ignore -import zio.test.Assertion.* - -object InmemoryUnderlyingLedgerServiceSpec extends ZIOSpecDefault { - val defaultConfig = InmemoryUnderlyingLedgerService.Config(10.seconds, Funds(1000), Funds(1), Ledger.Mainnet) - val inmemoryLedger = InmemoryUnderlyingLedgerService.layer(defaultConfig) - - def spec = suite("InmemoryUnderlyingLedgerServiceSpec")( - suite("Background worker")( - test("All the operations in the one block within 4 different transactions") { - val testCase = - for { - op <- ZIO.replicateZIO(4)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 1.seconds, - Seq(op(1)) >> 1.seconds, - Seq(op(2)) >> 0.seconds, - Seq(op(3)) >> 20.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - mempool <- srvc.getMempool - blocks <- srvc.getBlocks - } yield assertTrue(mempool == List.empty) && - assertTrue( - blocks.map(_.txs) == List( - List(), - List( - CardanoTransaction(Seq(op(0))), - CardanoTransaction(Seq(op(1))), - CardanoTransaction(Seq(op(2))), - CardanoTransaction(Seq(op(3))) - ), - List() - ) - ) - testCase.provideLayer(inmemoryLedger) - } @@ TestAspect.ignore, - test("Operations distributed between 2 blocks") { - val testCase = - for { - op <- ZIO.replicateZIO(4)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 1.seconds, - Seq(op(1)) >> 10.seconds, - Seq(op(2)) >> 0.seconds, - Seq(op(3)) >> 10.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - mempool <- srvc.getMempool - blocks <- srvc.getBlocks - } yield assertTrue(mempool == List.empty) && - assertTrue( - blocks.map(_.txs) == List( - List(), - List( - CardanoTransaction(Seq(op(0))), - CardanoTransaction(Seq(op(1))), - ), - List( - CardanoTransaction(Seq(op(2))), - CardanoTransaction(Seq(op(3))), - ) - ) - ) - testCase.provideLayer(inmemoryLedger) - } @@ TestAspect.ignore - ), - suite("getTransactionDetails")( - test("Find unconfirmed transaction") { - val testCase = - for { - op <- ZIO.replicateZIO(5)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 1.seconds, - Seq(op(1)) >> 10.seconds, - Seq(op(2), op(3)) >> 0.seconds, - Seq(op(4)) >> 2.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - targetTx = CardanoTransaction(Seq(op(2), op(3))) - txDetails <- srvc.getTransactionDetails(targetTx.transactionId) - } yield assertTrue(txDetails == TransactionDetails(targetTx.transactionId, InMempool)) - testCase.provideLayer(inmemoryLedger) - }, - test("Find confirmed transaction") { - val testCase = - for { - op <- ZIO.replicateZIO(5)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 11.seconds, - Seq(op(1)) >> 11.seconds, - Seq(op(2), op(3)) >> 0.seconds, - Seq(op(4)) >> 12.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - targetTx = CardanoTransaction(Seq(op(2), op(3))) - txDetails <- srvc.getTransactionDetails(targetTx.transactionId) - } yield assertTrue(txDetails == TransactionDetails(targetTx.transactionId, InLedger)) - testCase.provideLayer(inmemoryLedger) - }, - test("Find unknown transaction") { - val testCase = - for { - op <- ZIO.replicateZIO(5)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 11.seconds, - Seq(op(1)) >> 11.seconds, - Seq(op(2), op(3)) >> 0.seconds, - Seq(op(4)) >> 12.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - targetTx = CardanoTransaction(Seq(op(1), op(2))) - testResult <- assertZIO(srvc.getTransactionDetails(targetTx.transactionId).exit) { - fails(equalTo(LedgerError(s"Couldn't find tx ${targetTx.transactionId}"))) - } - } yield testResult - testCase.provideLayer(inmemoryLedger) - } - ), - suite("deleteTransaction")( - test("Delete transaction from mempool") { - val testCase = - for { - op <- ZIO.replicateZIO(5)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 1.seconds, - Seq(op(1)) >> 10.seconds, - Seq(op(2), op(3)) >> 0.seconds, - Seq(op(4)) >> 2.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - targetTx = CardanoTransaction(Seq(op(2), op(3))) - _ <- srvc.deleteTransaction(targetTx.transactionId) - mempool <- srvc.getMempool - } yield assertTrue(mempool == List(CardanoTransaction(Seq(op(4))))) - testCase.provideLayer(inmemoryLedger) - }, - test("Delete confirmed transaction") { - val testCase = - for { - op <- ZIO.replicateZIO(5)(genOperation()).map(_.toList) - srvc <- ZIO.service[InmemoryUnderlyingLedgerService] - scenario = List( - Seq(op(0)) >> 1.seconds, - Seq(op(1)) >> 10.seconds, - Seq(op(2), op(3)) >> 0.seconds, - Seq(op(4)) >> 2.seconds - ) - _ <- PublishThenAdjust.foreachZIO(srvc)(scenario) - targetTx = CardanoTransaction(Seq(op(1))) - testResult <- - assertZIO(srvc.deleteTransaction(targetTx.transactionId).exit) { - fails(equalTo(LedgerError(s"Transaction ${targetTx.transactionId} not found in the mempool"))) - } - } yield testResult - testCase.provideLayer(inmemoryLedger) - } - ) - ) -} diff --git a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/testutils/PublishThenAdjust.scala b/iris/service/core/src/test/scala/io/iohk/atala/iris/core/testutils/PublishThenAdjust.scala deleted file mode 100644 index fdf185ee1d..0000000000 --- a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/testutils/PublishThenAdjust.scala +++ /dev/null @@ -1,19 +0,0 @@ -package io.iohk.atala.iris.core.testutils - -import io.iohk.atala.iris.core.service.InmemoryUnderlyingLedgerService -import zio.* -import zio.test.* -import io.iohk.atala.iris.proto.dlt as proto - -case class PublishThenAdjust(operations: Seq[proto.IrisOperation], adjust: Duration) - -object PublishThenAdjust { - implicit class Then(operations: Seq[proto.IrisOperation]) { - def >>(adj: Duration): PublishThenAdjust = PublishThenAdjust(operations, adj) - } - - def foreachZIO[R](srv: InmemoryUnderlyingLedgerService)(xs: Iterable[PublishThenAdjust]): ZIO[R, Any, Unit] = - ZIO.foreachDiscard[R, Any, PublishThenAdjust](xs) { case PublishThenAdjust(ops, adj) => - srv.publish(ops).flatMap(_ => TestClock.adjust(adj)) - } -} diff --git a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/testutils/RandomUtils.scala b/iris/service/core/src/test/scala/io/iohk/atala/iris/core/testutils/RandomUtils.scala deleted file mode 100644 index 9f5ddddaf8..0000000000 --- a/iris/service/core/src/test/scala/io/iohk/atala/iris/core/testutils/RandomUtils.scala +++ /dev/null @@ -1,56 +0,0 @@ -package io.iohk.atala.iris.core.testutils - -import com.google.protobuf.ByteString -import io.iohk.atala.iris.proto.did_operations.{CreateDid, DocumentDefinition, UpdateDid} -import io.iohk.atala.iris.proto.dlt as proto -import zio.{Random, UIO} - -object RandomUtils { - - private def nextBytes(length: Int): UIO[ByteString] = - Random.nextBytes(length).map(x => ByteString.copyFrom(x.toArray)) - - def genCreateOperation(): UIO[proto.IrisOperation] = - for { - updComm <- nextBytes(20) - recComm <- nextBytes(20) - } yield proto.IrisOperation( - proto.IrisOperation.Operation.CreateDid( - CreateDid( - initialUpdateCommitment = updComm, - initialRecoveryCommitment = recComm, - ledger = "mainnet", - document = Some(DocumentDefinition(publicKeys = Seq(), services = Seq())) - ) - ) - ) - - def genUpdateOperation(): UIO[proto.IrisOperation] = - for { - didSuff <- Random.nextString(10) - updKey <- nextBytes(20) - prevVers <- nextBytes(20) - forwUpdComm <- nextBytes(20) - sig <- nextBytes(20) - } yield proto.IrisOperation( - proto.IrisOperation.Operation.UpdateDid( - UpdateDid( - did = "did:prism:" + didSuff, - revealedUpdateKey = updKey, - previousVersion = prevVers, - forwardUpdateCommitment = forwUpdComm, - patches = Seq(), - ledger = "mainnet", - signature = sig - ) - ) - ) - - def genOperation(): UIO[proto.IrisOperation] = - for { - op <- Random.nextBoolean - res <- - if (op) genCreateOperation() - else genUpdateOperation() - } yield res -} diff --git a/iris/service/docker/docker-compose-local.yaml b/iris/service/docker/docker-compose-local.yaml deleted file mode 100644 index 2ef4e9f0c6..0000000000 --- a/iris/service/docker/docker-compose-local.yaml +++ /dev/null @@ -1,34 +0,0 @@ -version: "3.9" - -services: - db: - image: postgres:13 - restart: always - environment: - POSTGRES_DB: iris - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - ports: - - 5432:5432 - volumes: - - pg_data_iris_db:/var/lib/postgresql/data - - # delay to ensure DB is up before applying migrations - db_init_delay: - image: alpine:3 - command: sleep 5 - depends_on: - db: - condition: service_started - - db_init: - image: flyway/flyway:9.3.0-alpine - volumes: - - $PWD/migrations/sql:/flyway/sql - command: -url=jdbc:postgresql://db:5432/iris?user=postgres&password=postgres migrate - depends_on: - db_init_delay: - condition: service_completed_successfully - -volumes: - pg_data_iris_db: diff --git a/iris/service/migrations/sql/V1__init_tables.sql b/iris/service/migrations/sql/V1__init_tables.sql deleted file mode 100644 index 487fbbd7f7..0000000000 --- a/iris/service/migrations/sql/V1__init_tables.sql +++ /dev/null @@ -1,3 +0,0 @@ -CREATE TABLE public.iris_operations( - "foo" VARCHAR(100) NOT NULL -); diff --git a/iris/service/server/src/main/resources/application.conf b/iris/service/server/src/main/resources/application.conf deleted file mode 100644 index 0ee97184d8..0000000000 --- a/iris/service/server/src/main/resources/application.conf +++ /dev/null @@ -1,14 +0,0 @@ -iris { - database { - host = "localhost" - host = ${?IRIS_DB_HOST} - port = 5432 - port = ${?IRIS_DB_PORT} - databaseName = "iris" - databaseName = ${?IRIS_DB_NAME} - username = "postgres" - username = ${?IRIS_DB_USER} - password = "postgres" - password = ${?IRIS_DB_PASSWORD} - } -} \ No newline at end of file diff --git a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/Main.scala b/iris/service/server/src/main/scala/io/iohk/atala/iris/server/Main.scala deleted file mode 100644 index 024d19f7f9..0000000000 --- a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/Main.scala +++ /dev/null @@ -1,8 +0,0 @@ -package io.iohk.atala.iris.server - -import zio.* -import zio.stream.* - -object Main extends ZIOAppDefault { - override def run = Modules.app -} diff --git a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/Modules.scala b/iris/service/server/src/main/scala/io/iohk/atala/iris/server/Modules.scala deleted file mode 100644 index 8fbc64cea2..0000000000 --- a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/Modules.scala +++ /dev/null @@ -1,109 +0,0 @@ -package io.iohk.atala.iris.server - -import cats.effect.std.Dispatcher -import doobie.util.transactor.Transactor -import io.iohk.atala.iris.core.repository.* -import io.iohk.atala.iris.core.service.* -import io.iohk.atala.iris.core.worker.{MockPublishingScheduler, PublishingScheduler} -import io.iohk.atala.iris.proto.service.IrisServiceGrpc -import io.iohk.atala.iris.server.grpc.service.IrisServiceGrpcImpl -import io.iohk.atala.iris.server.grpc.{GrpcServer, GrpcServices} -import io.iohk.atala.iris.sql.repository -import io.iohk.atala.iris.sql.repository.* -import zio.* -import zio.interop.catz.* -import zio.stream.ZStream -import com.typesafe.config.ConfigFactory -import io.iohk.atala.iris.server.config.AppConfig -import zio.config.typesafe.TypesafeConfigSource -import zio.config.{ReadError, read} - -object Modules { - val app: Task[Unit] = { - val grpcServerApp = GrpcServices.services.flatMap(GrpcServer.start(8081, _)) - - grpcServerApp - .provideLayer(GrpcModule.layers) - .unit - } - -} - -// TODO: replace with actual implementation -object AppModule { - val publishingServiceLayer: ULayer[PublishingService] = MockPublishingService.layer - val publishingSchedulerLayer: ULayer[PublishingScheduler] = MockPublishingScheduler.layer - - val configLayer: Layer[ReadError[String], AppConfig] = ZLayer.fromZIO { - read( - AppConfig.descriptor.from( - TypesafeConfigSource.fromTypesafeConfig( - ZIO.attempt(ConfigFactory.load()) - ) - ) - ) - } -} - -object GrpcModule { - val irisServiceGrpcLayer: TaskLayer[IrisServiceGrpc.IrisService] = { - val schedulerLayer = AppModule.publishingSchedulerLayer - val irisBatchesLayer = RepoModule.irisBatchesRepoLayer - (schedulerLayer ++ irisBatchesLayer) >>> IrisServiceGrpcImpl.layer - } - - val layers = irisServiceGrpcLayer -} - -object BlockchainModule { - def blocksStreamerLayer(config: BlocksStreamer.Config): TaskLayer[BlocksStreamer] = { - val blocksRepoLayer = RepoModule.blocksRepoLayer - val keyValueRepoLayer = RepoModule.keyValueRepoLayer - (blocksRepoLayer ++ keyValueRepoLayer) >>> BlocksStreamer.layer(config) - } - - val blocksSaverLayer: TaskLayer[BlocksSaveSinker] = { - val keyValueIO = JdbcKeyValueRepositoryIO.layer - val irisBatchesIO = JdbcIrisBatchRepositoryIO.layer - val dbRepositoryTransactorIO = RepoModule.dbRepositoryTransactor - (keyValueIO ++ irisBatchesIO ++ dbRepositoryTransactorIO) >>> BlocksSaveSinker - .layer[repository.IO, repository.StreamIO] - } -} - -object RepoModule { - val transactorLayer: TaskLayer[Transactor[Task]] = { - val layerWithConfig = ZLayer.fromZIO { - ZIO.service[AppConfig].map(_.iris.database).flatMap { config => - Dispatcher[Task].allocated.map { case (dispatcher, _) => - given Dispatcher[Task] = dispatcher - TransactorLayer.hikari[Task]( - TransactorLayer.DbConfig( - username = config.username, - password = config.password, - jdbcUrl = s"jdbc:postgresql://${config.host}:${config.port}/${config.databaseName}" - ) - ) - } - } - }.flatten - AppModule.configLayer >>> layerWithConfig - } - - val dbRepositoryTransactor: TaskLayer[JdbcDbRepositoryTransactorIO] = - transactorLayer >>> JdbcDbRepositoryTransactorIO.layer - - val operationsRepoLayer: TaskLayer[OperationsRepository[Task]] = - transactorLayer >>> JdbcOperationsRepository.layer - - val irisBatchesRepoLayer: TaskLayer[IrisBatchesRepository[Task, StreamZIO]] = - (transactorLayer ++ JdbcIrisBatchRepositoryIO.layer) >>> JdbcIrisBatchRepository.layer - - val blocksRepoLayer: TaskLayer[ROBlocksRepository[Task]] = - transactorLayer >>> JdbcBlocksRepository.layer - - val keyValueRepoLayer: TaskLayer[KeyValueRepository[Task]] = - (transactorLayer ++ JdbcKeyValueRepositoryIO.layer) >>> JdbcKeyValueRepository.layer - - val layers = operationsRepoLayer ++ irisBatchesRepoLayer ++ blocksRepoLayer ++ keyValueRepoLayer -} diff --git a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/config/AppConfig.scala b/iris/service/server/src/main/scala/io/iohk/atala/iris/server/config/AppConfig.scala deleted file mode 100644 index 555a221c97..0000000000 --- a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/config/AppConfig.scala +++ /dev/null @@ -1,16 +0,0 @@ -package io.iohk.atala.iris.server.config - -import zio.config.* -import zio.config.magnolia.Descriptor - -final case class AppConfig( - iris: IrisConfig -) - -object AppConfig { - val descriptor: ConfigDescriptor[AppConfig] = Descriptor[AppConfig] -} - -final case class IrisConfig(database: DatabaseConfig) - -final case class DatabaseConfig(host: String, port: Int, databaseName: String, username: String, password: String) diff --git a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/GrpcServer.scala b/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/GrpcServer.scala deleted file mode 100644 index b3e9203464..0000000000 --- a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/GrpcServer.scala +++ /dev/null @@ -1,38 +0,0 @@ -package io.iohk.atala.iris.server.grpc - -import io.grpc.{ServerBuilder, ServerServiceDefinition} -import io.grpc.protobuf.services.ProtoReflectionService -import io.iohk.atala.iris.proto.service.IrisServiceGrpc -import zio.* - -object GrpcServer { - - def start(port: Int, services: Seq[ServerServiceDefinition]): Task[Unit] = { - val managedServer = ZIO.acquireRelease( - for { - _ <- ZIO.logInfo(s"starting grpc server on port $port") - server <- ZIO.attempt { - val builder = ServerBuilder.forPort(port) - services.foreach(s => builder.addService(s)) - builder.addService(ProtoReflectionService.newInstance()) - builder.build().start() - } - _ <- ZIO.logInfo(s"grpc server listening on port $port") - } yield server - )(server => - for { - _ <- ZIO.logInfo("stopping grpc server") - _ <- ZIO.attempt(server.shutdown()).orDie - _ <- ZIO.logInfo("grpc server stopped successfully") - } yield () - ) - - ZIO.scoped { - for { - _ <- managedServer - _ <- ZIO.never - } yield () - } - } - -} diff --git a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/GrpcServices.scala b/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/GrpcServices.scala deleted file mode 100644 index f6275acfd8..0000000000 --- a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/GrpcServices.scala +++ /dev/null @@ -1,15 +0,0 @@ -package io.iohk.atala.iris.server.grpc - -import io.grpc.ServerServiceDefinition -import io.iohk.atala.iris.proto.service.IrisServiceGrpc -import zio.* - -object GrpcServices { - - def services: URIO[IrisServiceGrpc.IrisService, Seq[ServerServiceDefinition]] = - for { - ec <- ZIO.executor.map(_.asExecutionContext) - irisService <- ZIO.serviceWith[IrisServiceGrpc.IrisService](IrisServiceGrpc.bindService(_, ec)) - } yield Seq(irisService) - -} diff --git a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/service/IrisServiceGrpcImpl.scala b/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/service/IrisServiceGrpcImpl.scala deleted file mode 100644 index 72a494bd0f..0000000000 --- a/iris/service/server/src/main/scala/io/iohk/atala/iris/server/grpc/service/IrisServiceGrpcImpl.scala +++ /dev/null @@ -1,101 +0,0 @@ -package io.iohk.atala.iris.server.grpc.service - -import com.google.protobuf.ByteString -import io.grpc.stub.StreamObserver -import io.iohk.atala.iris.core.model.ledger.TransactionId -import io.iohk.atala.iris.core.repository.ROIrisBatchesRepository -import io.iohk.atala.iris.core.service.PublishingService -import io.iohk.atala.iris.core.worker.PublishingScheduler -import io.iohk.atala.iris.proto.did_operations.{CreateDid, DocumentDefinition} -import io.iohk.atala.iris.proto.{dlt as proto, service as proto_service} -import com.google.protobuf.timestamp as proto_google -import io.iohk.atala.iris.proto.service.* -import zio.* -import zio.stream.* - -import scala.concurrent.Future - -type Stream[A] = ZStream[Any, Throwable, A] - -class IrisServiceGrpcImpl(service: PublishingScheduler, batchRepo: ROIrisBatchesRepository[Stream])(using - runtime: Runtime[Any] -) extends IrisServiceGrpc.IrisService { - - private val mockOperationId = ByteString.copyFrom("aaafff111".getBytes()) - private val mockOperation = IrisOperationInfo.Operation.CreateDid( - CreateDid( - initialUpdateCommitment = ByteString.copyFrom("a".getBytes()), - initialRecoveryCommitment = ByteString.copyFrom("b".getBytes()), - ledger = "https://atalaprism.io", - document = Some(DocumentDefinition(publicKeys = Seq(), services = Seq())) - ) - ) - - override def scheduleOperation(request: proto.IrisOperation): Future[IrisOperationOutcome] = Unsafe.unsafe { - implicit unsafe => - runtime.unsafe.runToFuture(ZIO.succeed(IrisOperationOutcome(mockOperationId))) - } - - override def getOperation(request: IrisOperationId): Future[IrisOperationInfo] = Unsafe.unsafe { implicit unsafe => - runtime.unsafe.runToFuture( - ZIO.succeed( - IrisOperationInfo( - operationId = mockOperationId, - operation = mockOperation - ) - ) - ) - } - - override def getIrisBatchStream( - request: IrisBatchRequest, - responseObserver: StreamObserver[ConfirmedIrisBatch] - ): Unit = { - Unsafe.unsafe { implicit unsafe => - val txIdHex = request.lastSeenTransactionId - runtime.unsafe - .run { - for { - txId <- - if (txIdHex.isEmpty) ZIO.succeed(None) - else { ZIO.fromOption(TransactionId.from(txIdHex)).map(Some(_)) } - _ <- batchRepo - .getIrisBatchesStream(txId) - .foreach { b => - ZIO.succeedBlocking { - responseObserver.onNext( - proto_service - .ConfirmedIrisBatch( - blockLevel = b.blockLevel, - blockTimestamp = - Some(proto_google.Timestamp(b.blockTimestamp.getEpochSecond, b.blockTimestamp.getNano)), - transactionId = b.transactionId.toString, - batch = Some(b.batch) - ) - ) - } - } - .onError { cause => - cause.failureOption.fold(ZIO.unit) { e => - ZIO.succeedBlocking { - responseObserver.onError(e) - } - } - } - } yield () - } - .getOrThrowFiberFailure() - } - } -} - -object IrisServiceGrpcImpl { - val layer: URLayer[PublishingScheduler & ROIrisBatchesRepository[Stream], IrisServiceGrpc.IrisService] = - ZLayer.fromZIO { - for { - rt <- ZIO.runtime[Any] - svc <- ZIO.service[PublishingScheduler] - repo <- ZIO.service[ROIrisBatchesRepository[Stream]] - } yield IrisServiceGrpcImpl(svc, repo)(using rt) - } -} diff --git a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcBlocksRepository.scala b/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcBlocksRepository.scala deleted file mode 100644 index c49cd9c4c8..0000000000 --- a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcBlocksRepository.scala +++ /dev/null @@ -1,17 +0,0 @@ -package io.iohk.atala.iris.sql.repository - -import doobie.Transactor -import io.iohk.atala.iris.core.model.ledger.{Block, BlockError} -import zio.* -import io.iohk.atala.iris.core.repository.ROBlocksRepository - -class JdbcBlocksRepository(xa: Transactor[Task]) extends ROBlocksRepository[Task] { - override def getFullBlock(blockNo: RuntimeFlags): Task[Either[BlockError.NotFound, Block.Full]] = ??? - - override def getLatestBlock: Task[Either[BlockError.NoneAvailable.type, Block.Canonical]] = ??? -} - -object JdbcBlocksRepository { - val layer: URLayer[Transactor[Task], ROBlocksRepository[Task]] = - ZLayer.fromFunction(new JdbcBlocksRepository(_)) -} diff --git a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcDbRepositoryTransactorIO.scala b/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcDbRepositoryTransactorIO.scala deleted file mode 100644 index 7d635dba6a..0000000000 --- a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcDbRepositoryTransactorIO.scala +++ /dev/null @@ -1,16 +0,0 @@ -package io.iohk.atala.iris.sql.repository - -import doobie.* -import doobie.implicits.* -import io.iohk.atala.iris.core.repository.DbRepositoryTransactor -import zio.* -import zio.interop.catz.* - -class JdbcDbRepositoryTransactorIO(xa: Transactor[Task]) extends DbRepositoryTransactor[ConnectionIO] { - override def runAtomically[A](action: ConnectionIO[A]): Task[A] = action.transact(xa) -} - -object JdbcDbRepositoryTransactorIO { - val layer: URLayer[Transactor[Task], JdbcDbRepositoryTransactorIO] = - ZLayer.fromFunction(new JdbcDbRepositoryTransactorIO(_)) -} diff --git a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcIrisBatchRepository.scala b/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcIrisBatchRepository.scala deleted file mode 100644 index de7a74ad6a..0000000000 --- a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcIrisBatchRepository.scala +++ /dev/null @@ -1,35 +0,0 @@ -package io.iohk.atala.iris.sql.repository - -import doobie.* -import fs2.Stream -import io.iohk.atala.iris.core.model.ConfirmedIrisBatch -import io.iohk.atala.iris.core.model.ledger.TransactionId -import io.iohk.atala.iris.core.repository.IrisBatchesRepository -import zio.* - -class JdbcIrisBatchRepositoryIO extends IrisBatchesRepository[ConnectionIO, StreamIO] { - override def saveIrisBatch(irisBatch: ConfirmedIrisBatch): ConnectionIO[Unit] = ??? - - override def getIrisBatchesStream(lastSeen: Option[TransactionId]): StreamIO[ConfirmedIrisBatch] = ??? -} - -object JdbcIrisBatchRepositoryIO { - val layer: ULayer[IrisBatchesRepository[ConnectionIO, StreamIO]] = - ZLayer.succeed(new JdbcIrisBatchRepositoryIO) -} - -class JdbcIrisBatchRepository(xa: Transactor[Task], ioImpl: IrisBatchesRepository[ConnectionIO, StreamIO]) - extends IrisBatchesRepository[Task, StreamZIO] { - - override def saveIrisBatch(irisBatch: ConfirmedIrisBatch): Task[Unit] = ??? - - override def getIrisBatchesStream(lastSeen: Option[TransactionId]): StreamZIO[ConfirmedIrisBatch] = ??? -} - -object JdbcIrisBatchRepository { - val layer: URLayer[Transactor[Task] & IrisBatchesRepository[ConnectionIO, StreamIO], IrisBatchesRepository[ - Task, - StreamZIO - ]] = - ZLayer.fromFunction(new JdbcIrisBatchRepository(_, _)) -} diff --git a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcKeyValueRepository.scala b/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcKeyValueRepository.scala deleted file mode 100644 index d6ccdc9bc6..0000000000 --- a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcKeyValueRepository.scala +++ /dev/null @@ -1,33 +0,0 @@ -package io.iohk.atala.iris.sql.repository - -import doobie.* -import io.iohk.atala.iris.core.repository.KeyValueRepository -import zio.* - -class JdbcKeyValueRepositoryIO extends KeyValueRepository[ConnectionIO] { - override def get(key: String): ConnectionIO[Option[String]] = ??? - - override def getInt(key: String): ConnectionIO[Option[Int]] = ??? - - override def set(key: String, value: Option[Int | String]): ConnectionIO[Unit] = ??? -} - -object JdbcKeyValueRepositoryIO { - val layer: ULayer[KeyValueRepository[ConnectionIO]] = - ZLayer.succeed(new JdbcKeyValueRepositoryIO) -} - -class JdbcKeyValueRepository(xa: Transactor[Task], ioImpl: KeyValueRepository[ConnectionIO]) - extends KeyValueRepository[Task] { - - override def get(key: String): Task[Option[String]] = ??? - - override def getInt(key: String): Task[Option[RuntimeFlags]] = ??? - - override def set(key: String, value: Option[Int | String]): Task[Unit] = ??? -} - -object JdbcKeyValueRepository { - val layer: URLayer[Transactor[Task] & KeyValueRepository[ConnectionIO], KeyValueRepository[Task]] = - ZLayer.fromFunction(new JdbcKeyValueRepository(_, _)) -} diff --git a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcOperationsRepository.scala b/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcOperationsRepository.scala deleted file mode 100644 index 58434c8cfd..0000000000 --- a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/JdbcOperationsRepository.scala +++ /dev/null @@ -1,30 +0,0 @@ -package io.iohk.atala.iris.sql.repository - -import doobie.* -import doobie.implicits.* -import io.iohk.atala.iris.core.model -import io.iohk.atala.iris.core.repository.OperationsRepository -import io.iohk.atala.iris.sql.repository.JdbcOperationsRepository -import zio.* -import zio.interop.catz.* - -// TODO: replace with actual implementation -class JdbcOperationsRepository(xa: Transactor[Task]) extends OperationsRepository[Task] { - - override def getOperation(id: model.IrisOperationId): Task[model.IrisOperation] = { - val cxnIO = sql""" - |SELECT foo FROM public.iris_operations - |""".stripMargin.query[String].unique - - cxnIO - .transact(xa) - .map(model.IrisOperation.apply) - } - - override def saveOperations(ops: Seq[model.IrisOperation]): Task[Unit] = ZIO.unit -} - -object JdbcOperationsRepository { - val layer: URLayer[Transactor[Task], OperationsRepository[Task]] = - ZLayer.fromFunction(new JdbcOperationsRepository(_)) -} diff --git a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/TransactorLayer.scala b/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/TransactorLayer.scala deleted file mode 100644 index 5798536d07..0000000000 --- a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/TransactorLayer.scala +++ /dev/null @@ -1,63 +0,0 @@ -package io.iohk.atala.iris.sql.repository - -import cats.effect.{Async, Resource} -import doobie.util.transactor.Transactor -import com.zaxxer.hikari.HikariConfig -import doobie.util.ExecutionContexts -import doobie.hikari.HikariTransactor -import zio.interop.catz.* -import zio.* -import cats.effect.std.Dispatcher - -object TransactorLayer { - - case class DbConfig( - username: String, - password: String, - jdbcUrl: String, - awaitConnectionThreads: Int = 8 - ) - - def hikari[A[_]: Async: Dispatcher](config: DbConfig)(using tag: Tag[Transactor[A]]): TaskLayer[Transactor[A]] = { - val transactorLayerZio = ZIO - .attempt { - // https://github.com/brettwooldridge/HikariCP/wiki/About-Pool-Sizing - val poolSize = (config.awaitConnectionThreads * 2) + 1 - val hikariConfig = makeHikariConfig(config) - hikariConfig.setPoolName("DBPool") - hikariConfig.setLeakDetectionThreshold(300000) // 5 mins - hikariConfig.setMinimumIdle(poolSize) - hikariConfig.setMaximumPoolSize(poolSize) // Both Pool size amd Minimum Idle should same and is recommended - hikariConfig - } - .map { hikariConfig => - val pool: Resource[A, Transactor[A]] = for { - // Resource yielding a transactor configured with a bounded connect EC and an unbounded - // transaction EC. Everything will be closed and shut down cleanly after use. - ec <- ExecutionContexts.fixedThreadPool[A](config.awaitConnectionThreads) // our connect EC - xa <- HikariTransactor.fromHikariConfig[A](hikariConfig, ec) - } yield xa - - pool.toManaged.toLayer[Transactor[A]] - } - - ZLayer.fromZIO(transactorLayerZio).flatten - } - - private def makeHikariConfig(config: DbConfig): HikariConfig = { - val hikariConfig = HikariConfig() - - hikariConfig.setJdbcUrl(config.jdbcUrl) - hikariConfig.setUsername(config.username) - hikariConfig.setPassword(config.password) - hikariConfig.setAutoCommit(false) - - hikariConfig.setDriverClassName("org.postgresql.Driver") - hikariConfig.addDataSourceProperty("cachePrepStmts", "true") - hikariConfig.addDataSourceProperty("prepStmtCacheSize", "250") - hikariConfig.addDataSourceProperty("prepStmtCacheSqlLimit", "2048") - - hikariConfig - } - -} diff --git a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/package.scala b/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/package.scala deleted file mode 100644 index d9e914bec9..0000000000 --- a/iris/service/sql/src/main/scala/io/iohk/atala/iris/sql/repository/package.scala +++ /dev/null @@ -1,12 +0,0 @@ -package io.iohk.atala.iris.sql - -import doobie.* -import fs2.Stream -import zio.stream as zstream - -package object repository { - type IO[A] = ConnectionIO[A] - type StreamIO[A] = Stream[ConnectionIO, A] - - type StreamZIO[A] = zstream.Stream[Throwable, A] -} diff --git a/package-lock.json b/package-lock.json index d733632be3..adf9c14b3a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "atala-prism-building-blocks", - "version": "1.17.0", + "version": "1.18.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "atala-prism-building-blocks", - "version": "1.17.0", + "version": "1.18.0", "devDependencies": { "@commitlint/cli": "^17.0.3", "@commitlint/config-conventional": "^17.0.3", diff --git a/package.json b/package.json index ce39844cab..69524e2af8 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "atala-prism-building-blocks", - "version": "1.17.0", + "version": "1.18.0", "engines": { "node": ">=16.13.0" }, @@ -38,6 +38,12 @@ "prepareCmd": "npm version ${nextRelease.version} --git-tag-version false" } ], + [ + "@semantic-release/exec", + { + "prepareCmd": "sed -i.bak \"s/PRISM_AGENT_VERSION=.*/PRISM_AGENT_VERSION=${nextRelease.version}/\" ./infrastructure/local/.env && rm -f ./infrastructure/local/.env.bak" + } + ], [ "@semantic-release/exec", { @@ -104,9 +110,10 @@ "prism-agent/service/api/http/prism-agent-openapi-spec.yaml", "infrastructure/charts/agent/Chart.yaml", "infrastructure/charts/index.yaml", - "infrastructure/charts/*.tgz" + "infrastructure/charts/*.tgz", + "infrastructure/local/.env" ], - "message": "chore(release): cut atala prism ${nextRelease.version} release\n\n${nextRelease.notes}\n\nSigned-off-by: Anton Baliasnikov " + "message": "chore(release): cut open enterprise agent ${nextRelease.version} release\n\n${nextRelease.notes}\n\nSigned-off-by: Anton Baliasnikov " } ], [ @@ -116,7 +123,7 @@ "notifyOnFail": true, "markdownReleaseNotes": true, "onSuccessTemplate": { - "text": "A new version of Atala PRISM successfully released!\nVersion: `$npm_package_version`\nTag: $repo_url/releases/tag/$npm_package_version\n\nRelease notes:\n$release_notes" + "text": "A new version of Opent Enterprise Agent successfully released!\nVersion: `$npm_package_version`\nTag: $repo_url/releases/tag/$npm_package_version\n\nRelease notes:\n$release_notes" } } ] diff --git a/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/PublishedBatchData.scala b/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/PublishedBatchData.scala deleted file mode 100644 index 2866d4dfd1..0000000000 --- a/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/PublishedBatchData.scala +++ /dev/null @@ -1,10 +0,0 @@ -package io.iohk.atala.pollux.core.model - -import io.iohk.atala.pollux.vc.jwt.W3cCredentialPayload -import io.iohk.atala.prism.crypto.MerkleInclusionProof -import io.iohk.atala.iris.proto.service.IrisOperationId - -final case class PublishedBatchData( - operationId: IrisOperationId, - credentialsAnsProofs: Seq[(W3cCredentialPayload, MerkleInclusionProof)] -) diff --git a/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/error/CredentialServiceError.scala b/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/error/CredentialServiceError.scala index f1c27236ce..140013be7c 100644 --- a/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/error/CredentialServiceError.scala +++ b/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/model/error/CredentialServiceError.scala @@ -22,7 +22,6 @@ object CredentialServiceError { final case class CreateCredentialPayloadFromRecordError(cause: Throwable) extends CredentialServiceError final case class CredentialRequestValidationError(error: String) extends CredentialServiceError final case class CredentialIdNotDefined(credential: W3cCredentialPayload) extends CredentialServiceError - final case class IrisError(cause: Throwable) extends CredentialServiceError final case class CredentialSchemaError(cause: io.iohk.atala.pollux.core.model.error.CredentialSchemaError) extends CredentialServiceError final case class UnsupportedVCClaimsValue(error: String) extends CredentialServiceError diff --git a/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/service/MockCredentialService.scala b/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/service/MockCredentialService.scala index 36b5fcec3a..c68cf44984 100644 --- a/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/service/MockCredentialService.scala +++ b/pollux/lib/core/src/main/scala/io/iohk/atala/pollux/core/service/MockCredentialService.scala @@ -5,7 +5,7 @@ import io.iohk.atala.castor.core.model.did.CanonicalPrismDID import io.iohk.atala.mercury.model.DidId import io.iohk.atala.mercury.protocol.issuecredential.{IssueCredential, OfferCredential, RequestCredential} import io.iohk.atala.pollux.core.model.error.CredentialServiceError -import io.iohk.atala.pollux.core.model.{DidCommID, IssueCredentialRecord, PublishedBatchData} +import io.iohk.atala.pollux.core.model.{DidCommID, IssueCredentialRecord} import io.iohk.atala.pollux.vc.jwt.{Issuer, W3cCredentialPayload} import io.iohk.atala.prism.crypto.MerkleInclusionProof import io.iohk.atala.shared.models.WalletAccessContext @@ -57,8 +57,6 @@ object MockCredentialService extends Mock[CredentialService] { object AcceptCredentialRequest extends Effect[DidCommID, CredentialServiceError, IssueCredentialRecord] object GenerateJWTCredential extends Effect[DidCommID, CredentialServiceError, IssueCredentialRecord] object GenerateAnonCredsCredential extends Effect[DidCommID, CredentialServiceError, IssueCredentialRecord] - object PublishCredentialBatch - extends Effect[(Seq[W3cCredentialPayload], Issuer), CredentialServiceError, PublishedBatchData] object MarkCredentialRecordsAsPublishQueued extends Effect[Seq[(W3cCredentialPayload, MerkleInclusionProof)], CredentialServiceError, Int] object ReceiveCredentialIssue extends Effect[IssueCredential, CredentialServiceError, IssueCredentialRecord] diff --git a/pollux/lib/core/src/test/scala/io/iohk/atala/pollux/core/service/CredentialServiceSpecHelper.scala b/pollux/lib/core/src/test/scala/io/iohk/atala/pollux/core/service/CredentialServiceSpecHelper.scala index 6e92687262..10b986d98e 100644 --- a/pollux/lib/core/src/test/scala/io/iohk/atala/pollux/core/service/CredentialServiceSpecHelper.scala +++ b/pollux/lib/core/src/test/scala/io/iohk/atala/pollux/core/service/CredentialServiceSpecHelper.scala @@ -1,13 +1,11 @@ package io.iohk.atala.pollux.core.service import io.circe.Json -import io.grpc.ManagedChannelBuilder import io.iohk.atala.agent.walletapi.memory.GenericSecretStorageInMemory import io.iohk.atala.agent.walletapi.service.ManagedDIDService import io.iohk.atala.agent.walletapi.storage.GenericSecretStorage import io.iohk.atala.castor.core.model.did.PrismDID import io.iohk.atala.castor.core.service.DIDService -import io.iohk.atala.iris.proto.service.IrisServiceGrpc import io.iohk.atala.mercury.model.{AttachmentDescriptor, DidId} import io.iohk.atala.mercury.protocol.issuecredential.* import io.iohk.atala.pollux.core.model.* @@ -21,9 +19,6 @@ import java.util.UUID trait CredentialServiceSpecHelper { - protected val irisStubLayer = ZLayer.fromZIO( - ZIO.succeed(IrisServiceGrpc.stub(ManagedChannelBuilder.forAddress("localhost", 9999).usePlaintext.build)) - ) protected val didResolverLayer = ZLayer.fromZIO(ZIO.succeed(makeResolver(Map.empty))) protected val defaultWalletLayer = ZLayer.succeed(WalletAccessContext(WalletId.default)) diff --git a/pollux/lib/sql-doobie/src/main/scala/io/iohk/atala/pollux/sql/model/db/VerificationPolicy.scala b/pollux/lib/sql-doobie/src/main/scala/io/iohk/atala/pollux/sql/model/db/VerificationPolicy.scala index b64bd515b1..fbc8120a2c 100644 --- a/pollux/lib/sql-doobie/src/main/scala/io/iohk/atala/pollux/sql/model/db/VerificationPolicy.scala +++ b/pollux/lib/sql-doobie/src/main/scala/io/iohk/atala/pollux/sql/model/db/VerificationPolicy.scala @@ -75,7 +75,9 @@ object VerificationPolicySql extends DoobieContext.Postgres(SnakeCase) { def getVerificationPolicyConstrains(fk_ids: Seq[UUID]) = run( quote( - query[VerificationPolicyConstraint].filter(vpc => liftQuery(fk_ids).contains(vpc.fk_id)) + query[VerificationPolicyConstraint] + .filter(vpc => liftQuery(fk_ids).contains(vpc.fk_id)) + .sortBy(_.index)(Ord.asc) ) ) diff --git a/pollux/lib/sql-doobie/src/test/scala/io/iohk/atala/pollux/sql/VerificationPolicySqlIntegrationSpec.scala b/pollux/lib/sql-doobie/src/test/scala/io/iohk/atala/pollux/sql/VerificationPolicySqlIntegrationSpec.scala index af073f3919..ac24d4851b 100644 --- a/pollux/lib/sql-doobie/src/test/scala/io/iohk/atala/pollux/sql/VerificationPolicySqlIntegrationSpec.scala +++ b/pollux/lib/sql-doobie/src/test/scala/io/iohk/atala/pollux/sql/VerificationPolicySqlIntegrationSpec.scala @@ -286,7 +286,7 @@ object VerificationPolicySqlIntegrationSpec extends ZIOSpecDefault, PostgresTest name <- name description <- description constraints <- Gen - .setOfBounded(min = 1, max = 10)(verificationPolicyConstraint) + .setOfBounded(min = 2, max = 10)(verificationPolicyConstraint) .map(_.toVector) } yield VerificationPolicy.make( name = name, diff --git a/prism-agent/client/generator/generate-clients.sh b/prism-agent/client/generator/generate-clients.sh index 3c0aea1c1d..8c4795f265 100755 --- a/prism-agent/client/generator/generate-clients.sh +++ b/prism-agent/client/generator/generate-clients.sh @@ -1,13 +1,16 @@ #!/bin/bash set -e +# open api cli generator is not compatible with 3.1.0 +yq e -i '.openapi = "3.0.3"' ../../service/api/http/prism-agent-openapi-spec.yaml + # generate kotlin models yarn openapi-generator-cli generate \ -g kotlin \ -i ../../service/api/http/prism-agent-openapi-spec.yaml \ -o ../kotlin \ --ignore-file-override ../kotlin/.openapi-generator-ignore \ - --additional-properties=packageName=io.iohk.atala.prism,serializationLibrary=gson + --additional-properties=packageName=io.iohk.atala.prism,serializationLibrary=gson,enumPropertyNaming=UPPERCASE # generate typescript models yarn openapi-generator-cli generate \ diff --git a/prism-agent/service/api/http/prism-agent-openapi-spec.yaml b/prism-agent/service/api/http/prism-agent-openapi-spec.yaml index 068d98b065..42a2ceff59 100644 --- a/prism-agent/service/api/http/prism-agent-openapi-spec.yaml +++ b/prism-agent/service/api/http/prism-agent-openapi-spec.yaml @@ -1,7 +1,7 @@ openapi: 3.1.0 info: title: Prism Agent - version: 1.17.0 + version: 1.18.0 paths: /credential-definition-registry/definitions: get: @@ -12,12 +12,6 @@ paths: and control the pagination by `offset` and `limit` parameters ' operationId: lookupCredentialDefinitionsByQuery parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: author in: query required: false @@ -78,6 +72,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] post: tags: - Credential Definition Registry @@ -86,13 +83,6 @@ paths: JSON Schema on behalf of Cloud Agent. The credential definition will be signed by the keys of Cloud Agent and issued by the DID that corresponds to it. operationId: createCredentialDefinition - parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string requestBody: description: JSON object required for the credential definition creation content: @@ -131,6 +121,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /credential-definition-registry/definitions/{guid}: get: tags: @@ -220,12 +213,6 @@ paths: the pagination by `offset` and `limit` parameters ' operationId: lookupSchemasByQuery parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: author in: query required: false @@ -292,6 +279,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] post: tags: - Schema Registry @@ -300,13 +290,6 @@ paths: JSON Schema on behalf of Cloud Agent. The credential schema will be signed by the keys of Cloud Agent and issued by the DID that corresponds to it. operationId: createSchema - parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string requestBody: description: JSON object required for the credential schema creation content: @@ -345,6 +328,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /schema-registry/{author}/{id}: put: tags: @@ -356,12 +342,6 @@ paths: to it. operationId: updateSchema parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: author in: path description: DID of the identity which authored the credential schema. A piece @@ -415,6 +395,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /schema-registry/schemas/{guid}: get: tags: @@ -462,13 +445,6 @@ paths: summary: Trace the request input from the point of view of the server description: Trace the request input from the point of view of the server operationId: test - parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string responses: '200': description: '' @@ -494,6 +470,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /verification/policies: get: tags: @@ -503,12 +482,6 @@ paths: by `offset` and `limit` parameters operationId: lookupVerificationPoliciesByQuery parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: name in: query required: false @@ -556,19 +529,15 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] post: tags: - Verification summary: Create the new verification policy description: Create the new verification policy operationId: createVerificationPolicy - parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string requestBody: description: Create verification policy object content: @@ -601,6 +570,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /verification/policies/{id}: get: tags: @@ -609,12 +581,6 @@ paths: description: Get the verification policy by id operationId: getVerificationPolicyById parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: id in: path description: Get the verification policy by id @@ -653,6 +619,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] put: tags: - Verification @@ -660,12 +629,6 @@ paths: description: Update the verification policy entry operationId: updateVerificationPolicy parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: id in: path required: true @@ -717,6 +680,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] delete: tags: - Verification @@ -724,12 +690,6 @@ paths: description: Delete the verification policy by id operationId: deleteVerificationPolicyById parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: id in: path description: Delete the verification policy by id @@ -764,6 +724,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /connections: get: tags: @@ -772,12 +735,6 @@ paths: description: Get the list of connection records paginated operationId: getConnections parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: offset in: query required: false @@ -821,6 +778,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] post: tags: - Connections Management @@ -831,13 +791,6 @@ paths: It returns a new connection record in `InvitationGenerated` state. The request body may contain a `label` that can be used as a human readable alias for the connection, for example `{'label': "Bob"}` operationId: createConnection - parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string requestBody: description: JSON object required for the connection creation content: @@ -871,6 +824,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /connections/{connectionId}: get: tags: @@ -879,12 +835,6 @@ paths: description: Gets an existing connection record by its unique identifier operationId: getConnection parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: connectionId in: path description: The unique identifier of the connection record. @@ -923,6 +873,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /connection-invitations: post: tags: @@ -934,13 +887,6 @@ paths: and submits a Connection Request to the inviter. It returns a connection object in `ConnectionRequestPending` state, until the Connection Request is eventually sent to the inviter by the prism-agent's background process. The connection object state will then automatically move to `ConnectionRequestSent`. operationId: acceptConnectionInvitation - parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string requestBody: description: The request used by an invitee to accept a connection invitation received from an inviter, using out-of-band mechanism. @@ -974,6 +920,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /dids/{didRef}: get: tags: @@ -1086,12 +1035,6 @@ paths: If the `limit` parameter is not set, it defaults to 100 items per page. operationId: getDid-registrarDids parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: offset in: query required: false @@ -1129,6 +1072,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] post: tags: - DID Registrar @@ -1137,13 +1083,6 @@ paths: Create unpublished DID and store it inside Prism Agent's wallet. The private keys of the DID is managed by Prism Agent. The DID can later be published to the VDR using publications endpoint. operationId: postDid-registrarDids - parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string requestBody: content: application/json: @@ -1187,6 +1126,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /did-registrar/dids/{didRef}: get: tags: @@ -1195,12 +1137,6 @@ paths: description: Get DID stored in Prism Agent's wallet operationId: getDid-registrarDidsDidref parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: didRef in: path description: Prism DID according to [the Prism DID method syntax](https://github.com/input-output-hk/prism-did-method-spec/blob/main/w3c-spec/PRISM-method.md#prism-did-method-syntax) @@ -1239,6 +1175,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /did-registrar/dids/{didRef}/publications: post: tags: @@ -1247,12 +1186,6 @@ paths: description: Publish the DID stored in Prism Agent's wallet to the VDR. operationId: postDid-registrarDidsDidrefPublications parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: didRef in: path description: Prism DID according to [the Prism DID method syntax](https://github.com/input-output-hk/prism-did-method-spec/blob/main/w3c-spec/PRISM-method.md#prism-did-method-syntax) @@ -1291,6 +1224,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /did-registrar/dids/{didRef}/updates: post: tags: @@ -1304,12 +1240,6 @@ paths: some operations being rejected as only one operation is allowed to be appended to the last confirmed operation. operationId: postDid-registrarDidsDidrefUpdates parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: didRef in: path description: Prism DID according to [the Prism DID method syntax](https://github.com/input-output-hk/prism-did-method-spec/blob/main/w3c-spec/PRISM-method.md#prism-did-method-syntax) @@ -1366,6 +1296,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /did-registrar/dids/{didRef}/deactivations: post: tags: @@ -1376,12 +1309,6 @@ paths: to the VDR. operationId: postDid-registrarDidsDidrefDeactivations parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: didRef in: path description: Prism DID according to [the Prism DID method syntax](https://github.com/input-output-hk/prism-did-method-spec/blob/main/w3c-spec/PRISM-method.md#prism-did-method-syntax) @@ -1426,6 +1353,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /issue-credentials/credential-offers: post: tags: @@ -1434,13 +1364,6 @@ paths: a holder. description: Creates a new credential offer in the database operationId: createCredentialOffer - parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string requestBody: description: The credential offer object. content: @@ -1479,6 +1402,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /issue-credentials/records: get: tags: @@ -1487,12 +1413,6 @@ paths: description: Get the list of issue credential records paginated operationId: getCredentialRecords parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: offset in: query required: false @@ -1536,6 +1456,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /issue-credentials/records/{recordId}: get: tags: @@ -1544,12 +1467,6 @@ paths: description: Gets issue credential records by record id operationId: getCredentialRecord parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: recordId in: path description: The unique identifier of the issue credential record. @@ -1587,6 +1504,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /issue-credentials/records/{recordId}/accept-offer: post: tags: @@ -1596,12 +1516,6 @@ paths: back a credential request. operationId: acceptCredentialOffer parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: recordId in: path description: The unique identifier of the issue credential record. @@ -1646,6 +1560,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /issue-credentials/records/{recordId}/issue-credential: post: tags: @@ -1657,12 +1574,6 @@ paths: by credential id. operationId: issueCredential parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: recordId in: path description: The unique identifier of the issue credential record. @@ -1701,6 +1612,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /present-proof/presentations: get: tags: @@ -1709,12 +1623,6 @@ paths: description: list of presentation statuses operationId: getAllPresentation parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: offset in: query required: false @@ -1757,6 +1665,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] post: tags: - Present Proof @@ -1765,13 +1676,6 @@ paths: description: Holder presents proof derived from the verifiable credential to verifier. operationId: requestPresentation - parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string requestBody: description: The present proof creation request. content: @@ -1811,6 +1715,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /present-proof/presentations/{presentationId}: get: tags: @@ -1820,12 +1727,6 @@ paths: description: Returns an existing presentation record by id. operationId: getPresentation parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: presentationId in: path description: The unique identifier of the presentation record. @@ -1864,6 +1765,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] patch: tags: - Present Proof @@ -1872,12 +1776,6 @@ paths: description: Accept or reject presentation of proof request. operationId: updatePresentation parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: presentationId in: path description: The unique identifier of the presentation record. @@ -1923,6 +1821,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /_system/health: get: tags: @@ -2540,13 +2441,6 @@ paths: - Events summary: List wallet webhook notifications operationId: getEventsWebhooks - parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string responses: '200': description: List wallet webhook notifications @@ -2572,18 +2466,14 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] post: tags: - Events summary: Create wallet webhook notifications operationId: postEventsWebhooks - parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string requestBody: content: application/json: @@ -2621,6 +2511,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] /events/webhooks/{id}: delete: tags: @@ -2628,12 +2521,6 @@ paths: summary: Delete the wallet webhook notification by `id` operationId: deleteEventsWebhooksId parameters: - - name: apikey - in: header - description: API key - required: false - schema: - type: string - name: id in: path description: ID of the webhook notification to delete. @@ -2668,6 +2555,9 @@ paths: application/json: schema: $ref: '#/components/schemas/ErrorResponse' + security: + - apiKeyAuth: [] + - jwtAuth: [] components: schemas: AcceptConnectionInvitationRequest: @@ -3775,7 +3665,7 @@ components: type: string description: The date and time when the issue credential record was created. format: date-time - example: '2023-10-14T17:00:01.188137Z' + example: '2023-10-24T10:50:45.871298Z' updatedAt: type: string description: The date and time when the issue credential record was last @@ -4587,3 +4477,11 @@ components: type: array items: $ref: '#/components/schemas/WebhookNotification' + securitySchemes: + apiKeyAuth: + type: apiKey + name: apikey + in: header + jwtAuth: + type: http + scheme: bearer diff --git a/prism-agent/service/server/src/main/resources/application.conf b/prism-agent/service/server/src/main/resources/application.conf index 90a67bfb52..cfdf89342c 100644 --- a/prism-agent/service/server/src/main/resources/application.conf +++ b/prism-agent/service/server/src/main/resources/application.conf @@ -1,15 +1,6 @@ devMode = false devMode = ${?DEV_MODE} -iris { - service { - host = "localhost" - host = ${?IRIS_HOST} - port = 8081 - port = ${?IRIS_PORT} - } -} - prismNode { service = { host = "localhost" diff --git a/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/Modules.scala b/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/Modules.scala index a8854e0a8b..38d4836efc 100644 --- a/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/Modules.scala +++ b/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/Modules.scala @@ -34,8 +34,6 @@ import io.iohk.atala.iam.authentication.apikey.AuthenticationRepository import io.iohk.atala.iam.authentication.oidc.KeycloakAuthenticatorImpl import io.iohk.atala.iam.authentication.oidc.KeycloakClientImpl import io.iohk.atala.iam.authentication.oidc.KeycloakConfig -import io.iohk.atala.iris.proto.service.IrisServiceGrpc -import io.iohk.atala.iris.proto.service.IrisServiceGrpc.IrisServiceStub import io.iohk.atala.pollux.vc.jwt.{PrismDidResolver, DidResolver as JwtDidResolver} import io.iohk.atala.prism.protos.node_api.NodeServiceGrpc import io.iohk.atala.shared.db.{ContextAwareTask, DbConfig, TransactorLayer} @@ -94,21 +92,6 @@ object AppModule { } object GrpcModule { - // TODO: once Castor + Pollux has migrated to use Node 2.0 stubs, this should be removed. - val irisStubLayer: TaskLayer[IrisServiceStub] = { - val stubLayer = ZLayer.fromZIO( - ZIO - .service[AppConfig] - .map(_.iris.service) - .flatMap(config => - ZIO.attempt( - IrisServiceGrpc.stub(ManagedChannelBuilder.forAddress(config.host, config.port).usePlaintext.build) - ) - ) - ) - SystemModule.configLayer >>> stubLayer - } - val prismNodeStubLayer: TaskLayer[NodeServiceGrpc.NodeServiceStub] = { val stubLayer = ZLayer.fromZIO( ZIO diff --git a/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/config/AppConfig.scala b/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/config/AppConfig.scala index 38c179ee38..7f461f72ff 100644 --- a/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/config/AppConfig.scala +++ b/prism-agent/service/server/src/main/scala/io/iohk/atala/agent/server/config/AppConfig.scala @@ -14,7 +14,6 @@ import scala.util.Try final case class AppConfig( devMode: Boolean, - iris: IrisConfig, pollux: PolluxConfig, agent: AgentConfig, connect: ConnectConfig, @@ -33,8 +32,6 @@ object AppConfig { final case class VaultConfig(address: String, token: String) -final case class IrisConfig(service: GrpcServiceConfig) - final case class PolluxConfig( database: DatabaseConfig, issueBgJobRecordsLimit: Int, diff --git a/prism-agent/service/server/src/main/scala/io/iohk/atala/issue/controller/IssueController.scala b/prism-agent/service/server/src/main/scala/io/iohk/atala/issue/controller/IssueController.scala index af6bcd083e..6fec924fc2 100644 --- a/prism-agent/service/server/src/main/scala/io/iohk/atala/issue/controller/IssueController.scala +++ b/prism-agent/service/server/src/main/scala/io/iohk/atala/issue/controller/IssueController.scala @@ -60,8 +60,6 @@ object IssueController { ErrorResponse.badRequest(title = "Create Request Validation Error", detail = Some(msg)) case CredentialServiceError.CredentialIdNotDefined(msg) => ErrorResponse.badRequest(title = "Credential ID not defined one request", detail = Some(msg.toString)) - case CredentialServiceError.IrisError(msg) => - ErrorResponse.internalServerError(title = "VDR Error", detail = Some(msg.toString)) case CredentialServiceError.CredentialSchemaError(e) => ErrorResponse.badRequest(title = "Credential Schema Error", detail = Some(e.message)) case CredentialServiceError.UnsupportedVCClaimsValue(error) => diff --git a/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerSpec.scala b/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerSpec.scala index d60e8a6178..880e697c89 100644 --- a/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerSpec.scala +++ b/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerSpec.scala @@ -116,12 +116,6 @@ object IssueControllerSpec extends ZIOSpecDefault { ) assert(httpError)(equalTo(errorResponse)) }, - test("return internal server error if iris error") { - val cse = CredentialServiceError.IrisError(new Throwable("message")) - val httpError = IssueController.toHttpError(cse) - val errorResponse = ErrorResponse.internalServerError(title = "VDR Error", detail = Some(cse.cause.toString)) - assert(httpError)(equalTo(errorResponse)) - } ) } diff --git a/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerTestTools.scala b/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerTestTools.scala index b45b74358f..a643ed17a0 100644 --- a/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerTestTools.scala +++ b/prism-agent/service/server/src/test/scala/io/iohk/atala/issue/controller/IssueControllerTestTools.scala @@ -1,7 +1,6 @@ package io.iohk.atala.issue.controller import com.typesafe.config.ConfigFactory -import io.grpc.ManagedChannelBuilder import io.iohk.atala.agent.server.config.AppConfig import io.iohk.atala.agent.walletapi.memory.GenericSecretStorageInMemory import io.iohk.atala.agent.walletapi.model.BaseEntity @@ -12,7 +11,6 @@ import io.iohk.atala.connect.core.repository.ConnectionRepositoryInMemory import io.iohk.atala.connect.core.service.ConnectionServiceImpl import io.iohk.atala.iam.authentication.AuthenticatorWithAuthZ import io.iohk.atala.iam.authentication.DefaultEntityAuthenticator -import io.iohk.atala.iris.proto.service.IrisServiceGrpc import io.iohk.atala.issue.controller.http.{ CreateIssueCredentialRecordRequest, IssueCredentialRecord, @@ -51,10 +49,6 @@ trait IssueControllerTestTools extends PostgresTestContainerSupport { Response[ Either[DeserializationException[String], IssueCredentialRecordPage] ] - - val irisStubLayer = ZLayer.fromZIO( - ZIO.succeed(IrisServiceGrpc.stub(ManagedChannelBuilder.forAddress("localhost", 9999).usePlaintext.build)) - ) val didResolverLayer = ZLayer.fromZIO(ZIO.succeed(makeResolver(Map.empty))) val configLayer: Layer[ReadError[String], AppConfig] = ZLayer.fromZIO { @@ -84,7 +78,6 @@ trait IssueControllerTestTools extends PostgresTestContainerSupport { private val controllerLayer = contextAwareTransactorLayer >+> configLayer >+> - irisStubLayer >+> didResolverLayer >+> ResourceURIDereferencerImpl.layer >+> CredentialRepositoryInMemory.layer >+> diff --git a/project/build.properties b/project/build.properties index 8b9a0b0ab0..e8a1e246e8 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.8.0 +sbt.version=1.9.7 diff --git a/tests/e2e-tests/build.gradle.kts b/tests/e2e-tests/build.gradle.kts deleted file mode 100644 index 4e9d088ca1..0000000000 --- a/tests/e2e-tests/build.gradle.kts +++ /dev/null @@ -1,55 +0,0 @@ -plugins { - id("org.jetbrains.kotlin.jvm") version "1.9.0" - idea - jacoco - id("net.serenity-bdd.serenity-gradle-plugin") version "4.0.1" - kotlin("plugin.serialization") version "1.9.0" -} - -repositories { - mavenCentral() -} - -dependencies { - // Logging - implementation("org.slf4j:slf4j-log4j12:2.0.5") - // Beautify async waits - implementation("org.awaitility:awaitility-kotlin:4.2.0") - // Test engines and reports - testImplementation("junit:junit:4.13.2") - implementation("net.serenity-bdd:serenity-core:4.0.1") - implementation("net.serenity-bdd:serenity-cucumber:4.0.1") - implementation("net.serenity-bdd:serenity-screenplay-rest:4.0.1") - testImplementation("net.serenity-bdd:serenity-ensure:4.0.1") - // Beautify exceptions handling assertions - testImplementation("org.assertj:assertj-core:3.23.1") - // Navigate through Json with xpath - testImplementation("com.jayway.jsonpath:json-path:2.7.0") - // HTTP listener - implementation("io.ktor:ktor-server-netty:2.3.0") - implementation("io.ktor:ktor-client-apache:2.3.0") - implementation("org.jetbrains.kotlinx:kotlinx-serialization-json:1.5.0") -} - -buildscript { - dependencies { - classpath("net.serenity-bdd:serenity-single-page-report:4.0.1") - classpath("net.serenity-bdd:serenity-json-summary-report:4.0.1") - } -} - -/** - * Add HTML one-pager and JSON summary report to be produced - */ -serenity { - reports = listOf("single-page-html", "json-summary") -} - -tasks.test { - testLogging.showStandardStreams = true - systemProperty("cucumber.filter.tags", System.getProperty("cucumber.filter.tags")) -} - -kotlin { - jvmToolchain(19) -} diff --git a/tests/e2e-tests/settings.gradle.kts b/tests/e2e-tests/settings.gradle.kts deleted file mode 100644 index 40b97a0ff6..0000000000 --- a/tests/e2e-tests/settings.gradle.kts +++ /dev/null @@ -1,2 +0,0 @@ - -rootProject.name = "e2e-tests" diff --git a/tests/e2e-tests/src/main/kotlin/api_models/Connection.kt b/tests/e2e-tests/src/main/kotlin/api_models/Connection.kt deleted file mode 100644 index 37096a7609..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/Connection.kt +++ /dev/null @@ -1,27 +0,0 @@ -package api_models - -import kotlinx.serialization.Serializable - -@Serializable -data class Connection( - var connectionId: String = "", - var thid: String = "", - var createdAt: String = "", - var updatedAt: String = "", - var invitation: Invitation = Invitation(), - var kind: String = "", - var self: String = "", - var state: String = "", - var label: String = "", - var myDid: String = "", - var theirDid: String = "", - var role: String = "", - var metaRetries: Int = 0, -): JsonEncoded - -object ConnectionState { - const val INVITATION_GENERATED = "InvitationGenerated" - const val CONNECTION_REQUEST_PENDING = "ConnectionRequestPending" - const val CONNECTION_RESPONSE_SENT = "ConnectionResponseSent" - const val CONNECTION_RESPONSE_RECEIVED = "ConnectionResponseReceived" -} diff --git a/tests/e2e-tests/src/main/kotlin/api_models/CreatePrismDidRequest.kt b/tests/e2e-tests/src/main/kotlin/api_models/CreatePrismDidRequest.kt deleted file mode 100644 index 4c90ac84c4..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/CreatePrismDidRequest.kt +++ /dev/null @@ -1,5 +0,0 @@ -package api_models - -data class CreatePrismDidRequest( - val documentTemplate: DocumentTemplate, -) diff --git a/tests/e2e-tests/src/main/kotlin/api_models/Credential.kt b/tests/e2e-tests/src/main/kotlin/api_models/Credential.kt deleted file mode 100644 index eea423b762..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/Credential.kt +++ /dev/null @@ -1,31 +0,0 @@ -package api_models - -import kotlinx.serialization.Serializable - -@Serializable -data class Credential( - var automaticIssuance: Boolean = false, - var awaitConfirmation: Boolean = false, - var createdAt: String = "", - var protocolState: String = "", - var recordId: String = "", - var thid: String = "", - var role: String = "", - var schemaId: String? = "", - var subjectId: String = "", - var updatedAt: String = "", - var validityPeriod: Double = 0.0, - var claims: LinkedHashMap = LinkedHashMap(), - var credential: String = "", - var issuingDID: String = "", - var connectionId: String = "", - var credentialFormat: String = "JWT", - var metaRetries: Int = 0, -): JsonEncoded - -object CredentialState { - const val OFFER_RECEIVED = "OfferReceived" - const val REQUEST_RECEIVED = "RequestReceived" - const val CREDENTIAL_SENT = "CredentialSent" - const val CREDENTIAL_RECEIVED = "CredentialReceived" -} diff --git a/tests/e2e-tests/src/main/kotlin/api_models/CredentialSchema.kt b/tests/e2e-tests/src/main/kotlin/api_models/CredentialSchema.kt deleted file mode 100644 index be0c860cc1..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/CredentialSchema.kt +++ /dev/null @@ -1,19 +0,0 @@ -package api_models - -import com.fasterxml.jackson.databind.JsonNode - -data class CredentialSchema( - var name: String? = null, - var version: String? = null, - var tags: List? = listOf(""), - var description: String? = null, - var type: String? = null, - var author: String? = null, - var authored: String? = null, - var schema: JsonNode? = null, - var guid: String? = null, - var longId: String? = null, - var id: String? = null, - var kind: String? = null, - var self: String? = null, -) diff --git a/tests/e2e-tests/src/main/kotlin/api_models/DidResolutionResult.kt b/tests/e2e-tests/src/main/kotlin/api_models/DidResolutionResult.kt deleted file mode 100644 index cedf10631c..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/DidResolutionResult.kt +++ /dev/null @@ -1,64 +0,0 @@ -package api_models - -import kotlinx.serialization.Serializable - -@Serializable -data class DidResolutionResult( - var `@context`: String? = null, - var didDocument: DidDocument? = null, - var didDocumentMetadata: DidDocumentMetadata? = null, - var didResolutionMetadata: DidResolutionMetadata? = null, -): JsonEncoded - -@Serializable -data class DidDocument( - var `@context`: List? = null, - var assertionMethod: List? = null, - var authentication: List? = null, - var capabilityInvocation: List? = null, - var capabilityDelegation: List? = null, - var controller: String? = null, - var id: String? = null, - var keyAgreement: List? = null, - var service: List? = null, - var verificationMethod: List? = null, -): JsonEncoded - -@Serializable -data class VerificationMethod( - var controller: String? = null, - var id: String? = null, - var publicKeyJwk: PublicKeyJwk? = null, - var type: String? = null, -): JsonEncoded - -typealias VerificationMethodRef = String - -@Serializable -data class PublicKeyJwk( - var crv: String? = null, - var kty: String? = null, - var x: String? = null, - var y: String? = null, -): JsonEncoded - -@Serializable -data class DidDocumentMetadata( - var canonicalId: String? = null, - var versionId: String? = null, - var deactivated: Boolean? = null, - var created: String? = null, - var updated: String? = null, -): JsonEncoded - -@Serializable -data class DidDocumentService( - var id: String? = null, - var serviceEndpoint: List? = null, - var type: String? = null, -): JsonEncoded - -@Serializable -data class DidResolutionMetadata( - var contentType: String? = null, -): JsonEncoded diff --git a/tests/e2e-tests/src/main/kotlin/api_models/DocumentTemplate.kt b/tests/e2e-tests/src/main/kotlin/api_models/DocumentTemplate.kt deleted file mode 100644 index fb52838955..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/DocumentTemplate.kt +++ /dev/null @@ -1,6 +0,0 @@ -package api_models - -data class DocumentTemplate( - val publicKeys: List, - val services: List, -) diff --git a/tests/e2e-tests/src/main/kotlin/api_models/Entity.kt b/tests/e2e-tests/src/main/kotlin/api_models/Entity.kt deleted file mode 100644 index 6035d98f50..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/Entity.kt +++ /dev/null @@ -1,16 +0,0 @@ -package api_models - -import kotlinx.serialization.Serializable - -@Serializable -data class CreateEntityRequest( - val walletId: String, - val name: String, - val id: String, -): JsonEncoded - -@Serializable -data class AddApiKeyRequest( - val entityId: String, - val apiKey: String, -): JsonEncoded diff --git a/tests/e2e-tests/src/main/kotlin/api_models/EventRegistration.kt b/tests/e2e-tests/src/main/kotlin/api_models/EventRegistration.kt deleted file mode 100644 index 6c152b45bd..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/EventRegistration.kt +++ /dev/null @@ -1,8 +0,0 @@ -package api_models - -import kotlinx.serialization.Serializable - -@Serializable -data class RegisterWebhookRequest( - val url: String, -) :JsonEncoded diff --git a/tests/e2e-tests/src/main/kotlin/api_models/Events.kt b/tests/e2e-tests/src/main/kotlin/api_models/Events.kt deleted file mode 100644 index 817274ffe4..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/Events.kt +++ /dev/null @@ -1,49 +0,0 @@ -package api_models - -import kotlinx.serialization.Serializable -import kotlinx.serialization.json.JsonElement - -@Serializable -data class Event( - var type: String, - var id: String, - var ts: String, - var data: JsonElement, - var walletId: String, -) : JsonEncoded - -@Serializable -data class ConnectionEvent( - var type: String, - var id: String, - var ts: String, - var data: Connection, - var walletId: String, -) : JsonEncoded - -@Serializable -data class CredentialEvent( - var type: String, - var id: String, - var ts: String, - var data: Credential, - var walletId: String, -) : JsonEncoded - -@Serializable -data class PresentationEvent( - var type: String, - var id: String, - var ts: String, - var data: PresentationProof, - var walletId: String, -) : JsonEncoded - -@Serializable -data class DidEvent( - var type: String, - var id: String, - var ts: String, - var data: ManagedDid, - var walletId: String, -) : JsonEncoded diff --git a/tests/e2e-tests/src/main/kotlin/api_models/HealthInfo.kt b/tests/e2e-tests/src/main/kotlin/api_models/HealthInfo.kt deleted file mode 100644 index 896c228f42..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/HealthInfo.kt +++ /dev/null @@ -1,4 +0,0 @@ -package api_models -data class HealthInfo( - var version: String = "", -) diff --git a/tests/e2e-tests/src/main/kotlin/api_models/Invitation.kt b/tests/e2e-tests/src/main/kotlin/api_models/Invitation.kt deleted file mode 100644 index 93cb6f4fe8..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/Invitation.kt +++ /dev/null @@ -1,11 +0,0 @@ -package api_models - -import kotlinx.serialization.Serializable - -@Serializable -data class Invitation( - var id: String = "", - var from: String = "", - var invitationUrl: String = "", - var type: String = "", -): JsonEncoded diff --git a/tests/e2e-tests/src/main/kotlin/api_models/JsonEncoded.kt b/tests/e2e-tests/src/main/kotlin/api_models/JsonEncoded.kt deleted file mode 100644 index 560d6132f1..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/JsonEncoded.kt +++ /dev/null @@ -1,12 +0,0 @@ -package api_models - -import kotlinx.serialization.Serializable -import kotlinx.serialization.encodeToString -import kotlinx.serialization.json.Json - -@Serializable -sealed interface JsonEncoded { - fun toJsonString(): String { - return Json.encodeToString(this) - } -} diff --git a/tests/e2e-tests/src/main/kotlin/api_models/ManagedDid.kt b/tests/e2e-tests/src/main/kotlin/api_models/ManagedDid.kt deleted file mode 100644 index e5063ffc30..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/ManagedDid.kt +++ /dev/null @@ -1,15 +0,0 @@ -package api_models - -import kotlinx.serialization.Serializable - -@Serializable -data class ManagedDid( - var did: String = "", - var longFormDid: String = "", - var status: String = "", -): JsonEncoded - -object ManagedDidStatuses { - val PUBLISHED = "PUBLISHED" - val CREATED = "CREATED" -} diff --git a/tests/e2e-tests/src/main/kotlin/api_models/PresentationProof.kt b/tests/e2e-tests/src/main/kotlin/api_models/PresentationProof.kt deleted file mode 100644 index 2f617a3571..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/PresentationProof.kt +++ /dev/null @@ -1,21 +0,0 @@ -package api_models - -import kotlinx.serialization.Serializable - -@Serializable -data class PresentationProof( - var presentationId: String? = null, - var thid: String? = null, - var status: String? = null, - var connectionId: String? = null, - var proofs: List? = null, - var data: List? = null, - var role: String? = null, - var metaRetries: Int = 0, -): JsonEncoded - -object PresentationProofStatus { - const val REQUEST_RECEIVED = "RequestReceived" - const val REQUEST_REJECTED = "RequestRejected" - const val PRESENTATION_VERIFIED = "PresentationVerified" -} diff --git a/tests/e2e-tests/src/main/kotlin/api_models/PublicKey.kt b/tests/e2e-tests/src/main/kotlin/api_models/PublicKey.kt deleted file mode 100644 index 9e53427ea4..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/PublicKey.kt +++ /dev/null @@ -1,16 +0,0 @@ -package api_models - -import com.fasterxml.jackson.annotation.JsonValue - -data class PublicKey( - val id: String, - val purpose: Purpose, -) - -enum class Purpose(@JsonValue val value: String) { - AUTHENTICATION("authentication"), - ASSERTION_METHOD("assertionMethod"), - KEY_AGREEMENT("keyAgreement"), - CAPABILITY_INVOCATION("capabilityInvocation"), - CAPABILITY_DELEGATION("capabilityDelegation"), -} diff --git a/tests/e2e-tests/src/main/kotlin/api_models/Service.kt b/tests/e2e-tests/src/main/kotlin/api_models/Service.kt deleted file mode 100644 index 1fb68ee4f4..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/Service.kt +++ /dev/null @@ -1,7 +0,0 @@ -package api_models - -data class Service( - var id: String = "", - var serviceEndpoint: List = listOf(""), - var type: String = "", -) diff --git a/tests/e2e-tests/src/main/kotlin/api_models/UpdatePrismDidRequest.kt b/tests/e2e-tests/src/main/kotlin/api_models/UpdatePrismDidRequest.kt deleted file mode 100644 index d9681335e0..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/UpdatePrismDidRequest.kt +++ /dev/null @@ -1,14 +0,0 @@ -package api_models - -data class UpdatePrismDidRequest( - val actions: List, -) - -data class UpdatePrismDidAction( - val actionType: String? = null, - val addKey: PublicKey? = null, - val removeKey: PublicKey? = null, - val addService: Service? = null, - val removeService: Service? = null, - val updateService: Service? = null, -) diff --git a/tests/e2e-tests/src/main/kotlin/api_models/VerificationPolicy.kt b/tests/e2e-tests/src/main/kotlin/api_models/VerificationPolicy.kt deleted file mode 100644 index c3eb0caf21..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/VerificationPolicy.kt +++ /dev/null @@ -1,24 +0,0 @@ -package api_models - -data class VerificationPolicy( - var id: String? = null, - var nonce: String? = null, - var name: String? = null, - var description: String? = null, - var constraints: List? = null, - var createdAt: String? = null, - var updatedAt: String? = null, - var kind: String? = null, - var self: String? = null, -) - -data class VerificationPolicyInput( - var name: String? = null, - var description: String? = null, - var constraints: List? = null, -) - -data class Constraint( - var schemaId: String? = null, - var trustedIssuers: List? = null, -) diff --git a/tests/e2e-tests/src/main/kotlin/api_models/Wallet.kt b/tests/e2e-tests/src/main/kotlin/api_models/Wallet.kt deleted file mode 100644 index 4bb7283bc8..0000000000 --- a/tests/e2e-tests/src/main/kotlin/api_models/Wallet.kt +++ /dev/null @@ -1,10 +0,0 @@ -package api_models - -import kotlinx.serialization.Serializable - -@Serializable -data class CreateWalletRequest( - val name: String, - val seed: String, - val id: String, -): JsonEncoded diff --git a/tests/e2e-tests/src/test/kotlin/common/CredentialSchemas.kt b/tests/e2e-tests/src/test/kotlin/common/CredentialSchemas.kt deleted file mode 100644 index 70338cc29f..0000000000 --- a/tests/e2e-tests/src/test/kotlin/common/CredentialSchemas.kt +++ /dev/null @@ -1,51 +0,0 @@ -package common - -import api_models.CredentialSchema -import com.fasterxml.jackson.databind.ObjectMapper -import java.util.* - -object CredentialSchemas { - - val CREDENTIAL_SCHEMA_TYPE = "https://w3c-ccg.github.io/vc-json-schemas/schema/2.0/schema.json" - - val SCHEMA_TYPE = "https://json-schema.org/draft/2020-12/schema" - - val JSON_SCHEMA = """ - { - "${"$"}id": "https://example.com/student-schema-1.0", - "${"$"}schema": "$SCHEMA_TYPE", - "description": "Student schema", - "type": "object", - "properties": { - "name": { - "type": "string" - }, - "age": { - "type": "integer" - } - } - } - """.trimIndent() - - fun generate_with_name_suffix_and_author(suffix: String, author: String): CredentialSchema { - return CredentialSchema( - author = author, - name = "${UUID.randomUUID()} $suffix", - description = "Simple student credentials schema", - type = CREDENTIAL_SCHEMA_TYPE, - schema = ObjectMapper().readTree(JSON_SCHEMA), - tags = listOf("school", "students"), - version = "1.0.0", - ) - } - - val STUDENT_SCHEMA = CredentialSchema( - author = "did:prism:agent", - name = UUID.randomUUID().toString(), - description = "Simple student credentials schema", - type = CREDENTIAL_SCHEMA_TYPE, - schema = ObjectMapper().readTree(JSON_SCHEMA), - tags = listOf("school", "students"), - version = "1.0.0", - ) -} diff --git a/tests/e2e-tests/src/test/kotlin/common/Ensure.kt b/tests/e2e-tests/src/test/kotlin/common/Ensure.kt deleted file mode 100644 index 491437276d..0000000000 --- a/tests/e2e-tests/src/test/kotlin/common/Ensure.kt +++ /dev/null @@ -1,52 +0,0 @@ -package common - -import net.serenitybdd.screenplay.Question -import org.openqa.selenium.By -import java.time.LocalDate -import java.time.LocalTime -import net.serenitybdd.screenplay.ensure.enableSoftAssertions as EnableSoftAssertions -import net.serenitybdd.screenplay.ensure.reportSoftAssertions as ReportSoftAssertions -import net.serenitybdd.screenplay.ensure.that as That -import net.serenitybdd.screenplay.ensure.thatAmongst as ThatAmongst -import net.serenitybdd.screenplay.ensure.thatTheCurrentPage as ThatTheCurrentPage -import net.serenitybdd.screenplay.ensure.thatTheListOf as ThatTheListOf -import net.serenitybdd.screenplay.targets.Target as SerenityTarget - -object Ensure { - fun that(value: String?) = That(value) - fun that(value: LocalDate) = That(value) - fun that(value: LocalTime) = That(value) - fun that(value: Boolean) = That(value) - fun that(value: Float) = That(value) - fun that(value: Double) = That(value) - - fun that(value: Comparable) = That(value) - fun that(value: Collection) = That(value) - - fun that(question: Question, predicate: (actual: A) -> Boolean) = That(question, predicate) - fun that(description: String, question: Question, predicate: (actual: A) -> Boolean) = - That(description, question, predicate) - - fun > that(description: String, question: Question) = That(description, question) - fun > that(question: Question) = That(question) - - fun that(description: String, question: Question>) = That(description, question) - fun that(question: Question>) = That(question) - - fun thatTheListOf(description: String, question: Question>) = ThatTheListOf(description, question) - fun thatTheListOf(question: Question>) = ThatTheListOf(question) - - fun thatTheCurrentPage() = ThatTheCurrentPage() - fun that(value: SerenityTarget) = That(value) - fun that(value: By) = net.serenitybdd.screenplay.ensure.that(value) - - // Collection matchers - fun thatTheListOf(value: SerenityTarget) = ThatTheListOf(value) - fun thatTheListOf(value: By) = ThatTheListOf(value) - - fun thatAmongst(value: SerenityTarget) = ThatAmongst(value) - fun thatAmongst(value: By) = ThatAmongst(value) - - fun enableSoftAssertions() = EnableSoftAssertions() - fun reportSoftAssertions() = ReportSoftAssertions() -} diff --git a/tests/e2e-tests/src/test/kotlin/common/Environments.kt b/tests/e2e-tests/src/test/kotlin/common/Environments.kt deleted file mode 100644 index eebbe6db36..0000000000 --- a/tests/e2e-tests/src/test/kotlin/common/Environments.kt +++ /dev/null @@ -1,24 +0,0 @@ -package common - -object Environments { - val AGENT_AUTH_REQUIRED: Boolean = (System.getenv("AGENT_AUTH_REQUIRED") ?: "true").toBoolean() - val AGENT_AUTH_HEADER = System.getenv("AGENT_AUTH_HEADER") ?: "apikey" - val ACME_AUTH_KEY = System.getenv("ACME_AUTH_KEY") ?: "SECURE_ACME_AUTH_KEY_GREATER_16_SYMBOLS" - val ACME_AGENT_URL = System.getenv("ACME_AGENT_URL") ?: "http://localhost:8080/prism-agent" - val ACME_AGENT_WEBHOOK_HOST = System.getenv("ACME_AGENT_WEBHOOK_HOST") ?: "host.docker.internal" - val ACME_AGENT_WEBHOOK_PORT = (System.getenv("ACME_AGENT_WEBHOOK_PORT") ?: "9955").toInt() - val ACME_AGENT_WEBHOOK_URL = "http://$ACME_AGENT_WEBHOOK_HOST:$ACME_AGENT_WEBHOOK_PORT" - val BOB_AGENT_URL = System.getenv("BOB_AGENT_URL") ?: "http://localhost:8090/prism-agent" - val BOB_AUTH_KEY = System.getenv("BOB_AUTH_KEY") ?: "default" - val BOB_AGENT_WEBHOOK_HOST = System.getenv("BOB_AGENT_WEBHOOK_HOST") ?: "host.docker.internal" - val BOB_AGENT_WEBHOOK_PORT = (System.getenv("BOB_AGENT_WEBHOOK_PORT") ?: "9956").toInt() - val BOB_AGENT_WEBHOOK_URL = "http://$BOB_AGENT_WEBHOOK_HOST:$BOB_AGENT_WEBHOOK_PORT" - val FABER_AGENT_URL = System.getenv("FABER_AGENT_URL") ?: "http://localhost:8080/prism-agent" - val FABER_AUTH_KEY = System.getenv("FABER_AUTH_KEY") ?: "SECURE_FABER_AUTH_KEY_GREATER_16_SYMBOLS" - val FABER_AGENT_WEBHOOK_HOST = System.getenv("FABER_AGENT_WEBHOOK_HOST") ?: "host.docker.internal" - val FABER_AGENT_WEBHOOK_PORT = (System.getenv("FABER_AGENT_WEBHOOK_PORT") ?: "9957").toInt() - val FABER_AGENT_WEBHOOK_URL = "http://$FABER_AGENT_WEBHOOK_HOST:$FABER_AGENT_WEBHOOK_PORT" - val ADMIN_AGENT_URL = ACME_AGENT_URL - val ADMIN_AUTH_HEADER = System.getenv("ADMIN_AUTH_HEADER") ?: "x-admin-api-key" - val ADMIN_AUTH_TOKEN = System.getenv("ADMIN_AUTH_TOKEN") ?: "admin" -} diff --git a/tests/e2e-tests/src/test/kotlin/common/TestConstants.kt b/tests/e2e-tests/src/test/kotlin/common/TestConstants.kt deleted file mode 100644 index 21bf3c67e9..0000000000 --- a/tests/e2e-tests/src/test/kotlin/common/TestConstants.kt +++ /dev/null @@ -1,43 +0,0 @@ -package common - -import api_models.PublicKey -import api_models.Purpose -import api_models.Service -import java.time.Duration -import java.util.* - -object TestConstants { - val VERIFICATION_POLICIES = VerificationPolicies - val CREDENTIAL_SCHEMAS = CredentialSchemas - val RANDOM_CONSTAND_UUID = UUID.randomUUID().toString() - val DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN = Duration.ofSeconds(60L) - val PRISM_DID_AUTH_KEY = PublicKey("auth-1", Purpose.AUTHENTICATION) - val PRISM_DID_ASSERTION_KEY = PublicKey("assertion-1", Purpose.ASSERTION_METHOD) - val PRISM_DID_UPDATE_NEW_AUTH_KEY = PublicKey("auth-2", Purpose.AUTHENTICATION) - val PRISM_DID_SERVICE = Service( - "https://foo.bar.com", - listOf("https://foo.bar.com/"), - "LinkedDomains", - ) - val PRISM_DID_SERVICE_FOR_UPDATE = Service( - "https://update.com", - listOf("https://update.com/"), - "LinkedDomains", - ) - val PRISM_DID_SERVICE_TO_REMOVE = Service( - "https://remove.com", - listOf("https://remove.com/"), - "LinkedDomains", - ) - val PRISM_DID_UPDATE_NEW_SERVICE_URL = "https://bar.foo.com/" - val PRISM_DID_UPDATE_NEW_SERVICE = Service( - "https://new.service.com", - listOf("https://new.service.com/"), - "LinkedDomains", - ) - val EVENT_TYPE_CONNECTION_UPDATED = "ConnectionUpdated" - val EVENT_TYPE_ISSUE_CREDENTIAL_RECORD_UPDATED = "IssueCredentialRecordUpdated" - val EVENT_TYPE_PRESENTATION_UPDATED = "PresentationUpdated" - val EVENT_TYPE_DID_STATUS_UPDATED = "DIDStatusUpdated" - val WRONG_SEED = "wrong seed" -} diff --git a/tests/e2e-tests/src/test/kotlin/common/Utils.kt b/tests/e2e-tests/src/test/kotlin/common/Utils.kt deleted file mode 100644 index b4d3fa53fd..0000000000 --- a/tests/e2e-tests/src/test/kotlin/common/Utils.kt +++ /dev/null @@ -1,52 +0,0 @@ -package common - -import com.fasterxml.jackson.databind.ObjectMapper -import com.jayway.jsonpath.DocumentContext -import com.jayway.jsonpath.JsonPath -import net.serenitybdd.rest.SerenityRest -import org.awaitility.Awaitility -import org.awaitility.core.ConditionTimeoutException -import org.awaitility.kotlin.withPollInterval -import org.awaitility.pollinterval.FixedPollInterval -import java.time.Duration -import kotlin.reflect.KClass - -object Utils { - - fun lastResponseObject(path: String, clazz: KClass): T { - return SerenityRest.lastResponse().jsonPath().getObject(path, clazz.java) - } - - fun lastResponseList(path: String, clazz: KClass): List { - return SerenityRest.lastResponse().jsonPath().getList(path, clazz.java) - } - - fun lastResponseMap(path: String, keyType: KClass, valueType: KClass): Map { - return SerenityRest.lastResponse().jsonPath().getMap(path, keyType.java, valueType.java) - } - - fun toJsonPath(any: Any): DocumentContext { - val json = ObjectMapper().writeValueAsString(any) - return JsonPath.parse(json) - } - - fun wait( - blockToWait: () -> Boolean, - errorMessage: String, - poolInterval: FixedPollInterval = FixedPollInterval(Duration.ofMillis(500L)), - timeout: Duration = Duration.ofSeconds(120L), - ) { - try { - Awaitility.await().withPollInterval(poolInterval) - .pollInSameThread() - .atMost(timeout) - .until { - blockToWait() - } - } catch (err: ConditionTimeoutException) { - throw ConditionTimeoutException( - errorMessage, - ) - } - } -} diff --git a/tests/e2e-tests/src/test/kotlin/common/VerificationPolicies.kt b/tests/e2e-tests/src/test/kotlin/common/VerificationPolicies.kt deleted file mode 100644 index a79fff24ff..0000000000 --- a/tests/e2e-tests/src/test/kotlin/common/VerificationPolicies.kt +++ /dev/null @@ -1,25 +0,0 @@ -package common - -import api_models.Constraint -import api_models.VerificationPolicy - -object VerificationPolicies { - - val schemaId = "http://atalaprism.io/schemas/1.0/StudentCredential" - val trustedIssuer1 = "did:example:123456789abcdefghi" - val trustedIssuer2 = "did:example:123456789abcdefghj" - - val VERIFICATION_POLICY = VerificationPolicy( - name = "Trusted Issuer and SchemaID", - description = "Verification Policy with trusted issuer and schemaId", - constraints = listOf( - Constraint( - schemaId = schemaId, - trustedIssuers = listOf( - trustedIssuer1, - trustedIssuer2 - ) - ) - ) - ) -} diff --git a/tests/e2e-tests/src/test/kotlin/features/CommonSteps.kt b/tests/e2e-tests/src/test/kotlin/features/CommonSteps.kt deleted file mode 100644 index 718e1f565d..0000000000 --- a/tests/e2e-tests/src/test/kotlin/features/CommonSteps.kt +++ /dev/null @@ -1,153 +0,0 @@ -package features - -import api_models.Connection -import api_models.ConnectionState -import api_models.Credential -import common.Environments -import common.ListenToEvents -import common.Utils -import common.Utils.lastResponseList -import features.connection.ConnectionSteps -import features.did.PublishDidSteps -import features.issue_credentials.IssueCredentialsSteps -import features.multitenancy.EntitySteps -import features.multitenancy.EventsSteps -import features.multitenancy.WalletsSteps -import interactions.Get -import io.cucumber.java.AfterAll -import io.cucumber.java.BeforeAll -import io.cucumber.java.ParameterType -import io.cucumber.java.en.Given -import net.serenitybdd.screenplay.Actor -import net.serenitybdd.screenplay.actors.Cast -import net.serenitybdd.screenplay.actors.OnStage -import net.serenitybdd.screenplay.rest.abilities.CallAnApi -import net.serenitybdd.screenplay.rest.questions.ResponseConsequence -import org.apache.http.HttpStatus.SC_OK - -@BeforeAll -fun initializeIssuerVerifierMultitenantAgent() { - val eventSteps = EventsSteps() - val cast = Cast() - cast.actorNamed("Admin", CallAnApi.at(Environments.ADMIN_AGENT_URL)) - cast.actorNamed("Acme", CallAnApi.at(Environments.ACME_AGENT_URL), ListenToEvents.at(Environments.ACME_AGENT_WEBHOOK_HOST, Environments.ACME_AGENT_WEBHOOK_PORT)) - cast.actorNamed("Bob", CallAnApi.at(Environments.BOB_AGENT_URL), ListenToEvents.at(Environments.BOB_AGENT_WEBHOOK_HOST, Environments.BOB_AGENT_WEBHOOK_PORT)) - cast.actorNamed("Faber", CallAnApi.at(Environments.FABER_AGENT_URL), ListenToEvents.at(Environments.FABER_AGENT_WEBHOOK_HOST, Environments.FABER_AGENT_WEBHOOK_PORT)) - OnStage.setTheStage(cast) - val walletSteps = WalletsSteps() - val entitySteps = EntitySteps() - - // Create issuer wallet and tenant - walletSteps.createNewWallet(cast.actorNamed("Admin"), "issuerWallet") - val issuerEntityId = entitySteps.createNewEntity( - cast.actorNamed("Admin"), walletId = Utils.lastResponseObject("id", String::class), name = "issuer" - ) - entitySteps.addNewApiKeyToEntity(cast.actorNamed("Admin"), issuerEntityId, Environments.ACME_AUTH_KEY) - - // Create verifier wallet - walletSteps.createNewWallet(cast.actorNamed("Admin"), "verifierWallet") - val verifierEntityId = entitySteps.createNewEntity( - cast.actorNamed("Admin"), walletId = Utils.lastResponseObject("id", String::class), name = "verifier" - ) - entitySteps.addNewApiKeyToEntity(cast.actorNamed("Admin"), verifierEntityId, Environments.FABER_AUTH_KEY) - cast.actors.forEach { actor -> - when(actor.name) { - "Acme" -> { - actor.remember("AUTH_KEY", Environments.ACME_AUTH_KEY) - } - "Bob" -> { - actor.remember("AUTH_KEY", Environments.BOB_AUTH_KEY) - } - "Faber" -> { - actor.remember("AUTH_KEY", Environments.FABER_AUTH_KEY) - } - } - } - eventSteps.registerNewWebhook(cast.actorNamed("Acme"), Environments.ACME_AGENT_WEBHOOK_URL) - eventSteps.registerNewWebhook(cast.actorNamed("Faber"), Environments.FABER_AGENT_WEBHOOK_URL) -} - -@AfterAll -fun clearStage() { - OnStage.drawTheCurtain() -} - -class CommonSteps { - @ParameterType(".*") - fun actor(actorName: String): Actor { - return OnStage.theActorCalled(actorName) - } - - @Given("{actor} has an issued credential from {actor}") - fun holderHasIssuedCredentialFromIssuer(holder: Actor, issuer: Actor) { - holder.attemptsTo( - Get.resource("/issue-credentials/records"), - ) - holder.should( - ResponseConsequence.seeThatResponse { - it.statusCode(SC_OK) - }, - ) - val receivedCredential = lastResponseList("contents", Credential::class).findLast { credential -> - credential.protocolState == "CredentialReceived" - } - - if (receivedCredential != null) { - holder.remember("issuedCredential", receivedCredential) - } else { - val publishDidSteps = PublishDidSteps() - val issueSteps = IssueCredentialsSteps() - actorsHaveExistingConnection(issuer, holder) - publishDidSteps.createsUnpublishedDid(holder) - publishDidSteps.createsUnpublishedDid(issuer) - publishDidSteps.hePublishesDidToLedger(issuer) - issueSteps.acmeOffersACredential(issuer, holder, "short") - issueSteps.bobRequestsTheCredential(holder) - issueSteps.acmeIssuesTheCredential(issuer) - issueSteps.bobHasTheCredentialIssued(holder) - } - } - - @Given("{actor} and {actor} have an existing connection") - fun actorsHaveExistingConnection(inviter: Actor, invitee: Actor) { - inviter.attemptsTo( - Get.resource("/connections"), - ) - inviter.should( - ResponseConsequence.seeThatResponse { - it.statusCode(SC_OK) - }, - ) - val inviterConnection = lastResponseList("contents", Connection::class).firstOrNull { - it.label == "Connection with ${invitee.name}" && it.state == ConnectionState.CONNECTION_RESPONSE_SENT - } - - var inviteeConnection: Connection? = null - if (inviterConnection != null) { - invitee.attemptsTo( - Get.resource("/connections"), - ) - invitee.should( - ResponseConsequence.seeThatResponse { - it.statusCode(SC_OK) - }, - ) - inviteeConnection = lastResponseList("contents", Connection::class).firstOrNull { - it.theirDid == inviterConnection.myDid && it.state == ConnectionState.CONNECTION_RESPONSE_RECEIVED - } - } - - if (inviterConnection != null && inviteeConnection != null) { - inviter.remember("connection-with-${invitee.name}", inviterConnection) - invitee.remember("connection-with-${inviter.name}", inviteeConnection) - } else { - val connectionSteps = ConnectionSteps() - connectionSteps.inviterGeneratesAConnectionInvitation(inviter, invitee) - connectionSteps.inviteeReceivesTheConnectionInvitation(invitee, inviter) - connectionSteps.inviteeSendsAConnectionRequestToInviter(invitee, inviter) - connectionSteps.inviterReceivesTheConnectionRequest(inviter) - connectionSteps.inviteeReceivesTheConnectionResponse(invitee) - connectionSteps.inviterAndInviteeHaveAConnection(inviter, invitee) - } - } -} diff --git a/tests/e2e-tests/src/test/kotlin/features/connection/ConnectionSteps.kt b/tests/e2e-tests/src/test/kotlin/features/connection/ConnectionSteps.kt deleted file mode 100644 index 3d1d945762..0000000000 --- a/tests/e2e-tests/src/test/kotlin/features/connection/ConnectionSteps.kt +++ /dev/null @@ -1,161 +0,0 @@ -package features.connection - -import api_models.* -import common.ListenToEvents -import common.Utils.lastResponseObject -import common.Utils.wait -import interactions.Get -import interactions.Post -import io.cucumber.java.en.Then -import io.cucumber.java.en.When -import net.serenitybdd.screenplay.Actor -import net.serenitybdd.screenplay.rest.questions.ResponseConsequence -import org.apache.http.HttpStatus.SC_CREATED -import org.apache.http.HttpStatus.SC_OK -import org.assertj.core.api.Assertions.assertThat -import org.hamcrest.CoreMatchers.* - -class ConnectionSteps { - - @When("{actor} generates a connection invitation to {actor}") - fun inviterGeneratesAConnectionInvitation(inviter: Actor, invitee: Actor) { - // Acme(Issuer) initiates a connection - // and sends it to Bob(Holder) out-of-band, e.g. using QR-code - val connectionLabel = "Connection with ${invitee.name}" - inviter.attemptsTo( - Post.to("/connections") - .with { - it.body("""{"label": "$connectionLabel"}""") - }, - ) - inviter.should( - ResponseConsequence.seeThatResponse { response -> - response.statusCode(SC_CREATED) - response.body("connectionId", notNullValue()) - response.body("createdAt", notNullValue()) - response.body("invitation", notNullValue()) - response.body("label", containsString(connectionLabel)) - response.body("state", containsString(ConnectionState.INVITATION_GENERATED)) - response.body("role", containsString("Inviter")) - }, - ) - // Acme remembers invitation URL to send it out of band to Bob - inviter.remember( - "invitationUrl", - lastResponseObject("", Connection::class) - .invitation.invitationUrl.split("=")[1], - ) - inviter.remember( - "invitation", - lastResponseObject("invitation", Invitation::class), - ) - - // Acme remembers its connection ID for further use - inviter.remember( - "connectionId", - lastResponseObject("", Connection::class) - .connectionId, - ) - inviter.remember("thid", lastResponseObject("", Connection::class).thid) - } - - @When("{actor} receives the connection invitation from {actor}") - fun inviteeReceivesTheConnectionInvitation(invitee: Actor, inviter: Actor) { - // Here out of band transfer of connection QR code is happening - // and Bob (Holder) gets an invitation URL - // they're accepting connection invitation by POST request specifying achieved invitation - // we demonstrate it by Bob remembering invitationUrl that Acme recalls - invitee.remember("invitationUrl", inviter.recall("invitationUrl")) - } - - @When("{actor} sends a connection request to {actor}") - fun inviteeSendsAConnectionRequestToInviter(invitee: Actor, inviter: Actor) { - // Bob accepts connection using achieved out-of-band invitation - invitee.attemptsTo( - Post.to("/connection-invitations") - .with { - it.body("""{"invitation": "${invitee.recall("invitationUrl")}"}""") - }, - ) - val acmeInvitation = inviter.recall("invitation") - invitee.should( - ResponseConsequence.seeThatResponse { response -> - response.statusCode(SC_OK) - response.body("connectionId", notNullValue()) - response.body("createdAt", notNullValue()) - response.body("myDid", notNullValue()) - response.body("theirDid", notNullValue()) - response.body("invitation.from", containsString(acmeInvitation.from)) - response.body("invitation.id", containsString(acmeInvitation.id)) - response.body("invitation.invitationUrl", containsString(acmeInvitation.invitationUrl)) - response.body("invitation.type", containsString(acmeInvitation.type)) - response.body("state", containsString(ConnectionState.CONNECTION_REQUEST_PENDING)) - response.body("role", containsString("Invitee")) - }, - ) - invitee.remember("connectionId", lastResponseObject("", Connection::class).connectionId) - invitee.remember("thid", lastResponseObject("", Connection::class).thid) - } - - @When("{actor} receives the connection request and sends back the response") - fun inviterReceivesTheConnectionRequest(inviter: Actor) { - wait( - { - val lastEvent = ListenToEvents.`as`(inviter).connectionEvents.lastOrNull { - it.data.thid == inviter.recall("thid") - } - lastEvent != null && - lastEvent.data.state == ConnectionState.CONNECTION_RESPONSE_SENT - }, - "Inviter connection didn't reach ${ConnectionState.CONNECTION_RESPONSE_SENT} state", - ) - } - - @When("{actor} receives the connection response") - fun inviteeReceivesTheConnectionResponse(invitee: Actor) { - // Bob (Holder) receives final connection response - wait( - { - val lastEvent = ListenToEvents.`as`(invitee).connectionEvents.lastOrNull { - it.data.thid == invitee.recall("thid") - } - lastEvent != null && - lastEvent.data.state == ConnectionState.CONNECTION_RESPONSE_RECEIVED - }, - "Invitee connection didn't reach ${ConnectionState.CONNECTION_RESPONSE_RECEIVED} state.", - ) - } - - @Then("{actor} and {actor} have a connection") - fun inviterAndInviteeHaveAConnection(inviter: Actor, invitee: Actor) { - // Connection established. Both parties exchanged their DIDs with each other - inviter.attemptsTo( - Get.resource("/connections/${inviter.recall("connectionId")}"), - ) - inviter.should( - ResponseConsequence.seeThatResponse { - it.statusCode(SC_OK) - }, - ) - inviter.remember("connection-with-${invitee.name}", lastResponseObject("", Connection::class)) - - invitee.attemptsTo( - Get.resource("/connections/${invitee.recall("connectionId")}"), - ) - invitee.should( - ResponseConsequence.seeThatResponse { - it.statusCode(SC_OK) - }, - ) - invitee.remember("connection-with-${inviter.name}", lastResponseObject("", Connection::class)) - - assertThat(inviter.recall("connection-with-${invitee.name}").myDid) - .isEqualTo(invitee.recall("connection-with-${inviter.name}").theirDid) - assertThat(inviter.recall("connection-with-${invitee.name}").theirDid) - .isEqualTo(invitee.recall("connection-with-${inviter.name}").myDid) - assertThat(inviter.recall("connection-with-${invitee.name}").state) - .isEqualTo(ConnectionState.CONNECTION_RESPONSE_SENT) - assertThat(invitee.recall("connection-with-${inviter.name}").state) - .isEqualTo(ConnectionState.CONNECTION_RESPONSE_RECEIVED) - } -} diff --git a/tests/e2e-tests/src/test/kotlin/features/credential_schemas/CredentialSchemasSteps.kt b/tests/e2e-tests/src/test/kotlin/features/credential_schemas/CredentialSchemasSteps.kt deleted file mode 100644 index 147b8d9a12..0000000000 --- a/tests/e2e-tests/src/test/kotlin/features/credential_schemas/CredentialSchemasSteps.kt +++ /dev/null @@ -1,176 +0,0 @@ -package features.credential_schemas - -import api_models.CredentialSchema -import com.fasterxml.jackson.databind.ObjectMapper -import common.TestConstants -import common.Utils.lastResponseObject -import common.Utils.toJsonPath -import io.cucumber.java.PendingException -import io.cucumber.java.en.Then -import io.cucumber.java.en.When -import io.restassured.path.json.JsonPath -import net.serenitybdd.screenplay.Actor -import interactions.Get -import interactions.Post -import net.serenitybdd.screenplay.rest.questions.ResponseConsequence -import org.apache.http.HttpStatus.* -import org.hamcrest.CoreMatchers.* -import org.hamcrest.Matchers.containsString -import org.hamcrest.Matchers.emptyString -import java.util.* - -class CredentialSchemasSteps { - - @When("{actor} creates a new credential schema") - fun acmeCreatesANewCredentialSchema(actor: Actor) { - actor.attemptsTo( - Post.to("/schema-registry/schemas").with { - it.body(TestConstants.CREDENTIAL_SCHEMAS.STUDENT_SCHEMA.copy(author = actor.recall("shortFormDid"))) - }, - ) - } - - @Then("{actor} sees new credential schema is available") - fun newCredentialSchemaIsAvailable(actor: Actor) { - actor.should(ResponseConsequence.seeThatResponse("New schema created") { - it.statusCode(SC_CREATED) - it.body("guid", not(emptyString())) - it.body("id", not(emptyString())) - it.body("longId", not(emptyString())) - it.body("authored", not(emptyString())) - it.body("kind", containsString("CredentialSchema")) - it.body("name", containsString(TestConstants.CREDENTIAL_SCHEMAS.STUDENT_SCHEMA.name)) - it.body("description", containsString(TestConstants.CREDENTIAL_SCHEMAS.STUDENT_SCHEMA.description)) - it.body("version", containsString(TestConstants.CREDENTIAL_SCHEMAS.STUDENT_SCHEMA.version)) - it.body("type", equalTo(TestConstants.CREDENTIAL_SCHEMAS.CREDENTIAL_SCHEMA_TYPE)) - TestConstants.CREDENTIAL_SCHEMAS.STUDENT_SCHEMA.tags!!.forEach { tag -> - it.body("tags", hasItem(tag)) - } - it.body( - "schema.\$id", - equalTo(TestConstants.CREDENTIAL_SCHEMAS.STUDENT_SCHEMA.schema!!.get("\$id").asText()) - ) - - it.body( - "schema", equalTo>( - JsonPath( - ObjectMapper().writeValueAsString( - TestConstants.CREDENTIAL_SCHEMAS.STUDENT_SCHEMA.schema - ) - ).getMap("") - ) - ) - }) - } - - @When("{actor} creates {int} new schemas") - fun acmeCreatesMultipleSchemas(actor: Actor, numberOfSchemas: Int) { - val createdSchemas: MutableList = mutableListOf() - repeat(numberOfSchemas) { i: Int -> - actor.attemptsTo( - Post.to("/schema-registry/schemas").with { - it.body( - TestConstants.CREDENTIAL_SCHEMAS.generate_with_name_suffix_and_author( - i.toString(), - actor.recall("shortFormDid") - ) - ) - }, - ) - actor.should( - ResponseConsequence.seeThatResponse("New schema created") { - it.statusCode(SC_CREATED) - }, - ) - createdSchemas.add(lastResponseObject("", CredentialSchema::class)) - } - actor.remember("createdSchemas", createdSchemas) - } - - @Then("{actor} can access all of them one by one") - fun theyCanBeAccessedWithPagination(actor: Actor) { - actor.recall>("createdSchemas").forEach { schema -> - actor.attemptsTo( - Get.resource("/schema-registry/schemas/${schema.guid}"), - ) - actor.should( - ResponseConsequence.seeThatResponse("Schema achieved") { - it.statusCode(SC_OK) - }, - ) - } - } - - @When("{actor} creates a new schema with some id") - fun acmeCreatesANewSchemaWithFixedId(actor: Actor) { - val wrongSchema = TestConstants.CREDENTIAL_SCHEMAS.STUDENT_SCHEMA - wrongSchema.guid = TestConstants.RANDOM_CONSTAND_UUID - actor.attemptsTo( - Post.to("/schema-registry/schemas").with { - it.body(wrongSchema) - }, - ) - actor.should( - ResponseConsequence.seeThatResponse("New schema created") { - it.statusCode(SC_CREATED) - }, - ) - } - - @When("{actor} tries to create a new schema with identical id") - fun acmeTriesToCreateANewSchemaWithSameId(actor: Actor) { - val wrongSchema = TestConstants.CREDENTIAL_SCHEMAS.STUDENT_SCHEMA - wrongSchema.guid = TestConstants.RANDOM_CONSTAND_UUID - actor.attemptsTo( - Post.to("/schema-registry/schemas").with { - it.body(wrongSchema) - }, - ) - } - - @Then("{actor} sees the request failure with identical id error") - fun idDuplicateErrorIsThrown(actor: Actor) { - try { - actor.should( - ResponseConsequence.seeThatResponse("New schema creation error: same UUID") { - it.statusCode(SC_BAD_REQUEST) - }, - ) - } catch (err: AssertionError) { - println(err.message) - throw PendingException("BUG: New credential schema CAN be created with same UUID.") - } - } - - @When("{actor} tries to create a new schema with {word} in field {word}") - fun acmeTriesToCreateANewSchemaWithField(actor: Actor, value: String, field: String) { - actor.attemptsTo( - Post.to("/schema-registry/schemas").with { - it.body( - toJsonPath(TestConstants.CREDENTIAL_SCHEMAS.STUDENT_SCHEMA).set(field, value).jsonString(), - ) - }, - ) - } - - @When("{actor} tries to get schemas with {int} in parameter {word}") - fun acmeTriesToCreateANewSchemaWithParameter(actor: Actor, value: Int, parameter: String) { - actor.attemptsTo( - Get.resource("/schema-registry/schemas?$parameter=$value"), - ) - } - - @Then("{actor} sees the request with status {int}") - fun heSeesTheRequestFailureWithErrorStatus(actor: Actor, errorStatusCode: Int) { - try { - actor.should( - ResponseConsequence.seeThatResponse { - it.statusCode(errorStatusCode) - }, - ) - } catch (err: AssertionError) { - println(err.message) - throw PendingException("BUG: credential schemas CAN be accessed with negative limit and offset.") - } - } -} diff --git a/tests/e2e-tests/src/test/kotlin/features/did/ManageDidSteps.kt b/tests/e2e-tests/src/test/kotlin/features/did/ManageDidSteps.kt deleted file mode 100644 index 84fd532c8c..0000000000 --- a/tests/e2e-tests/src/test/kotlin/features/did/ManageDidSteps.kt +++ /dev/null @@ -1,114 +0,0 @@ -package features.did - -import api_models.* -import common.Ensure -import common.TestConstants -import common.Utils.lastResponseList -import common.Utils.lastResponseObject -import common.Utils.toJsonPath -import io.cucumber.java.en.Given -import io.cucumber.java.en.Then -import io.cucumber.java.en.When -import net.serenitybdd.rest.SerenityRest.lastResponse -import net.serenitybdd.screenplay.Actor -import interactions.Get -import interactions.Post -import net.serenitybdd.screenplay.rest.questions.ResponseConsequence -import org.apache.http.HttpStatus.SC_CREATED -import org.assertj.core.api.Assertions -import org.hamcrest.Matchers.* - -class ManageDidSteps { - - @Given("{actor} creates {int} PRISM DIDs") - fun createsMultipleManagedDids(actor: Actor, number: Int) { - repeat(number) { - createManageDid(actor) - } - actor.remember("number", number) - } - - @When("{actor} creates PRISM DID") - fun createManageDid(actor: Actor) { - val createDidRequest = createPrismDidRequest() - - actor.attemptsTo( - Post.to("/did-registrar/dids") - .with { - it.body(createDidRequest) - }, - ) - var createdDids = actor.recall>("createdDids") - if (createdDids == null) { - createdDids = mutableListOf() - } - createdDids.add(lastResponseObject("longFormDid", String::class)) - actor.remember("createdDids", createdDids) - } - - @When("{actor} tries to create PRISM DID with missing {word}") - fun triesToCreateManagedDidWithMissingField(actor: Actor, missingFieldPath: String) { - val createDidRequest = createPrismDidRequest() - val requestBody = toJsonPath(createDidRequest).delete(missingFieldPath).jsonString() - actor.attemptsTo( - Post.to("/did-registrar/dids") - .with { - it.body(requestBody) - }, - ) - } - - @When("{actor} tries to create a managed DID with value {word} in {word}") - fun trisToCreateManagedDidWithValueInField(actor: Actor, value: String, fieldPath: String) { - val createDidRequest = createPrismDidRequest() - val requestBody = toJsonPath(createDidRequest).set(fieldPath, value).jsonString() - actor.attemptsTo( - Post.to("/did-registrar/dids") - .with { - it.body(requestBody) - }, - ) - } - - @When("{actor} lists all PRISM DIDs") - fun iListManagedDids(actor: Actor) { - actor.attemptsTo( - Get.resource("/did-registrar/dids"), - ) - } - - @Then("{actor} sees PRISM DID was created successfully") - fun theDidShouldBeRegisteredSuccessfully(actor: Actor) { - actor.should( - ResponseConsequence.seeThatResponse { - it.statusCode(SC_CREATED) - it.body("longFormDid", not(emptyString())) - }, - ) - } - - @Then("{actor} sees the request has failed with error status {int}") - fun seesTheRequestHasFailedWithErrorStatus(actor: Actor, errorStatusCode: Int) { - Assertions.assertThat(lastResponse().statusCode).isEqualTo(errorStatusCode) - } - - @Then("{actor} sees the list contains all created DIDs") - fun seeTheListContainsAllCreatedDids(actor: Actor) { - val expectedDids = actor.recall>("createdDids") - val managedDidList = lastResponseList("contents.longFormDid", String::class) - actor.attemptsTo( - Ensure.that(managedDidList).containsElementsFrom(expectedDids) - ) - } - - private fun createPrismDidRequest(): CreatePrismDidRequest { - val publicKeys = listOf( - TestConstants.PRISM_DID_AUTH_KEY, - ) - val services = listOf( - TestConstants.PRISM_DID_SERVICE, - ) - val documentTemplate = DocumentTemplate(publicKeys, services) - return CreatePrismDidRequest(documentTemplate) - } -} diff --git a/tests/e2e-tests/src/test/kotlin/features/did/PublishDidSteps.kt b/tests/e2e-tests/src/test/kotlin/features/did/PublishDidSteps.kt deleted file mode 100644 index f95cf7f6da..0000000000 --- a/tests/e2e-tests/src/test/kotlin/features/did/PublishDidSteps.kt +++ /dev/null @@ -1,153 +0,0 @@ -package features.did - -import api_models.* -import common.ListenToEvents -import common.TestConstants -import common.Utils.lastResponseList -import common.Utils.lastResponseObject -import common.Utils.wait -import io.cucumber.java.en.Given -import io.cucumber.java.en.Then -import io.cucumber.java.en.When -import net.serenitybdd.screenplay.Actor -import interactions.Get -import interactions.Post -import net.serenitybdd.rest.SerenityRest -import net.serenitybdd.screenplay.rest.questions.ResponseConsequence -import org.apache.http.HttpStatus.* -import org.assertj.core.api.Assertions.assertThat -import org.hamcrest.Matchers.* - -class PublishDidSteps { - - @Given("{actor} have published PRISM DID") - fun actorHavePublishedPrismDid(actor: Actor) { - actor.attemptsTo( - Get.resource("/did-registrar/dids"), - ) - actor.should( - ResponseConsequence.seeThatResponse { - it.statusCode(SC_OK) - }, - ) - val publishedDids = lastResponseList("contents", ManagedDid::class).filter { - it.status == ManagedDidStatuses.PUBLISHED - } - val did = publishedDids.firstOrNull { - actor.attemptsTo( - Get.resource("/dids/${it.did}"), - ) - lastResponseObject("didDocumentMetadata.deactivated", String::class) == "false" - } - if (did == null) { - createsUnpublishedDid(actor) - hePublishesDidToLedger(actor) - } else { - actor.remember("shortFormDid", did.did) - } - } - - @Given("{actor} creates unpublished DID") - fun createsUnpublishedDid(actor: Actor) { - val publicKeys = listOf( - TestConstants.PRISM_DID_AUTH_KEY, - TestConstants.PRISM_DID_ASSERTION_KEY, - ) - val services = listOf( - TestConstants.PRISM_DID_SERVICE, - TestConstants.PRISM_DID_SERVICE_FOR_UPDATE, - TestConstants.PRISM_DID_SERVICE_TO_REMOVE, - ) - val documentTemplate = DocumentTemplate(publicKeys, services) - actor.attemptsTo( - Post.to("/did-registrar/dids") - .with { - it.body(CreatePrismDidRequest(documentTemplate)) - }, - ) - actor.should( - ResponseConsequence.seeThatResponse { - it.statusCode(SC_CREATED) - it.body("longFormDid", not(emptyString())) - }, - ) - val longFormDid = lastResponseObject("longFormDid", String::class) - actor.remember("longFormDid", longFormDid) - - actor.attemptsTo( - Get.resource("/did-registrar/dids/$longFormDid"), - ) - actor.should( - ResponseConsequence.seeThatResponse { - it.statusCode(SC_OK) - }, - ) - actor.remember( - "shortFormDid", - lastResponseObject("", ManagedDid::class).did, - ) - } - - @When("{actor} publishes DID to ledger") - fun hePublishesDidToLedger(actor: Actor) { - actor.attemptsTo( - Post.to("/did-registrar/dids/${actor.recall("shortFormDid")}/publications"), - ) - actor.should( - ResponseConsequence.seeThatResponse { - it.statusCode(SC_ACCEPTED) - it.body("scheduledOperation.didRef", not(emptyString())) - it.body("scheduledOperation.id", not(emptyString())) - }, - ) - wait( - { - val didEvent = - ListenToEvents.`as`(actor).didEvents.lastOrNull { - it.data.did == actor.recall("shortFormDid") - } - didEvent != null && didEvent.data.status == ManagedDidStatuses.PUBLISHED - }, - "ERROR: DID was not published to ledger!", - timeout = TestConstants.DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN, - ) - actor.attemptsTo( - Get.resource("/dids/${actor.recall("shortFormDid")}"), - ) - actor.should( - ResponseConsequence.seeThatResponse { - it.statusCode(SC_OK) - it.body("didDocument.id", equalTo(actor.recall("shortFormDid"))) - }, - ) - } - - @Then("{actor} resolves DID document corresponds to W3C standard") - fun heSeesDidDocumentCorrespondsToW3cStandard(actor: Actor) { - val didDocument = lastResponseObject("", DidResolutionResult::class).didDocument!! - assertThat(didDocument) - .hasFieldOrProperty("assertionMethod") - .hasFieldOrProperty("authentication") - .hasFieldOrProperty("capabilityInvocation") - .hasFieldOrProperty("controller") - .hasFieldOrProperty("id") - .hasFieldOrProperty("keyAgreement") - .hasFieldOrProperty("service") - .hasFieldOrProperty("verificationMethod") - - val shortFormDid = actor.recall("shortFormDid") - - assertThat(didDocument.id == shortFormDid) - - assertThat(didDocument.authentication!![0]) - .isEqualTo("$shortFormDid#${TestConstants.PRISM_DID_AUTH_KEY.id}") - - assertThat(didDocument.verificationMethod!![0]) - .hasFieldOrPropertyWithValue("controller", shortFormDid) - .hasFieldOrProperty("publicKeyJwk") - - assertThat(lastResponseObject("", DidResolutionResult::class).didDocumentMetadata!!) - .hasFieldOrPropertyWithValue("deactivated", false) - .hasFieldOrProperty("canonicalId") - } -} diff --git a/tests/e2e-tests/src/test/kotlin/features/issue_credentials/IssueCredentialsSteps.kt b/tests/e2e-tests/src/test/kotlin/features/issue_credentials/IssueCredentialsSteps.kt deleted file mode 100644 index f57355234c..0000000000 --- a/tests/e2e-tests/src/test/kotlin/features/issue_credentials/IssueCredentialsSteps.kt +++ /dev/null @@ -1,133 +0,0 @@ -package features.issue_credentials - -import api_models.* -import common.ListenToEvents -import common.Utils.lastResponseObject -import common.Utils.wait -import io.cucumber.java.en.Then -import io.cucumber.java.en.When -import net.serenitybdd.screenplay.Actor -import interactions.Post -import net.serenitybdd.screenplay.rest.questions.ResponseConsequence -import org.apache.http.HttpStatus.SC_CREATED -import org.apache.http.HttpStatus.SC_OK - -class IssueCredentialsSteps { - - var credentialEvent: CredentialEvent? = null - - @When("{actor} offers a credential to {actor} with {string} form DID") - fun acmeOffersACredential(issuer: Actor, holder: Actor, didForm: String) { - - val did: String = if (didForm == "short") - issuer.recall("shortFormDid") else issuer.recall("longFormDid") - - val newCredential = Credential( - schemaId = null, - validityPeriod = 3600.0, - automaticIssuance = false, - awaitConfirmation = false, - claims = linkedMapOf( - "firstName" to "FirstName", - "lastName" to "LastName", - ), - issuingDID = did, - connectionId = issuer.recall("connection-with-${holder.name}").connectionId, - ) - issuer.attemptsTo( - Post.to("/issue-credentials/credential-offers") - .with { - it.body(newCredential) - }, - ) - issuer.should( - ResponseConsequence.seeThatResponse { - it.statusCode(SC_CREATED) - }, - ) - issuer.remember("thid", lastResponseObject("", Credential::class).thid) - holder.remember("thid", lastResponseObject("", Credential::class).thid) - } - - @When("{actor} receives the credential offer and accepts") - fun bobRequestsTheCredential(holder: Actor) { - wait( - { - credentialEvent = ListenToEvents.`as`(holder).credentialEvents.lastOrNull { - it.data.thid == holder.recall("thid") - } - credentialEvent != null && - credentialEvent!!.data.protocolState == CredentialState.OFFER_RECEIVED - }, - "Holder was unable to receive the credential offer from Issuer! Protocol state did not achieve OfferReceived state.", - ) - - val recordId = ListenToEvents.`as`(holder).credentialEvents.last().data.recordId - holder.remember("recordId", recordId) - - holder.attemptsTo( - Post.to("/issue-credentials/records/$recordId/accept-offer") - .with { - it.body(""" - { "subjectId": "${holder.recall("longFormDid")}" } - """.trimIndent()) - }, - ) - holder.should( - ResponseConsequence.seeThatResponse("Accept offer") { - it.statusCode(SC_OK) - }, - ) - } - - @When("{actor} issues the credential") - fun acmeIssuesTheCredential(issuer: Actor) { - wait( - { - credentialEvent = ListenToEvents.`as`(issuer).credentialEvents.lastOrNull { - it.data.thid == issuer.recall("thid") - } - credentialEvent != null && - credentialEvent!!.data.protocolState == CredentialState.REQUEST_RECEIVED - }, - "Issuer was unable to receive the credential request from Holder! Protocol state did not achieve RequestReceived state.", - ) - val recordId = credentialEvent!!.data.recordId - issuer.attemptsTo( - Post.to("/issue-credentials/records/$recordId/issue-credential"), - ) - issuer.should( - ResponseConsequence.seeThatResponse("Issue credential") { - it.statusCode(SC_OK) - }, - ) - - wait( - { - credentialEvent = ListenToEvents.`as`(issuer).credentialEvents.lastOrNull { - it.data.thid == issuer.recall("thid") - } - credentialEvent != null && - credentialEvent!!.data.protocolState == CredentialState.CREDENTIAL_SENT - }, - "Issuer was unable to issue the credential! " + - "Protocol state did not achieve ${CredentialState.CREDENTIAL_SENT} state.", - ) - } - - @Then("{actor} receives the issued credential") - fun bobHasTheCredentialIssued(holder: Actor) { - wait( - { - credentialEvent = ListenToEvents.`as`(holder).credentialEvents.lastOrNull { - it.data.thid == holder.recall("thid") - } - credentialEvent != null && - credentialEvent!!.data.protocolState == CredentialState.CREDENTIAL_RECEIVED - }, - "Holder was unable to receive the credential from Issuer! " + - "Protocol state did not achieve ${CredentialState.CREDENTIAL_RECEIVED} state.", - ) - holder.remember("issuedCredential", ListenToEvents.`as`(holder).credentialEvents.last().data) - } -} diff --git a/tests/e2e-tests/src/test/kotlin/features/multitenancy/EventsSteps.kt b/tests/e2e-tests/src/test/kotlin/features/multitenancy/EventsSteps.kt deleted file mode 100644 index 81acb9a978..0000000000 --- a/tests/e2e-tests/src/test/kotlin/features/multitenancy/EventsSteps.kt +++ /dev/null @@ -1,19 +0,0 @@ -package features.multitenancy - -import api_models.RegisterWebhookRequest -import interactions.Post -import net.serenitybdd.rest.SerenityRest -import net.serenitybdd.screenplay.Actor - -class EventsSteps { - fun registerNewWebhook(actor: Actor, webhookUrl: String) { - actor.attemptsTo( - Post.to("/events/webhooks") - .with { - it.body( - RegisterWebhookRequest(url = webhookUrl) - ) - }, - ) - } -} diff --git a/tests/e2e-tests/src/test/kotlin/features/present_proof/PresentProofSteps.kt b/tests/e2e-tests/src/test/kotlin/features/present_proof/PresentProofSteps.kt deleted file mode 100644 index 3641b9de23..0000000000 --- a/tests/e2e-tests/src/test/kotlin/features/present_proof/PresentProofSteps.kt +++ /dev/null @@ -1,133 +0,0 @@ -package features.present_proof - -import api_models.* -import common.ListenToEvents -import common.Utils.lastResponseObject -import common.Utils.wait -import interactions.Get -import io.cucumber.java.en.Then -import io.cucumber.java.en.When -import net.serenitybdd.screenplay.Actor -import interactions.Post -import interactions.Patch -import net.serenitybdd.screenplay.rest.questions.ResponseConsequence -import org.apache.http.HttpStatus.SC_CREATED -import org.apache.http.HttpStatus.SC_OK - -class PresentProofSteps { - - var proofEvent: PresentationEvent? = null - - @When("{actor} sends a request for proof presentation to {actor}") - fun faberSendsARequestForProofPresentationToBob(faber: Actor, bob: Actor) { - faber.attemptsTo( - Post.to("/present-proof/presentations") - .with { - it.body( - """ - { - "description":"Request presentation of credential", - "connectionId": "${faber.recall("connection-with-${bob.name}").connectionId}", - "options":{ - "challenge": "11c91493-01b3-4c4d-ac36-b336bab5bddf", - "domain": "https://example-verifier.com" - }, - "proofs":[ - { - "schemaId": "https://schema.org/Person", - "trustIssuers": [ - "did:web:atalaprism.io/users/testUser" - ] - } - ] - } - """.trimIndent(), - ) - }, - ) - faber.should( - ResponseConsequence.seeThatResponse { - it.statusCode(SC_CREATED) - }, - ) - - val presentationId = lastResponseObject("", PresentationProof::class).presentationId - faber.remember("presentationId", presentationId) - faber.attemptsTo( - Get.resource("/present-proof/presentations/${presentationId}"), - ) - faber.should( - ResponseConsequence.seeThatResponse("Get presentations") { - it.statusCode(SC_OK) - }, - ) - faber.remember("thid", lastResponseObject("", PresentationProof::class).thid) - bob.remember("thid", lastResponseObject("", PresentationProof::class).thid) - } - - @When("{actor} receives the request") - fun bobReceivesTheRequest(bob: Actor) { - wait( - { - proofEvent = ListenToEvents.`as`(bob).presentationEvents.lastOrNull { - it.data.thid == bob.recall("thid") - } - proofEvent != null && - proofEvent!!.data.status == PresentationProofStatus.REQUEST_RECEIVED - }, - "ERROR: Bob did not achieve any presentation request!", - ) - bob.remember("presentationId", proofEvent!!.data.presentationId) - } - - @When("{actor} makes the presentation of the proof to {actor}") - fun bobMakesThePresentationOfTheProof(bob: Actor, faber: Actor) { - bob.attemptsTo( - Patch.to("/present-proof/presentations/${bob.recall("presentationId")}").with { - it.body( - """ - { "action": "request-accept", "proofId": ["${bob.recall("issuedCredential").recordId}"] } - """.trimIndent(), - ) - }, - ) - } - - @When("{actor} rejects the proof") - fun bobRejectsProof(bob: Actor) { - bob.attemptsTo( - Patch.to("/present-proof/presentations/${bob.recall("presentationId")}").with { - it.body("""{ "action": "request-reject" }""") - }, - ) - } - - @Then("{actor} sees the proof is rejected") - fun bobSeesProofIsRejected(bob: Actor) { - wait( - { - proofEvent = ListenToEvents.`as`(bob).presentationEvents.lastOrNull { - it.data.thid == bob.recall("thid") - } - proofEvent != null && - proofEvent!!.data.status == PresentationProofStatus.REQUEST_REJECTED - }, - "ERROR: Faber did not receive presentation from Bob!", - ) - } - - @Then("{actor} has the proof verified") - fun faberHasTheProofVerified(faber: Actor) { - wait( - { - proofEvent = ListenToEvents.`as`(faber).presentationEvents.lastOrNull { - it.data.thid == faber.recall("thid") - } - - proofEvent != null && - proofEvent!!.data.status == PresentationProofStatus.PRESENTATION_VERIFIED - }, - "ERROR: presentation did not achieve PresentationVerified state!", - ) - } -} diff --git a/tests/e2e-tests/src/test/kotlin/features/system/SystemSteps.kt b/tests/e2e-tests/src/test/kotlin/features/system/SystemSteps.kt deleted file mode 100644 index 9835c7246b..0000000000 --- a/tests/e2e-tests/src/test/kotlin/features/system/SystemSteps.kt +++ /dev/null @@ -1,36 +0,0 @@ -package features.system - -import api_models.HealthInfo -import common.Utils.lastResponseObject -import io.cucumber.java.en.Then -import io.cucumber.java.en.When -import net.serenitybdd.screenplay.Actor -import interactions.Get -import net.serenitybdd.screenplay.rest.questions.ResponseConsequence -import org.apache.http.HttpStatus.SC_OK -import org.assertj.core.api.Assertions.assertThat - -class SystemSteps { - @When("{actor} makes a request to the health endpoint") - fun actorRequestsHealthEndpoint(actor: Actor) { - actor.attemptsTo( - Get.resource("/_system/health"), - ) - actor.should( - ResponseConsequence.seeThatResponse { - it.statusCode(SC_OK) - }, - ) - val healthResponse = lastResponseObject("", HealthInfo::class) - assertThat(healthResponse) - .hasFieldOrProperty("version") - .hasNoNullFieldsOrProperties() - actor.remember("version", healthResponse.version) - } - - @Then("{actor} knows what version of the service is running") - fun actorUnderstandsVersion(actor: Actor) { - assertThat(actor.recall("version")) - .isNotBlank() - } -} diff --git a/tests/e2e-tests/src/test/kotlin/features/verification_policies/VerificationPoliciesSteps.kt b/tests/e2e-tests/src/test/kotlin/features/verification_policies/VerificationPoliciesSteps.kt deleted file mode 100644 index 6f5d95280a..0000000000 --- a/tests/e2e-tests/src/test/kotlin/features/verification_policies/VerificationPoliciesSteps.kt +++ /dev/null @@ -1,96 +0,0 @@ -package features.verification_policies - -import api_models.VerificationPolicy -import api_models.VerificationPolicyInput -import common.TestConstants -import io.cucumber.java.en.Then -import io.cucumber.java.en.When -import net.serenitybdd.rest.SerenityRest -import net.serenitybdd.screenplay.Actor -import interactions.Put -import interactions.Post -import net.serenitybdd.screenplay.rest.questions.ResponseConsequence -import org.apache.http.HttpStatus -import org.hamcrest.CoreMatchers -import org.hamcrest.Matchers -import java.util.* - - -class VerificationPoliciesSteps { - - @When("{actor} creates a new verification policy") - fun acmeCreatesANewVerificationPolicy(actor: Actor) { - actor.attemptsTo( - Post.to("/verification/policies").with { - it.body(TestConstants.VERIFICATION_POLICIES.VERIFICATION_POLICY) - }, - ) - } - - @Then("{actor} sees new verification policy is available") - fun newVerificationPolicyIsAvailable(actor: Actor) { - actor.should(ResponseConsequence.seeThatResponse("New policy created") { - it.statusCode(HttpStatus.SC_CREATED) - //it.body("", CoreMatchers.`is`(Matchers.emptyString())) - it.body("id", CoreMatchers.not(Matchers.emptyString())) - it.body("nonce", CoreMatchers.not(Matchers.emptyString())) - it.body("kind", Matchers.containsString("VerificationPolicy")) - it.body( - "name", - Matchers.containsString(TestConstants.VERIFICATION_POLICIES.VERIFICATION_POLICY.name) - ) - it.body( - "description", - Matchers.containsString(TestConstants.VERIFICATION_POLICIES.VERIFICATION_POLICY.description) - ) - TestConstants.VERIFICATION_POLICIES.VERIFICATION_POLICY.constraints!!.forEach { constraint -> - it.body("constraints.schemaId", CoreMatchers.hasItem(constraint.schemaId)) - it.body("constraints.trustedIssuers", CoreMatchers.hasItems(constraint.trustedIssuers!!)) - } - }) - val policy = SerenityRest.lastResponse().`as`(VerificationPolicy::class.java) - actor.remember("policy", policy) - } - - @When("{actor} updates a new verification policy") - fun acmeUpdatesAVerificationPolicy(actor: Actor) { - val policy = actor.recall("policy") - val updatePolicyInput = VerificationPolicyInput( - name = policy.name, - description = "updated description + ${UUID.randomUUID()}", - constraints = policy.constraints - ) - actor.attemptsTo( - Put.to("/verification/policies/${policy.id}?nonce=${policy.nonce}").with { - it.body(updatePolicyInput) - }, - ) - actor.remember("updatedPolicyInput", updatePolicyInput) - } - - @Then("{actor} sees the updated verification policy is available") - fun updatedVerificationPolicyIsAvailable(actor: Actor) { - val updatedPolicy = actor.forget("policy") - val updatePolicyInput = actor.forget("updatedPolicyInput") - actor.should(ResponseConsequence.seeThatResponse("Verification policy is updated") { - it.statusCode(HttpStatus.SC_OK) - it.body("id", CoreMatchers.`is`(Matchers.equalTo(updatedPolicy.id))) - it.body("nonce", CoreMatchers.not(Matchers.emptyString())) - it.body("kind", Matchers.containsString("VerificationPolicy")) - it.body( - "name", - Matchers.containsString(updatePolicyInput.name) - ) - it.body( - "description", - Matchers.containsString(updatePolicyInput.description) - ) - updatePolicyInput.constraints!!.forEach { constraint -> - it.body("constraints.schemaId", CoreMatchers.hasItem(constraint.schemaId)) - it.body("constraints.trustedIssuers", CoreMatchers.hasItems(constraint.trustedIssuers!!)) - } - }) - val policy = SerenityRest.lastResponse().`as`(VerificationPolicy::class.java) - actor.remember("policy", policy) - } -} diff --git a/tests/e2e-tests/src/test/resources/cucumber.properties b/tests/e2e-tests/src/test/resources/cucumber.properties deleted file mode 100644 index b48dd63bf1..0000000000 --- a/tests/e2e-tests/src/test/resources/cucumber.properties +++ /dev/null @@ -1 +0,0 @@ -cucumber.publish.quiet=true diff --git a/tests/e2e-tests/src/test/resources/features/did_registrar/create_did.feature b/tests/e2e-tests/src/test/resources/features/did_registrar/create_did.feature deleted file mode 100644 index 90d8b3b23a..0000000000 --- a/tests/e2e-tests/src/test/resources/features/did_registrar/create_did.feature +++ /dev/null @@ -1,39 +0,0 @@ -Feature: Create and publish DID - -@TEST_ATL-3838 -Scenario: Create PRISM DID - When Acme creates PRISM DID - Then He sees PRISM DID was created successfully - -@TEST_ATL-3839 -Scenario Outline: PRISM DID creation fails when required request fields are missing - Given Acme tries to create PRISM DID with missing - Then He sees the request has failed with error status -Examples: - | field | error | - | documentTemplate | 400 | - | documentTemplate.publicKeys | 400 | - | documentTemplate.publicKeys[0].id | 400 | - | documentTemplate.publicKeys[0].purpose | 400 | - | documentTemplate.services | 400 | - | documentTemplate.services[0].id | 400 | - | documentTemplate.services[0].type | 400 | - | documentTemplate.services[0].serviceEndpoint | 400 | - -@TEST_ATL-3840 -Scenario Outline: PRISM DID creation fails with wrong formatted fields - Given Acme tries to create a managed DID with value in - Then He sees the request has failed with error status -Examples: - | field | value | error | - | documentTemplate.publicKeys[0].id | # | 422 | - | documentTemplate.publicKeys[0].purpose | potato | 400 | - | documentTemplate.services[0].id | # | 422 | - | documentTemplate.services[0].type | pot@to | 422 | - | documentTemplate.services[0].serviceEndpoint[0] | potato | 422 | - -@TEST_ATL-3842 -Scenario: Successfully publish DID to ledger - When Acme creates unpublished DID - And He publishes DID to ledger - Then He resolves DID document corresponds to W3C standard diff --git a/tests/e2e-tests/src/test/resources/features/issue_credentials/issue_credentials.feature b/tests/e2e-tests/src/test/resources/features/issue_credentials/issue_credentials.feature deleted file mode 100644 index 527e4fa667..0000000000 --- a/tests/e2e-tests/src/test/resources/features/issue_credentials/issue_credentials.feature +++ /dev/null @@ -1,23 +0,0 @@ -@RFC0453 @AIP20 -Feature: Issue Credentials Protocol - -@TEST_ATL-3849 -Scenario: Issuing credential with published PRISM DID to unpublished PRISM DID - Given Acme and Bob have an existing connection - When Acme creates unpublished DID - And He publishes DID to ledger - And Bob creates unpublished DID - And Acme offers a credential to Bob with "short" form DID - And Bob receives the credential offer and accepts - And Acme issues the credential - Then Bob receives the issued credential - -@TEST_ATL-3894 -Scenario: Issuing credential with unpublished PRISM DID to unpublished PRISM DID - Given Acme and Bob have an existing connection - When Acme creates unpublished DID - And Bob creates unpublished DID - And Acme offers a credential to Bob with "long" form DID - And Bob receives the credential offer and accepts - And Acme issues the credential - Then Bob receives the issued credential diff --git a/tests/e2e-tests/.gitignore b/tests/integration-tests/.gitignore similarity index 100% rename from tests/e2e-tests/.gitignore rename to tests/integration-tests/.gitignore diff --git a/tests/integration-tests/build.gradle.kts b/tests/integration-tests/build.gradle.kts new file mode 100644 index 0000000000..ea7b9a924f --- /dev/null +++ b/tests/integration-tests/build.gradle.kts @@ -0,0 +1,74 @@ +plugins { + idea + id("org.jetbrains.kotlin.jvm") version "1.9.0" + id("net.serenity-bdd.serenity-gradle-plugin") version "4.0.14" + id("org.jlleitschuh.gradle.ktlint") version "11.5.0" +} + +repositories { + mavenCentral() + maven { + url = uri("https://maven.pkg.github.com/input-output-hk/atala-automation/") + credentials { + username = System.getenv("ATALA_GITHUB_ACTOR") + password = System.getenv("ATALA_GITHUB_TOKEN") + } + } + maven { + url = uri("https://maven.pkg.github.com/hyperledger-labs/open-enterprise-agent/") + credentials { + username = System.getenv("ATALA_GITHUB_ACTOR") + password = System.getenv("ATALA_GITHUB_TOKEN") + } + } +} + +dependencies { + // Logging + implementation("org.slf4j:slf4j-log4j12:2.0.5") + // Beautify async waits + implementation("org.awaitility:awaitility-kotlin:4.2.0") + // Test engines and reports + testImplementation("junit:junit:4.13.2") + implementation("net.serenity-bdd:serenity-core:4.0.14") + implementation("net.serenity-bdd:serenity-cucumber:4.0.14") + implementation("net.serenity-bdd:serenity-screenplay-rest:4.0.14") + testImplementation("net.serenity-bdd:serenity-ensure:4.0.14") + // HTTP listener + implementation("io.ktor:ktor-server-netty:2.3.0") + implementation("io.ktor:ktor-client-apache:2.3.0") + // RestAPI client + implementation("io.iohk.atala.prism:prism-kotlin-client:1.18.0") + // Test helpers library + testImplementation("io.iohk.atala:atala-automation:0.3.0") + // Hoplite for configuration + implementation("com.sksamuel.hoplite:hoplite-core:2.7.5") + implementation("com.sksamuel.hoplite:hoplite-hocon:2.7.5") +} + +buildscript { + dependencies { + classpath("net.serenity-bdd:serenity-single-page-report:4.0.14") + classpath("net.serenity-bdd:serenity-json-summary-report:4.0.14") + } +} + +/** + * Add HTML one-pager and JSON summary report to be produced + */ +serenity { + reports = listOf("single-page-html", "json-summary") +} + +tasks.test { + testLogging.showStandardStreams = true + systemProperty("cucumber.filter.tags", System.getProperty("cucumber.filter.tags")) +} + +kotlin { + jvmToolchain(19) +} + +ktlint { + disabledRules.set(setOf("no-wildcard-imports")) +} diff --git a/tests/e2e-tests/gradle.properties b/tests/integration-tests/gradle.properties similarity index 100% rename from tests/e2e-tests/gradle.properties rename to tests/integration-tests/gradle.properties diff --git a/tests/e2e-tests/gradle/wrapper/gradle-wrapper.jar b/tests/integration-tests/gradle/wrapper/gradle-wrapper.jar similarity index 100% rename from tests/e2e-tests/gradle/wrapper/gradle-wrapper.jar rename to tests/integration-tests/gradle/wrapper/gradle-wrapper.jar diff --git a/tests/e2e-tests/gradle/wrapper/gradle-wrapper.properties b/tests/integration-tests/gradle/wrapper/gradle-wrapper.properties similarity index 100% rename from tests/e2e-tests/gradle/wrapper/gradle-wrapper.properties rename to tests/integration-tests/gradle/wrapper/gradle-wrapper.properties diff --git a/tests/e2e-tests/gradlew b/tests/integration-tests/gradlew similarity index 100% rename from tests/e2e-tests/gradlew rename to tests/integration-tests/gradlew diff --git a/tests/e2e-tests/gradlew.bat b/tests/integration-tests/gradlew.bat similarity index 100% rename from tests/e2e-tests/gradlew.bat rename to tests/integration-tests/gradlew.bat diff --git a/tests/e2e-tests/serenity.properties b/tests/integration-tests/serenity.properties similarity index 77% rename from tests/e2e-tests/serenity.properties rename to tests/integration-tests/serenity.properties index a4b2f36eb0..52ed622b2f 100644 --- a/tests/e2e-tests/serenity.properties +++ b/tests/integration-tests/serenity.properties @@ -1,4 +1,4 @@ -serenity.project.name=PRISM agent e2e tests +serenity.project.name=Open Enterprise Agent Integration tests serenity.reports.show.step.details=true serenity.console.colors=true simplified.stack.traces=false diff --git a/tests/integration-tests/settings.gradle.kts b/tests/integration-tests/settings.gradle.kts new file mode 100644 index 0000000000..e57032400b --- /dev/null +++ b/tests/integration-tests/settings.gradle.kts @@ -0,0 +1 @@ +rootProject.name = "integration-tests" diff --git a/tests/integration-tests/src/main/kotlin/models/AnoncredsSchema.kt b/tests/integration-tests/src/main/kotlin/models/AnoncredsSchema.kt new file mode 100644 index 0000000000..ae79a5d118 --- /dev/null +++ b/tests/integration-tests/src/main/kotlin/models/AnoncredsSchema.kt @@ -0,0 +1,17 @@ +package models + +import com.google.gson.annotations.SerializedName + +class AnoncredsSchema( + @SerializedName("name") + val name: String, + + @SerializedName("version") + val version: String, + + @SerializedName("issuerId") + val issuerId: String, + + @SerializedName("attrNames") + val attrNames: List +) diff --git a/tests/integration-tests/src/main/kotlin/models/Events.kt b/tests/integration-tests/src/main/kotlin/models/Events.kt new file mode 100644 index 0000000000..0295c3a713 --- /dev/null +++ b/tests/integration-tests/src/main/kotlin/models/Events.kt @@ -0,0 +1,48 @@ +package models + +import com.google.gson.JsonElement +import com.google.gson.annotations.SerializedName +import io.iohk.atala.prism.models.Connection +import io.iohk.atala.prism.models.IssueCredentialRecord +import io.iohk.atala.prism.models.ManagedDID +import io.iohk.atala.prism.models.PresentationStatus + +data class Event( + @SerializedName("type") var type: String, + @SerializedName("id") var id: String, + @SerializedName("ts") var ts: String, + @SerializedName("data") var data: JsonElement, + @SerializedName("walletId") var walletId: String +) + +data class ConnectionEvent( + @SerializedName("type") var type: String, + @SerializedName("id") var id: String, + @SerializedName("ts") var ts: String, + @SerializedName("data") var data: Connection, + @SerializedName("walletId") var walletId: String +) + +data class CredentialEvent( + @SerializedName("type") var type: String, + @SerializedName("id") var id: String, + @SerializedName("ts") var ts: String, + @SerializedName("data") var data: IssueCredentialRecord, + @SerializedName("walletId") var walletId: String +) + +data class PresentationEvent( + @SerializedName("type") var type: String, + @SerializedName("id") var id: String, + @SerializedName("ts") var ts: String, + @SerializedName("data") var data: PresentationStatus, + @SerializedName("walletId") var walletId: String +) + +data class DidEvent( + @SerializedName("type") var type: String, + @SerializedName("id") var id: String, + @SerializedName("ts") var ts: String, + @SerializedName("data") var data: ManagedDID, + @SerializedName("walletId") var walletId: String +) diff --git a/tests/integration-tests/src/main/kotlin/models/JsonSchema.kt b/tests/integration-tests/src/main/kotlin/models/JsonSchema.kt new file mode 100644 index 0000000000..dac0cef8a4 --- /dev/null +++ b/tests/integration-tests/src/main/kotlin/models/JsonSchema.kt @@ -0,0 +1,20 @@ +package models + +import com.google.gson.annotations.SerializedName + +data class JsonSchema( + @SerializedName("\$id") + var id: String = "", + + @SerializedName("\$schema") + var schema: String = "", + + @SerializedName("\$description") + var description: String = "", + + @SerializedName("type") + var type: String = "", + + @SerializedName("properties") + val properties: MutableMap = mutableMapOf() +) diff --git a/tests/integration-tests/src/main/kotlin/models/JsonSchemaProperty.kt b/tests/integration-tests/src/main/kotlin/models/JsonSchemaProperty.kt new file mode 100644 index 0000000000..dc118bf7b0 --- /dev/null +++ b/tests/integration-tests/src/main/kotlin/models/JsonSchemaProperty.kt @@ -0,0 +1,8 @@ +package models + +import com.google.gson.annotations.SerializedName + +data class JsonSchemaProperty( + @SerializedName("type") + var type: String = "" +) diff --git a/tests/e2e-tests/src/test/kotlin/common/ListenToEvents.kt b/tests/integration-tests/src/test/kotlin/common/ListenToEvents.kt similarity index 61% rename from tests/e2e-tests/src/test/kotlin/common/ListenToEvents.kt rename to tests/integration-tests/src/test/kotlin/common/ListenToEvents.kt index b221e3ce12..3efae51c49 100644 --- a/tests/e2e-tests/src/test/kotlin/common/ListenToEvents.kt +++ b/tests/integration-tests/src/test/kotlin/common/ListenToEvents.kt @@ -1,6 +1,7 @@ package common -import api_models.* +import com.google.gson.GsonBuilder +import io.iohk.atala.automation.restassured.CustomGsonObjectMapperFactory import io.ktor.http.* import io.ktor.server.application.* import io.ktor.server.engine.* @@ -8,19 +9,21 @@ import io.ktor.server.netty.* import io.ktor.server.request.* import io.ktor.server.response.* import io.ktor.server.routing.* -import kotlinx.serialization.decodeFromString -import kotlinx.serialization.json.Json +import models.* import net.serenitybdd.screenplay.Ability import net.serenitybdd.screenplay.Actor import net.serenitybdd.screenplay.HasTeardown -import java.lang.IllegalArgumentException +import java.net.URL +import java.time.OffsetDateTime open class ListenToEvents( - private val host: String, - private val port: Int, -): Ability, HasTeardown { + private val url: URL +) : Ability, HasTeardown { private val server: ApplicationEngine + private val gson = GsonBuilder() + .registerTypeAdapter(OffsetDateTime::class.java, CustomGsonObjectMapperFactory.OffsetDateTimeDeserializer()) + .create() var connectionEvents: MutableList = mutableListOf() var credentialEvents: MutableList = mutableListOf() @@ -31,13 +34,13 @@ open class ListenToEvents( application.routing { post("/") { val eventString = call.receiveText() - val event = Json.decodeFromString(eventString) + val event = gson.fromJson(eventString, Event::class.java) when (event.type) { - TestConstants.EVENT_TYPE_CONNECTION_UPDATED -> connectionEvents.add(Json.decodeFromString(eventString)) - TestConstants.EVENT_TYPE_ISSUE_CREDENTIAL_RECORD_UPDATED -> credentialEvents.add(Json.decodeFromString(eventString)) - TestConstants.EVENT_TYPE_PRESENTATION_UPDATED -> presentationEvents.add(Json.decodeFromString(eventString)) + TestConstants.EVENT_TYPE_CONNECTION_UPDATED -> connectionEvents.add(gson.fromJson(eventString, ConnectionEvent::class.java)) + TestConstants.EVENT_TYPE_ISSUE_CREDENTIAL_RECORD_UPDATED -> credentialEvents.add(gson.fromJson(eventString, CredentialEvent::class.java)) + TestConstants.EVENT_TYPE_PRESENTATION_UPDATED -> presentationEvents.add(gson.fromJson(eventString, PresentationEvent::class.java)) TestConstants.EVENT_TYPE_DID_STATUS_UPDATED -> { - didEvents.add(Json.decodeFromString(eventString)) + didEvents.add(gson.fromJson(eventString, DidEvent::class.java)) } else -> { throw IllegalArgumentException("ERROR: unknown event type ${event.type}") @@ -49,8 +52,8 @@ open class ListenToEvents( } companion object { - fun at(host: String, port: Int): ListenToEvents { - return ListenToEvents(host, port) + fun at(url: URL): ListenToEvents { + return ListenToEvents(url) } fun `as`(actor: Actor): ListenToEvents { @@ -61,14 +64,15 @@ open class ListenToEvents( init { server = embeddedServer( Netty, - port = port, - host = if (host == "host.docker.internal") "0.0.0.0" else host, - module = {route(this)}) + port = url.port, + host = if (url.host == "host.docker.internal") "0.0.0.0" else url.host, + module = { route(this) } + ) .start(wait = false) } override fun toString(): String { - return "Listen HTTP port at ${host}:${port}" + return "Listen HTTP port at $url" } override fun tearDown() { diff --git a/tests/integration-tests/src/test/kotlin/common/TestConstants.kt b/tests/integration-tests/src/test/kotlin/common/TestConstants.kt new file mode 100644 index 0000000000..36258df158 --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/common/TestConstants.kt @@ -0,0 +1,90 @@ +package common + +import io.iohk.atala.prism.models.* +import models.JsonSchema +import models.JsonSchemaProperty +import java.time.Duration +import java.util.* + +object TestConstants { + val TEST_VERIFICATION_POLICY = VerificationPolicyInput( + name = "Trusted Issuer and SchemaID", + description = "Verification Policy with trusted issuer and schemaId", + constraints = listOf( + VerificationPolicyConstraint( + schemaId = "http://atalaprism.io/schemas/1.0/StudentCredential", + trustedIssuers = listOf( + "did:example:123456789abcdefghi", + "did:example:123456789abcdefghj" + ) + ) + ) + ) + val CREDENTIAL_SCHEMA_TYPE = "https://w3c-ccg.github.io/vc-json-schemas/schema/2.0/schema.json" + + val SCHEMA_TYPE_JSON = "https://json-schema.org/draft/2020-12/schema" + + val jsonSchema = JsonSchema( + id = "https://example.com/student-schema-1.0", + schema = SCHEMA_TYPE_JSON, + description = "Student schema", + type = "object", + properties = mutableMapOf( + "name" to JsonSchemaProperty(type = "string"), + "age" to JsonSchemaProperty(type = "integer") + ) + ) + + fun generate_with_name_suffix_and_author(suffix: String, author: String): CredentialSchemaInput { + return CredentialSchemaInput( + author = author, + name = "${UUID.randomUUID()} $suffix", + description = "Simple student credentials schema", + type = CREDENTIAL_SCHEMA_TYPE, + schema = jsonSchema, + tags = listOf("school", "students"), + version = "1.0.0" + ) + } + + val STUDENT_SCHEMA = CredentialSchemaInput( + author = "did:prism:agent", + name = UUID.randomUUID().toString(), + description = "Simple student credentials schema", + type = CREDENTIAL_SCHEMA_TYPE, + schema = jsonSchema, + tags = listOf("school", "students"), + version = "1.0.0" + ) + val RANDOM_CONSTAND_UUID = UUID.randomUUID().toString() + val DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN = Duration.ofSeconds(60L) + val PRISM_DID_AUTH_KEY = ManagedDIDKeyTemplate("auth-1", Purpose.AUTHENTICATION) + val PRISM_DID_ASSERTION_KEY = ManagedDIDKeyTemplate("assertion-1", Purpose.ASSERTION_METHOD) + val PRISM_DID_UPDATE_NEW_AUTH_KEY = ManagedDIDKeyTemplate("auth-2", Purpose.AUTHENTICATION) + val PRISM_DID_SERVICE = Service( + "https://foo.bar.com", + listOf("LinkedDomains"), + Json("https://foo.bar.com/") + ) + val PRISM_DID_SERVICE_FOR_UPDATE = Service( + "https://update.com", + listOf("LinkedDomains"), + Json("https://update.com/") + ) + val PRISM_DID_SERVICE_TO_REMOVE = Service( + "https://remove.com", + listOf("LinkedDomains"), + Json("https://remove.com/") + ) + val PRISM_DID_UPDATE_NEW_SERVICE_URL = "https://bar.foo.com/" + val PRISM_DID_UPDATE_NEW_SERVICE = Service( + "https://new.service.com", + listOf("LinkedDomains"), + Json("https://new.service.com/") + ) + val EVENT_TYPE_CONNECTION_UPDATED = "ConnectionUpdated" + val EVENT_TYPE_ISSUE_CREDENTIAL_RECORD_UPDATED = "IssueCredentialRecordUpdated" + val EVENT_TYPE_PRESENTATION_UPDATED = "PresentationUpdated" + val EVENT_TYPE_DID_STATUS_UPDATED = "DIDStatusUpdated" + val WRONG_SEED = "wrong seed" +} diff --git a/tests/integration-tests/src/test/kotlin/common/Utils.kt b/tests/integration-tests/src/test/kotlin/common/Utils.kt new file mode 100644 index 0000000000..2ccbd5b5c1 --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/common/Utils.kt @@ -0,0 +1,29 @@ +package common + +import org.awaitility.Awaitility +import org.awaitility.core.ConditionTimeoutException +import org.awaitility.kotlin.withPollInterval +import org.awaitility.pollinterval.FixedPollInterval +import java.time.Duration + +object Utils { + fun wait( + blockToWait: () -> Boolean, + errorMessage: String, + poolInterval: FixedPollInterval = FixedPollInterval(Duration.ofMillis(500L)), + timeout: Duration = Duration.ofSeconds(120L) + ) { + try { + Awaitility.await().withPollInterval(poolInterval) + .pollInSameThread() + .atMost(timeout) + .until { + blockToWait() + } + } catch (err: ConditionTimeoutException) { + throw ConditionTimeoutException( + errorMessage + ) + } + } +} diff --git a/tests/integration-tests/src/test/kotlin/config/AgentConf.kt b/tests/integration-tests/src/test/kotlin/config/AgentConf.kt new file mode 100644 index 0000000000..3918ef0960 --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/config/AgentConf.kt @@ -0,0 +1,11 @@ +package config + +import com.sksamuel.hoplite.ConfigAlias +import java.net.URL + +data class AgentConf( + val url: URL, + @ConfigAlias("webhook_url") val webhookUrl: URL?, + var apikey: String?, + @ConfigAlias("multi-tenant") val multiTenant: Boolean?, +) diff --git a/tests/integration-tests/src/test/kotlin/config/Config.kt b/tests/integration-tests/src/test/kotlin/config/Config.kt new file mode 100644 index 0000000000..c4abe8b62d --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/config/Config.kt @@ -0,0 +1,9 @@ +package config + +data class Config( + val global: GlobalConf, + val issuer: AgentConf, + val holder: AgentConf, + val verifier: AgentConf, + val admin: AgentConf +) diff --git a/tests/integration-tests/src/test/kotlin/config/GlobalConf.kt b/tests/integration-tests/src/test/kotlin/config/GlobalConf.kt new file mode 100644 index 0000000000..e3e6d89a34 --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/config/GlobalConf.kt @@ -0,0 +1,10 @@ +package config + +import com.sksamuel.hoplite.ConfigAlias + +data class GlobalConf( + @ConfigAlias("auth_required") val authRequired: Boolean, + @ConfigAlias("auth_header") val authHeader: String, + @ConfigAlias("admin_auth_header") val adminAuthHeader: String, + @ConfigAlias("admin_apikey") val adminApiKey: String +) diff --git a/tests/integration-tests/src/test/kotlin/features/CommonSteps.kt b/tests/integration-tests/src/test/kotlin/features/CommonSteps.kt new file mode 100644 index 0000000000..95a9ab8762 --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/features/CommonSteps.kt @@ -0,0 +1,210 @@ +package features + +import com.sksamuel.hoplite.ConfigLoader +import common.ListenToEvents +import config.AgentConf +import config.Config +import features.connection.ConnectionSteps +import features.credentials.IssueCredentialsSteps +import features.did.PublishDidSteps +import features.multitenancy.EventsSteps +import interactions.Get +import io.cucumber.java.AfterAll +import io.cucumber.java.BeforeAll +import io.cucumber.java.ParameterType +import io.cucumber.java.en.Given +import io.iohk.atala.automation.extensions.get +import io.iohk.atala.automation.serenity.ensure.Ensure +import io.iohk.atala.prism.models.* +import io.restassured.RestAssured +import net.serenitybdd.rest.SerenityRest +import net.serenitybdd.screenplay.Actor +import net.serenitybdd.screenplay.actors.Cast +import net.serenitybdd.screenplay.actors.OnStage +import net.serenitybdd.screenplay.rest.abilities.CallAnApi +import org.apache.http.HttpStatus +import org.apache.http.HttpStatus.SC_OK +import java.util.* +import kotlin.random.Random + +@OptIn(ExperimentalStdlibApi::class) +fun createWalletAndEntity(agentConf: AgentConf) { + val config = ConfigLoader().loadConfigOrThrow("/tests.conf") + val createWalletResponse = RestAssured + .given().body( + CreateWalletRequest( + name = UUID.randomUUID().toString(), + seed = Random.nextBytes(64).toHexString(), + id = UUID.randomUUID() + ) + ) + .header(config.global.adminAuthHeader, config.global.adminApiKey) + .post("${agentConf.url}/wallets") + .thenReturn() + Ensure.that(createWalletResponse.statusCode).isEqualTo(HttpStatus.SC_CREATED) + val wallet = createWalletResponse.body.jsonPath().getObject("", WalletDetail::class.java) + val tenantResponse = RestAssured + .given().body( + CreateEntityRequest( + name = UUID.randomUUID().toString(), + walletId = wallet.id + ) + ) + .header(config.global.adminAuthHeader, config.global.adminApiKey) + .post("${agentConf.url}/iam/entities") + .thenReturn() + Ensure.that(tenantResponse.statusCode).isEqualTo(HttpStatus.SC_CREATED) + val entity = tenantResponse.body.jsonPath().getObject("", EntityResponse::class.java) + val addApiKeyResponse = + RestAssured + .given().body( + ApiKeyAuthenticationRequest( + entityId = entity.id, + apiKey = agentConf.apikey!! + ) + ) + .header(config.global.adminAuthHeader, config.global.adminApiKey) + .post("${agentConf.url}/iam/apikey-authentication") + .thenReturn() + Ensure.that(addApiKeyResponse.statusCode).isEqualTo(HttpStatus.SC_CREATED) + val registerIssuerWebhookResponse = + RestAssured + .given().body( + CreateWebhookNotification( + url = agentConf.webhookUrl!!.toExternalForm() + ) + ) + .header(config.global.authHeader, agentConf.apikey) + .post("${agentConf.url}/events/webhooks") + .thenReturn() + Ensure.that(registerIssuerWebhookResponse.statusCode).isEqualTo(HttpStatus.SC_CREATED) +} + +@BeforeAll +fun initAgents() { + val cast = Cast() + val config = ConfigLoader().loadConfigOrThrow("/tests.conf") + cast.actorNamed( + "Admin", + CallAnApi.at(config.admin.url.toExternalForm()) + ) + cast.actorNamed( + "Acme", + CallAnApi.at(config.issuer.url.toExternalForm()), + ListenToEvents.at(config.issuer.webhookUrl!!) + ) + cast.actorNamed( + "Bob", + CallAnApi.at(config.holder.url.toExternalForm()), + ListenToEvents.at(config.holder.webhookUrl!!) + ) + cast.actorNamed( + "Faber", + CallAnApi.at(config.verifier.url.toExternalForm()), + ListenToEvents.at(config.verifier.webhookUrl!!) + ) + OnStage.setTheStage(cast) + + // Create issuer wallet and tenant + if (config.issuer.multiTenant!!) { + createWalletAndEntity(config.issuer) + } + // Create verifier wallet + if (config.verifier.multiTenant!!) { + createWalletAndEntity(config.verifier) + } + + cast.actors.forEach { actor -> + when (actor.name) { + "Acme" -> { + actor.remember("AUTH_KEY", config.issuer.apikey) + } + "Bob" -> { + actor.remember("AUTH_KEY", config.holder.apikey) + } + "Faber" -> { + actor.remember("AUTH_KEY", config.verifier.apikey) + } + } + } +} + +@AfterAll +fun clearStage() { + OnStage.drawTheCurtain() +} + +class CommonSteps { + @ParameterType(".*") + fun actor(actorName: String): Actor { + return OnStage.theActorCalled(actorName) + } + + @Given("{actor} has an issued credential from {actor}") + fun holderHasIssuedCredentialFromIssuer(holder: Actor, issuer: Actor) { + holder.attemptsTo( + Get.resource("/issue-credentials/records") + ) + holder.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK) + ) + val receivedCredential = SerenityRest.lastResponse().get().contents!!.findLast { credential -> + credential.protocolState == IssueCredentialRecord.ProtocolState.CREDENTIAL_RECEIVED + && credential.credentialFormat == IssueCredentialRecord.CredentialFormat.JWT + } + + if (receivedCredential != null) { + holder.remember("issuedCredential", receivedCredential) + } else { + val publishDidSteps = PublishDidSteps() + val issueSteps = IssueCredentialsSteps() + actorsHaveExistingConnection(issuer, holder) + publishDidSteps.createsUnpublishedDid(holder) + publishDidSteps.createsUnpublishedDid(issuer) + publishDidSteps.hePublishesDidToLedger(issuer) + issueSteps.acmeOffersACredential(issuer, holder, "short") + issueSteps.holderReceivesCredentialOffer(holder) + issueSteps.holderAcceptsCredentialOfferForJwt(holder) + issueSteps.acmeIssuesTheCredential(issuer) + issueSteps.bobHasTheCredentialIssued(holder) + } + } + + @Given("{actor} and {actor} have an existing connection") + fun actorsHaveExistingConnection(inviter: Actor, invitee: Actor) { + inviter.attemptsTo( + Get.resource("/connections") + ) + inviter.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK) + ) + val inviterConnection = SerenityRest.lastResponse().get().contents!!.firstOrNull { + it.label == "Connection with ${invitee.name}" && it.state == Connection.State.CONNECTION_RESPONSE_SENT + } + + var inviteeConnection: Connection? = null + if (inviterConnection != null) { + invitee.attemptsTo( + Get.resource("/connections") + ) + invitee.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK) + ) + inviteeConnection = SerenityRest.lastResponse().get().contents!!.firstOrNull { + it.theirDid == inviterConnection.myDid && it.state == Connection.State.CONNECTION_RESPONSE_RECEIVED + } + } + + if (inviterConnection != null && inviteeConnection != null) { + inviter.remember("connection-with-${invitee.name}", inviterConnection) + invitee.remember("connection-with-${inviter.name}", inviteeConnection) + } else { + val connectionSteps = ConnectionSteps() + connectionSteps.inviterGeneratesAConnectionInvitation(inviter, invitee) + connectionSteps.inviteeSendsAConnectionRequestToInviter(invitee, inviter) + connectionSteps.inviterReceivesTheConnectionRequest(inviter) + connectionSteps.inviteeReceivesTheConnectionResponse(invitee) + connectionSteps.inviterAndInviteeHaveAConnection(inviter, invitee) + } + } +} diff --git a/tests/integration-tests/src/test/kotlin/features/connection/ConnectionSteps.kt b/tests/integration-tests/src/test/kotlin/features/connection/ConnectionSteps.kt new file mode 100644 index 0000000000..301ff5064e --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/features/connection/ConnectionSteps.kt @@ -0,0 +1,135 @@ +package features.connection + +import common.ListenToEvents +import common.Utils.wait +import interactions.Get +import interactions.Post +import io.cucumber.java.en.Then +import io.cucumber.java.en.When +import io.iohk.atala.automation.extensions.get +import io.iohk.atala.automation.serenity.ensure.Ensure +import io.iohk.atala.prism.models.AcceptConnectionInvitationRequest +import io.iohk.atala.prism.models.Connection +import io.iohk.atala.prism.models.CreateConnectionRequest +import net.serenitybdd.rest.SerenityRest +import net.serenitybdd.screenplay.Actor +import org.apache.http.HttpStatus.SC_CREATED +import org.apache.http.HttpStatus.SC_OK +import org.assertj.core.api.Assertions.assertThat + +class ConnectionSteps { + + @When("{actor} generates a connection invitation to {actor}") + fun inviterGeneratesAConnectionInvitation(inviter: Actor, invitee: Actor) { + // Acme(Issuer) initiates a connection + // and sends it to Bob(Holder) out-of-band, e.g. using QR-code + val connectionLabel = "Connection with ${invitee.name}" + inviter.attemptsTo( + Post.to("/connections") + .with { + it.body( + CreateConnectionRequest(label = connectionLabel) + ) + } + ) + + val connection = SerenityRest.lastResponse().get() + + inviter.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED), + Ensure.that(connection.label!!).isEqualTo(connectionLabel), + Ensure.that(connection.state).isEqualTo(Connection.State.INVITATION_GENERATED), + Ensure.that(connection.role).isEqualTo(Connection.Role.INVITER) + ) + + // Acme remembers connection to send it out of band to Bob + inviter.remember("connection", connection) + } + + @When("{actor} sends a connection request to {actor}") + fun inviteeSendsAConnectionRequestToInviter(invitee: Actor, inviter: Actor) { + // Bob accepts connection using achieved out-of-band invitation + val inviterConnection = inviter.recall("connection") + invitee.attemptsTo( + Post.to("/connection-invitations") + .with { + it.body( + AcceptConnectionInvitationRequest( + inviterConnection.invitation.invitationUrl.split("=")[1] + ) + ) + } + ) + val inviteeConnection = SerenityRest.lastResponse().get() + + invitee.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK), + Ensure.that(inviteeConnection.invitation.from).isEqualTo(inviterConnection.invitation.from), + Ensure.that(inviteeConnection.invitation.id).isEqualTo(inviterConnection.invitation.id), + Ensure.that(inviteeConnection.invitation.invitationUrl).isEqualTo(inviterConnection.invitation.invitationUrl), + Ensure.that(inviteeConnection.invitation.type).isEqualTo(inviterConnection.invitation.type), + Ensure.that(inviteeConnection.state).isEqualTo(Connection.State.CONNECTION_REQUEST_PENDING), + Ensure.that(inviteeConnection.role).isEqualTo(Connection.Role.INVITEE) + ) + + invitee.remember("connection", inviteeConnection) + } + + @When("{actor} receives the connection request and sends back the response") + fun inviterReceivesTheConnectionRequest(inviter: Actor) { + wait( + { + val lastEvent = ListenToEvents.`as`(inviter).connectionEvents.lastOrNull { + it.data.thid == inviter.recall("connection").thid + } + lastEvent != null && + lastEvent.data.state == Connection.State.CONNECTION_RESPONSE_SENT + }, + "Inviter connection didn't reach ${Connection.State.CONNECTION_RESPONSE_SENT} state" + ) + } + + @When("{actor} receives the connection response") + fun inviteeReceivesTheConnectionResponse(invitee: Actor) { + // Bob (Holder) receives final connection response + wait( + { + val lastEvent = ListenToEvents.`as`(invitee).connectionEvents.lastOrNull { + it.data.thid == invitee.recall("connection").thid + } + lastEvent != null && + lastEvent.data.state == Connection.State.CONNECTION_RESPONSE_RECEIVED + }, + "Invitee connection didn't reach ${Connection.State.CONNECTION_RESPONSE_RECEIVED} state." + ) + } + + @Then("{actor} and {actor} have a connection") + fun inviterAndInviteeHaveAConnection(inviter: Actor, invitee: Actor) { + // Connection established. Both parties exchanged their DIDs with each other + inviter.attemptsTo( + Get.resource("/connections/${inviter.recall("connection").connectionId}") + ) + inviter.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK) + ) + inviter.remember("connection-with-${invitee.name}", SerenityRest.lastResponse().get()) + + invitee.attemptsTo( + Get.resource("/connections/${invitee.recall("connection").connectionId}") + ) + invitee.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK) + ) + invitee.remember("connection-with-${inviter.name}", SerenityRest.lastResponse().get()) + + assertThat(inviter.recall("connection-with-${invitee.name}").myDid) + .isEqualTo(invitee.recall("connection-with-${inviter.name}").theirDid) + assertThat(inviter.recall("connection-with-${invitee.name}").theirDid) + .isEqualTo(invitee.recall("connection-with-${inviter.name}").myDid) + assertThat(inviter.recall("connection-with-${invitee.name}").state) + .isEqualTo(Connection.State.CONNECTION_RESPONSE_SENT) + assertThat(invitee.recall("connection-with-${inviter.name}").state) + .isEqualTo(Connection.State.CONNECTION_RESPONSE_RECEIVED) + } +} diff --git a/tests/integration-tests/src/test/kotlin/features/credentials/IssueCredentialsSteps.kt b/tests/integration-tests/src/test/kotlin/features/credentials/IssueCredentialsSteps.kt new file mode 100644 index 0000000000..8238474631 --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/features/credentials/IssueCredentialsSteps.kt @@ -0,0 +1,248 @@ +package features.credentials + +import common.ListenToEvents +import common.Utils.wait +import interactions.Post +import io.cucumber.java.en.Then +import io.cucumber.java.en.When +import io.iohk.atala.automation.extensions.get +import io.iohk.atala.automation.serenity.ensure.Ensure +import io.iohk.atala.prism.models.* +import models.AnoncredsSchema +import models.CredentialEvent +import net.serenitybdd.rest.SerenityRest +import net.serenitybdd.screenplay.Actor +import net.serenitybdd.screenplay.rest.abilities.CallAnApi +import org.apache.http.HttpStatus.SC_CREATED +import org.apache.http.HttpStatus.SC_OK +import java.util.* + +class IssueCredentialsSteps { + + var credentialEvent: CredentialEvent? = null + + @When("{actor} offers a credential to {actor} with {string} form DID") + fun acmeOffersACredential(issuer: Actor, holder: Actor, didForm: String) { + val did: String = if (didForm == "short") { + issuer.recall("shortFormDid") + } else { + issuer.recall("longFormDid") + } + + val credentialOfferRequest = CreateIssueCredentialRecordRequest( + schemaId = null, + claims = linkedMapOf( + "firstName" to "FirstName", + "lastName" to "LastName" + ), + issuingDID = did, + connectionId = issuer.recall("connection-with-${holder.name}").connectionId, + validityPeriod = 3600.0, + credentialFormat = "JWT", + automaticIssuance = false + ) + + issuer.attemptsTo( + Post.to("/issue-credentials/credential-offers") + .with { + it.body(credentialOfferRequest) + } + ) + + val credentialRecord = SerenityRest.lastResponse().get() + + issuer.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED) + ) + + issuer.remember("thid", credentialRecord.thid) + holder.remember("thid", credentialRecord.thid) + } + + @When("{actor} creates anoncred schema") + fun acmeCreatesAnoncredSchema(issuer: Actor) { + issuer.attemptsTo( + Post.to("/schema-registry/schemas") + .with { + it.body( + CredentialSchemaInput( + author = issuer.recall("shortFormDid"), + name = UUID.randomUUID().toString(), + description = "Simple student credentials schema", + type = "AnoncredSchemaV1", + schema = AnoncredsSchema( + name = "StudentCredential", + version = "1.0", + issuerId = issuer.recall("shortFormDid"), + attrNames = listOf("name", "age") + ), + tags = listOf("school", "students"), + version = "1.0.0" + ) + ) + } + ) + issuer.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED) + ) + val schema = SerenityRest.lastResponse().get() + issuer.remember("anoncredsSchema", schema) + } + + @When("{actor} creates anoncred credential definition") + fun acmeCreatesAnoncredCredentialDefinition(issuer: Actor) { + val schemaRegistryUrl = issuer.usingAbilityTo(CallAnApi::class.java).resolve("/schema-registry/schemas") + .replace("localhost", "host.docker.internal") + issuer.attemptsTo( + Post.to("/credential-definition-registry/definitions") + .with { + it.body( + CredentialDefinitionInput( + name = "StudentCredential", + version = "1.0.0", + schemaId = "$schemaRegistryUrl/${issuer.recall("anoncredsSchema").guid}", + description = "Simple student credentials definition", + author = issuer.recall("shortFormDid"), + signatureType = "CL", + tag = "student", + supportRevocation = false, + ) + ) + } + ) + issuer.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED) + ) + val credentialDefinition = SerenityRest.lastResponse().get() + issuer.remember("anoncredsCredentialDefinition", credentialDefinition) + } + + @When("{actor} offers anoncred to {actor}") + fun acmeOffersAnoncredToBob(issuer: Actor, holder: Actor) { + val credentialOfferRequest = CreateIssueCredentialRecordRequest( + credentialDefinitionId = issuer.recall("anoncredsCredentialDefinition").guid, + claims = linkedMapOf( + "name" to "Bob", + "age" to "21" + ), + issuingDID = issuer.recall("shortFormDid"), + connectionId = issuer.recall("connection-with-${holder.name}").connectionId, + validityPeriod = 3600.0, + credentialFormat = "AnonCreds", + automaticIssuance = false + ) + + issuer.attemptsTo( + Post.to("/issue-credentials/credential-offers") + .with { + it.body(credentialOfferRequest) + } + ) + + val credentialRecord = SerenityRest.lastResponse().get() + + issuer.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED) + ) + + issuer.remember("thid", credentialRecord.thid) + holder.remember("thid", credentialRecord.thid) + } + + @When("{actor} receives the credential offer") + fun holderReceivesCredentialOffer(holder: Actor) { + wait( + { + credentialEvent = ListenToEvents.`as`(holder).credentialEvents.lastOrNull { + it.data.thid == holder.recall("thid") + } + credentialEvent != null && + credentialEvent!!.data.protocolState == IssueCredentialRecord.ProtocolState.OFFER_RECEIVED + }, + "Holder was unable to receive the credential offer from Issuer! " + + "Protocol state did not achieve ${IssueCredentialRecord.ProtocolState.OFFER_RECEIVED} state." + ) + + val recordId = ListenToEvents.`as`(holder).credentialEvents.last().data.recordId + holder.remember("recordId", recordId) + } + + @When("{actor} accepts credential offer for JWT") + fun holderAcceptsCredentialOfferForJwt(holder: Actor) { + holder.attemptsTo( + Post.to("/issue-credentials/records/${holder.recall("recordId")}/accept-offer") + .with { + it.body( + AcceptCredentialOfferRequest(holder.recall("longFormDid")) + ) + } + ) + holder.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK) + ) + } + + @When("{actor} accepts credential offer for anoncred") + fun holderAcceptsCredentialOfferForAnoncred(holder: Actor) { + holder.attemptsTo( + Post.to("/issue-credentials/records/${holder.recall("recordId")}/accept-offer") + .with { + it.body( + "{}" + ) + } + ) + holder.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK) + ) + } + + @When("{actor} issues the credential") + fun acmeIssuesTheCredential(issuer: Actor) { + wait( + { + credentialEvent = ListenToEvents.`as`(issuer).credentialEvents.lastOrNull { + it.data.thid == issuer.recall("thid") + } + credentialEvent != null && + credentialEvent!!.data.protocolState == IssueCredentialRecord.ProtocolState.REQUEST_RECEIVED + }, + "Issuer was unable to receive the credential request from Holder! Protocol state did not achieve RequestReceived state." + ) + val recordId = credentialEvent!!.data.recordId + issuer.attemptsTo( + Post.to("/issue-credentials/records/$recordId/issue-credential") + ) + issuer.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK) + ) + + wait( + { + credentialEvent = ListenToEvents.`as`(issuer).credentialEvents.lastOrNull { + it.data.thid == issuer.recall("thid") + } + credentialEvent != null && + credentialEvent!!.data.protocolState == IssueCredentialRecord.ProtocolState.CREDENTIAL_SENT + }, + "Issuer was unable to issue the credential! " + + "Protocol state did not achieve ${IssueCredentialRecord.ProtocolState.CREDENTIAL_SENT} state." + ) + } + + @Then("{actor} receives the issued credential") + fun bobHasTheCredentialIssued(holder: Actor) { + wait( + { + credentialEvent = ListenToEvents.`as`(holder).credentialEvents.lastOrNull { + it.data.thid == holder.recall("thid") + } + credentialEvent != null && + credentialEvent!!.data.protocolState == IssueCredentialRecord.ProtocolState.CREDENTIAL_RECEIVED + }, + "Holder was unable to receive the credential from Issuer! " + + "Protocol state did not achieve ${IssueCredentialRecord.ProtocolState.CREDENTIAL_RECEIVED} state." + ) + holder.remember("issuedCredential", ListenToEvents.`as`(holder).credentialEvents.last().data) + } +} diff --git a/tests/e2e-tests/src/test/kotlin/features/did/DeactivateDidSteps.kt b/tests/integration-tests/src/test/kotlin/features/did/DeactivateDidSteps.kt similarity index 55% rename from tests/e2e-tests/src/test/kotlin/features/did/DeactivateDidSteps.kt rename to tests/integration-tests/src/test/kotlin/features/did/DeactivateDidSteps.kt index d5c56a029f..29e7a7434e 100644 --- a/tests/e2e-tests/src/test/kotlin/features/did/DeactivateDidSteps.kt +++ b/tests/integration-tests/src/test/kotlin/features/did/DeactivateDidSteps.kt @@ -1,31 +1,33 @@ package features.did -import common.ListenToEvents import common.TestConstants -import common.Utils.lastResponseObject import common.Utils.wait +import interactions.Get +import interactions.Post import io.cucumber.java.en.Then import io.cucumber.java.en.When +import io.iohk.atala.automation.extensions.get +import io.iohk.atala.automation.serenity.ensure.Ensure +import io.iohk.atala.prism.models.DIDOperationResponse +import io.iohk.atala.prism.models.DIDResolutionResult +import net.serenitybdd.rest.SerenityRest import net.serenitybdd.screenplay.Actor -import interactions.Get -import interactions.Post -import net.serenitybdd.screenplay.rest.questions.ResponseConsequence import org.apache.http.HttpStatus -import org.hamcrest.Matchers class DeactivateDidSteps { @When("{actor} deactivates PRISM DID") fun actorIssuesDeactivateDidOperation(actor: Actor) { actor.attemptsTo( - Post.to("/did-registrar/dids/${actor.recall("shortFormDid")}/deactivations"), + Post.to("/did-registrar/dids/${actor.recall("shortFormDid")}/deactivations") ) - actor.should( - ResponseConsequence.seeThatResponse { - it.statusCode(HttpStatus.SC_ACCEPTED) - it.body("scheduledOperation.didRef", Matchers.not(Matchers.emptyString())) - it.body("scheduledOperation.id", Matchers.not(Matchers.emptyString())) - }, + + val didOperationResponse = SerenityRest.lastResponse().get() + + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(HttpStatus.SC_ACCEPTED), + Ensure.that(didOperationResponse.scheduledOperation.didRef).isNotEmpty(), + Ensure.that(didOperationResponse.scheduledOperation.id).isNotEmpty() ) } @@ -34,12 +36,12 @@ class DeactivateDidSteps { wait( { actor.attemptsTo( - Get.resource("/dids/${actor.recall("shortFormDid")}"), + Get.resource("/dids/${actor.recall("shortFormDid")}") ) - lastResponseObject("didDocumentMetadata.deactivated", String::class) == "true" + SerenityRest.lastResponse().get().didDocumentMetadata.deactivated!! }, "ERROR: DID deactivate operation did not succeed on the ledger!", - timeout = TestConstants.DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN, + timeout = TestConstants.DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN ) } } diff --git a/tests/integration-tests/src/test/kotlin/features/did/ManageDidSteps.kt b/tests/integration-tests/src/test/kotlin/features/did/ManageDidSteps.kt new file mode 100644 index 0000000000..d041db6987 --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/features/did/ManageDidSteps.kt @@ -0,0 +1,85 @@ +package features.did + +import interactions.Get +import interactions.Post +import io.cucumber.java.en.Given +import io.cucumber.java.en.Then +import io.cucumber.java.en.When +import io.iohk.atala.automation.extensions.get +import io.iohk.atala.automation.serenity.ensure.Ensure +import io.iohk.atala.prism.models.* +import net.serenitybdd.rest.SerenityRest +import net.serenitybdd.screenplay.Actor +import org.apache.http.HttpStatus.SC_CREATED + +class ManageDidSteps { + + @Given("{actor} creates {int} PRISM DIDs") + fun createsMultipleManagedDids(actor: Actor, number: Int) { + repeat(number) { + createManageDid(actor) + } + actor.remember("number", number) + } + + @When("{actor} creates PRISM DID") + fun createManageDid(actor: Actor) { + val createDidRequest = createPrismDidRequest() + + actor.attemptsTo( + Post.to("/did-registrar/dids") + .with { + it.body(createDidRequest) + } + ) + + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED) + ) + + var createdDids = actor.recall>("createdDids") + if (createdDids == null) { + createdDids = mutableListOf() + } + + val managedDid = SerenityRest.lastResponse().get() + + createdDids.add(managedDid.longFormDid!!) + actor.remember("createdDids", createdDids) + } + + @When("{actor} lists all PRISM DIDs") + fun iListManagedDids(actor: Actor) { + actor.attemptsTo( + Get.resource("/did-registrar/dids") + ) + } + + @Then("{actor} sees PRISM DID was created successfully") + fun theDidShouldBeRegisteredSuccessfully(actor: Actor) { + val managedDid = SerenityRest.lastResponse().get() + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED), + Ensure.that(managedDid.longFormDid!!).isNotEmpty() + ) + } + + @Then("{actor} sees the list contains all created DIDs") + fun seeTheListContainsAllCreatedDids(actor: Actor) { + val expectedDids = actor.recall>("createdDids") + val managedDidList = SerenityRest.lastResponse().get().contents!! + .filter { it.status == "CREATED" }.map { it.longFormDid!! } + actor.attemptsTo( + Ensure.that(managedDidList).containsElementsFrom(expectedDids) + ) + } + + private fun createPrismDidRequest(): CreateManagedDidRequest = CreateManagedDidRequest( + CreateManagedDidRequestDocumentTemplate( + publicKeys = listOf(ManagedDIDKeyTemplate("auth-1", Purpose.AUTHENTICATION)), + services = listOf( + Service("https://foo.bar.com", listOf("LinkedDomains"), Json("https://foo.bar.com/")) + ) + ) + ) +} diff --git a/tests/integration-tests/src/test/kotlin/features/did/PublishDidSteps.kt b/tests/integration-tests/src/test/kotlin/features/did/PublishDidSteps.kt new file mode 100644 index 0000000000..76167565e3 --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/features/did/PublishDidSteps.kt @@ -0,0 +1,140 @@ +package features.did + +import common.ListenToEvents +import common.TestConstants +import common.Utils.wait +import interactions.Get +import interactions.Post +import io.cucumber.java.en.Given +import io.cucumber.java.en.Then +import io.cucumber.java.en.When +import io.iohk.atala.automation.extensions.get +import io.iohk.atala.automation.serenity.ensure.Ensure +import io.iohk.atala.prism.models.* +import net.serenitybdd.rest.SerenityRest +import net.serenitybdd.screenplay.Actor +import org.apache.http.HttpStatus +import org.apache.http.HttpStatus.SC_CREATED +import org.apache.http.HttpStatus.SC_OK + +class PublishDidSteps { + + @Given("{actor} have published PRISM DID") + fun actorHavePublishedPrismDid(actor: Actor) { + actor.attemptsTo( + Get.resource("/did-registrar/dids") + ) + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK) + ) + val publishedDids = SerenityRest.lastResponse().get().contents!!.filter { + // TODO: fix openapi spec to have statuses as enum + it.status == "PUBLISHED" + } + val did = publishedDids.firstOrNull { + actor.attemptsTo( + Get.resource("/dids/${it.did}") + ) + !SerenityRest.lastResponse().get().didDocumentMetadata.deactivated!! + } + if (did == null) { + createsUnpublishedDid(actor) + hePublishesDidToLedger(actor) + } else { + actor.remember("shortFormDid", did.did) + } + } + + @Given("{actor} creates unpublished DID") + fun createsUnpublishedDid(actor: Actor) { + val createDidRequest = CreateManagedDidRequest( + CreateManagedDidRequestDocumentTemplate( + publicKeys = listOf( + ManagedDIDKeyTemplate("auth-1", Purpose.AUTHENTICATION), + ManagedDIDKeyTemplate("assertion-1", Purpose.ASSERTION_METHOD) + ), + services = listOf( + Service("https://foo.bar.com", listOf("LinkedDomains"), Json("https://foo.bar.com/")), + Service("https://update.com", listOf("LinkedDomains"), Json("https://update.com/")), + Service("https://remove.com", listOf("LinkedDomains"), Json("https://remove.com/")) + ) + ) + ) + actor.attemptsTo( + Post.to("/did-registrar/dids") + .with { + it.body(createDidRequest) + } + ) + + val managedDid = SerenityRest.lastResponse().get() + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED), + Ensure.that(managedDid.longFormDid!!).isNotEmpty() + ) + + actor.remember("longFormDid", managedDid.longFormDid) + + actor.attemptsTo( + Get.resource("/did-registrar/dids/${managedDid.longFormDid}") + ) + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK) + ) + val did = SerenityRest.lastResponse().get() + actor.remember( + "shortFormDid", + did.did + ) + } + + @When("{actor} publishes DID to ledger") + fun hePublishesDidToLedger(actor: Actor) { + actor.attemptsTo( + Post.to("/did-registrar/dids/${actor.recall("shortFormDid")}/publications") + ) + val didOperationResponse = SerenityRest.lastResponse().get() + + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(HttpStatus.SC_ACCEPTED), + Ensure.that(didOperationResponse.scheduledOperation.didRef).isNotEmpty(), + Ensure.that(didOperationResponse.scheduledOperation.id).isNotEmpty() + ) + + wait( + { + val didEvent = + ListenToEvents.`as`(actor).didEvents.lastOrNull { + it.data.did == actor.recall("shortFormDid") + } + didEvent != null && didEvent.data.status == "PUBLISHED" + }, + "ERROR: DID was not published to ledger!", + timeout = TestConstants.DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN + ) + actor.attemptsTo( + Get.resource("/dids/${actor.recall("shortFormDid")}") + ) + + val didDocument = SerenityRest.lastResponse().get().didDocument!! + + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK), + Ensure.that(didDocument.id).isEqualTo(actor.recall("shortFormDid")) + ) + } + + @Then("{actor} resolves DID document corresponds to W3C standard") + fun heSeesDidDocumentCorrespondsToW3cStandard(actor: Actor) { + val didResolutionResult = SerenityRest.lastResponse().get() + val didDocument = didResolutionResult.didDocument!! + val shortFormDid = actor.recall("shortFormDid") + actor.attemptsTo( + Ensure.that(didDocument.id).isEqualTo(shortFormDid), + Ensure.that(didDocument.authentication!![0]) + .isEqualTo("$shortFormDid#${TestConstants.PRISM_DID_AUTH_KEY.id}"), + Ensure.that(didDocument.verificationMethod!![0].controller).isEqualTo(shortFormDid), + Ensure.that(didResolutionResult.didDocumentMetadata.deactivated!!).isFalse() + ) + } +} diff --git a/tests/e2e-tests/src/test/kotlin/features/did/UpdateDidSteps.kt b/tests/integration-tests/src/test/kotlin/features/did/UpdateDidSteps.kt similarity index 63% rename from tests/e2e-tests/src/test/kotlin/features/did/UpdateDidSteps.kt rename to tests/integration-tests/src/test/kotlin/features/did/UpdateDidSteps.kt index 3d566291de..dad0341d95 100644 --- a/tests/e2e-tests/src/test/kotlin/features/did/UpdateDidSteps.kt +++ b/tests/integration-tests/src/test/kotlin/features/did/UpdateDidSteps.kt @@ -1,69 +1,72 @@ package features.did -import api_models.* import common.TestConstants -import common.Utils.lastResponseList import common.Utils.wait +import interactions.Get +import interactions.Post import io.cucumber.java.en.Then import io.cucumber.java.en.When +import io.iohk.atala.automation.extensions.get +import io.iohk.atala.automation.serenity.ensure.Ensure +import io.iohk.atala.prism.models.* +import net.serenitybdd.rest.SerenityRest import net.serenitybdd.screenplay.Actor -import interactions.Get -import interactions.Post -import net.serenitybdd.screenplay.rest.questions.ResponseConsequence import org.apache.http.HttpStatus -import org.hamcrest.Matchers.emptyString -import org.hamcrest.Matchers.not class UpdateDidSteps { @When("{actor} updates PRISM DID by adding new keys") fun actorUpdatesPrismDidByAddingNewKeys(actor: Actor) { - val updatePrismDidAction = UpdatePrismDidAction( - actionType = "ADD_KEY", - addKey = TestConstants.PRISM_DID_UPDATE_NEW_AUTH_KEY, + val updatePrismDidAction = UpdateManagedDIDRequestAction( + actionType = ActionType.ADD_KEY, + ManagedDIDKeyTemplate("auth-2", Purpose.AUTHENTICATION) ) actor.remember("updatePrismDidAction", updatePrismDidAction) } @When("{actor} updates PRISM DID by removing keys") fun actorUpdatesPrismDidByRemovingKeys(actor: Actor) { - val updatePrismDidAction = UpdatePrismDidAction( - actionType = "REMOVE_KEY", - removeKey = TestConstants.PRISM_DID_AUTH_KEY, + val updatePrismDidAction = UpdateManagedDIDRequestAction( + actionType = ActionType.REMOVE_KEY, + removeKey = RemoveEntryById("auth-1") ) actor.remember("updatePrismDidAction", updatePrismDidAction) } @When("{actor} updates PRISM DID with new services") fun actorUpdatesPrismDidWithNewServices(actor: Actor) { - val updatePrismDidAction = UpdatePrismDidAction( - actionType = "ADD_SERVICE", - addService = TestConstants.PRISM_DID_UPDATE_NEW_SERVICE, + val updatePrismDidAction = UpdateManagedDIDRequestAction( + actionType = ActionType.ADD_SERVICE, + addService = Service( + "https://new.service.com", + listOf("LinkedDomains"), + Json("https://new.service.com/") + ) ) actor.remember("updatePrismDidAction", updatePrismDidAction) } @When("{actor} updates PRISM DID by removing services") fun actorUpdatesPrismDidByRemovingServices(actor: Actor) { - val updatePrismDidAction = UpdatePrismDidAction( - actionType = "REMOVE_SERVICE", - removeService = TestConstants.PRISM_DID_UPDATE_NEW_SERVICE, + val updatePrismDidAction = UpdateManagedDIDRequestAction( + actionType = ActionType.REMOVE_SERVICE, + removeService = RemoveEntryById("https://new.service.com") ) actor.remember("updatePrismDidAction", updatePrismDidAction) } @When("{actor} updates PRISM DID by updating services") fun actorUpdatesPrismDidByUpdatingServices(actor: Actor) { - val newService = Service( + val newService = UpdateManagedDIDServiceAction( id = TestConstants.PRISM_DID_SERVICE_FOR_UPDATE.id, type = TestConstants.PRISM_DID_SERVICE_FOR_UPDATE.type, - serviceEndpoint = listOf( - TestConstants.PRISM_DID_UPDATE_NEW_SERVICE_URL, - ), + serviceEndpoint = Json( + TestConstants.PRISM_DID_UPDATE_NEW_SERVICE_URL + ) ) - val updatePrismDidAction = UpdatePrismDidAction( - actionType = "UPDATE_SERVICE", - updateService = newService, + val updatePrismDidAction = UpdateManagedDIDRequestAction( + actionType = ActionType.UPDATE_SERVICE, + updateService = newService ) actor.remember("updatePrismDidAction", updatePrismDidAction) } @@ -73,15 +76,14 @@ class UpdateDidSteps { actor.attemptsTo( Post.to("/did-registrar/dids/${actor.recall("shortFormDid")}/updates") .with { - it.body(UpdatePrismDidRequest(listOf(actor.recall("updatePrismDidAction")))) - }, + it.body(UpdateManagedDIDRequest(listOf(actor.recall("updatePrismDidAction")))) + } ) - actor.should( - ResponseConsequence.seeThatResponse { - it.statusCode(HttpStatus.SC_ACCEPTED) - it.body("scheduledOperation.didRef", not(emptyString())) - it.body("scheduledOperation.id", not(emptyString())) - }, + val didOperationResponse = SerenityRest.lastResponse().get() + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(HttpStatus.SC_ACCEPTED), + Ensure.that(didOperationResponse.scheduledOperation.didRef).isNotEmpty(), + Ensure.that(didOperationResponse.scheduledOperation.id).isNotEmpty() ) } @@ -90,10 +92,10 @@ class UpdateDidSteps { wait( { actor.attemptsTo( - Get.resource("/dids/${actor.recall("shortFormDid")}"), + Get.resource("/dids/${actor.recall("shortFormDid")}") ) - val authUris = lastResponseList("didDocument.authentication", String::class) - val verificationMethods = lastResponseList("didDocument.verificationMethod.id", String::class) + val authUris = SerenityRest.lastResponse().get().didDocument!!.authentication!! + val verificationMethods = SerenityRest.lastResponse().get().didDocument!!.verificationMethod!!.map { it.id } authUris.any { it == "${actor.recall("shortFormDid")}#${TestConstants.PRISM_DID_UPDATE_NEW_AUTH_KEY.id}" } && verificationMethods.any { @@ -101,7 +103,7 @@ class UpdateDidSteps { } }, "ERROR: DID UPDATE operation did not succeed on the ledger!", - timeout = TestConstants.DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN, + timeout = TestConstants.DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN ) } @@ -110,10 +112,10 @@ class UpdateDidSteps { wait( { actor.attemptsTo( - Get.resource("/dids/${actor.recall("shortFormDid")}"), + Get.resource("/dids/${actor.recall("shortFormDid")}") ) - val authUris = lastResponseList("didDocument.authentication", String::class) - val verificationMethods = lastResponseList("didDocument.verificationMethod.id", String::class) + val authUris = SerenityRest.lastResponse().get().didDocument!!.authentication!! + val verificationMethods = SerenityRest.lastResponse().get().didDocument!!.verificationMethod!!.map { it.id } authUris.none { it == "${actor.recall("shortFormDid")}#${TestConstants.PRISM_DID_AUTH_KEY.id}" } && verificationMethods.none { @@ -121,7 +123,7 @@ class UpdateDidSteps { } }, "ERROR: DID UPDATE operation did not succeed on the ledger!", - timeout = TestConstants.DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN, + timeout = TestConstants.DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN ) } @@ -130,15 +132,15 @@ class UpdateDidSteps { wait( { actor.attemptsTo( - Get.resource("/dids/${actor.recall("shortFormDid")}"), + Get.resource("/dids/${actor.recall("shortFormDid")}") ) - val serviceIds = lastResponseList("didDocument.service.id", String::class) + val serviceIds = SerenityRest.lastResponse().get().didDocument!!.service!!.map { it.id } serviceIds.any { it == "${actor.recall("shortFormDid")}#${TestConstants.PRISM_DID_UPDATE_NEW_SERVICE.id}" } }, "ERROR: DID UPDATE operation did not succeed on the ledger!", - timeout = TestConstants.DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN, + timeout = TestConstants.DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN ) } @@ -147,15 +149,15 @@ class UpdateDidSteps { wait( { actor.attemptsTo( - Get.resource("/dids/${actor.recall("shortFormDid")}"), + Get.resource("/dids/${actor.recall("shortFormDid")}") ) - val serviceIds = lastResponseList("didDocument.service.id", String::class) + val serviceIds = SerenityRest.lastResponse().get().didDocument!!.service!!.map { it.id } serviceIds.none { it == "${actor.recall("shortFormDid")}#${TestConstants.PRISM_DID_UPDATE_NEW_SERVICE.id}" } }, "ERROR: DID UPDATE operation did not succeed on the ledger!", - timeout = TestConstants.DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN, + timeout = TestConstants.DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN ) } @@ -164,13 +166,13 @@ class UpdateDidSteps { wait( { actor.attemptsTo( - Get.resource("/dids/${actor.recall("shortFormDid")}"), + Get.resource("/dids/${actor.recall("shortFormDid")}") ) - val service = lastResponseList("didDocument.service", Service::class) - service.any { it.serviceEndpoint.contains(TestConstants.PRISM_DID_UPDATE_NEW_SERVICE_URL) } + val service = SerenityRest.lastResponse().get().didDocument!!.service!! + service.any { it.serviceEndpoint.value.contains(TestConstants.PRISM_DID_UPDATE_NEW_SERVICE_URL) } }, "ERROR: DID UPDATE operation did not succeed on the ledger!", - timeout = TestConstants.DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN, + timeout = TestConstants.DID_UPDATE_PUBLISH_MAX_WAIT_5_MIN ) } } diff --git a/tests/e2e-tests/src/test/kotlin/features/multitenancy/EntitySteps.kt b/tests/integration-tests/src/test/kotlin/features/multitenancy/EntitySteps.kt similarity index 53% rename from tests/e2e-tests/src/test/kotlin/features/multitenancy/EntitySteps.kt rename to tests/integration-tests/src/test/kotlin/features/multitenancy/EntitySteps.kt index 3e45e7d4f3..4a0c88e297 100644 --- a/tests/e2e-tests/src/test/kotlin/features/multitenancy/EntitySteps.kt +++ b/tests/integration-tests/src/test/kotlin/features/multitenancy/EntitySteps.kt @@ -1,10 +1,11 @@ package features.multitenancy -import api_models.CreateEntityRequest -import api_models.AddApiKeyRequest -import common.Ensure -import common.Utils import interactions.Post +import io.iohk.atala.automation.extensions.get +import io.iohk.atala.automation.serenity.ensure.Ensure +import io.iohk.atala.prism.models.ApiKeyAuthenticationRequest +import io.iohk.atala.prism.models.CreateEntityRequest +import io.iohk.atala.prism.models.EntityResponse import net.serenitybdd.rest.SerenityRest import net.serenitybdd.screenplay.Actor import org.apache.http.HttpStatus.SC_CREATED @@ -14,9 +15,10 @@ class EntitySteps { fun createNewEntity( actor: Actor, - walletId: String, + walletId: UUID, name: String = "", - id: String = UUID.randomUUID().toString()): String { + id: UUID = UUID.randomUUID() + ): EntityResponse { actor.attemptsTo( Post.to("/iam/entities") .with { @@ -24,31 +26,31 @@ class EntitySteps { CreateEntityRequest( walletId = walletId, name = name, - id = id, + id = id ) ) - }, + } ) actor.attemptsTo( - Ensure.that(SerenityRest.lastResponse().statusCode).isEqualTo(SC_CREATED) + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED) ) - return Utils.lastResponseObject("id", String::class) + return SerenityRest.lastResponse().get() } - fun addNewApiKeyToEntity(actor: Actor, entityId: String, apiKey: String) { + fun addNewApiKeyToEntity(actor: Actor, entityId: UUID, apiKey: String) { actor.attemptsTo( Post.to("/iam/apikey-authentication") .with { it.body( - AddApiKeyRequest( + ApiKeyAuthenticationRequest( entityId = entityId, - apiKey = apiKey, + apiKey = apiKey ) ) - }, + } ) actor.attemptsTo( - Ensure.that(SerenityRest.lastResponse().statusCode).isEqualTo(SC_CREATED) + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED) ) } } diff --git a/tests/integration-tests/src/test/kotlin/features/multitenancy/EventsSteps.kt b/tests/integration-tests/src/test/kotlin/features/multitenancy/EventsSteps.kt new file mode 100644 index 0000000000..0348674dc4 --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/features/multitenancy/EventsSteps.kt @@ -0,0 +1,24 @@ +package features.multitenancy + +import interactions.Post +import io.iohk.atala.automation.serenity.ensure.Ensure +import io.iohk.atala.prism.models.CreateWebhookNotification +import net.serenitybdd.screenplay.Actor +import org.apache.http.HttpStatus + +class EventsSteps { + fun registerNewWebhook(actor: Actor, webhookUrl: String) { + actor.attemptsTo( + Post.to("/events/webhooks") + .with { + it.body( + CreateWebhookNotification(url = webhookUrl) + ) + } + ) + + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(HttpStatus.SC_CREATED) + ) + } +} diff --git a/tests/e2e-tests/src/test/kotlin/features/multitenancy/WalletsSteps.kt b/tests/integration-tests/src/test/kotlin/features/multitenancy/WalletsSteps.kt similarity index 58% rename from tests/e2e-tests/src/test/kotlin/features/multitenancy/WalletsSteps.kt rename to tests/integration-tests/src/test/kotlin/features/multitenancy/WalletsSteps.kt index b6815edb9e..d3f3d47d71 100644 --- a/tests/e2e-tests/src/test/kotlin/features/multitenancy/WalletsSteps.kt +++ b/tests/integration-tests/src/test/kotlin/features/multitenancy/WalletsSteps.kt @@ -1,21 +1,22 @@ package features.multitenancy -import api_models.CreateWalletRequest -import common.Ensure import common.TestConstants -import common.Utils import interactions.Get import interactions.Post import io.cucumber.java.en.Given import io.cucumber.java.en.Then import io.cucumber.java.en.When +import io.iohk.atala.automation.extensions.get +import io.iohk.atala.automation.serenity.ensure.Ensure +import io.iohk.atala.prism.models.CreateWalletRequest +import io.iohk.atala.prism.models.WalletDetail +import io.iohk.atala.prism.models.WalletDetailPage import net.serenitybdd.rest.SerenityRest import net.serenitybdd.screenplay.Actor import org.apache.http.HttpStatus.* import java.util.* import kotlin.random.Random - class WalletsSteps { @OptIn(ExperimentalStdlibApi::class) @@ -23,7 +24,8 @@ class WalletsSteps { actor: Actor, name: String = "test-wallet", seed: String = Random.nextBytes(64).toHexString(), - id: String = UUID.randomUUID().toString()) { + id: UUID = UUID.randomUUID() + ): WalletDetail { actor.attemptsTo( Post.to("/wallets") .with { @@ -31,33 +33,32 @@ class WalletsSteps { CreateWalletRequest( name = name, seed = seed, - id = id, + id = id ) ) - }, + } ) + return SerenityRest.lastResponse().get() } @When("{actor} creates new wallet with name {string}") fun iCreateNewWalletWithName(acme: Actor, name: String) { - createNewWallet(acme, name) + val wallet = createNewWallet(acme, name) acme.attemptsTo( - Ensure.that(SerenityRest.lastResponse().statusCode).isEqualTo(SC_CREATED) - .withReportedError("Response status code is not correct!"), + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED) ) - acme.remember("walletId", Utils.lastResponseObject("id", String::class)) + acme.remember("walletId", wallet.id) } @When("{actor} creates new wallet with unique id") fun acmeCreateNewWalletWithId(acme: Actor) { - val uniqueId = UUID.randomUUID().toString() + val uniqueId = UUID.randomUUID() acme.remember("uniqueId", uniqueId) - createNewWallet(acme, id = uniqueId) + val wallet = createNewWallet(acme, id = uniqueId) acme.attemptsTo( - Ensure.that(SerenityRest.lastResponse().statusCode).isEqualTo(SC_CREATED) - .withReportedError("Response status code is not correct!"), - Ensure.that(Utils.lastResponseObject("id", String::class)).isEqualTo(uniqueId) - .withReportedError("Wallet id is not correct!"), + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED), + Ensure.that(wallet.id).isEqualTo(uniqueId) + .withReportedError("Wallet id is not correct!") ) } @@ -65,8 +66,7 @@ class WalletsSteps { fun acmeCreateNewWalletWithTheSameId(acme: Actor) { createNewWallet(acme, id = acme.recall("uniqueId")) acme.attemptsTo( - Ensure.that(SerenityRest.lastResponse().statusCode).isEqualTo(SC_BAD_REQUEST) - .withReportedError("Response status code is not correct!"), + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_BAD_REQUEST) ) } @@ -76,8 +76,7 @@ class WalletsSteps { acme.remember("uniqueName", name) createNewWallet(acme, name = name) acme.attemptsTo( - Ensure.that(SerenityRest.lastResponse().statusCode).isEqualTo(SC_CREATED) - .withReportedError("Response status code is not correct!"), + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED) ) } @@ -85,8 +84,7 @@ class WalletsSteps { fun acmeCreatesNewWalletWithTheSameUniqueName(acme: Actor) { createNewWallet(acme, name = acme.recall("uniqueName")) acme.attemptsTo( - Ensure.that(SerenityRest.lastResponse().statusCode).isEqualTo(SC_CREATED) - .withReportedError("Response status code is not correct!"), + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED) ) } @@ -96,13 +94,15 @@ class WalletsSteps { Get.resource("/wallets/${acme.recall("walletId")}") .with { it.queryParam("name", name) - }, + } ) + val wallet = SerenityRest.lastResponse().get() + acme.attemptsTo( - Ensure.that(Utils.lastResponseObject("name", String::class)).isEqualTo(name) + Ensure.that(wallet.name).isEqualTo(name) .withReportedError("Wallet name is not correct!"), - Ensure.that(Utils.lastResponseObject("id", String::class)).isEqualTo(acme.recall("walletId")) - .withReportedError("Wallet id is not correct!"), + Ensure.that(wallet.id).isEqualTo(acme.recall("walletId")) + .withReportedError("Wallet id is not correct!") ) } @@ -112,33 +112,30 @@ class WalletsSteps { Get.resource("/wallets") ) acme.attemptsTo( - Ensure.that(SerenityRest.lastResponse().statusCode).isEqualTo(SC_OK) - .withReportedError("Response status code is not correct!"), + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK) ) - val wallets = Utils.lastResponseList("contents.name", String::class).filter { it == acme.recall("uniqueName") } + val wallets = SerenityRest.lastResponse().get().contents!!.filter { it.name == acme.recall("uniqueName") } acme.attemptsTo( Ensure.that(wallets.size).isEqualTo(2) - .withReportedError("Two wallets with the same name were not created!"), + .withReportedError("Two wallets with the same name were not created!") ) } @Then("{actor} should have only one wallet and second operation should fail") fun acmeShouldHaveOnlyOneWalletAndSecondOperationShouldFail(acme: Actor) { acme.attemptsTo( - Ensure.that(SerenityRest.lastResponse().statusCode).isEqualTo(SC_BAD_REQUEST) - .withReportedError("Response status code is not correct!") + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_BAD_REQUEST) ) acme.attemptsTo( Get.resource("/wallets") ) acme.attemptsTo( - Ensure.that(SerenityRest.lastResponse().statusCode).isEqualTo(SC_OK) - .withReportedError("Response status code is not correct!"), + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK) ) - val wallets = Utils.lastResponseList("contents.id", String::class).filter { it == acme.recall("uniqueId") } + val wallets = SerenityRest.lastResponse().get().contents!!.filter { it.id == acme.recall("uniqueId") } acme.attemptsTo( Ensure.that(wallets.size).isEqualTo(1) - .withReportedError("Only one wallet should be created with the same id!"), + .withReportedError("Only one wallet should be created with the same id!") ) } @@ -150,8 +147,7 @@ class WalletsSteps { @Then("{actor} should see the error and wallet should not be created") fun acmeShouldSeeTheErrorAndWalletShouldNotBeCreated(acme: Actor) { acme.attemptsTo( - Ensure.that(SerenityRest.lastResponse().statusCode).isEqualTo(SC_BAD_REQUEST) - .withReportedError("Response status code is not correct!"), + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_BAD_REQUEST) ) } } diff --git a/tests/integration-tests/src/test/kotlin/features/proofs/PresentProofSteps.kt b/tests/integration-tests/src/test/kotlin/features/proofs/PresentProofSteps.kt new file mode 100644 index 0000000000..c1fa5976e2 --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/features/proofs/PresentProofSteps.kt @@ -0,0 +1,124 @@ +package features.proofs + +import common.ListenToEvents +import common.Utils.wait +import interactions.Patch +import interactions.Post +import io.cucumber.java.en.Then +import io.cucumber.java.en.When +import io.iohk.atala.automation.extensions.get +import io.iohk.atala.automation.serenity.ensure.Ensure +import io.iohk.atala.prism.models.* +import models.PresentationEvent +import net.serenitybdd.rest.SerenityRest +import net.serenitybdd.screenplay.Actor +import org.apache.http.HttpStatus.SC_CREATED + +class PresentProofSteps { + + private var proofEvent: PresentationEvent? = null + + @When("{actor} sends a request for proof presentation to {actor}") + fun faberSendsARequestForProofPresentationToBob(faber: Actor, bob: Actor) { + val presentationRequest = RequestPresentationInput( + connectionId = faber.recall("connection-with-${bob.name}").connectionId, + options = Options( + challenge = "11c91493-01b3-4c4d-ac36-b336bab5bddf", + domain = "https://example-verifier.com" + ), + proofs = listOf( + ProofRequestAux( + schemaId = "https://schema.org/Person", + trustIssuers = listOf("did:web:atalaprism.io/users/testUser") + ) + ) + ) + faber.attemptsTo( + Post.to("/present-proof/presentations") + .with { + it.body( + presentationRequest + ) + } + ) + faber.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED) + ) + val presentationStatus = SerenityRest.lastResponse().get() + faber.remember("thid", presentationStatus.thid) + bob.remember("thid", presentationStatus.thid) + } + + @When("{actor} receives the request") + fun bobReceivesTheRequest(bob: Actor) { + wait( + { + proofEvent = ListenToEvents.`as`(bob).presentationEvents.lastOrNull { + it.data.thid == bob.recall("thid") + } + proofEvent != null && + proofEvent!!.data.status == PresentationStatus.Status.REQUEST_RECEIVED + }, + "ERROR: Bob did not achieve any presentation request!" + ) + bob.remember("presentationId", proofEvent!!.data.presentationId) + } + + @When("{actor} makes the presentation of the proof to {actor}") + fun bobMakesThePresentationOfTheProof(bob: Actor, faber: Actor) { + val requestPresentationAction = RequestPresentationAction( + proofId = listOf(bob.recall("issuedCredential").recordId), + action = RequestPresentationAction.Action.REQUEST_MINUS_ACCEPT + ) + + bob.attemptsTo( + Patch.to("/present-proof/presentations/${bob.recall("presentationId")}").with { + it.body( + requestPresentationAction + ) + } + ) + } + + @When("{actor} rejects the proof") + fun bobRejectsProof(bob: Actor) { + bob.attemptsTo( + Patch.to("/present-proof/presentations/${bob.recall("presentationId")}").with { + it.body( + RequestPresentationAction( + action = RequestPresentationAction.Action.REQUEST_MINUS_REJECT + ) + ) + } + ) + } + + @Then("{actor} sees the proof is rejected") + fun bobSeesProofIsRejected(bob: Actor) { + wait( + { + proofEvent = ListenToEvents.`as`(bob).presentationEvents.lastOrNull { + it.data.thid == bob.recall("thid") + } + proofEvent != null && + proofEvent!!.data.status == PresentationStatus.Status.REQUEST_REJECTED + }, + "ERROR: Faber did not receive presentation from Bob!" + ) + } + + @Then("{actor} has the proof verified") + fun faberHasTheProofVerified(faber: Actor) { + wait( + { + proofEvent = ListenToEvents.`as`(faber).presentationEvents.lastOrNull { + it.data.thid == faber.recall("thid") + } + + proofEvent != null && + proofEvent!!.data.status == PresentationStatus.Status.PRESENTATION_VERIFIED + }, + "ERROR: presentation did not achieve PresentationVerified state!" + ) + } +} diff --git a/tests/integration-tests/src/test/kotlin/features/schemas/CredentialSchemasSteps.kt b/tests/integration-tests/src/test/kotlin/features/schemas/CredentialSchemasSteps.kt new file mode 100644 index 0000000000..b1aad0e237 --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/features/schemas/CredentialSchemasSteps.kt @@ -0,0 +1,84 @@ +package features + +import common.TestConstants +import interactions.Get +import interactions.Post +import io.cucumber.java.en.Then +import io.cucumber.java.en.When +import io.iohk.atala.automation.extensions.get +import io.iohk.atala.automation.serenity.ensure.Ensure +import io.iohk.atala.prism.models.CredentialSchemaResponse +import models.JsonSchema +import net.serenitybdd.rest.SerenityRest +import net.serenitybdd.screenplay.Actor +import org.apache.http.HttpStatus.SC_CREATED +import org.apache.http.HttpStatus.SC_OK + +class CredentialSchemasSteps { + + @When("{actor} creates a new credential schema") + fun acmeCreatesANewCredentialSchema(actor: Actor) { + actor.attemptsTo( + Post.to("/schema-registry/schemas").with { + it.body( + TestConstants.STUDENT_SCHEMA.copy(author = actor.recall("shortFormDid")) + ) + } + ) + } + + @Then("{actor} sees new credential schema is available") + fun newCredentialSchemaIsAvailable(actor: Actor) { + val credentialSchema = SerenityRest.lastResponse().get() + val jsonSchema = SerenityRest.lastResponse().get("schema") + + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED), + Ensure.that(credentialSchema.guid).isNotNull(), + Ensure.that(credentialSchema.id).isNotNull(), + Ensure.that(credentialSchema.longId!!).isNotNull(), + Ensure.that(credentialSchema.authored).isNotNull(), + Ensure.that(credentialSchema.kind).isEqualTo("CredentialSchema"), + Ensure.that(credentialSchema.name).contains(TestConstants.STUDENT_SCHEMA.name), + Ensure.that(credentialSchema.description).contains(TestConstants.STUDENT_SCHEMA.description!!), + Ensure.that(credentialSchema.version).contains(TestConstants.STUDENT_SCHEMA.version), + Ensure.that(credentialSchema.type).isEqualTo(TestConstants.CREDENTIAL_SCHEMA_TYPE), + Ensure.that(credentialSchema.tags!!).containsExactlyInAnyOrderElementsFrom(TestConstants.STUDENT_SCHEMA.tags!!), + Ensure.that(jsonSchema.toString()).isEqualTo(TestConstants.jsonSchema.toString()) + ) + } + + @When("{actor} creates {int} new schemas") + fun acmeCreatesMultipleSchemas(actor: Actor, numberOfSchemas: Int) { + val createdSchemas: MutableList = mutableListOf() + repeat(numberOfSchemas) { i: Int -> + actor.attemptsTo( + Post.to("/schema-registry/schemas").with { + it.body( + TestConstants.generate_with_name_suffix_and_author( + i.toString(), + actor.recall("shortFormDid") + ) + ) + } + ) + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_CREATED) + ) + createdSchemas.add(SerenityRest.lastResponse().get()) + } + actor.remember("createdSchemas", createdSchemas) + } + + @Then("{actor} can access all of them one by one") + fun theyCanBeAccessedWithPagination(actor: Actor) { + actor.recall>("createdSchemas").forEach { schema -> + actor.attemptsTo( + Get.resource("/schema-registry/schemas/${schema.guid}") + ) + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(SC_OK) + ) + } + } +} diff --git a/tests/integration-tests/src/test/kotlin/features/system/SystemSteps.kt b/tests/integration-tests/src/test/kotlin/features/system/SystemSteps.kt new file mode 100644 index 0000000000..aaee61d9ec --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/features/system/SystemSteps.kt @@ -0,0 +1,31 @@ +package features.system + +import interactions.Get +import io.cucumber.java.en.Then +import io.cucumber.java.en.When +import io.iohk.atala.automation.extensions.get +import io.iohk.atala.automation.serenity.ensure.Ensure +import io.iohk.atala.prism.models.HealthInfo +import net.serenitybdd.rest.SerenityRest +import net.serenitybdd.screenplay.Actor +import org.apache.http.HttpStatus + +class SystemSteps { + @When("{actor} makes a request to the health endpoint") + fun actorRequestsHealthEndpoint(actor: Actor) { + actor.attemptsTo( + Get.resource("/_system/health") + ) + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(HttpStatus.SC_OK) + ) + } + + @Then("{actor} knows what version of the service is running") + fun actorUnderstandsVersion(actor: Actor) { + val healthResponse = SerenityRest.lastResponse().get() + actor.attemptsTo( + Ensure.that(healthResponse.version).isNotBlank() + ) + } +} diff --git a/tests/integration-tests/src/test/kotlin/features/verificationpolicies/VerificationPoliciesSteps.kt b/tests/integration-tests/src/test/kotlin/features/verificationpolicies/VerificationPoliciesSteps.kt new file mode 100644 index 0000000000..0b1ad616d9 --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/features/verificationpolicies/VerificationPoliciesSteps.kt @@ -0,0 +1,100 @@ +package features.verificationpolicies + +import common.TestConstants +import interactions.Get +import interactions.Post +import interactions.Put +import io.cucumber.java.en.Then +import io.cucumber.java.en.When +import io.iohk.atala.automation.extensions.get +import io.iohk.atala.automation.serenity.ensure.Ensure +import io.iohk.atala.prism.models.VerificationPolicy +import io.iohk.atala.prism.models.VerificationPolicyInput +import net.serenitybdd.rest.SerenityRest +import net.serenitybdd.screenplay.Actor +import org.apache.http.HttpStatus +import java.util.* + +class VerificationPoliciesSteps { + + @When("{actor} creates a new verification policy") + fun acmeCreatesANewVerificationPolicy(actor: Actor) { + actor.attemptsTo( + Post.to("/verification/policies").with { + it.body( + TestConstants.TEST_VERIFICATION_POLICY + ) + } + ) + actor.attemptsTo( + Ensure.thatTheLastResponse().statusCode().isEqualTo(HttpStatus.SC_CREATED) + ) + } + + @Then("{actor} sees new verification policy is available") + fun newVerificationPolicyIsAvailable(actor: Actor) { + val policy = SerenityRest.lastResponse().get() + actor.attemptsTo( + Ensure.that(policy.id).isNotNull(), + Ensure.that(policy.nonce).isNotNull(), + Ensure.that(policy.kind).contains("VerificationPolicy"), + Ensure.that(policy.name).contains(TestConstants.TEST_VERIFICATION_POLICY.name), + Ensure.that(policy.description).contains(TestConstants.TEST_VERIFICATION_POLICY.description) + ) + + policy.constraints!!.forEach { + actor.attemptsTo( + Ensure.that(it.schemaId).isEqualTo(TestConstants.TEST_VERIFICATION_POLICY.constraints!!.first().schemaId), + Ensure.that(it.trustedIssuers!!) + .containsExactlyInAnyOrderElementsFrom( + TestConstants.TEST_VERIFICATION_POLICY.constraints!!.first().trustedIssuers!! + ) + ) + } + actor.remember("policy", policy) + } + + @When("{actor} updates a new verification policy") + fun acmeUpdatesAVerificationPolicy(actor: Actor) { + val policy = actor.recall("policy") + val updatePolicyInput = VerificationPolicyInput( + name = policy.name, + description = "updated description + ${UUID.randomUUID()}", + constraints = policy.constraints + ) + actor.attemptsTo( + Put.to("/verification/policies/${policy.id}?nonce=${policy.nonce}").with { + it.body(updatePolicyInput) + } + ) + actor.remember("updatedPolicyInput", updatePolicyInput) + } + + @Then("{actor} sees the updated verification policy is available") + fun updatedVerificationPolicyIsAvailable(actor: Actor) { + val updatePolicyInput = actor.forget("updatedPolicyInput") + + actor.attemptsTo( + Get.resource("/verification/policies/${actor.recall("policy").id}") + ) + val policy = SerenityRest.lastResponse().get() + + actor.attemptsTo( + Ensure.that(policy.id).isNotNull(), + Ensure.that(policy.nonce).isNotNull(), + Ensure.that(policy.kind).contains("VerificationPolicy"), + Ensure.that(policy.name).contains(updatePolicyInput.name), + Ensure.that(policy.description).contains(updatePolicyInput.description) + ) + + policy.constraints!!.forEach { + actor.attemptsTo( + Ensure.that(it.schemaId).isEqualTo(updatePolicyInput.constraints!!.first().schemaId), + Ensure.that(it.trustedIssuers!!) + .containsExactlyInAnyOrderElementsFrom( + updatePolicyInput.constraints!!.first().trustedIssuers!! + ) + ) + } + } +} diff --git a/tests/integration-tests/src/test/kotlin/interactions/AuthRestInteraction.kt b/tests/integration-tests/src/test/kotlin/interactions/AuthRestInteraction.kt new file mode 100644 index 0000000000..881c193dae --- /dev/null +++ b/tests/integration-tests/src/test/kotlin/interactions/AuthRestInteraction.kt @@ -0,0 +1,25 @@ +package interactions + +import com.sksamuel.hoplite.ConfigLoader +import config.Config +import io.ktor.util.* +import io.restassured.specification.RequestSpecification +import net.serenitybdd.screenplay.Actor +import net.serenitybdd.screenplay.rest.interactions.RestInteraction + +abstract class AuthRestInteraction : RestInteraction() { + + private val config = ConfigLoader().loadConfigOrThrow("/tests.conf") + + fun specWithAuthHeaders(actor: T): RequestSpecification { + val spec = rest() + if (actor!!.name.toLowerCasePreservingASCIIRules().contains("admin")) { + spec.header(config.global.adminAuthHeader, config.global.adminApiKey) + } else { + if (config.global.authRequired) { + spec.header(config.global.authHeader, actor.recall("AUTH_KEY")) + } + } + return spec + } +} diff --git a/tests/e2e-tests/src/test/kotlin/interactions/Delete.kt b/tests/integration-tests/src/test/kotlin/interactions/Delete.kt similarity index 50% rename from tests/e2e-tests/src/test/kotlin/interactions/Delete.kt rename to tests/integration-tests/src/test/kotlin/interactions/Delete.kt index b70dd3da7b..4727abaad3 100644 --- a/tests/e2e-tests/src/test/kotlin/interactions/Delete.kt +++ b/tests/integration-tests/src/test/kotlin/interactions/Delete.kt @@ -1,30 +1,18 @@ package interactions -import common.Environments -import io.ktor.util.* import net.serenitybdd.annotations.Step import net.serenitybdd.screenplay.Actor import net.serenitybdd.screenplay.Tasks import net.serenitybdd.screenplay.rest.abilities.CallAnApi -import net.serenitybdd.screenplay.rest.interactions.RestInteraction - /** * This class is a copy of the class Delete from serenity rest interactions * to add a custom authentication header to the request on-the-fly. */ -open class Delete(private val resource: String) : RestInteraction() { +open class Delete(private val resource: String) : AuthRestInteraction() { @Step("{0} executes a DELETE on the resource #resource") override fun performAs(actor: T) { - val spec = rest() - if (actor!!.name.toLowerCasePreservingASCIIRules().contains("admin")) { - spec.header(Environments.ADMIN_AUTH_HEADER, Environments.ADMIN_AUTH_TOKEN) - } else { - if (Environments.AGENT_AUTH_REQUIRED) { - spec.header(Environments.AGENT_AUTH_HEADER, actor.recall("AUTH_KEY")) - } - } - spec.delete(CallAnApi.`as`(actor).resolve(resource)) + specWithAuthHeaders(actor).delete(CallAnApi.`as`(actor).resolve(resource)) } companion object { diff --git a/tests/e2e-tests/src/test/kotlin/interactions/Get.kt b/tests/integration-tests/src/test/kotlin/interactions/Get.kt similarity index 50% rename from tests/e2e-tests/src/test/kotlin/interactions/Get.kt rename to tests/integration-tests/src/test/kotlin/interactions/Get.kt index f049f85d1f..c62b416cd8 100644 --- a/tests/e2e-tests/src/test/kotlin/interactions/Get.kt +++ b/tests/integration-tests/src/test/kotlin/interactions/Get.kt @@ -1,29 +1,18 @@ package interactions -import common.Environments -import io.ktor.util.* import net.serenitybdd.annotations.Step import net.serenitybdd.screenplay.Actor import net.serenitybdd.screenplay.Tasks import net.serenitybdd.screenplay.rest.abilities.CallAnApi -import net.serenitybdd.screenplay.rest.interactions.RestInteraction /** * This class is a copy of the class Get from serenity rest interactions * to add a custom authentication header to the request on-the-fly. */ -open class Get(private val resource: String) : RestInteraction() { +open class Get(private val resource: String) : AuthRestInteraction() { @Step("{0} executes a GET on the resource #resource") override fun performAs(actor: T) { - val spec = rest() - if (actor!!.name.toLowerCasePreservingASCIIRules().contains("admin")) { - spec.header(Environments.ADMIN_AUTH_HEADER, Environments.ADMIN_AUTH_TOKEN) - } else { - if (Environments.AGENT_AUTH_REQUIRED) { - spec.header(Environments.AGENT_AUTH_HEADER, actor.recall("AUTH_KEY")) - } - } - spec.get(CallAnApi.`as`(actor).resolve(resource)) + specWithAuthHeaders(actor).get(CallAnApi.`as`(actor).resolve(resource)) } companion object { diff --git a/tests/e2e-tests/src/test/kotlin/interactions/Patch.kt b/tests/integration-tests/src/test/kotlin/interactions/Patch.kt similarity index 50% rename from tests/e2e-tests/src/test/kotlin/interactions/Patch.kt rename to tests/integration-tests/src/test/kotlin/interactions/Patch.kt index a48f463301..fbf874bbf5 100644 --- a/tests/e2e-tests/src/test/kotlin/interactions/Patch.kt +++ b/tests/integration-tests/src/test/kotlin/interactions/Patch.kt @@ -1,29 +1,18 @@ package interactions -import common.Environments -import io.ktor.util.* import net.serenitybdd.annotations.Step import net.serenitybdd.screenplay.Actor import net.serenitybdd.screenplay.Tasks import net.serenitybdd.screenplay.rest.abilities.CallAnApi -import net.serenitybdd.screenplay.rest.interactions.RestInteraction /** * This class is a copy of the class Patch from serenity rest interactions * to add a custom authentication header to the request on-the-fly. */ -open class Patch(private val resource: String) : RestInteraction() { +open class Patch(private val resource: String) : AuthRestInteraction() { @Step("{0} executes a PATCH on the resource #resource") override fun performAs(actor: T) { - val spec = rest() - if (actor!!.name.toLowerCasePreservingASCIIRules().contains("admin")) { - spec.header(Environments.ADMIN_AUTH_HEADER, Environments.ADMIN_AUTH_TOKEN) - } else { - if (Environments.AGENT_AUTH_REQUIRED) { - spec.header(Environments.AGENT_AUTH_HEADER, actor.recall("AUTH_KEY")) - } - } - spec.patch(CallAnApi.`as`(actor).resolve(resource)) + specWithAuthHeaders(actor).patch(CallAnApi.`as`(actor).resolve(resource)) } companion object { diff --git a/tests/e2e-tests/src/test/kotlin/interactions/Post.kt b/tests/integration-tests/src/test/kotlin/interactions/Post.kt similarity index 50% rename from tests/e2e-tests/src/test/kotlin/interactions/Post.kt rename to tests/integration-tests/src/test/kotlin/interactions/Post.kt index 2104827f94..9b692867db 100644 --- a/tests/e2e-tests/src/test/kotlin/interactions/Post.kt +++ b/tests/integration-tests/src/test/kotlin/interactions/Post.kt @@ -1,29 +1,18 @@ package interactions -import common.Environments -import io.ktor.util.* import net.serenitybdd.annotations.Step import net.serenitybdd.screenplay.Actor import net.serenitybdd.screenplay.Tasks import net.serenitybdd.screenplay.rest.abilities.CallAnApi -import net.serenitybdd.screenplay.rest.interactions.RestInteraction /** * This class is a copy of the class Post from serenity rest interactions * to add a custom authentication header to the request on-the-fly. */ -open class Post(private val resource: String) : RestInteraction() { +open class Post(private val resource: String) : AuthRestInteraction() { @Step("{0} executes a POST on the resource #resource") override fun performAs(actor: T) { - val spec = rest() - if (actor!!.name.toLowerCasePreservingASCIIRules().contains("admin")) { - spec.header(Environments.ADMIN_AUTH_HEADER, Environments.ADMIN_AUTH_TOKEN) - } else { - if (Environments.AGENT_AUTH_REQUIRED) { - spec.header(Environments.AGENT_AUTH_HEADER, actor.recall("AUTH_KEY")) - } - } - spec.post(CallAnApi.`as`(actor).resolve(resource)) + specWithAuthHeaders(actor).post(CallAnApi.`as`(actor).resolve(resource)) } companion object { diff --git a/tests/e2e-tests/src/test/kotlin/interactions/Put.kt b/tests/integration-tests/src/test/kotlin/interactions/Put.kt similarity index 50% rename from tests/e2e-tests/src/test/kotlin/interactions/Put.kt rename to tests/integration-tests/src/test/kotlin/interactions/Put.kt index 9326f9b0e5..4c2eeeeaa9 100644 --- a/tests/e2e-tests/src/test/kotlin/interactions/Put.kt +++ b/tests/integration-tests/src/test/kotlin/interactions/Put.kt @@ -1,30 +1,18 @@ package interactions -import common.Environments -import io.ktor.util.* import net.serenitybdd.annotations.Step import net.serenitybdd.screenplay.Actor import net.serenitybdd.screenplay.Tasks import net.serenitybdd.screenplay.rest.abilities.CallAnApi -import net.serenitybdd.screenplay.rest.interactions.RestInteraction - /** * This class is a copy of the class Put from serenity rest interactions * to add a custom authentication header to the request on-the-fly. */ -open class Put(private val resource: String) : RestInteraction() { +open class Put(private val resource: String) : AuthRestInteraction() { @Step("{0} executes a PUT on the resource #resource") override fun performAs(actor: T) { - val spec = rest() - if (actor!!.name.toLowerCasePreservingASCIIRules().contains("admin")) { - spec.header(Environments.ADMIN_AUTH_HEADER, Environments.ADMIN_AUTH_TOKEN) - } else { - if (Environments.AGENT_AUTH_REQUIRED) { - spec.header(Environments.AGENT_AUTH_HEADER, actor.recall("AUTH_KEY")) - } - } - spec.put(CallAnApi.`as`(actor).resolve(resource)) + specWithAuthHeaders(actor).put(CallAnApi.`as`(actor).resolve(resource)) } companion object { diff --git a/tests/e2e-tests/src/test/kotlin/runners/E2eTestsRunner.kt b/tests/integration-tests/src/test/kotlin/runners/IntegrationTestsRunner.kt similarity index 71% rename from tests/e2e-tests/src/test/kotlin/runners/E2eTestsRunner.kt rename to tests/integration-tests/src/test/kotlin/runners/IntegrationTestsRunner.kt index daab43ba4f..4613f9d205 100644 --- a/tests/e2e-tests/src/test/kotlin/runners/E2eTestsRunner.kt +++ b/tests/integration-tests/src/test/kotlin/runners/IntegrationTestsRunner.kt @@ -6,14 +6,14 @@ import org.junit.runner.RunWith @CucumberOptions( features = [ - "src/test/resources/features", + "src/test/resources/features" ], glue = ["features"], snippets = CucumberOptions.SnippetType.CAMELCASE, plugin = [ "pretty", - "json:target/serenity-reports/cucumber_report.json", - ], + "json:target/serenity-reports/cucumber_report.json" + ] ) @RunWith(CucumberWithSerenity::class) -class E2eTestsRunner +class IntegrationTestsRunner diff --git a/tests/integration-tests/src/test/resources/cucumber.properties b/tests/integration-tests/src/test/resources/cucumber.properties new file mode 100644 index 0000000000..170a3740bb --- /dev/null +++ b/tests/integration-tests/src/test/resources/cucumber.properties @@ -0,0 +1,2 @@ +cucumber.publish.quiet=true +cucumber.object-factory=io.iohk.atala.automation.serenity.objectfactory.AtalaObjectFactory \ No newline at end of file diff --git a/tests/e2e-tests/src/test/resources/features/connection/connection.feature b/tests/integration-tests/src/test/resources/features/connection/connection.feature similarity index 82% rename from tests/e2e-tests/src/test/resources/features/connection/connection.feature rename to tests/integration-tests/src/test/resources/features/connection/connection.feature index 6254ba419f..360a135220 100644 --- a/tests/e2e-tests/src/test/resources/features/connection/connection.feature +++ b/tests/integration-tests/src/test/resources/features/connection/connection.feature @@ -1,9 +1,7 @@ Feature: Agents connection -@TEST_ATL-3834 Scenario: Establish a connection between two agents When Acme generates a connection invitation to Bob - And Bob receives the connection invitation from Acme And Bob sends a connection request to Acme And Acme receives the connection request and sends back the response And Bob receives the connection response diff --git a/tests/integration-tests/src/test/resources/features/credentials/issue_credentials.feature b/tests/integration-tests/src/test/resources/features/credentials/issue_credentials.feature new file mode 100644 index 0000000000..8f85e9d332 --- /dev/null +++ b/tests/integration-tests/src/test/resources/features/credentials/issue_credentials.feature @@ -0,0 +1,36 @@ +@RFC0453 @AIP20 +Feature: Issue Credentials Protocol + +Scenario: Issuing credential with published PRISM DID + Given Acme and Bob have an existing connection + When Acme creates unpublished DID + And He publishes DID to ledger + And Bob creates unpublished DID + And Acme offers a credential to Bob with "short" form DID + And Bob receives the credential offer + And Bob accepts credential offer for JWT + And Acme issues the credential + Then Bob receives the issued credential + +Scenario: Issuing credential with unpublished PRISM DID + Given Acme and Bob have an existing connection + When Acme creates unpublished DID + And Bob creates unpublished DID + And Acme offers a credential to Bob with "long" form DID + And Bob receives the credential offer + And Bob accepts credential offer for JWT + And Acme issues the credential + Then Bob receives the issued credential + +Scenario: Issuing anoncred with published PRISM DID + Given Acme and Bob have an existing connection + When Acme creates unpublished DID + And He publishes DID to ledger + And Bob creates unpublished DID + And Acme creates anoncred schema + And Acme creates anoncred credential definition + And Acme offers anoncred to Bob + And Bob receives the credential offer + And Bob accepts credential offer for anoncred + And Acme issues the credential + Then Bob receives the issued credential diff --git a/tests/integration-tests/src/test/resources/features/did/create_did.feature b/tests/integration-tests/src/test/resources/features/did/create_did.feature new file mode 100644 index 0000000000..f987c46390 --- /dev/null +++ b/tests/integration-tests/src/test/resources/features/did/create_did.feature @@ -0,0 +1,10 @@ +Feature: Create and publish DID + +Scenario: Create PRISM DID + When Acme creates PRISM DID + Then He sees PRISM DID was created successfully + +Scenario: Successfully publish DID to ledger + When Acme creates unpublished DID + And He publishes DID to ledger + Then He resolves DID document corresponds to W3C standard diff --git a/tests/e2e-tests/src/test/resources/features/did_registrar/deactivate_did.feature b/tests/integration-tests/src/test/resources/features/did/deactivate_did.feature similarity index 92% rename from tests/e2e-tests/src/test/resources/features/did_registrar/deactivate_did.feature rename to tests/integration-tests/src/test/resources/features/did/deactivate_did.feature index 3c80b55c14..9f98dfa7a3 100644 --- a/tests/e2e-tests/src/test/resources/features/did_registrar/deactivate_did.feature +++ b/tests/integration-tests/src/test/resources/features/did/deactivate_did.feature @@ -1,7 +1,6 @@ @DLT Feature: Deactivate DID -@TEST_ATL-3837 Scenario: Deactivate DID Given Acme have published PRISM DID When Acme deactivates PRISM DID diff --git a/tests/e2e-tests/src/test/resources/features/did_registrar/listing_did.feature b/tests/integration-tests/src/test/resources/features/did/listing_did.feature similarity index 92% rename from tests/e2e-tests/src/test/resources/features/did_registrar/listing_did.feature rename to tests/integration-tests/src/test/resources/features/did/listing_did.feature index 14bffb3eab..cddac4ecd2 100644 --- a/tests/e2e-tests/src/test/resources/features/did_registrar/listing_did.feature +++ b/tests/integration-tests/src/test/resources/features/did/listing_did.feature @@ -1,6 +1,5 @@ Feature: DID listing -@TEST_ATL-3841 Scenario: Listing multiple PRISM DIDs Given Acme creates 5 PRISM DIDs When He lists all PRISM DIDs diff --git a/tests/e2e-tests/src/test/resources/features/did_registrar/update_did.feature b/tests/integration-tests/src/test/resources/features/did/update_did.feature similarity index 92% rename from tests/e2e-tests/src/test/resources/features/did_registrar/update_did.feature rename to tests/integration-tests/src/test/resources/features/did/update_did.feature index 906d6b1472..349ae0628a 100644 --- a/tests/e2e-tests/src/test/resources/features/did_registrar/update_did.feature +++ b/tests/integration-tests/src/test/resources/features/did/update_did.feature @@ -2,34 +2,28 @@ Feature: Update DID Background: Published DID is created - #@PRECOND_ATL-3843 Given Acme have published PRISM DID -@TEST_ATL-3844 Scenario: Update PRISM DID by adding new services When Acme updates PRISM DID with new services And He submits PRISM DID update operation Then He sees PRISM DID was successfully updated with new services -@TEST_ATL-3845 Scenario: Update PRISM DID by removing services When Acme updates PRISM DID by removing services And He submits PRISM DID update operation Then He sees PRISM DID was successfully updated by removing services -@TEST_ATL-3846 Scenario: Update PRISM DID by updating services When Acme updates PRISM DID by updating services And He submits PRISM DID update operation Then He sees PRISM DID was successfully updated by updating services -@TEST_ATL-3847 Scenario: Update PRISM DID by adding new keys When Acme updates PRISM DID by adding new keys And He submits PRISM DID update operation Then He sees PRISM DID was successfully updated with new keys -@TEST_ATL-3848 Scenario: Update PRISM DID by removing keys When Acme updates PRISM DID by removing keys And He submits PRISM DID update operation diff --git a/tests/e2e-tests/src/test/resources/features/multitenancy/wallets.feature b/tests/integration-tests/src/test/resources/features/multitenancy/wallets.feature similarity index 100% rename from tests/e2e-tests/src/test/resources/features/multitenancy/wallets.feature rename to tests/integration-tests/src/test/resources/features/multitenancy/wallets.feature diff --git a/tests/e2e-tests/src/test/resources/features/present_proof/present_proof.feature b/tests/integration-tests/src/test/resources/features/proofs/present_proof.feature similarity index 95% rename from tests/e2e-tests/src/test/resources/features/present_proof/present_proof.feature rename to tests/integration-tests/src/test/resources/features/proofs/present_proof.feature index dcb0865754..406908d490 100644 --- a/tests/e2e-tests/src/test/resources/features/present_proof/present_proof.feature +++ b/tests/integration-tests/src/test/resources/features/proofs/present_proof.feature @@ -1,6 +1,5 @@ Feature: Present Proof Protocol -@TEST_ATL-3850 Scenario: Holder presents credential proof to verifier Given Faber and Bob have an existing connection And Bob has an issued credential from Acme @@ -9,7 +8,6 @@ Scenario: Holder presents credential proof to verifier And Bob makes the presentation of the proof to Faber Then Faber has the proof verified -@TEST_ATL-3881 Scenario: Verifier rejects holder proof Given Faber and Bob have an existing connection And Bob has an issued credential from Acme @@ -18,7 +16,6 @@ Scenario: Verifier rejects holder proof And Bob rejects the proof Then Bob sees the proof is rejected -@TEST_ATL-4968 Scenario: Holder presents proof to verifier which is the issuer itself Given Acme and Bob have an existing connection And Bob has an issued credential from Acme diff --git a/tests/e2e-tests/src/test/resources/features/credential_schemas/credential_schemas.feature b/tests/integration-tests/src/test/resources/features/schemas/credential_schemas.feature similarity index 92% rename from tests/e2e-tests/src/test/resources/features/credential_schemas/credential_schemas.feature rename to tests/integration-tests/src/test/resources/features/schemas/credential_schemas.feature index 11a73b6600..1cd58174fc 100644 --- a/tests/e2e-tests/src/test/resources/features/credential_schemas/credential_schemas.feature +++ b/tests/integration-tests/src/test/resources/features/schemas/credential_schemas.feature @@ -1,12 +1,10 @@ Feature: Credential schemas -@TEST_ATL-3835 Scenario: Successful schema creation When Acme creates unpublished DID And Acme creates a new credential schema Then He sees new credential schema is available -@TEST_ATL-3836 Scenario Outline: Multiple schema creation When Acme creates unpublished DID And Acme creates new schemas diff --git a/tests/e2e-tests/src/test/resources/features/system/health_endpoint.feature b/tests/integration-tests/src/test/resources/features/system/health_endpoint.feature similarity index 93% rename from tests/e2e-tests/src/test/resources/features/system/health_endpoint.feature rename to tests/integration-tests/src/test/resources/features/system/health_endpoint.feature index ba03a7903c..ea5229aee6 100644 --- a/tests/e2e-tests/src/test/resources/features/system/health_endpoint.feature +++ b/tests/integration-tests/src/test/resources/features/system/health_endpoint.feature @@ -1,7 +1,6 @@ @system @smoke Feature: Agent Health Endpoint -@TEST_ATL-3833 Scenario: The runtime version can be retrieved from the Health Endpoint When Acme makes a request to the health endpoint Then Acme knows what version of the service is running diff --git a/tests/e2e-tests/src/test/resources/features/verification_policies/verification_policies.feature b/tests/integration-tests/src/test/resources/features/verificationpolicies/verification_policies.feature similarity index 70% rename from tests/e2e-tests/src/test/resources/features/verification_policies/verification_policies.feature rename to tests/integration-tests/src/test/resources/features/verificationpolicies/verification_policies.feature index 0b14be8b8a..e72204109f 100644 --- a/tests/e2e-tests/src/test/resources/features/verification_policies/verification_policies.feature +++ b/tests/integration-tests/src/test/resources/features/verificationpolicies/verification_policies.feature @@ -1,4 +1,3 @@ -@TEST_ATL-2487 Feature: Verification Policies Scenario: Successful verification policy creation @@ -6,5 +5,3 @@ Scenario: Successful verification policy creation Then He sees new verification policy is available When He updates a new verification policy Then He sees the updated verification policy is available - When He updates a new verification policy - Then He sees the updated verification policy is available diff --git a/tests/e2e-tests/src/test/resources/log4j.properties b/tests/integration-tests/src/test/resources/log4j.properties similarity index 100% rename from tests/e2e-tests/src/test/resources/log4j.properties rename to tests/integration-tests/src/test/resources/log4j.properties diff --git a/tests/e2e-tests/src/test/resources/scripts/add_test_execution.py b/tests/integration-tests/src/test/resources/scripts/add_test_execution.py similarity index 100% rename from tests/e2e-tests/src/test/resources/scripts/add_test_execution.py rename to tests/integration-tests/src/test/resources/scripts/add_test_execution.py diff --git a/tests/e2e-tests/src/test/resources/scripts/requirements.txt b/tests/integration-tests/src/test/resources/scripts/requirements.txt similarity index 100% rename from tests/e2e-tests/src/test/resources/scripts/requirements.txt rename to tests/integration-tests/src/test/resources/scripts/requirements.txt diff --git a/tests/integration-tests/src/test/resources/tests.conf b/tests/integration-tests/src/test/resources/tests.conf new file mode 100644 index 0000000000..bd1065b8e6 --- /dev/null +++ b/tests/integration-tests/src/test/resources/tests.conf @@ -0,0 +1,32 @@ +global { + auth_required = true + auth_header = "${AUTH_HEADER:-apikey}" + admin_auth_header = "${ADMIN_AUTH_HEADER:-x-admin-api-key}" + admin_apikey = "${ADMIN_API_KEY:-admin}" +} + +admin { + url = "${ADMIN_AGENT_URL:-http://localhost:8080/prism-agent}" + apikey = "${ISSUER_API_KEY:-${random.string(16)}}" +} + +issuer { + url = "${ISSUER_AGENT_URL:-http://localhost:8080/prism-agent}" + webhook_url = "${ISSUER_WEBHOOK_URL:-http://host.docker.internal:9955}" + apikey = "${ISSUER_API_KEY:-${random.string(16)}}" + multi-tenant = true +} + +verifier { + url = "${VERIFIER_AGENT_URL:-http://localhost:8080/prism-agent}" + webhook_url = "${VERIFIER_WEBHOOK_URL:-http://host.docker.internal:9957}" + apikey = "${VERIFIER_API_KEY:-${random.string(16)}}" + multi-tenant = true +} + +holder { + url = "${HOLDER_AGENT_URL:-http://localhost:8090/prism-agent}" + webhook_url = "${HOLDER_WEBHOOK_URL:-http://host.docker.internal:9956}" + apikey = "${HOLDER_API_KEY:-default}" + multi-tenant = false +} diff --git a/version.sbt b/version.sbt index 180428aea1..bc21f09057 100644 --- a/version.sbt +++ b/version.sbt @@ -1 +1 @@ -ThisBuild / version := "1.17.0-SNAPSHOT" +ThisBuild / version := "1.18.0-SNAPSHOT"