diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000..0f38032d --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,39 @@ +--- +name: Bug Report +about: Create a report to help us improve +title: '' +labels: 'bug' +assignees: '' + +--- + +# Bug Report + +## Describe the Bug +_A clear and concise description of the bug._ + +### Expected Behavior +_A clear and concise description of what you expected to happen._ + +### Observed Behavior +_A clear and concise description of what happened instead._ + +## Steps to Reproduce +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +## Context Information +_Add any other context about the problem here._ + +- Used version [e.g. EDC v1.0.0] +- OS: [e.g. iOS, Windows] +- ... + +## Detailed Description +_If applicable, add screenshots and logs to help explain your problem._ + +## Possible Implementation +_You already know the root cause of the erroneous state and how to fix it? Feel free to share your thoughts._ diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000..bd9dfe4e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,2 @@ +--- +blank_issues_enabled: false diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000..292266bd --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,19 @@ +--- +name: Feature Request +about: Help us with new ideas +title: '' +labels: '' +assignees: '' + +--- + +# Feature Request + +## Which Areas Would Be Affected? +_e.g., DPF, CI, build, transfer, etc._ + +## Why Is the Feature Desired? +_Are there any requirements?_ + +## Solution Proposal +_If possible, provide a (brief!) solution proposal._ diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..55a8bd93 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,15 @@ +## What this PR changes/adds + +_Briefly describe WHAT your pr changes, which features it adds/modifies._ + +## Why it does that + +_Briefly state why the change was necessary._ + +## Further notes + +_List other areas of code that have changed but are not necessarily linked to the main feature. This could be method signature changes, package declarations, bugs that were encountered and were fixed inline, etc._ + +## Linked Issue(s) + +Closes # <-- _insert Issue number if one exists_ \ No newline at end of file diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 00000000..2fea6be6 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,37 @@ +# This is a basic workflow to help you get started with Actions + +name: Deploy to Github-Pages + +# Controls when the action will run. +on: + # Triggers the workflow on push or pull request events but only for the main branch + push: + branches: [ development ] + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: mdBook Action + uses: peaceiris/actions-mdbook@v1 + + - name: Install D2 (Diagram generator) + run: curl -fsSL https://d2lang.com/install.sh | sh -s -- + + - name: Install mdbook-d2 integration + run: cargo install mdbook-d2 --locked + + - name: Build book + run: mdbook build + + - name: Deploy to GitHub Pages + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./book + +permissions: + contents: write diff --git a/.github/workflows/release-publish.yml b/.github/workflows/release-publish.yml new file mode 100644 index 00000000..ff15867f --- /dev/null +++ b/.github/workflows/release-publish.yml @@ -0,0 +1,77 @@ +name: Release +on: + push: + branches: + - master + - alpha + - beta +jobs: + release: + runs-on: ubuntu-latest + permissions: + contents: write + packages: write + id-token: write + outputs: + new_tag_version: ${{ steps.tag_version.outputs.new_tag_version }} + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Setup Node.js + uses: actions/setup-node@v1 + with: + node-version: 18 + - name: Install dependencies + run: npm ci + - name: Dry run to get the next release version + id: tag_version + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + export NEXT_TAG_VERSION=$(npx semantic-release --dry-run | grep 'The next release version is' | sed -E 's/.* ([[:alnum:].\-]+)$/\1/') + echo "new_tag_version=${NEXT_TAG_VERSION}" >> $GITHUB_OUTPUT + - name: Release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: npx semantic-release + + publish-docker-images: + runs-on: ubuntu-latest + needs: release + if: ${{ needs.release.outputs.new_tag_version != '' }} + permissions: + contents: read + packages: write + id-token: write + + strategy: + matrix: + include: + - context: "ch-app" + directory: "clearing-house-app" + dockerfile: "Dockerfile" + - context: "ch-edc" + directory: "clearing-house-edc" + dockerfile: "launchers/connector-prod/Dockerfile" + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Login to GitHub Container Registry + run: echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin + + - name: Build Docker image + env: + DOCKER_IMAGE_TAG: ${{ needs.release.outputs.new_tag_version }} + run: | + cd ${{ matrix.directory }} + docker build -t ghcr.io/${{ github.repository }}/${{ matrix.context }}:$DOCKER_IMAGE_TAG -f ${{ matrix.dockerfile }} . + + - name: Push Docker image + env: + DOCKER_IMAGE_TAG: ${{ needs.release.outputs.new_tag_version }} + run: docker push ghcr.io/${{ github.repository }}/${{ matrix.context }}:$DOCKER_IMAGE_TAG + diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml deleted file mode 100644 index dac798e7..00000000 --- a/.github/workflows/rust.yml +++ /dev/null @@ -1,74 +0,0 @@ -name: Rust - -on: - push: - branches: - - master - pull_request: - branches: [ master ] - -env: - CARGO_TERM_COLOR: always - IMAGE_NAME_LS: ids-ch-logging-service - IMAGE_NAME_DA: ids-ch-document-api - IMAGE_NAME_KA: ids-ch-keyring-api - - -jobs: - build: - runs-on: ubuntu-20.04 - steps: - - uses: actions/checkout@v3 - - name: Install minimal stable - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - override: true - # TODO: do not use caching for actual release builds, aka ones that start with v* - - uses: Swatinem/rust-cache@v2 - - name: Build clearing-house-api - run: | - cd clearing-house-app - eval "$(ssh-agent -s)" - ssh-add - <<< "${{ secrets.IDS_CLEARING_HOUSE_CORE_TOKEN }}" - cargo build --release - - - name: Build build images - run: | - docker build . --file docker/logging-service.Dockerfile --tag $IMAGE_NAME_LS - docker build . --file docker/document-api.Dockerfile --tag $IMAGE_NAME_DA - docker build . --file docker/keyring-api.Dockerfile --tag $IMAGE_NAME_KA - - - name: Log into registry - run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin - - - name: Push image - run: | - IMAGE_ID_LS=ghcr.io/Fraunhofer-AISEC/$IMAGE_NAME_LS - IMAGE_ID_DA=ghcr.io/Fraunhofer-AISEC/$IMAGE_NAME_DA - IMAGE_ID_KA=ghcr.io/Fraunhofer-AISEC/$IMAGE_NAME_KA - - # Change all uppercase to lowercase - IMAGE_ID_LS=$(echo $IMAGE_ID_LS | tr '[A-Z]' '[a-z]') - IMAGE_ID_DA=$(echo $IMAGE_ID_DA | tr '[A-Z]' '[a-z]') - IMAGE_ID_KA=$(echo $IMAGE_ID_KA | tr '[A-Z]' '[a-z]') - - # Strip git ref prefix from version - VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,') - - # Strip "v" prefix from tag name - [[ "${{ github.ref }}" == "refs/tags/"* ]] && VERSION=$(echo $VERSION | sed -e 's/^v//') - - # Use Docker `latest` tag convention - [ "$VERSION" == "master" ] && VERSION=latest - - docker tag $IMAGE_NAME_LS $IMAGE_ID_LS:$VERSION - docker push $IMAGE_ID_LS:$VERSION - - docker tag $IMAGE_NAME_DA $IMAGE_ID_DA:$VERSION - docker push $IMAGE_ID_DA:$VERSION - - docker tag $IMAGE_NAME_KA $IMAGE_ID_KA:$VERSION - docker push $IMAGE_ID_KA:$VERSION - - diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000..4cc5188b --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,59 @@ +name: test + +on: + pull_request: + branches: + - master + - beta + - alpha + - development + +jobs: + app-unit-tests: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + + - name: Build and Test + run: | + cd clearing-house-app + cargo build + cargo test + + edc-unit-tests: + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Setup JDK 17 + uses: actions/setup-java@v2 + with: + java-version: '17' + distribution: 'temurin' + + - name: Run Unit Tests + run: | + cd clearing-house-edc + ./gradlew test jacocoTestReport + + - name: Add Coverage Report + id: jacoco + uses: madrapps/jacoco-report@v1.6.1 + with: + paths: | + ${{ github.workspace }}/clearing-house-edc/core/build/reports/jacoco/test/jacocoTestReport.xml, + ${{ github.workspace }}clearing-house-edc/extensions/multipart/build/reports/jacoco/test/jacocoTestReport.xml + token: ${{ secrets.GITHUB_TOKEN }} + min-coverage-overall: 70 + min-coverage-changed-files: 80 diff --git a/.gitignore b/.gitignore index 86bfd074..fb4e6cba 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,13 @@ -.settings/* +/**/.settings +/**/.classpath .project target data *.log .idea/ -**/*.lock -**/*.iml \ No newline at end of file +node_modules/ +**/*.iml +.vscode/ +book/ +ca/ +.DS_Store diff --git a/.releaserc b/.releaserc new file mode 100644 index 00000000..e4ce0ea1 --- /dev/null +++ b/.releaserc @@ -0,0 +1,26 @@ +{ + "branches": ["+([0-9])?(.{+([0-9]),x}).x", "master", {"name": "beta", "prerelease": true}, {"name": "alpha", "prerelease": true}], + "plugins": [ + [ + "@semantic-release/commit-analyzer", + { + "preset": "angular", + "releaseRules": [ + { "type": "docs", "release": "patch" }, + { "type": "refactor", "release": "patch" }, + { "scope": "no-release", "release": false } + ] + } + ], + "@semantic-release/release-notes-generator", + "@semantic-release/changelog", + "@semantic-release/git", + [ + "@semantic-release/github", + { + "successComment": false, + "releasedLabels": false + } + ] + ] +} diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..99379d94 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,108 @@ +# [1.0.0-beta.3](https://github.com/ids-basecamp/clearinghouse/compare/v1.0.0-beta.2...v1.0.0-beta.3) (2024-03-15) + + +### Bug Fixes + +* change tests for `referringConnector` test ([25cd379](https://github.com/ids-basecamp/clearinghouse/commit/25cd379c969c8747f618fc5166e97957631504e8)) + + +### Features + +* **ch-app:** Add create_process test and fix an issue ([8cfb5e1](https://github.com/ids-basecamp/clearinghouse/commit/8cfb5e18feea759aeb4425cb900453f86f07c15f)) +* **ch-app:** Add testcontainers for Integration tests with database ([679b06b](https://github.com/ids-basecamp/clearinghouse/commit/679b06b95d8e7ac58019fd21e678c6725c79083e)) +* enable pedantic linter and fix clippy findings where appropiate ([df0a5d4](https://github.com/ids-basecamp/clearinghouse/commit/df0a5d40ed50ea45f90383c21666b51fb89bdddd)) +* uses now `referringConnector` instead of `SKI:AKI` ([b472344](https://github.com/ids-basecamp/clearinghouse/commit/b472344d7bb9e9f63dc4c97bbf3545e7d761d8f6)) + +# [1.0.0-beta.2](https://github.com/ids-basecamp/clearinghouse/compare/v1.0.0-beta.1...v1.0.0-beta.2) (2024-02-19) + + +### Bug Fixes + +* **ch-app:** copy migrations in Dockerfile ([b91926c](https://github.com/ids-basecamp/clearinghouse/commit/b91926cd6dbde60e1e13813949587d3a6f3e3f4c)) +* **ch-app:** Fix 3 vulnerabilitites: GHSA-rjhf-4mh8-9xjq, GHSA-xphf-cx8h-7q9g, GHSA-3mv5-343c-w2qg ([2ca4dfa](https://github.com/ids-basecamp/clearinghouse/commit/2ca4dfae59aa65061f818d579d81eb7f09325576)) +* **ch-app:** Fixed uuid <-> str mismatch in document, which resulted in failed query ([0571bd1](https://github.com/ids-basecamp/clearinghouse/commit/0571bd1d720d89d9c3b9d3758d70197faca4f04c)) +* changed repository in package.json ([5b0b15c](https://github.com/ids-basecamp/clearinghouse/commit/5b0b15cdf5f44ffe6e38b556c6573d19a9ffce7e)) +* **ci:** change docker image tag to reflect new repo and impl job matrix ([60379b4](https://github.com/ids-basecamp/clearinghouse/commit/60379b464c8e00591555462cce1d4820619b274f)) +* disable tokenFormat check ([c920b82](https://github.com/ids-basecamp/clearinghouse/commit/c920b825219edeae317d874f6cb723d1016ecabc)) +* GITHUB_TOKEN permissions for release job ([b2678aa](https://github.com/ids-basecamp/clearinghouse/commit/b2678aaa49bb9d2d0259413567704b7670635bc1)) + + +### Features + +* **ch-app:** Add postgres implementation ([#96](https://github.com/ids-basecamp/clearinghouse/issues/96)) ([842ff00](https://github.com/ids-basecamp/clearinghouse/commit/842ff0058b0b6d1ca4b3d62a6747d0bfcf025bb8)) +* **ch-app:** Implement [#91](https://github.com/ids-basecamp/clearinghouse/issues/91) ([965b4c2](https://github.com/ids-basecamp/clearinghouse/commit/965b4c2cbba0580006f9e40834470f3e225354b6)) + +# 1.0.0-beta.1 (2023-11-23) + + +### Bug Fixes + +* **app:** Fix build on development branch ([32bfea3](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/32bfea389a3f0f43907f3c5e7afa66105f25cf60)) +* **app:** Fix build on development branch ([851146e](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/851146eb3c546f6813d3209beee367b84ee1ffaa)) +* **app:** Fix warnings and build on development branch ([89f39f7](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/89f39f784180b4bd26813f33e7787d0744fe975c)) +* **ch-app:** Add error log and removed assert ([0d07fe5](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/0d07fe55c3a83a2b4d22adde2e7c70ddc44b2c06)) +* **ch-app:** Bump dependencies ([6f273bb](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/6f273bbd5b8c0503f2061aee944b95c692a2a3f1)) +* **ch-app:** Fix all clippy warnings ([812f3e8](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/812f3e868bfb4c17c5a18765bacaf7826ef99532)) +* **ch-app:** Fix integration test case log ([bcc6a56](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/bcc6a5604162d6d4166f00e57587e9bab049c565)) +* **ch-app:** Fix security issue through updating dependencies ([2613559](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/26135597ccc4a8f9f040f496732fb7e275504ce9)) +* **ch-app:** Reenable new serde crates, due to resolved issues with precompiled binaries ([e2784b9](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/e2784b9b642987cc1ddb9ffa2ca7057cb6382d25)) +* **ch-app:** Updated dependencies to fix security vulnerability ([fe19cdf](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/fe19cdf8c153a1108759a27f689ed3fdc2197ff4)) +* **ch-edc:** add missing vault filesystem ([e845269](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/e845269a2149f9b02b5dac71c4f40649052a8d12)) +* **ch-edc:** add multistage dockerfile ([8e8026e](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/8e8026e39059debc5df27f24b58829c081c58da0)) +* **ci:** Delete .github/workflows/rust.yml to fix failing CI ([3a8d5a1](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/3a8d5a15c08151ea2d43f70d7a25ecb4f4555424)) +* **ci:** disable rust workflow (dublicate build) ([9af75cf](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/9af75cf760173fda5d1fad4bf4ddbefd21224413)) +* **ci:** Fix rust.yml workflow ([0a474c0](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/0a474c0904a74f258978b1bd0ed2278edd8c8db1)) +* **ci:** Fix unauthorized push ([57d4e02](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/57d4e02ebee80c04f359d577fd87af2a70e0b7ce)) +* **ci:** Fix unauthorized push ([453ce88](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/453ce8810ddd5970f0d7c349f142ea5f24db8b8a)) +* **ci:** simplified ch-edc docker build ([f0cb1e1](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/f0cb1e149160b945e6e03d2426e6b40165c6fb55)) +* **ci:** updated test job to run from root ([04cecce](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/04cecce30c0c787847ca199788d40e1daf07092f)) +* **config:** Fixed config and added unit test to verify correct functionality ([76765e6](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/76765e687c3cac025f33fd902d28a6caec764e2f)) +* **core:** Disable integration tests, fix warnings and make the build reproducible ([ecd3078](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/ecd3078b92d8061588f58537133c5b56074b91f9)) +* **core:** Disable integration tests, fix warnings and make the build reproducible ([c69b246](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/c69b246cf365c06ccfb23bdf0c85f0506f4a023e)) +* quick start docker-compose.yml snytax ([0d83989](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/0d8398932fb4fde1b454d2117ef567cc85ddc0c0)) +* removed workingdir since cd is used ([34e2b9a](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/34e2b9ad64c1e95e969450c412745412b852d716)) +* **tests:** add __ENV for hostname and token ([209244c](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/209244c551e8e9fd4eed5e00b620a271e5fd57e9)) +* updating .gitignore to exclude vscode files ([1ce073f](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/1ce073fef0b2e70d97c58d1b14a7dec104bed3a1)) + + +### Features + +* AppSender, LoggingMessageDelegate, LogMessageHandler tests implemented ([5127591](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/5127591162bec3ee6e92227ffbb80f36ffa08f62)) +* basic endpoint functions working ([f1726e7](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/f1726e74574a596e1216d4cf468af1ccfd07443e)) +* **ch-app:** Add and debug integration test ([cef068b](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/cef068b2e41916a05101dab5e3255114a49a95c8)) +* **ch-app:** Add CreateProcessResponse as JSON ([002845a](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/002845aa0729887853954118032084c6e5606354)) +* **ch-app:** add Dockerfile and GH action ([f64aa14](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/f64aa14c802e91a34b85437d07d79eba756ea504)) +* **ch-app:** Add docs for installation of ch-app ([293500d](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/293500d45f2bccbae47d4ae0dfdbf01851ea4f03)) +* **ch-app:** Added tests, refactored unwrap ([b3f8ede](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/b3f8edec027aa8168f64fd552ec7bed0e7f4ac30)) +* **ch-app:** Bump Cargo edition to 2021 and remove unused imports ([007281f](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/007281f3e7f436606c04c41edab917c432e7e0c8)) +* **ch-app:** Bump Cargo edition to 2021 and remove unused imports ([6a3934e](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/6a3934e089f775bf434821d0e672e63daf34676c)) +* **ch-app:** Created services for Keyring- and Document-Service inside logging service and adjusted the handlers ([4bb512f](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/4bb512f68f1137a3c89cca7bbd4ee6055525b1ed)) +* **ch-app:** Created services for Keyring- and Document-Service inside logging service and adjusted the handlers ([f1a8e59](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/f1a8e5969006156c931ce39a7225b8e3acea56a5)) +* **ch-app:** feature flag sentry ([918a903](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/918a9035ac1e61a0faa8716143f25886d049dae2)) +* **ch-app:** Finished error-handling in keyring service and introduces 'doc_type' feature ([387498c](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/387498c15ff2bd8c2890625dd92d8d3be1250b42)) +* **ch-app:** Finished refactoring document-service error-handling ([8965f5e](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/8965f5e8a1ccbfdf8c36040f3736a3dd7fee7929)) +* **ch-app:** Refactor logging-api to use a service as well ([4259c65](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/4259c65cfca978f3ad77c8d37fec85bd3fbaa90f)) +* **ch-app:** Refactor logging-api to use a service as well ([f1beee0](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/f1beee0bd6ed48277d02a385b25d232f7ee5740a)) +* **ch-app:** Remove Blockchain, add integration tests ([ffdfbad](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/ffdfbadd10769b99f392617f0d691fcd45dcdafb)) +* **ch-app:** Removed ApiResponse, fixed warnings and hid more doc_type related functions ([fc710b7](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/fc710b7afc2f8ff28729ee88315fd74777476c05)) +* **ch-app:** Removed certs folder ([2779f6c](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/2779f6c5fc2f550e9e35af9c60b2ca7426d52036)) +* **ch-app:** Setup tracing as logger and replace rocket as logger; setup config ([c9d8e6f](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/c9d8e6f99fba95ab83816911293cc1885f866fae)) +* **ch-app:** Setup tracing as logger and replace rocket as logger; setup config ([356665a](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/356665a46bd6de165b0fd227b845d10d6e1fcb0e)) +* **ch-app:** Use JWKS from endpoint to validate receipt ([11a7314](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/11a7314f2bfc9236561770623a98239bf71b088e)) +* **ci:** add test job for CH app ([807bcdf](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/807bcdf5fad95456dfcd008fcee990983facd711)) +* create connector and extension modules ([fa47ff8](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/fa47ff8f18feeefd77fdcf6be9cfe266981f358b)) +* Create TestUtils with mock and start to create application tests ([f1612e0](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/f1612e027f9815ad9525c7f78aab876baf1f64a1)) +* **doc:** Add internal description to docs ([4e89ba6](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/4e89ba6755095d30d23df8caec3463561112cafe)) +* **docker:** Optimised docker image with distroless image ([d046826](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/d046826132c1e6cc3e60f2c31e2d4f8c397fe01b)) +* **docs:** add d2 diagramming integration to workflow ([24e87ef](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/24e87efc96516a22dc1edc4d89662cebd537d2bf)) +* **docs:** add mdbook for documentation ([0cf4ada](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/0cf4adaa5494a8ae3bc679ee0387b90bc3079e38)) +* **docs:** Enable GitHub Pages generation ([36bfaa3](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/36bfaa3f569ee86be8f8cc072cb951aeaca8e295)) +* externalization of environments variables ([f8e187e](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/f8e187e59c32483c8250252683804f0b86643de7)) +* readme added ([4d382b5](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/4d382b5877dda24b6143b08a47549d3c29a61d71)) +* release action ([98f1448](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/98f1448795003bf6fc823fccda7f0e14fe8b7cb0)) +* release action ([4710fc0](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/4710fc0bde1a63ca6af2042a56b81b68c73860b1)) +* **release:** add more release types ([cd59461](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/cd59461fb2dfa5b8c95c80fbaa3bafd511e036c0)) +* semantic-release ([6fb29ff](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/6fb29ff39a86a34e2bda5ac400b1114643b4f906)) +* starting create objects and method ([f13f15e](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/f13f15e7e35c866f011a4474bc3bd5722d8a40b9)) +* **tests:** add load tests ([a88175b](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/a88175bb083ce0091459e8b47c4c27ac042f782b)) +* **tests:** add smoke tests ([e31f806](https://github.com/truzzt/ids-basecamp-clearinghouse/commit/e31f8066b08ebac341aa3b081056bbd110b72680)) diff --git a/README.md b/README.md index 6fa2b14d..07adb80a 100644 --- a/README.md +++ b/README.md @@ -1,101 +1,53 @@ -# IDS Clearing House -The IDS Clearing House Service is a prototype implementation of the [Clearing House](https://github.com/International-Data-Spaces-Association/IDS-RAM_4_0/blob/main/documentation/3_Layers_of_the_Reference_Architecture_Model/3_5_System_Layer/3_5_5_Clearing_House.md) component of the [Industrial Data Space](https://internationaldataspaces.org/). - -Data in the Clearing House is stored encrypted and practically immutable. There are multiple ways in which the Clearing House enforces Data Immutability: -- Using the `Logging Service` there is no way to update an already existing log entry in the database -- Log entries in the database include a hash value of the previous log entry, chaining together all log entries. Any change to a previous log entry would require rehashing all following log entries. -- The connector logging information in the Clearing House receives a signed receipt from the Clearing House that includes among other things a timestamp and the current chain hash. A single valid receipt in possession of any connector is enough to detect any change to data up to the time indicated in the receipt. - -## Architecture -The IDS Clearing House Service currently implements the [`Logging Service`](https://github.com/International-Data-Spaces-Association/IDS-RAM_4_0/blob/main/documentation/3_Layers_of_the_Reference_Architecture_Model/3_5_System_Layer/3_5_5_Clearing_House.md). Other services that comprise the [Clearing House](https://github.com/International-Data-Spaces-Association/IDS-RAM_4_0/blob/main/documentation/3_Layers_of_the_Reference_Architecture_Model/3_5_System_Layer/3_5_5_Clearing_House.md) may follow. The Clearing House Service consists of two parts: - -1. [`Clearing House App`](clearing-house-app) -2. [`Clearing House Processors`](clearing-house-processors) - -The `Clearing House App` is a REST API written in [Rust](https://www.rust-lang.org) that implements the business logic of the Clearing House. The `Clearing House Processors` is a library written in Java that integrates the `Clearing House App` into the [Trusted Connector](https://github.com/industrial-data-space/trusted-connector). The `Clearing House Processors` provide the `multipart` and `idscp2` endpoints described in the [IDS-G](https://github.com/International-Data-Spaces-Association/IDS-G/tree/main). These are used by the IDS connectors to interact with the Clearing House. Both `Clearing House App` and `Clearing House Processors` are needed to provide the `Clearing House Service`. - -## Requirements -- [OpenSSL](https://www.openssl.org) -- [MongoDB](https://www.mongodb.com) -- ([Docker](https://www.docker.com)) -- [Trusted Connector](https://github.com/industrial-data-space/trusted-connector) - -## Trusted Connector -The Clearing House Service API requires a Trusted Connector [Trusted Connector](https://github.com/industrial-data-space/trusted-connector) (Version 7.1.0+) for deployment. The process of setting up a Trusted Connector is described [here](https://industrial-data-space.github.io/trusted-connector-documentation/docs/getting_started/). Using a docker image of the Trusted Connector should be sufficient for most deployments: - -`docker pull fraunhoferaisec/trusted-connector-core:7.2.0` - -The Clearing House Processors are written in Java for use in the Camel Component of the Trusted Connector. To configure the Trusted Connector for the Clearing House Service API, it needs access to the following files inside the docker container (e.g. mounted as a volume): -- `clearing-house-processors.jar`: The Clearing House Processors need to be placed in the `/root/jars` folder of the Trusted Connector. The jar file needs to be [build](clearing-house-processors#building-from-source) from the Clearing House Processors using `gradle`. -- [`clearing-house-routes.xml`](clearing-house-processors/src/routes/clearing-house-routes.xml): The camel routes required by the Clearing House need to be placed in the `/root/deploy` folder of the Trusted Connector. -- [`application.yml`](docker/application.yml): This is a new configuration file of Trusted Connector 7.0.0+. The file version in this repository enables the use of some of the environment variables documented in the next section. - -Besides those files that are specific for the configuration of the Clearing House Service API, the Trusted Connector requires other files for its configuration, e.g. a truststore and a keystore with appropriate key material. Please refer to the [Documentation](https://industrial-data-space.github.io/trusted-connector-documentation/) of the Trusted Connector for more information. Also, please check the [Examples](https://github.com/industrial-data-space/trusted-connector/tree/master/examples) as they contain up-to-date configurations for the Trusted Connector. - -#### Environment Variables -The Clearing House Processors can override some standard configuration settings of the Trusted Connector using environment variables. If these variables are not set, the Clearing House Processors will use the standard values provided by the Trusted Connector. Some of the variables are mandatory and have to be set: -- `TC_DAPS_URL`: The url of the DAPS used by the Clearing House. The Trusted Connector uses `https://daps.aisec.fraunhofer.de/v3` as the default DAPS url. -- `TC_KEYSTORE_PW`: The password of the key store mounted in the Trusted Connector. Defaults to `password`. -- `TC_TRUSTSTORE_PW`: The password of the trust store mounted in the Trusted Connector. Defaults to `password`. -- `TC_CH_ISSUER_CONNECTOR`(mandatory): Issuer connector needed for IDS Messages as specified by the [InfoModel](https://github.com/International-Data-Spaces-Association/InformationModel) -- `TC_CH_AGENT`(mandatory): Server agent needed for IDS Messages as specified by the [InfoModel](https://github.com/International-Data-Spaces-Association/InformationModel) -- `SERVICE_SHARED_SECRET`(mandatory): Shared secret, see Configuration section -- `SERVICE_ID_TC` (mandatory): Internal ID of the `Trusted Connector` that is used by the `Logging Service` to identify the `Trusted Connector`. -- `SERVICE_ID_LOG` (mandatory): Internal ID of the `Logging Service`. - - -#### Example Configuration (docker-compose) -``` -tc-core: - container_name: "tc-core" - image: fraunhoferaisec/trusted-connector-core:7.1.0 - tty: true - stdin_open: true - volumes: - - /var/run/docker.sock:/var/run/docker.sock - - ./data/trusted-connector/application.yml:/root/etc/application.yml - - ./data/trusted-connector/allow-all-flows.pl:/root/deploy/allow-all-flows.pl - - ./data/trusted-connector/ch-ids.p12:/root/etc/keystore.p12 - - ./data/trusted-connector/truststore.p12:/root/etc/truststore.p12 - - ./data/trusted-connector/clearing-house-processors-0.10.0.jar:/root/jars/clearing-house-processors.jar - - ./data/trusted-connector/routes/clearing-house-routes.xml:/root/deploy/clearing-house-routes.xml - environment: - TC_DAPS_URL: https:// - SERVICE_SHARED_SECRET: - SERVICE_ID_TC: - SERVICE_ID_LOG: - - ports: - - "8443:8443" - - "9999:9999" - - "29292:29292" +

Welcome to ids-basecamp-clearinghouse 👋

+

+ Version + + Documentation + + + License: Apache--2.0 + +

+ +> The ids-basecamp-clearingHouse is a implementation of the [Clearing House](https://github.com/International-Data-Spaces-Association/IDS-RAM_4_0/blob/main/documentation/3_Layers_of_the_Reference_Architecture_Model/3_5_System_Layer/3_5_5_Clearing_House.md) component of the [International Data Space](https://internationaldataspaces.org/). + +## Quick Start +Please refer to the quick start [guide](https://truzzt.github.io/ids-basecamp-clearinghouse/content/admin-guide/quick_start.html) + +## Documentation +Please refer to the [documentation](https://truzzt.github.io/ids-basecamp-clearinghouse/) + +## Run tests + +### CH_APP +```sh +npm run test:app ``` -## Docker Containers -The dockerfiles located [here](docker/) can be used to create containers for the services of the [`Clearing House App`](clearing-house-app). There are two types of dockerfiles: -1. Simple builds (e.g. [dockerfile](docker/keyring-api.Dockerfile)) that require you to build the Service APIs yourself using [Rust](https://www.rust-lang.org) -2. Multistage builds (e.g. [dockerfile](docker/keyring-api-multistage.Dockerfile)) that have a stage for building the rust code +### CH_EDC +```sh +npm run test:edc +``` -To build the containers check out the repository and in the main directory execute +## Authors -`docker build -f docker/ . -t ` +👤 **Maximilian Schönenberg** +👤 **Daniel Hommen** +👤 **Glaucio Jannotti** +👤 **Augusto Leal** -### Container Dependencies -![Container Dependencies](doc/images/ch_container_dependencies.png) -### Configuration -Please read the configuration section of the service ([`Logging Service`](https://github.com/Fraunhofer-AISEC/ids-clearing-house-service/tree/architecture-revamp/clearing-house-app#logging-service), [`Document API`](https://github.com/Fraunhofer-AISEC/ids-clearing-house-service/tree/architecture-revamp/clearing-house-app#document-api), [`Keyring API`](https://github.com/Fraunhofer-AISEC/ids-clearing-house-service/tree/architecture-revamp/clearing-house-app#keyring-api)) you are trying to run, before using `docker run` oder `docker-compose`. All Containers build with the provided dockerfiles require at least one volume: -1. The configuration file `Rocket.toml` is expected at `/server/Rocket.toml` +## 🤝 Contributing -Containers of the Keyring API require an additional volume: +Contributions, issues and feature requests are welcome!
Feel free to check [issues page](https://github.com/truzzt/ids-basecamp-clearinghouse/issues). -2. `/server/init_db` needs to contain the `default_doc_type.json` +## Show your support -Containers of the Logging Service require an additional volume: +Give a ⭐️ if this project helped you! -3. The folder containing the signing key needs to match the path configured for the signing key in `Rocket.toml`, e.g. `/sever/keys` +## 📝 License -## Shared Secret -The Clearing House services use signed JWTs with HMAC and a shared secret to ensure a minimal integrity of the requests received. The `Trusted Connector` as well as the services ([`Logging Service`](https://github.com/Fraunhofer-AISEC/ids-clearing-house-service/tree/architecture-revamp/clearing-house-app#logging-service), [`Document API`](https://github.com/Fraunhofer-AISEC/ids-clearing-house-service/tree/architecture-revamp/clearing-house-app#document-api), [`Keyring API`](https://github.com/Fraunhofer-AISEC/ids-clearing-house-service/tree/architecture-revamp/clearing-house-app#keyring-api)) need to have access to the shared secret. +This project is [Apache--2.0](https://github.com/truzzt/ids-basecamp-clearinghouse/blob/development/LICENSE) licensed. -For production use please consider using additional protection measures. +*** +_This README was generated with ❤️ by [readme-md-generator](https://github.com/kefranabg/readme-md-generator)_ diff --git a/book.toml b/book.toml new file mode 100644 index 00000000..848f6342 --- /dev/null +++ b/book.toml @@ -0,0 +1,26 @@ +[book] +authors = ["schoenenberg", "dhommen"] +language = "en" +multilingual = false +src = "docs" +title = "Documentation" + +[preprocessor.d2] + +# path to d2 binary. +# optional. default is "d2" (ie. on the path). +path = "d2" + +# layout engine for diagrams. See https://github.com/terrastruct/d2#plugins. +# optional. default is "dagre". +layout = "dagre" + +# whether to use inline svg when rendering. +# if 'false', separate files will be generated in src/ and referenced. +# optional. default is 'true' +inline = true + +# output directory relative to `src/` for generated diagrams. +# This is ignored if 'inline' is 'true'. +# optional. default is "d2". +#output-dir = "d2" \ No newline at end of file diff --git a/clearing-house-app/Cargo.lock b/clearing-house-app/Cargo.lock new file mode 100644 index 00000000..ec9a1cdf --- /dev/null +++ b/clearing-house-app/Cargo.lock @@ -0,0 +1,3524 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "getrandom", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +dependencies = [ + "memchr", +] + +[[package]] +name = "allocator-api2" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.81" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247" + +[[package]] +name = "async-trait" +version = "0.1.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "atoi" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" +dependencies = [ + "num-traits", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "axum" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1236b4b292f6c4d6dc34604bb5120d85c3fe1d1aa596bd5cc52ca054d13e7b9e" +dependencies = [ + "async-trait", + "axum-core", + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.2.0", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "backtrace" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "biscuit" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28865439fc81744500265d96c920985ceb6b612ef8564d43f1cc78e7a6c89e26" +dependencies = [ + "chrono", + "data-encoding", + "num-bigint", + "num-traits", + "once_cell", + "ring 0.16.20", + "serde", + "serde_json", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" +dependencies = [ + "serde", +] + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bollard-stubs" +version = "1.42.0-rc.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed59b5c00048f48d7af971b71f800fdf23e858844a6f9e4d32ca72e9399e7864" +dependencies = [ + "serde", + "serde_with", +] + +[[package]] +name = "bson" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce21468c1c9c154a85696bb25c20582511438edb6ad67f846ba1378ffdd80222" +dependencies = [ + "ahash", + "base64 0.13.1", + "bitvec", + "hex", + "indexmap", + "js-sys", + "once_cell", + "rand", + "serde", + "serde_bytes", + "serde_json", + "time", + "uuid", +] + +[[package]] +name = "bumpalo" +version = "3.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ff69b9dd49fd426c69a0db9fc04dd934cdb6645ff000864d98f7e2af8830eaa" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" + +[[package]] +name = "cc" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "num-traits", + "serde", + "windows-targets 0.52.4", +] + +[[package]] +name = "clearing-house-app" +version = "0.10.0" +dependencies = [ + "anyhow", + "async-trait", + "axum", + "base64 0.21.7", + "biscuit", + "chrono", + "config", + "futures", + "hyper 1.2.0", + "mongodb", + "num-bigint", + "once_cell", + "openssh-keys", + "rand", + "ring 0.16.20", + "sentry", + "serde", + "serde_json", + "serial_test", + "sqlx", + "tempfile", + "testcontainers", + "testcontainers-modules", + "thiserror", + "tokio", + "tower", + "tracing", + "tracing-subscriber", + "uuid", +] + +[[package]] +name = "config" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23738e11972c7643e4ec947840fc463b6a571afcd3e735bdfce7d03c7a784aca" +dependencies = [ + "async-trait", + "lazy_static", + "nom", + "pathdiff", + "serde", + "toml", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "cpufeatures" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + +[[package]] +name = "crossbeam-queue" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "darling" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 1.0.109", +] + +[[package]] +name = "darling_macro" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +dependencies = [ + "darling_core", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "dashmap" +version = "5.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +dependencies = [ + "cfg-if", + "hashbrown", + "lock_api", + "once_cell", + "parking_lot_core", +] + +[[package]] +name = "data-encoding" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" + +[[package]] +name = "debugid" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" +dependencies = [ + "serde", + "uuid", +] + +[[package]] +name = "der" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version 0.4.0", + "syn 1.0.109", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "either" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" +dependencies = [ + "serde", +] + +[[package]] +name = "encoding_rs" +version = "0.8.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "enum-as-inner" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21cdad81446a7f7dc43f6a77409efeb9733d2fa65553efef6018ef257c959b73" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "etcetera" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" +dependencies = [ + "cfg-if", + "home", + "windows-sys 0.48.0", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "fastrand" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" + +[[package]] +name = "findshlibs" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40b9e59cd0f7e0806cca4be089683ecb6434e602038df21fe6bf6711b2f07f64" +dependencies = [ + "cc", + "lazy_static", + "libc", + "winapi", +] + +[[package]] +name = "finl_unicode" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" + +[[package]] +name = "flume" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" +dependencies = [ + "futures-core", + "futures-sink", + "spin 0.9.8", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-intrusive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + +[[package]] +name = "h2" +version = "0.3.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31d030e59af851932b72ceebadf4a2b5986dba4c3b99dd2493f8273a0f151943" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 1.1.0", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +dependencies = [ + "ahash", + "allocator-api2", +] + +[[package]] +name = "hashlink" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +dependencies = [ + "hashbrown", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "hostname" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" +dependencies = [ + "libc", + "match_cfg", + "winapi", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" +dependencies = [ + "bytes", + "futures-core", + "http 1.1.0", + "http-body 1.0.0", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "0.14.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.3.24", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.5.6", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "186548d73ac615b32a73aafe38fb4f56c0d340e110e5a200bcadbaf2e199263a" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2 0.4.2", + "http 1.1.0", + "http-body 1.0.0", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper 0.14.28", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "hyper-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "hyper 1.2.0", + "pin-project-lite", + "socket2 0.5.6", + "tokio", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indexmap" +version = "2.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "ipconfig" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" +dependencies = [ + "socket2 0.5.6", + "widestring", + "windows-sys 0.48.0", + "winreg", +] + +[[package]] +name = "ipnet" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" + +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" + +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +dependencies = [ + "spin 0.5.2", +] + +[[package]] +name = "libc" +version = "0.2.153" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" + +[[package]] +name = "libm" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + +[[package]] +name = "libsqlite3-sys" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linux-raw-sys" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" + +[[package]] +name = "lock_api" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" + +[[package]] +name = "lru-cache" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c" +dependencies = [ + "linked-hash-map", +] + +[[package]] +name = "match_cfg" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + +[[package]] +name = "memchr" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "mongodb" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de59562e5c71656c098d8e966641b31da87b89dc3dcb6e761d3b37dcdfa0cb72" +dependencies = [ + "async-trait", + "base64 0.13.1", + "bitflags 1.3.2", + "bson", + "chrono", + "derivative", + "derive_more", + "futures-core", + "futures-executor", + "futures-io", + "futures-util", + "hex", + "hmac", + "lazy_static", + "md-5", + "openssl", + "openssl-probe", + "pbkdf2", + "percent-encoding", + "rand", + "rustc_version_runtime", + "rustls", + "rustls-pemfile", + "serde", + "serde_bytes", + "serde_with", + "sha-1", + "sha2", + "socket2 0.4.10", + "stringprep", + "strsim", + "take_mut", + "thiserror", + "tokio", + "tokio-openssl", + "tokio-rustls", + "tokio-util", + "trust-dns-proto", + "trust-dns-resolver", + "typed-builder", + "uuid", + "webpki-roots", +] + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-bigint" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-bigint-dig" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +dependencies = [ + "byteorder", + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand", + "smallvec", + "zeroize", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d869c01cc0c455284163fd0092f1f93835385ccab5a98a0dcc497b2f8bf055a9" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "object" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "openssh-keys" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c75a0ec2d1b302412fb503224289325fcc0e44600176864804c7211b055cfd58" +dependencies = [ + "base64 0.21.7", + "byteorder", + "md-5", + "sha2", + "thiserror", +] + +[[package]] +name = "openssl" +version = "0.10.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" +dependencies = [ + "bitflags 2.4.2", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dda2b0f344e78efc2facf7d195d098df0dd72151b26ab98da807afc26c198dff" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "os_info" +version = "3.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52a07930afc1bd77ac9e1101dc18d3fc4986c6568e939c31d1c26657eb0ccbf5" +dependencies = [ + "log", + "serde", + "windows-sys 0.52.0", +] + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.48.5", +] + +[[package]] +name = "paste" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" + +[[package]] +name = "pathdiff" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" + +[[package]] +name = "pbkdf2" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" +dependencies = [ + "digest", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "proc-macro2" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + +[[package]] +name = "quote" +version = "1.0.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "regex" +version = "1.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.6", + "regex-syntax 0.8.2", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.2", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" + +[[package]] +name = "reqwest" +version = "0.11.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78bf93c4af7a8bb7d879d51cebe797356ff10ae8516ace542b5182d9dcac10b2" +dependencies = [ + "base64 0.21.7", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2 0.3.24", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.28", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg", +] + +[[package]] +name = "resolv-conf" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52e44394d2086d010551b14b53b1f24e31647570cd1deb0379e2c21b329aba00" +dependencies = [ + "hostname", + "quick-error", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin 0.9.8", + "untrusted 0.9.0", + "windows-sys 0.52.0", +] + +[[package]] +name = "rsa" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core", + "signature", + "spki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustc_version" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +dependencies = [ + "semver 0.9.0", +] + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver 1.0.22", +] + +[[package]] +name = "rustc_version_runtime" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d31b7153270ebf48bf91c65ae5b0c00e749c4cfad505f66530ac74950249582f" +dependencies = [ + "rustc_version 0.2.3", + "semver 0.9.0", +] + +[[package]] +name = "rustix" +version = "0.38.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" +dependencies = [ + "bitflags 2.4.2", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.21.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" +dependencies = [ + "log", + "ring 0.17.8", + "rustls-webpki", + "sct", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "rustversion" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" + +[[package]] +name = "ryu" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" + +[[package]] +name = "schannel" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "security-framework" +version = "2.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" +dependencies = [ + "semver-parser", +] + +[[package]] +name = "semver" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" + +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" + +[[package]] +name = "sentry" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "766448f12e44d68e675d5789a261515c46ac6ccd240abdd451a9c46c84a49523" +dependencies = [ + "httpdate", + "native-tls", + "reqwest", + "sentry-backtrace", + "sentry-contexts", + "sentry-core", + "sentry-debug-images", + "sentry-panic", + "sentry-tracing", + "tokio", + "ureq", +] + +[[package]] +name = "sentry-backtrace" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32701cad8b3c78101e1cd33039303154791b0ff22e7802ed8cc23212ef478b45" +dependencies = [ + "backtrace", + "once_cell", + "regex", + "sentry-core", +] + +[[package]] +name = "sentry-contexts" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17ddd2a91a13805bd8dab4ebf47323426f758c35f7bf24eacc1aded9668f3824" +dependencies = [ + "hostname", + "libc", + "os_info", + "rustc_version 0.4.0", + "sentry-core", + "uname", +] + +[[package]] +name = "sentry-core" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1189f68d7e7e102ef7171adf75f83a59607fafd1a5eecc9dc06c026ff3bdec4" +dependencies = [ + "once_cell", + "rand", + "sentry-types", + "serde", + "serde_json", +] + +[[package]] +name = "sentry-debug-images" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4d0a615e5eeca5699030620c119a094e04c14cf6b486ea1030460a544111a7" +dependencies = [ + "findshlibs", + "once_cell", + "sentry-core", +] + +[[package]] +name = "sentry-panic" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1c18d0b5fba195a4950f2f4c31023725c76f00aabb5840b7950479ece21b5ca" +dependencies = [ + "sentry-backtrace", + "sentry-core", +] + +[[package]] +name = "sentry-tracing" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3012699a9957d7f97047fd75d116e22d120668327db6e7c59824582e16e791b2" +dependencies = [ + "sentry-backtrace", + "sentry-core", + "tracing-core", + "tracing-subscriber", +] + +[[package]] +name = "sentry-types" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7173fd594569091f68a7c37a886e202f4d0c1db1e1fa1d18a051ba695b2e2ec" +dependencies = [ + "debugid", + "hex", + "rand", + "serde", + "serde_json", + "thiserror", + "time", + "url", + "uuid", +] + +[[package]] +name = "serde" +version = "1.0.197" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_bytes" +version = "0.11.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b8497c313fd43ab992087548117643f6fcd935cbf36f176ffda0aacf9591734" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.197" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "serde_json" +version = "1.0.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" +dependencies = [ + "indexmap", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" +dependencies = [ + "itoa", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678b5a069e50bf00ecd22d0cd8ddf7c236f68581b03db652061ed5eb13a312ff" +dependencies = [ + "serde", + "serde_with_macros", +] + +[[package]] +name = "serde_with_macros" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "serial_test" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "953ad9342b3aaca7cb43c45c097dd008d4907070394bd0751a0aa8817e5a018d" +dependencies = [ + "dashmap", + "futures", + "lazy_static", + "log", + "parking_lot", + "serial_test_derive", +] + +[[package]] +name = "serial_test_derive" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b93fb4adc70021ac1b47f7d45e8cc4169baaa7ea58483bc5b721d19a26202212" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "sha-1" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" + +[[package]] +name = "socket2" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "socket2" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "sqlformat" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c" +dependencies = [ + "itertools", + "nom", + "unicode_categories", +] + +[[package]] +name = "sqlx" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9a2ccff1a000a5a59cd33da541d9f2fdcd9e6e8229cc200565942bff36d0aaa" +dependencies = [ + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", +] + +[[package]] +name = "sqlx-core" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24ba59a9342a3d9bab6c56c118be528b27c9b60e490080e9711a04dccac83ef6" +dependencies = [ + "ahash", + "atoi", + "byteorder", + "bytes", + "chrono", + "crc", + "crossbeam-queue", + "either", + "event-listener", + "futures-channel", + "futures-core", + "futures-intrusive", + "futures-io", + "futures-util", + "hashlink", + "hex", + "indexmap", + "log", + "memchr", + "once_cell", + "paste", + "percent-encoding", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "sha2", + "smallvec", + "sqlformat", + "thiserror", + "tokio", + "tokio-stream", + "tracing", + "url", + "uuid", + "webpki-roots", +] + +[[package]] +name = "sqlx-macros" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ea40e2345eb2faa9e1e5e326db8c34711317d2b5e08d0d5741619048a803127" +dependencies = [ + "proc-macro2", + "quote", + "sqlx-core", + "sqlx-macros-core", + "syn 1.0.109", +] + +[[package]] +name = "sqlx-macros-core" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5833ef53aaa16d860e92123292f1f6a3d53c34ba8b1969f152ef1a7bb803f3c8" +dependencies = [ + "dotenvy", + "either", + "heck", + "hex", + "once_cell", + "proc-macro2", + "quote", + "serde", + "serde_json", + "sha2", + "sqlx-core", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", + "syn 1.0.109", + "tempfile", + "tokio", + "url", +] + +[[package]] +name = "sqlx-mysql" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ed31390216d20e538e447a7a9b959e06ed9fc51c37b514b46eb758016ecd418" +dependencies = [ + "atoi", + "base64 0.21.7", + "bitflags 2.4.2", + "byteorder", + "bytes", + "chrono", + "crc", + "digest", + "dotenvy", + "either", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array", + "hex", + "hkdf", + "hmac", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "percent-encoding", + "rand", + "rsa", + "serde", + "sha1", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror", + "tracing", + "uuid", + "whoami", +] + +[[package]] +name = "sqlx-postgres" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c824eb80b894f926f89a0b9da0c7f435d27cdd35b8c655b114e58223918577e" +dependencies = [ + "atoi", + "base64 0.21.7", + "bitflags 2.4.2", + "byteorder", + "chrono", + "crc", + "dotenvy", + "etcetera", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "hex", + "hkdf", + "hmac", + "home", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "rand", + "serde", + "serde_json", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror", + "tracing", + "uuid", + "whoami", +] + +[[package]] +name = "sqlx-sqlite" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b244ef0a8414da0bed4bb1910426e890b19e5e9bccc27ada6b797d05c55ae0aa" +dependencies = [ + "atoi", + "chrono", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "sqlx-core", + "tracing", + "url", + "urlencoding", + "uuid", +] + +[[package]] +name = "stringprep" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" +dependencies = [ + "finl_unicode", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "subtle" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.52" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "take_mut" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tempfile" +version = "3.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +dependencies = [ + "cfg-if", + "fastrand", + "rustix", + "windows-sys 0.52.0", +] + +[[package]] +name = "testcontainers" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83d2931d7f521af5bae989f716c3fa43a6af9af7ec7a5e21b59ae40878cec00" +dependencies = [ + "bollard-stubs", + "futures", + "hex", + "hmac", + "log", + "rand", + "serde", + "serde_json", + "sha2", +] + +[[package]] +name = "testcontainers-modules" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d0334776e1e8ee7c504a922c5236daf865ffe413aa630d84ae91dcce0b10bc3" +dependencies = [ + "testcontainers", +] + +[[package]] +name = "thiserror" +version = "1.0.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "time" +version = "0.3.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.36.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "pin-project-lite", + "signal-hook-registry", + "socket2 0.5.6", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-macros" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-openssl" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ffab79df67727f6acf57f1ff743091873c24c579b1e2ce4d8f53e47ded4d63d" +dependencies = [ + "futures-util", + "openssl", + "openssl-sys", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "trust-dns-proto" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c31f240f59877c3d4bb3b3ea0ec5a6a0cff07323580ff8c7a605cd7d08b255d" +dependencies = [ + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner", + "futures-channel", + "futures-io", + "futures-util", + "idna 0.2.3", + "ipnet", + "lazy_static", + "log", + "rand", + "smallvec", + "thiserror", + "tinyvec", + "tokio", + "url", +] + +[[package]] +name = "trust-dns-resolver" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4ba72c2ea84515690c9fcef4c6c660bb9df3036ed1051686de84605b74fd558" +dependencies = [ + "cfg-if", + "futures-util", + "ipconfig", + "lazy_static", + "log", + "lru-cache", + "parking_lot", + "resolv-conf", + "smallvec", + "thiserror", + "tokio", + "trust-dns-proto", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typed-builder" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89851716b67b937e393b3daa8423e67ddfc4bbbf1654bcf05488e95e0828db0c" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "uname" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b72f89f0ca32e4db1c04e2a72f5345d59796d4866a1ee0609084569f73683dc8" +dependencies = [ + "libc", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" + +[[package]] +name = "unicode_categories" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "ureq" +version = "2.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11f214ce18d8b2cbe84ed3aa6486ed3f5b285cf8d8fbdbce9f3f767a724adc35" +dependencies = [ + "base64 0.21.7", + "log", + "native-tls", + "once_cell", + "url", +] + +[[package]] +name = "url" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +dependencies = [ + "form_urlencoded", + "idna 0.5.0", + "percent-encoding", + "serde", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "uuid" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" +dependencies = [ + "getrandom", + "serde", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.52", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "web-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + +[[package]] +name = "whoami" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44ab49fad634e88f55bf8f9bb3abd2f27d7204172a112c7c9987e01c1c94ea9" +dependencies = [ + "redox_syscall", + "wasite", +] + +[[package]] +name = "widestring" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "653f141f39ec16bba3c5abe400a0c60da7468261cc2cbf36805022876bc721a8" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.4", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.4", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +dependencies = [ + "windows_aarch64_gnullvm 0.52.4", + "windows_aarch64_msvc 0.52.4", + "windows_i686_gnu 0.52.4", + "windows_i686_msvc 0.52.4", + "windows_x86_64_gnu 0.52.4", + "windows_x86_64_gnullvm 0.52.4", + "windows_x86_64_msvc 0.52.4", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "zerocopy" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "zeroize" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" diff --git a/clearing-house-app/Cargo.toml b/clearing-house-app/Cargo.toml index c9bdecc5..2ea06ca3 100644 --- a/clearing-house-app/Cargo.toml +++ b/clearing-house-app/Cargo.toml @@ -1,8 +1,72 @@ -[workspace] - -members = [ - "core-lib", - "document-api", - "keyring-api", - "logging-service" +[package] +name = "clearing-house-app" +version = "0.10.0" +license = "Apache-2.0" +repository = "https://github.com/ids-basecamp/clearinghouse" +authors = [ + "Mark Gall ", + "Georg Bramm ", + "Maximilian Schönenberg " ] +edition = "2021" + +[dependencies] +# JWT +biscuit = "0.6.0" +# Database +mongodb = { version = ">= 2.7.0", features = ["openssl-tls"], optional = true } +# Serialization +serde = { version = "> 1.0.184", features = ["derive"] } +serde_json = "1" +# Error handling +anyhow = "1" +# Time handling +chrono = { version = "0.4.26", features = ["serde", "clock", "std"], default-features = false } +# Encryption and hashing +ring = "0.16.20" +# Config reader +config = { version = "0.13.3", default-features = false, features = ["toml"] } +# Logging/Tracing +tracing = "0.1" +tracing-subscriber = { version = "0.3.17", features = ["env-filter"] } +# Random number generation +rand = "0.8.5" +# lazy initialization of static variables +once_cell = "1.18.0" +# Base64 encoding +base64 = "0.21.7" +# UUID generation +uuid = { version = "1", features = ["serde", "v4"] } +# Big integer handling (RSA key modulus and exponent) +num-bigint = "0.4.3" +# Generating fingerprint of RSA keys +openssh-keys = "0.6.2" +# Async runtime +tokio = { version = ">= 1.32.0", features = ["macros", "rt-multi-thread", "signal"] } +# HTTP server +axum = { version = "0.7.4", features = ["json", "http2"] } +# Helper to allow defining traits for async functions +async-trait = "0.1.73" +# Helper for working with futures +futures = "0.3.29" +# Helper for creating custom error types +thiserror = "1.0.48" +# Optional: Sentry integration +sentry = { version = "0.32.1", optional = true } +sqlx = { version = "0.7.3", features = ["runtime-tokio-rustls", "postgres", "chrono", "uuid"], optional = true } + +[dev-dependencies] +# Controlling execution of unit test cases, which could interfere with each other +serial_test = "3" +# Tempfile creation for testing +tempfile = "3.8" +tower = { version = "0.4", features = ["util"] } +hyper = { version = "1", features = ["full"] } +testcontainers = "0.15.0" +testcontainers-modules = { version = "0.3.4", features = ["postgres"] } + +[features] +default = ["postgres"] +sentry = ["dep:sentry"] +mongodb = ["dep:mongodb"] +postgres = ["dep:sqlx"] diff --git a/clearing-house-app/Dockerfile b/clearing-house-app/Dockerfile new file mode 100644 index 00000000..12dec4a9 --- /dev/null +++ b/clearing-house-app/Dockerfile @@ -0,0 +1,27 @@ +# Use an official Rust runtime as a parent image +FROM rust:latest AS build-env + +# Set the working directory inside the container +WORKDIR /usr/src/chapp + +# Copy the Cargo.toml and Cargo.lock files to leverage Docker cache +COPY Cargo.toml Cargo.lock config.toml ./ + +# Copy the source code into the container +COPY migrations ./migrations +COPY src ./src + +# Build the Rust application with dependencies (this helps to cache dependencies) +RUN cargo build --release + +FROM gcr.io/distroless/cc-debian12 + +# Expose any necessary ports (if your Rust app listens on a port) +EXPOSE 8000 + +WORKDIR /app +COPY init_db /app/init_db +COPY config.toml /app/ + +COPY --from=build-env /usr/src/chapp/target/release/clearing-house-app /app/ +CMD ["/app/clearing-house-app"] diff --git a/clearing-house-app/build.rs b/clearing-house-app/build.rs new file mode 100644 index 00000000..d5068697 --- /dev/null +++ b/clearing-house-app/build.rs @@ -0,0 +1,5 @@ +// generated by `sqlx migrate build-script` +fn main() { + // trigger recompilation when a new migration is added + println!("cargo:rerun-if-changed=migrations"); +} diff --git a/clearing-house-app/certs/daps-dev.der b/clearing-house-app/certs/daps-dev.der deleted file mode 100644 index 22bf0dfb..00000000 Binary files a/clearing-house-app/certs/daps-dev.der and /dev/null differ diff --git a/clearing-house-app/certs/daps.der b/clearing-house-app/certs/daps.der deleted file mode 100644 index b6d6d837..00000000 Binary files a/clearing-house-app/certs/daps.der and /dev/null differ diff --git a/clearing-house-app/config.toml b/clearing-house-app/config.toml new file mode 100644 index 00000000..c61e1b93 --- /dev/null +++ b/clearing-house-app/config.toml @@ -0,0 +1,5 @@ +log_level = "INFO" # TRACE, DEBUG, INFO, WARN, ERROR +database_url= "postgres://my_user:my_password@localhost:5432/ch" +clear_db = true +signing_key = "keys/private_key.der" # Optional +performance_tracing = false diff --git a/clearing-house-app/core-lib/Cargo.toml b/clearing-house-app/core-lib/Cargo.toml deleted file mode 100644 index e3977e6f..00000000 --- a/clearing-house-app/core-lib/Cargo.toml +++ /dev/null @@ -1,34 +0,0 @@ -[package] -name = "core-lib" -version = "0.10.0" -authors = [ - "Mark Gall ", - "Georg Bramm ", -] -edition = "2018" - -[dependencies] -aes = "0.6.0" -aes-gcm-siv = "0.9.0" -# As of now there is no release of biscuit after 04.03.2022 which introduces Clone to JWKS -biscuit = { git = "https://github.com/lawliet89/biscuit", branch = "master" } -base64 = "0.9.3" -blake2-rfc = "0.2.18" -chrono = { version = "0.4", features = ["serde"] } -error-chain = "0.12.4" -fern = "0.5" -figment = { version = "0.10", features = ["yaml", "env"] } -generic-array = "0.14.4" -hex = "0.4.2" -log = "0.4" -mongodb ="2.3.0" -num-bigint = "0.4.3" -openssh-keys = "0.5.0" -percent-encoding = "2.1.0" -reqwest = { version="0.11.11", features = ["default", "json"]} -ring = "0.16.20" -rocket = { version = "0.5.0-rc.1", features = ["json"] } -serde = "1.0" -serde_derive = "1.0" -serde_json = "1.0" -uuid = { version = "0.8", features = ["serde", "v4"] } diff --git a/clearing-house-app/core-lib/certs b/clearing-house-app/core-lib/certs deleted file mode 120000 index 36343b9b..00000000 --- a/clearing-house-app/core-lib/certs +++ /dev/null @@ -1 +0,0 @@ -../certs \ No newline at end of file diff --git a/clearing-house-app/core-lib/config.yml b/clearing-house-app/core-lib/config.yml deleted file mode 100644 index ee371c32..00000000 --- a/clearing-house-app/core-lib/config.yml +++ /dev/null @@ -1,6 +0,0 @@ -database_url: 127.0.0.1 -database_port: 27017 - -document_api_url: http://localhost:8001 -keyring_api_url: http://localhost:8002 -daps_api_url: https://daps.aisec.fraunhofer.de diff --git a/clearing-house-app/core-lib/src/api/client/document_api.rs b/clearing-house-app/core-lib/src/api/client/document_api.rs deleted file mode 100644 index 1207edd9..00000000 --- a/clearing-house-app/core-lib/src/api/client/document_api.rs +++ /dev/null @@ -1,143 +0,0 @@ -use std::env; -use reqwest::Client; -use reqwest::StatusCode; -use reqwest::header::{HeaderValue, CONTENT_TYPE}; -use serde_json; -use crate::api::{ApiClient, DocumentReceipt, QueryResult}; -use crate::api::crypto::create_service_token; -use crate::constants::{ROCKET_DOC_API, DOCUMENT_API_URL, SERVICE_HEADER, ENV_DOCUMENT_SERVICE_ID}; -use crate::errors::*; -use crate::model::document::Document; -use crate::model::SortingOrder; -use crate::util::url_encode; - -#[derive(Clone)] -pub struct DocumentApiClient { - uri: String, - api_service_id: String, - caller_service_id: String -} - -impl ApiClient for DocumentApiClient { - fn new(uri: &str, service_id: &str) -> DocumentApiClient { - let uri = String::from(uri); - let api_id = match env::var(ENV_DOCUMENT_SERVICE_ID){ - Ok(id) => id, - Err(_e) => { - panic!("Service ID not configured. Please configure {}", ENV_DOCUMENT_SERVICE_ID); - } - }; - DocumentApiClient { - uri, - api_service_id: api_id.to_string(), - caller_service_id: service_id.to_string() - } - } - - fn get_conf_param() -> String { - String::from(DOCUMENT_API_URL) - } -} - -impl DocumentApiClient{ - - pub async fn get_document(&self, client_id: &str, pid: &String, id: &String) -> Result>{ - let document_url = format!("{}{}/{}/{}", self.uri, ROCKET_DOC_API, url_encode(pid), url_encode(id)); - let client = Client::new(); - - let token = create_service_token(self.caller_service_id.as_str(), self.api_service_id.as_str(), client_id); - - debug!("calling {}", &document_url); - let response = client - .get(document_url.as_str()) - .header(CONTENT_TYPE, HeaderValue::from_static("application/json")) - .header(SERVICE_HEADER, &token) - .send().await?; - - debug!("Status Code: {}", &response.status()); - match response.status(){ - StatusCode::OK => { - let doc: Document = response.json().await?; - Ok(Some(doc)) - } - _ => Ok(None) - } - } - - pub async fn get_document_with_integrity_check(&self, client_id: &str, pid: &String, id: &String, hash: &String) -> Result{ - let document_url = format!("{}{}/{}/{}", self.uri, ROCKET_DOC_API, url_encode(pid), url_encode(id)); - let client = Client::new(); - - let token = create_service_token(self.caller_service_id.as_str(), self.api_service_id.as_str(), client_id); - - debug!("calling {}", &document_url); - let response = client - .get(document_url.as_str()) - .header(CONTENT_TYPE, HeaderValue::from_static("application/json")) - .header(SERVICE_HEADER, &token) - .query(&[("hash", hash.as_str())]) - .send().await?; - - debug!("Status Code: {}", &response.status()); - let doc: Document = response.json().await?; - Ok(doc) - } - - pub async fn get_documents(&self, client_id: &str, pid: &String, page: i32, size: i32, sort: SortingOrder, date_from: Option, date_to: Option) -> Result{ - let document_url = format!("{}{}/{}", self.uri, ROCKET_DOC_API, url_encode(pid)); - let client = Client::new(); - debug!("calling {}", &document_url); - - let token = create_service_token(self.caller_service_id.as_str(), self.api_service_id.as_str(), client_id); - - let mut request = client - .get(document_url.as_str()) - .header(CONTENT_TYPE, HeaderValue::from_static("application/json")) - .header(SERVICE_HEADER, &token) - - .query(&[("page", page)]) - .query(&[("size", size)]) - .query(&[("sort", sort)]); - - if date_from.is_some(){ - request = request.query(&[("date_from", date_from.unwrap())]); - } - - if date_to.is_some(){ - request = request.query(&[("date_to", date_to.unwrap())]); - } - - let response = request.send().await?; - - debug!("Status Code: {}", &response.status()); - let result: QueryResult = response.json().await?; - Ok(result) - } - - pub async fn create_document(&self, client_id: &str, doc: &Document) -> Result { - let document_url = format!("{}{}", self.uri, ROCKET_DOC_API); - let client = Client::new(); - - let json_data = serde_json::to_string(doc)?; - let token = create_service_token(self.caller_service_id.as_str(), self.api_service_id.as_str(), client_id); - - debug!("created jwt: {}", &token); - debug!("calling {}", &document_url); - let response = client - .post(document_url.as_str()) - .header(CONTENT_TYPE, HeaderValue::from_static("application/json")) - .header(SERVICE_HEADER, &token) - .body(json_data).send().await?; - - debug!("Status Code: {}", &response.status()); - match &response.status(){ - &StatusCode::CREATED => { - let receipt = response.json().await?; - println!("Payload: {:?}", receipt); - Ok(receipt) - }, - _ => bail!("Error while calling create_document(): status {} content {:?}", response.status(), response.text().await?) - } - - } - } \ No newline at end of file diff --git a/clearing-house-app/core-lib/src/api/client/keyring_api.rs b/clearing-house-app/core-lib/src/api/client/keyring_api.rs deleted file mode 100644 index 5c946c0b..00000000 --- a/clearing-house-app/core-lib/src/api/client/keyring_api.rs +++ /dev/null @@ -1,101 +0,0 @@ -use std::env; -use reqwest::Client; -use reqwest::header::{CONTENT_TYPE, HeaderValue}; -use crate::api::ApiClient; -use crate::api::crypto::create_service_token; -use crate::errors::*; -use crate::constants::{ROCKET_KEYRING_API, KEYRING_API_URL, SERVICE_HEADER, ENV_KEYRING_SERVICE_ID}; -use crate::model::crypto::{KeyMap, KeyMapListItem, KeyCtList}; - -#[derive(Clone)] -pub struct KeyringApiClient { - uri: String, - api_service_id: String, - caller_service_id: String -} - -impl ApiClient for KeyringApiClient { - - fn new(uri: &str, service_id: &str) -> KeyringApiClient { - let uri = String::from(uri); - let api_id = match env::var(ENV_KEYRING_SERVICE_ID){ - Ok(id) => id, - Err(_e) => { - panic!("Service ID not configured. Please configure {}", ENV_KEYRING_SERVICE_ID); - } - }; - KeyringApiClient { - uri, - api_service_id: api_id.to_string(), - caller_service_id: service_id.to_string() - } - } - - fn get_conf_param() -> String { - String::from(KEYRING_API_URL) - } -} - -impl KeyringApiClient { - - /// Calls the keyring api to generate new aes keys - pub async fn generate_keys(&self, client_id: &str, pid: &str, dt_id: &str) -> Result { - let keys_url = format!("{}{}/generate_keys/{}", self.uri, ROCKET_KEYRING_API, pid); - let client = Client::new(); - - let token = create_service_token(self.caller_service_id.as_str(), self.api_service_id.as_str(), client_id); - - debug!("calling {}", &keys_url); - let result = client.get(keys_url.as_str()) - .header(CONTENT_TYPE, HeaderValue::from_static("application/json")) - .header(SERVICE_HEADER, &token) - .query(&[("dt_id", dt_id)]) - .send().await?; - - debug!("Status Code: {}", result.status()); - let key_map: KeyMap = result.json().await?; - trace!("Payload: {:?}", key_map); - Ok(key_map) - } - - /// Calls the keyring api to decrypt aes keys - pub async fn decrypt_keys(&self, client_id: &str, pid: &str, dt_id: &str, ct: &[u8]) -> Result{ - let keys_url = format!("{}{}/decrypt_keys/{}/{}", self.uri, ROCKET_KEYRING_API, pid, hex::encode_upper(ct)); - let client = Client::new(); - - let token = create_service_token(self.caller_service_id.as_str(), self.api_service_id.as_str(), client_id); - - debug!("calling {}", &keys_url); - let result = client.get(keys_url.as_str()) - .header(CONTENT_TYPE, HeaderValue::from_static("application/json")) - .header(SERVICE_HEADER, &token) - .query(&[("dt_id", dt_id)]) - .send().await?; - - debug!("Status Code: {}", &result.status()); - let key_map: KeyMap = result.json().await?; - trace!("Payload: {:?}", key_map); - Ok(key_map) - } - - /// Calls the keyring api to decrypt aes keys - pub async fn decrypt_multiple_keys(&self, client_id: &str, pid: &str, cts: &KeyCtList) -> Result>{ - let keys_url = format!("{}{}/decrypt_keys/{}", self.uri, ROCKET_KEYRING_API, pid); - let client = Client::new(); - - let json_data = serde_json::to_string(cts)?; - let token = create_service_token(self.caller_service_id.as_str(), self.api_service_id.as_str(), client_id); - - debug!("calling {}", &keys_url); - let result = client.get(keys_url.as_str()) - .header(CONTENT_TYPE, HeaderValue::from_static("application/json")) - .header(SERVICE_HEADER, &token) - .body(json_data) - .send().await?; - - debug!("Status Code: {}", &result.status()); - let key_maps: Vec = result.json().await?; - trace!("Payload: {:?}", key_maps); - Ok(key_maps) - } -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/src/api/client/mod.rs b/clearing-house-app/core-lib/src/api/client/mod.rs deleted file mode 100644 index a5bcba0a..00000000 --- a/clearing-house-app/core-lib/src/api/client/mod.rs +++ /dev/null @@ -1,96 +0,0 @@ -use std::env; -use rocket::fairing::{self, Fairing, Info, Kind}; -use rocket::{Rocket, Build}; -use crate::api::ApiClient; -use crate::api::client::keyring_api::KeyringApiClient; -use crate::api::client::document_api::DocumentApiClient; -use crate::constants::{ENV_DOCUMENT_SERVICE_ID, ENV_KEYRING_SERVICE_ID}; - -pub mod document_api; -pub mod keyring_api; - -#[derive(Clone, Debug)] -pub enum ApiClientEnum{ - Document, - Keyring -} - -#[derive(Clone, Debug)] -pub struct ApiClientConfigurator{ - api: ApiClientEnum, -} - -impl ApiClientConfigurator{ - pub fn new(api: ApiClientEnum) -> Self{ - ApiClientConfigurator{ - api - } - } -} - -#[rocket::async_trait] -impl Fairing for ApiClientConfigurator { - fn info(&self) -> Info { - match self.api { - ApiClientEnum::Document => { - Info { - name: "Configuring Document Api Client", - kind: Kind::Ignite - } - }, - ApiClientEnum::Keyring => { - Info { - name: "Configuring Keyring Api Client", - kind: Kind::Ignite - } - } - } - } - - async fn on_ignite(&self, rocket: Rocket) -> fairing::Result { - let config_key = match self.api { - ApiClientEnum::Document => { - debug!("Configuring Document Api Client..."); - DocumentApiClient::get_conf_param() - }, - ApiClientEnum::Keyring => { - debug!("Configuring Keyring Api Client..."); - KeyringApiClient::get_conf_param() - } - }; - let api_url: String = rocket.figment().extract_inner(&config_key).unwrap_or(String::new()); - if api_url.len() > 0 { - debug!("...found api url: {}", &api_url); - match self.api { - ApiClientEnum::Document => { - match env::var(ENV_DOCUMENT_SERVICE_ID){ - Ok(id) => { - let client: DocumentApiClient = ApiClient::new(&api_url, &id); - Ok(rocket.manage(client)) - }, - Err(_e) => { - error!("Service ID not configured. Please configure environment variable {}", ENV_DOCUMENT_SERVICE_ID); - Err(rocket) - } - } - }, - ApiClientEnum::Keyring => { - match env::var(ENV_KEYRING_SERVICE_ID){ - Ok(id) => { - let client: KeyringApiClient = ApiClient::new(&api_url, &id); - Ok(rocket.manage(client)) - }, - Err(_e) => { - error!("Service ID not configured. Please configure environment variable {}", ENV_KEYRING_SERVICE_ID); - Err(rocket) - } - } - } - } - } - else{ - error!("...api url not found in config file."); - Err(rocket) - } - } -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/src/api/crypto.rs b/clearing-house-app/core-lib/src/api/crypto.rs deleted file mode 100644 index c433d441..00000000 --- a/clearing-house-app/core-lib/src/api/crypto.rs +++ /dev/null @@ -1,170 +0,0 @@ -use std::env; -use std::fmt::{Display, Formatter}; -use biscuit::{ClaimPresenceOptions, ClaimsSet, Empty, jwa::SignatureAlgorithm, JWT, RegisteredClaims, SingleOrMultiple, Timestamp, ValidationOptions}; -use biscuit::jwk::{AlgorithmParameters, CommonParameters, JWKSet}; -use biscuit::{jws, jws::Secret}; -use biscuit::Presence::Required; -use biscuit::Validation::Validate; -use chrono::{Duration, Utc}; -use num_bigint::BigUint; -use ring::signature::KeyPair; -use rocket::http::Status; -use rocket::request::{Request, FromRequest, Outcome}; -use serde::{Deserialize,Serialize}; -use crate::errors::*; -use crate::constants::{ENV_SHARED_SECRET, SERVICE_HEADER}; -use crate::util::ServiceConfig; - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ChClaims{ - pub client_id: String, -} - -impl ChClaims{ - pub fn new(client_id: &str) -> ChClaims{ - ChClaims{ - client_id: client_id.to_string(), - } - } -} - -impl Display for ChClaims{ - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!(f, "<{}>", self.client_id) - } -} - -#[derive(Debug)] -pub enum ChClaimsError { - Missing, - Invalid, -} - -#[rocket::async_trait] -impl<'r> FromRequest<'r> for ChClaims { - type Error = ChClaimsError; - - async fn from_request(request: &'r Request<'_>) -> Outcome { - match request.headers().get_one(&SERVICE_HEADER) { - None => Outcome::Failure((Status::BadRequest, ChClaimsError::Missing)), - Some(token) => { - debug!("...received service header: {:?}", token); - let service_config = request.rocket().state::().unwrap(); - match decode_token::(token, service_config.service_id.as_str()){ - Ok(claims) => { - debug!("...retrieved claims and succeed"); - Outcome::Success(claims) - }, - Err(_) => Outcome::Failure((Status::BadRequest, ChClaimsError::Invalid)) - } - } - } - } -} - -pub fn get_jwks(key_path: &str) -> Option>{ - let keypair = biscuit::jws::Secret::rsa_keypair_from_file(key_path).unwrap(); - - if let biscuit::jws::Secret::RsaKeyPair(a) = keypair{ - let pk_modulus = BigUint::from_bytes_be(a.as_ref().public_key().modulus().big_endian_without_leading_zero()); - let pk_e = BigUint::from_bytes_be(a.as_ref().public_key().exponent().big_endian_without_leading_zero()); - - let params = biscuit::jwk::RSAKeyParameters{ - n: pk_modulus, - e: pk_e, - ..Default::default() - }; - - let mut common = CommonParameters::default(); - common.key_id = get_fingerprint(key_path); - - let jwk = biscuit::jwk::JWK::{ - common, - algorithm: AlgorithmParameters::RSA(params), - additional: Empty::default(), - }; - - let jwks = biscuit::jwk::JWKSet::{ - keys: vec!(jwk) - }; - return Some(jwks) - } - None -} - -pub fn get_fingerprint(key_path: &str) -> Option{ - let keypair = biscuit::jws::Secret::rsa_keypair_from_file(key_path).unwrap(); - if let biscuit::jws::Secret::RsaKeyPair(a) = keypair { - let pk_modulus = a.as_ref().public_key().modulus().big_endian_without_leading_zero().to_vec(); - let pk_e = a.as_ref().public_key().exponent().big_endian_without_leading_zero().to_vec(); - - let pk = openssh_keys::PublicKey::from_rsa(pk_e, pk_modulus); - return Some(pk.fingerprint()) - } - None -} - -pub fn create_service_token(issuer: &str, audience: &str, client_id: &str) -> String{ - let private_claims = ChClaims::new(client_id); - create_token(issuer, audience, &private_claims) -} - -pub fn create_token Deserialize<'de>> (issuer: &str, audience: &str, private_claims: &T) -> String{ - let signing_secret = match env::var(ENV_SHARED_SECRET){ - Ok(secret) => { - Secret::Bytes(secret.to_string().into_bytes()) - }, - Err(_) => { - panic!("Shared Secret not configured. Please configure environment variable {}", ENV_SHARED_SECRET); - } - }; - let expiration_date = Utc::now() + Duration::minutes(5); - - let claims = ClaimsSet::{ - registered: RegisteredClaims{ - issuer: Some(issuer.to_string()), - issued_at: Some(Timestamp::from(Utc::now())), - audience: Some(SingleOrMultiple::Single(audience.to_string())), - expiry: Some(Timestamp::from(expiration_date)), - ..Default::default() - }, - private: private_claims.clone() - }; - - // Construct the JWT - let jwt = jws::Compact::new_decoded( - From::from(jws::RegisteredHeader { - algorithm: SignatureAlgorithm::HS256, - ..Default::default() - }), - claims.clone() - ); - - jwt.into_encoded(&signing_secret).unwrap().unwrap_encoded().to_string() -} - -pub fn decode_token Deserialize<'de>>(token: &str, audience: &str) -> Result{ - let signing_secret = match env::var(ENV_SHARED_SECRET){ - Ok(secret) => { - Secret::Bytes(secret.to_string().into_bytes()) - }, - Err(e) => { - error!("Shared Secret not configured. Please configure environment variable {}", ENV_SHARED_SECRET); - return Err(Error::from(e)) - } - }; - let jwt: jws::Compact, Empty> = JWT::<_, Empty>::new_encoded(token); - let decoded_jwt = jwt.decode(&signing_secret,SignatureAlgorithm::HS256)?; - let mut val_options = ValidationOptions::default(); - let mut claim_presence_options = ClaimPresenceOptions::default(); - claim_presence_options.expiry = Required; - claim_presence_options.issuer = Required; - claim_presence_options.audience = Required; - claim_presence_options.issued_at = Required; - val_options.claim_presence_options = claim_presence_options; - val_options.issued_at = Validate(Duration::minutes(5)); - // Issuer is not validated. Wouldn't make much of a difference if we did - val_options.audience = Validate(audience.to_string()); - assert!(decoded_jwt.validate(val_options).is_ok()); - Ok(decoded_jwt.payload().unwrap().private.clone()) -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/src/api/mod.rs b/clearing-house-app/core-lib/src/api/mod.rs deleted file mode 100644 index 6ed026d3..00000000 --- a/clearing-house-app/core-lib/src/api/mod.rs +++ /dev/null @@ -1,75 +0,0 @@ -use std::string::ToString; -use rocket::serde::json::Value; -use crate::model::document::Document; - -pub mod client; -pub mod crypto; - -pub trait ApiClient{ - fn new(url: &str, service_id: &str) -> Self; - fn get_conf_param() -> String; -} - -#[derive(Responder, Debug)] -pub enum ApiResponse { - #[response(status = 200)] - PreFlight(()), - #[response(status = 400, content_type = "text/plain")] - BadRequest(String), - #[response(status = 201, content_type = "json")] - SuccessCreate(Value), - #[response(status = 200, content_type = "json")] - SuccessOk(Value), - #[response(status = 204, content_type = "text/plain")] - SuccessNoContent(String), - #[response(status = 401, content_type = "text/plain")] - Unauthorized(String), - #[response(status = 403, content_type = "text/plain")] - Forbidden(String), - #[response(status = 404, content_type = "text/plain")] - NotFound(String), - #[response(status = 500, content_type = "text/plain")] - InternalError(String), -} - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct DocumentReceipt{ - pub timestamp: i64, - pub pid: String, - pub doc_id: String, - pub chain_hash: String, -} - -impl DocumentReceipt{ - pub fn new(timestamp: i64, pid: &str, doc_id: &str, chain_hash: &str) -> DocumentReceipt{ - DocumentReceipt{ - timestamp, - pid: pid.to_string(), - doc_id: doc_id.to_string(), - chain_hash: chain_hash.to_string(), - } - } -} - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct QueryResult{ - pub date_from: i64, - pub date_to: i64, - pub page: Option, - pub size: Option, - pub order: String, - pub documents: Vec -} - -impl QueryResult{ - pub fn new(date_from: i64, date_to: i64, page: Option, size: Option, order: String, documents: Vec) -> QueryResult{ - QueryResult{ - date_from, - date_to, - page, - size, - order, - documents - } - } -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/src/constants.rs b/clearing-house-app/core-lib/src/constants.rs deleted file mode 100644 index 664477a2..00000000 --- a/clearing-house-app/core-lib/src/constants.rs +++ /dev/null @@ -1,87 +0,0 @@ -// definition of daps constants -pub const DAPS_AUD: &'static str = "idsc:IDS_CONNECTORS_ALL"; -pub const DAPS_JWKS: &'static str = ".well-known/jwks.json"; -pub const DAPS_KID: &'static str = "default"; -pub const DAPS_AUTHHEADER: &'static str = "Authorization"; -pub const DAPS_AUTHBEARER: &'static str = "Bearer"; -pub const DAPS_CERTIFICATES: &'static str = "certs"; - -// definition of custom headers -pub const SERVICE_HEADER: &'static str = "CH-SERVICE"; - -// definition of config parameters (in config files) -pub const DATABASE_URL: &'static str = "database_url"; -pub const DOCUMENT_API_URL: &'static str = "document_api_url"; -pub const KEYRING_API_URL: &'static str = "keyring_api_url"; -pub const DAPS_API_URL: &'static str = "daps_api_url"; -pub const CLEAR_DB: &'static str = "clear_db"; - -// define here the config options from environment variables -pub const ENV_API_LOG_LEVEL: &'static str = "API_LOG_LEVEL"; -pub const ENV_SHARED_SECRET: &'static str = "SHARED_SECRET"; -pub const ENV_DOCUMENT_SERVICE_ID: &'static str = "SERVICE_ID_DOC"; -pub const ENV_KEYRING_SERVICE_ID: &'static str = "SERVICE_ID_KEY"; -pub const ENV_LOGGING_SERVICE_ID: &'static str = "SERVICE_ID_LOG"; - -// definition of rocket mount points -pub const ROCKET_DOC_API: &'static str = "/doc"; -pub const ROCKET_DOC_TYPE_API: &'static str = "/doctype"; -pub const ROCKET_POLICY_API: &'static str = "/policy"; -pub const ROCKET_STATISTICS: &'static str = "/statistics"; -pub const ROCKET_PROCESS_API: &'static str = "/process"; -pub const ROCKET_KEYRING_API: &'static str = "/keyring"; -pub const ROCKET_USER_API: &'static str = "/users"; - -// definition of service names -pub const DOCUMENT_DB_CLIENT: &'static str = "document-api"; -pub const KEYRING_DB_CLIENT: &'static str = "keyring-api"; -pub const PROCESS_DB_CLIENT: &'static str = "logging-service"; - -// definition of table names -pub const MONGO_DB: &'static str = "ch_ids"; -pub const DOCUMENT_DB: &'static str = "document"; -pub const KEYRING_DB: &'static str = "keyring"; -pub const PROCESS_DB: &'static str = "process"; -pub const MONGO_COLL_DOCUMENTS: &'static str = "documents"; -pub const MONGO_COLL_DOCUMENT_BUCKET: &'static str = "document_bucket"; -pub const MONGO_COLL_DOC_TYPES: &'static str = "doc_types"; -pub const MONGO_COLL_DOC_PARTS: &'static str = "parts"; -pub const MONGO_COLL_PROCESSES: &'static str = "processes"; -pub const MONGO_COLL_TRANSACTIONS: &'static str = "transactions"; -pub const MONGO_COLL_MASTER_KEY: &'static str = "keys"; - -// definition of database fields -pub const MONGO_ID: &'static str = "id"; -pub const MONGO_MKEY: &'static str = "msk"; -pub const MONGO_PID: &'static str = "pid"; -pub const MONGO_DT_ID: &'static str = "dt_id"; -pub const MONGO_NAME: &'static str = "name"; -pub const MONGO_OWNER: &'static str = "owner"; -pub const MONGO_TS: &'static str = "ts"; -pub const MONGO_TC: &'static str = "tc"; - -pub const MONGO_DOC_ARRAY: &'static str = "documents"; -pub const MONGO_COUNTER: &'static str = "counter"; -pub const MONGO_FROM_TS: &'static str = "from_ts"; -pub const MONGO_TO_TS: &'static str = "to_ts"; - -// definition of default database values -pub const DEFAULT_PROCESS_ID: &'static str = "default"; -pub const MAX_NUM_RESPONSE_ENTRIES: u64 = 1000; -pub const DEFAULT_NUM_RESPONSE_ENTRIES: u64 = 100; - -pub const DEFAULT_DOC_TYPE: &'static str = "IDS_MESSAGE"; - -// split string symbols for vec_to_string and string_to_vec -pub const SPLIT_QUOTE: &'static str = "'"; -pub const SPLIT_SIGN: &'static str = "~"; -pub const SPLIT_CT: &'static str = "::"; - -// definition of file names and folders -pub const FOLDER_DB: &'static str = "db_init"; -pub const FOLDER_DATA: &'static str = "data"; -pub const FILE_DOC: &'static str = "document.json"; -pub const FILE_DEFAULT_DOC_TYPE: &'static str = "init_db/default_doc_type.json"; - -// definition of special document parts -pub const PAYLOAD_PART: &'static str = "payload"; \ No newline at end of file diff --git a/clearing-house-app/core-lib/src/db/mod.rs b/clearing-house-app/core-lib/src/db/mod.rs deleted file mode 100644 index 6d9db45a..00000000 --- a/clearing-house-app/core-lib/src/db/mod.rs +++ /dev/null @@ -1,22 +0,0 @@ -use mongodb::Client; -use mongodb::options::ClientOptions; -use crate::errors::*; - -pub trait DataStoreApi{ - fn new(client: Client) -> Self; -} - -pub async fn init_database_client(db_url: &str, client_name: Option) -> Result{ - let mut client_options; - - match ClientOptions::parse(&format!("{}", db_url)).await{ - Ok(co) => {client_options = co;} - Err(_) => { - bail!("Can't parse database connection string"); - } - }; - - client_options.app_name = client_name; - let client = Client::with_options(client_options)?; - Ok(T::new(client)) -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/src/db/public_db.rs b/clearing-house-app/core-lib/src/db/public_db.rs deleted file mode 100644 index 0babcf2f..00000000 --- a/clearing-house-app/core-lib/src/db/public_db.rs +++ /dev/null @@ -1,176 +0,0 @@ -use crate::mongodb::{ - Bson, - db::ThreadedDatabase, - doc, - coll::options::FindOneAndUpdateOptions -}; -use crate::constants::{MONGO_ID, MONGO_PID, MONGO_DT_ID, MONGO_COLL_DOCUMENTS}; -use crate::db::DataStore; -use crate::errors::*; -use crate::model::document::EncryptedDocument; -use rocket_contrib::json; - -impl DataStore { - - // DOCUMENT - - pub fn add_document(&self, doc: EncryptedDocument) -> Result { - // The model collection - let coll = self.database.collection(MONGO_COLL_DOCUMENTS); - println!("add_document({:?})", json!(doc)); - let serialized_bson = mongodb::to_bson(&doc)?; - match serialized_bson.as_document(){ - Some(document) => { - match coll.insert_one(document.clone(), None) { - Ok(res) => { - println!("inserted document: acknowledged:{:?} inserted_id:{:?}", res.acknowledged, res.inserted_id); - Ok(true) - }, - Err(e) => { - bail!("error_ insertion of document failed: {}", e); - } - } - }, - _ => bail!("conversion to document failed!"), - } - } - - /// deletes model from db - pub fn delete_document(&self, id: &String) -> Result { - // The model collection - debug!("trying to delete entry with id '{}'", id); - let coll = self.database.collection(MONGO_COLL_DOCUMENTS); - let result = coll.delete_one(doc! { MONGO_ID: id }, None)?; - if result.deleted_count == 1{ - Ok(true) - } - else{ - debug!("deleted_count={}", result.deleted_count); - Ok(false) - } - } - - /// checks if the document exists - /// document ids are globally unique - pub fn exists_document(&self, id: &String) -> Result { - // The model collection - let coll = self.database.collection(MONGO_COLL_DOCUMENTS); - let result = coll.find_one(Some(doc! { MONGO_ID: id.clone() }), None)?; - match result { - Some(_r) => Ok(true), - None => { - debug!("document with id '{}' does not exist!", &id); - Ok(false) - } - } - } - - /// gets the model from the db - pub fn get_document(&self, id: &String, pid: &String) -> Result> { - debug!("Looking for doc: {}", &id); - // The model collection - let coll = self.database.collection(MONGO_COLL_DOCUMENTS); - let result = coll.find_one(Some(doc! { MONGO_ID: id.clone(), MONGO_PID: pid.clone() }), None)?; - - match result { - Some(r) => { - let doc = mongodb::from_bson::(Bson::Document(r))?; - Ok(Some(doc)) - }, - None => { - Ok(None) - } - } - } - - /// gets documents for a single process from the db - pub fn get_documents_for_pid(&self, pid: &String) -> Result> { - // The model collection - let coll = self.database.collection(MONGO_COLL_DOCUMENTS); - // Create cursor that finds all documents - let mut cursor = coll.find(Some(doc! { MONGO_PID: pid.clone() }), None)?; - let mut result = vec!(); - - loop{ - if cursor.has_next()?{ - // we checked has_next() so unwrap() is safe to get to the Result - let d = cursor.next().unwrap()?; - let doc = mongodb::from_bson::(Bson::Document(d))?; - result.push(doc); - } - else{ - break; - } - } - Ok(result) - } - - /// gets documents of a specific document type for a single process from the db - pub fn get_documents_of_dt_for_pid(&self, dt_id: &String, pid: &String) -> Result> { - // The model collection - let coll = self.database.collection(MONGO_COLL_DOCUMENTS); - // Create cursor that finds all documents - let mut cursor = coll.find(Some(doc! { MONGO_PID: pid.clone(), MONGO_DT_ID: dt_id.clone() }), None)?; - let mut result = vec!(); - - loop{ - if cursor.has_next()?{ - // we checked has_next() so unwrap() is safe to get to the Result - let d = cursor.next().unwrap()?; - let doc = mongodb::from_bson::(Bson::Document(d))?; - result.push(doc); - } - else{ - break; - } - } - Ok(result) - } - - /// gets all documents from the db - pub fn get_all_documents(&self) -> Result> { - // The model collection - let coll = self.database.collection(MONGO_COLL_DOCUMENTS); - // Create cursor that finds all documents - let mut cursor = coll.find(None, None)?; - let mut result = vec!(); - - loop{ - if cursor.has_next()?{ - // we checked has_next() so unwrap() is safe to get to the Result - let d = cursor.next().unwrap()?; - let doc = mongodb::from_bson::(Bson::Document(d))?; - result.push(doc); - } - else{ - break; - } - } - Ok(result) - } - - /// update existing model in the db - pub fn update_document(&self, doc: EncryptedDocument) -> Result { - // The model collection - let coll = self.database.collection(MONGO_COLL_DOCUMENTS); - let serialized_doc = mongodb::to_bson(&doc).unwrap(); // Serialize - - let mut options = FindOneAndUpdateOptions::new(); - options.upsert = Some(true); - - let result = coll.find_one_and_replace(doc! { MONGO_ID: doc.id.clone() }, - serialized_doc.as_document().unwrap().clone(), - Some(options))?; - match result { - Some(r) => { - let old_doc = mongodb::from_bson::(Bson::Document(r))?; - debug!("old model type was: {}", &old_doc.id); - Ok(true) - }, - None => { - warn!("model type with id {} could not be updated!", &doc.id); - Ok(false) - } - } - } -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/src/lib.rs b/clearing-house-app/core-lib/src/lib.rs deleted file mode 100644 index e14ed644..00000000 --- a/clearing-house-app/core-lib/src/lib.rs +++ /dev/null @@ -1,34 +0,0 @@ -extern crate biscuit; -extern crate chrono; -extern crate fern; -extern crate mongodb; -#[macro_use] extern crate rocket; -#[macro_use] extern crate serde_derive; - -#[macro_use] extern crate error_chain; -pub mod errors { - // Create the Error, ErrorKind, ResultExt, and Result types - error_chain!{ - foreign_links { - Conversion(std::num::TryFromIntError); - Figment(figment::Error); - HexError(hex::FromHexError); - Io(::std::io::Error) #[cfg(unix)]; - Mongodb(mongodb::error::Error); - MongodbBson(mongodb::bson::de::Error); - SetLogger(log::SetLoggerError); - ParseLogLevel(log::ParseLevelError); - Reqwest(reqwest::Error); - SerdeJson(serde_json::error::Error); - Uft8Error(std::string::FromUtf8Error); - BiscuitError(biscuit::errors::Error); - EnvVariable(::std::env::VarError); - } - } -} - -pub mod api; -pub mod constants; -pub mod db; -pub mod model; -pub mod util; diff --git a/clearing-house-app/core-lib/src/model/crypto.rs b/clearing-house-app/core-lib/src/model/crypto.rs deleted file mode 100644 index ef269ded..00000000 --- a/clearing-house-app/core-lib/src/model/crypto.rs +++ /dev/null @@ -1,80 +0,0 @@ -use std::collections::HashMap; - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct KeyEntry { - pub id: String, - pub key: Vec, - pub nonce: Vec, -} - -impl KeyEntry{ - pub fn new(id: String, key: Vec, nonce: Vec)-> KeyEntry{ - KeyEntry{ - id, - key, - nonce - } - } -} - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct KeyMap { - pub enc: bool, - pub keys: HashMap, - pub keys_enc: Option>, -} - -impl KeyMap{ - pub fn new(enc: bool, keys: HashMap, keys_enc: Option>) -> KeyMap{ - KeyMap{ - enc, - keys, - keys_enc - } - } - } - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct KeyCt{ - pub id: String, - pub ct: String -} - -impl KeyCt{ - pub fn new(id: String, ct: String) -> KeyCt{ - KeyCt{ - id, - ct - } - } -} - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct KeyCtList { - pub dt: String, - pub cts: Vec -} - -impl KeyCtList{ - pub fn new(dt: String, cts: Vec) -> KeyCtList{ - KeyCtList{ - dt, - cts - } - } -} - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct KeyMapListItem { - pub id: String, - pub map: KeyMap -} - -impl KeyMapListItem{ - pub fn new(id: String, map: KeyMap) -> KeyMapListItem{ - KeyMapListItem{ - id, - map - } - } -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/src/model/document.rs b/clearing-house-app/core-lib/src/model/document.rs deleted file mode 100644 index 6357e7db..00000000 --- a/clearing-house-app/core-lib/src/model/document.rs +++ /dev/null @@ -1,289 +0,0 @@ -use aes_gcm_siv::Aes256GcmSiv; -use aes_gcm_siv::aead::{Aead, NewAead}; -use blake2_rfc::blake2b::Blake2b; -use generic_array::GenericArray; -use std::collections::HashMap; -use uuid::Uuid; -use crate::errors::*; -use crate::constants::{SPLIT_CT}; -use crate::model::new_uuid; -use crate::model::crypto::{KeyEntry, KeyMap}; -use chrono::Local; - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct DocumentPart { - pub name: String, - pub content: Option, -} - -impl DocumentPart{ - pub fn new(name: String, content: Option) -> DocumentPart{ - DocumentPart{ - name, - content, - } - } - - pub fn encrypt(&self, key: &[u8], nonce: &[u8]) -> Result>{ - const EXP_KEY_SIZE: usize = 32; - const EXP_NONCE_SIZE: usize = 12; - // check key size - if key.len() != EXP_KEY_SIZE { - error!("Given key has size {} but expected {} bytes", key.len(), EXP_KEY_SIZE); - bail!("Incorrect key size") - } - // check nonce size - else if nonce.len() != EXP_NONCE_SIZE { - error!("Given nonce has size {} but expected {} bytes", nonce.len(), EXP_NONCE_SIZE); - bail!("Incorrect nonce size") - } - else{ - let key = GenericArray::from_slice(key); - let nonce = GenericArray::from_slice(nonce); - let cipher = Aes256GcmSiv::new(key); - - match &self.content{ - Some(pt) => { - let pt = format_pt_for_storage(&self.name, pt); - match cipher.encrypt(nonce, pt.as_bytes()){ - Ok(ct) => Ok(ct), - Err(e) => bail!("Error while encrypting {}", e) - } - }, - None => { - error!("Tried to encrypt empty document part."); - bail!("Nothing to encrypt"); - } - } - } - } - - pub fn decrypt(key: &[u8], nonce: &[u8], ct: &[u8]) -> Result{ - let key = GenericArray::from_slice(key); - let nonce = GenericArray::from_slice(nonce); - let cipher = Aes256GcmSiv::new(key); - - match cipher.decrypt(nonce, ct){ - Ok(pt) => { - let pt = String::from_utf8(pt)?; - let (name, content) = restore_pt_no_dt(&pt)?; - Ok(DocumentPart::new(name, Some(content))) - }, - Err(e) => { - bail!("Error while decrypting: {}", e) - } - } - } -} - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct Document { - #[serde(default = "new_uuid")] - pub id: String, - pub dt_id: String, - pub pid: String, - pub ts: i64, - pub tc: i64, - pub parts: Vec, -} - -/// Documents should have a globally unique id, setting the id manually is discouraged. -impl Document{ - pub fn create_uuid() -> String{ - Uuid::new_v4().to_hyphenated().to_string() - } - - // each part is encrypted using the part specific key from the key map - // the hash is set to "0". Chaining is not done here. - pub fn encrypt(&self, key_map: KeyMap) -> Result { - debug!("encrypting document of doc_type {}", self.dt_id); - let mut cts = vec!(); - - let keys = key_map.keys; - let key_ct; - match key_map.keys_enc{ - Some(ct) => { - key_ct = hex::encode(ct); - }, - None => { - bail!("Missing key ct"); - } - } - - for part in self.parts.iter() { - if part.content.is_none(){ - // no content, so we skip this one - continue; - } - // check if there's a key for this part - if !keys.contains_key(&part.name){ - error!("Missing key for part '{}'", &part.name); - bail!("Missing key for part '{}'", &part.name); - } - // get the key for this part - let key_entry = keys.get(&part.name).unwrap(); - let ct = part.encrypt(key_entry.key.as_slice(), key_entry.nonce.as_slice()); - if ct.is_err(){ - warn!("Encryption error. No ct received!"); - bail!("Encryption error. No ct received!"); - } - let ct_string = hex::encode_upper(ct.unwrap()); - - // key entry id is needed for decryption - cts.push(format!("{}::{}", key_entry.id, ct_string)); - } - cts.sort(); - - Ok(EncryptedDocument::new(self.id.clone(), self.pid.clone(), self.dt_id.clone(), self.ts, self.tc, key_ct, cts)) - } - - pub fn get_formatted_tc(&self) -> String{ - format_tc(self.tc) - } - - pub fn get_parts_map(&self) -> HashMap>{ - let mut p_map = HashMap::new(); - for part in self.parts.iter(){ - p_map.insert(part.name.clone(), part.content.clone()); - } - p_map - } - - pub fn new(pid: String, dt_id: String, tc: i64, parts: Vec) -> Document{ - Document{ - id: Document::create_uuid(), - dt_id, - pid, - ts: Local::now().timestamp(), - tc, - parts, - } - } - - fn restore(id: String, pid: String, dt_id: String, ts: i64, tc: i64, parts: Vec) -> Document{ - Document{ - id, - dt_id, - pid, - ts, - tc, - parts, - } - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct EncryptedDocument { - pub id: String, - pub pid: String, - pub dt_id: String, - pub ts: i64, - pub tc: i64, - pub hash: String, - pub keys_ct: String, - pub cts: Vec, -} - -impl EncryptedDocument{ - - /// Note: KeyMap keys need to be KeyEntry.ids in this case - // Decryption is done without checking the hashes. Do this before calling this method - pub fn decrypt(&self, keys: HashMap) -> Result{ - - let mut pts = vec!(); - for ct in self.cts.iter(){ - let ct_parts = ct.split(SPLIT_CT).collect::>(); - if ct_parts.len() != 2 { - bail!("Integrity violation! Ciphertexts modified"); - } - // get key and nonce - let key_entry = keys.get(ct_parts[0]); - if key_entry.is_none(){ - bail!("Key for id '{}' does not exist!", ct_parts[0]); - } - let key = key_entry.unwrap().key.as_slice(); - let nonce = key_entry.unwrap().nonce.as_slice(); - - // get ciphertext - //TODO: use error_chain? - let ct = hex::decode(ct_parts[1]).unwrap(); - - // decrypt - match DocumentPart::decrypt(key, nonce, ct.as_slice()){ - Ok(part) => pts.push(part), - Err(e) => { - bail!("Error while decrypting: {}", e); - } - } - } - - Ok(Document::restore(self.id.clone(), self.pid.clone(), self.dt_id.clone(), self.ts, self.tc, pts)) - } - - pub fn get_formatted_tc(&self) -> String{ - format_tc(self.tc) - } - - pub fn hash(&self) -> String{ - let mut hasher = Blake2b::new(64); - - hasher.update(self.id.as_bytes()); - hasher.update(self.pid.as_bytes()); - hasher.update(self.dt_id.as_bytes()); - hasher.update(self.get_formatted_tc().as_bytes()); - hasher.update(self.ts.to_string().as_bytes()); - hasher.update(self.hash.as_bytes()); - hasher.update(self.keys_ct.as_bytes()); - let mut cts = self.cts.clone(); - cts.sort(); - for ct in cts.iter() { - hasher.update(ct.as_bytes()); - } - - let res = base64::encode(&hasher.finalize()); - debug!("hashed cts: '{}'", &res); - res - } - - pub fn new(id: String, pid: String, dt_id: String, ts: i64, tc: i64, keys_ct: String, cts: Vec) -> EncryptedDocument { - EncryptedDocument{ - id, - pid, - dt_id, - ts, - tc, - hash: String::from("0"), - keys_ct, - cts, - } - } -} - -/// companion to format_pt_for_storage -pub fn restore_pt(pt: &str) -> Result<(String, String, String)> { - trace!("Trying to restore plain text"); - let vec: Vec<&str> = pt.split(SPLIT_CT).collect(); - if vec.len() != 3{ - bail!("Could not restore plaintext"); - } - Ok((String::from(vec[0]), String::from(vec[1]), String::from(vec[2]))) -} - -/// companion to format_pt_for_storage_no_dt -pub fn restore_pt_no_dt(pt: &str) -> Result<(String, String)> { - trace!("Trying to restore plain text"); - let vec: Vec<&str> = pt.split(SPLIT_CT).collect(); - if vec.len() != 2{ - bail!("Could not restore plaintext"); - } - Ok((String::from(vec[0]), String::from(vec[1]))) -} - -/// formats the pt before encryption -fn format_pt_for_storage(field_name: &str, pt: &str) -> String { - format!("{}{}{}", field_name, SPLIT_CT, pt) -} - -fn format_tc(tc: i64) -> String{ - format!("{:08}", tc) -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/src/model/mod.rs b/clearing-house-app/core-lib/src/model/mod.rs deleted file mode 100644 index f6e21242..00000000 --- a/clearing-house-app/core-lib/src/model/mod.rs +++ /dev/null @@ -1,105 +0,0 @@ -use chrono::{Datelike, Duration, Local, NaiveDate, NaiveDateTime, NaiveTime}; - -pub mod crypto; -pub mod document; -pub mod process; - -#[cfg(test)] mod tests; - -pub fn new_uuid() -> String { - use uuid::Uuid; - Uuid::new_v4().to_hyphenated().to_string() -} - -#[derive(Debug, Clone, Serialize, Deserialize, FromFormField)] -pub enum SortingOrder{ - #[field(value = "asc")] - #[serde(rename = "asc")] - Ascending, - #[field(value = "desc")] - #[serde(rename = "desc")] - Descending -} - -pub fn parse_date(date: Option, to_date: bool) -> Option{ - let time_format; - if to_date{ - time_format = "23:59:59" - } - else{ - time_format = "00:00:00" - } - - match date{ - Some(d) => { - debug!("Parsing date: {}", &d); - match NaiveDateTime::parse_from_str(format!("{} {}",&d, &time_format).as_str(), "%Y-%m-%d %H:%M:%S"){ - Ok(date) => { - Some(date) - } - Err(e) => { - error!("Error occurred: {:#?}", e); - return None - } - } - } - None => None - } -} - -pub fn sanitize_dates(date_from: Option, date_to: Option) -> (NaiveDateTime, NaiveDateTime){ - let default_to_date = Local::now().naive_local(); - let d = NaiveDate::from_ymd(default_to_date.year(), default_to_date.month(), default_to_date.day()); - let t = NaiveTime::from_hms(0, 0, 0); - let default_from_date = NaiveDateTime::new(d,t) - Duration::weeks(2); - - println!("date_to: {:#?}", date_to); - println!("date_from: {:#?}", date_from); - - println!("Default date_to: {:#?}", default_to_date); - println!("Default date_from: {:#?}", default_from_date); - - // validate already checked that date_from > date_to - if date_from.is_some() && date_to.is_some(){ - return (date_from.unwrap(), date_to.unwrap()) - } - - // if to_date is missing, default to now - if date_from.is_some() && date_to.is_none(){ - return (date_from.unwrap(), default_to_date) - } - - // if both dates are none (case to_date is none and from_date is_some should be catched by validation) - // return dates for default duration (last 2 weeks) - return (default_from_date, default_to_date) -} - -pub fn validate_dates(date_from: Option, date_to: Option) -> bool{ - let date_now = Local::now().naive_local(); - debug!("... validating dates: now: {:#?} , from: {:#?} , to: {:#?}", &date_now, &date_from, &date_to); - // date_from before now - if date_from.is_some() && date_from.as_ref().unwrap().clone() > date_now{ - debug!("oh no, date_from {:#?} is in the future! date_now is {:#?}", &date_from, &date_now); - return false; - } - - // date_to only if there is also date_from - if date_from.is_none() && date_to.is_some() { - return false; - } - - // date_to before or equals now - if date_to.is_some() && date_to.as_ref().unwrap().clone() >= date_now{ - debug!("oh no, date_to {:#?} is in the future! date_now is {:#?}", &date_to, &date_now); - return false; - } - - // date_from before date_to - if date_from.is_some() && date_to.is_some(){ - if date_from.unwrap() > date_to.unwrap() { - debug!("oh no, date_from {:#?} is before date_to {:#?}", &date_from, &date_to); - return false; - } - } - return true; -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/src/model/process.rs b/clearing-house-app/core-lib/src/model/process.rs deleted file mode 100644 index 12bc5718..00000000 --- a/clearing-house-app/core-lib/src/model/process.rs +++ /dev/null @@ -1,14 +0,0 @@ -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct Process { - pub id: String, - pub owners: Vec, -} - -impl Process { - pub fn new(id: String, owners: Vec) -> Process { - Process { - id, - owners - } - } -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/src/model/tests.rs b/clearing-house-app/core-lib/src/model/tests.rs deleted file mode 100644 index 0c60fb12..00000000 --- a/clearing-house-app/core-lib/src/model/tests.rs +++ /dev/null @@ -1,176 +0,0 @@ -use crate::model::crypto::{KeyEntry, KeyMap}; -use crate::model::document::{Document, DocumentPart, EncryptedDocument}; -use crate::errors::*; -use std::collections::HashMap; -use chrono::Utc; - -fn create_test_doc(dt_id: String) -> Document{ - let mut doc_parts = vec!(); - doc_parts.push(DocumentPart::new(String::from("part1"), Some(String::from("MODEL_VERSION")))); - doc_parts.push(DocumentPart::new(String::from("part2"), Some(String::from("CORRELATION_MESSAGE")))); - Document::new(Document::create_uuid(), dt_id, 3241, doc_parts) -} - -fn create_key_enc_map() -> KeyMap{ - let mut map = HashMap::new(); - let key1 = String::from("an example very very secret key."); - let key2 = String::from("another totally very secret key."); - let nonce1 = String::from("unique nonce"); - let nonce2 = String::from("second nonce"); - let key_ct = String::from("very secure key ct").into_bytes(); - - let e1 = KeyEntry::new(String::from("1"), key1.into_bytes(), nonce1.into_bytes()); - let e2 = KeyEntry::new(String::from("2"), key2.into_bytes(), nonce2.into_bytes()); - map.insert(String::from("part1"), e1); - map.insert(String::from("part2"), e2); - - return KeyMap::new(true, map, Some(key_ct)); -} - -fn create_key_dec_map() -> KeyMap{ - let mut map = HashMap::new(); - let key1 = String::from("an example very very secret key."); - let key2 = String::from("another totally very secret key."); - let nonce1 = String::from("unique nonce"); - let nonce2 = String::from("second nonce"); - - let e1 = KeyEntry::new(String::from("1"), key1.into_bytes(), nonce1.into_bytes()); - let e2 = KeyEntry::new(String::from("2"), key2.into_bytes(), nonce2.into_bytes()); - map.insert(String::from("1"), e1); - map.insert(String::from("2"), e2); - - return KeyMap::new(false, map, None); -} - -#[test] -fn test_document_part_encryption() -> Result<()>{ - - // prepare test data - let part = DocumentPart::new(String::from("model_version"), Some(String::from("MODEL_VERSION"))); - let expected_ct = hex::decode("7F80228F5187DBD7FC6F7DA93510905102D39EF790FB84097EAC541E9DABF3D035FB4E910E6F52E3DB31C935").unwrap(); - - // create key and nonce - let key = String::from("an example very very secret key."); - let nonce = String::from("unique nonce"); - - // encrypt - let ct = part.encrypt(key.as_bytes(), nonce.as_bytes())?; - - // check - assert_eq!(expected_ct, ct, "Ciphertext mismatch"); - Ok(()) -} - -#[test] -fn test_document_part_decryption() -> Result<()>{ - - // prepare test data - let ct = hex::decode("7F80228F5187DBD7FC6F7DA93510905102D39EF790FB84097EAC541E9DABF3D035FB4E910E6F52E3DB31C935").unwrap(); - let expected_part = DocumentPart::new(String::from("model_version"), Some(String::from("MODEL_VERSION"))); - - // create key and nonce - let key = String::from("an example very very secret key."); - let nonce = String::from("unique nonce"); - - // decrypt - let result = DocumentPart::decrypt(key.as_bytes(), nonce.as_bytes(), ct.as_slice())?; - - // check - assert_eq!(expected_part.name, result.name, "Field name mismatch"); - assert_eq!(expected_part.content, result.content, "Content mismatch"); - - Ok(()) -} - -#[test] -fn test_document_encryption() -> Result<()>{ - - // prepare test data - let dt = String::from("ids_message"); - let pid = String::from("test_pid"); - let doc = create_test_doc(dt.clone()); - let ts = Utc::now().timestamp(); - let key_ct = String::from("very secret key ciphertext"); - let mut cts = vec!(); - cts.push(String::from("1::4EBC3F1C2B8CB16C52E41424502FD112015D9C25919C2401514B5DD5B4233B65593CF0A4")); - cts.push(String::from("2::FE2195305E95B9F931660CBA20B4707A1D92123022371CEDD2E70A538A8771EE7540D9F34845BBAEECEC")); - let expected_doc = EncryptedDocument::new(doc.id.clone(), pid, dt, ts, 3241, key_ct, cts); - - // create KeyMap for encryption - let keys = create_key_enc_map(); - - // encrypt - let result = doc.clone().encrypt(keys)?; - - // ids should match - assert_eq!(result.id, expected_doc.id); - - //checking the cts - for i in 0..result.cts.len()-1{ - //println!("cts: {}", &result.cts[i]); - assert_eq!(expected_doc.cts[i], result.cts[i]); - assert_eq!(expected_doc.cts[i], result.cts[i]); - } - - Ok(()) -} - -#[test] -fn test_document_decryption() -> Result<()>{ - - // prepare test data - let mut cts = vec!(); - let ts = Utc::now().timestamp(); - cts.push(String::from("1::4EBC3F1C2B8CB16C52E41424502FD112015D9C25919C2401514B5DD5B4233B65593CF0A4")); - cts.push(String::from("2::FE2195305E95B9F931660CBA20B4707A1D92123022371CEDD2E70A538A8771EE7540D9F34845BBAEECEC")); - let dt = String::from("ids_message"); - let pid = String::from("test_pid"); - let key_ct = String::from("very secure key ct"); - let expected_doc = create_test_doc(dt.clone()); - let enc_doc = EncryptedDocument::new(expected_doc.id.clone(), pid, dt.clone(), ts, 3241, key_ct, cts); - - // create KeyMap for decryption - let dec_keys = create_key_dec_map(); - - // decrypt - let result = enc_doc.decrypt(dec_keys.keys)?; - - // ids should match - assert_eq!(result.id, expected_doc.id); - - //check document type - assert_eq!(result.dt_id, expected_doc.dt_id); - - //checking the parts - for i in 0..result.parts.len()-1{ - //println!("part: {} {}", result.parts[i].name, result.parts[i].content.as_ref().unwrap()); - assert_eq!(expected_doc.parts[i].name, result.parts[i].name); - assert_eq!(expected_doc.parts[i].content, result.parts[i].content); - } - - Ok(()) -} - -#[test] -fn test_encryption_hash() -> Result<()> { - - // prepare test data - let mut cts = vec!(); - let ts_fixed = 1630413850; - let expected_hash = String::from("eIiWaM874V6p3eeGnEEafDvcPJAzACKhXn0yEAVw0pnZNh+Lz7eLuMMtoIQ1mhY3huy0PN5h9ntZf3mBPcZkow=="); - cts.push(String::from("1::4EBC3F1C2B8CB16C52E41424502FD112015D9C25919C2401514B5DD5B4233B65593CF0A4")); - cts.push(String::from("2::FE2195305E95B9F931660CBA20B4707A1D92123022371CEDD2E70A538A8771EE7540D9F34845BBAEECEC")); - let dt = String::from("ids_message"); - let pid = String::from("test_pid"); - let tc = 3241; - let key_ct = String::from("very secure key ct"); - let mut expected_doc = create_test_doc(dt.clone()); - // need to fix otherwise random id - expected_doc.id = String::from("a9a30044-7dfd-476f-a217-db1dc27aeb75"); - - let enc_doc = EncryptedDocument::new(expected_doc.id.clone(), pid, dt.clone(), ts_fixed, tc, key_ct, cts); - let hash = enc_doc.hash(); - assert_eq!(expected_hash, hash); - - Ok(()) -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/src/util.rs b/clearing-house-app/core-lib/src/util.rs deleted file mode 100644 index f9ddd22a..00000000 --- a/clearing-house-app/core-lib/src/util.rs +++ /dev/null @@ -1,83 +0,0 @@ -use percent_encoding::{utf8_percent_encode, NON_ALPHANUMERIC}; -use std::env; -use std::fs::File; -use std::io::prelude::*; -use std::str::FromStr; - -use crate::constants::ENV_API_LOG_LEVEL; -use crate::errors::*; -use figment::{Figment, providers::{Format, Yaml}}; -use rocket::fairing::AdHoc; - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ServiceConfig{ - pub service_id: String -} - -impl ServiceConfig{ - pub fn new(service_id: String) -> ServiceConfig{ - ServiceConfig{ - service_id - } - } -} - -pub fn load_from_test_config(key: &str, file: &str) -> String{ - Figment::new().merge(Yaml::file(file)).extract_inner(key).unwrap_or(String::new()) -} - -pub fn add_service_config(service_id: String) -> AdHoc{ - AdHoc::try_on_ignite("Adding Service Config", move |rocket| async move { - match env::var(&service_id){ - Ok(id) => { - Ok(rocket.manage(ServiceConfig::new(id))) - }, - Err(_e) => { - error!("Service ID not configured. Please configure environment variable {}", &service_id); - return Err(rocket) - } - } - }) -} - - -/// setup the fern logger and set log level to environment variable `ENV_API_LOG_LEVEL` -/// allowed levels: `Off`, `Error`, `Warn`, `Info`, `Debug`, `Trace` -pub fn setup_logger() -> Result<()> { - let log_level; - match env::var(ENV_API_LOG_LEVEL){ - Ok(l) => log_level = l.clone(), - Err(_e) => { - println!("Log level not set correctly. Logging disabled"); - log_level = String::from("Off") - } - }; - - fern::Dispatch::new() - .format(|out, message, record| { - out.finish(format_args!( - "{}[{}][{}] {}", - chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"), - record.target(), - record.level(), - message - )) - }) - .level(log::LevelFilter::from_str(&log_level.as_str())?) - .chain(std::io::stdout()) - .chain(fern::log_file("output.log")?) - .apply()?; - Ok(()) -} - -pub fn read_file(file: &str) -> Result { - let mut f = File::open(file)?; - let mut data = String::new(); - f.read_to_string(&mut data)?; - drop(f); - Ok(data) -} - -pub fn url_encode(id: &str) -> String{ - utf8_percent_encode(id, NON_ALPHANUMERIC).to_string() -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/tests/integration/blockchain_api_client.rs b/clearing-house-app/core-lib/tests/integration/blockchain_api_client.rs deleted file mode 100644 index f4a38a58..00000000 --- a/clearing-house-app/core-lib/tests/integration/blockchain_api_client.rs +++ /dev/null @@ -1,42 +0,0 @@ -use core_lib::constants::{CONFIG_FILE, BLOCKCHAIN_API_URL}; -use core_lib::util; -use core_lib::errors::*; -use core_lib::api::client::blockchain_api::BlockchainApiClient; - -/// before running make sure the blockchain api is available -#[test] -fn test_store_hash() -> Result<()>{ - // configure client_api - let config = util::load_config(CONFIG_FILE); - let bc_api: BlockchainApiClient = util::configure_api(BLOCKCHAIN_API_URL, &config)?; - - let id = String::from("999"); - let cid = String::from("123"); - let hash = String::from("ABCD-EFGH"); - - assert_eq!(bc_api.store_hash(&id, &cid, &hash)?, true); - - Ok(()) -} - -#[test] -fn test_get_hash_list() -> Result<()>{ - // configure client_api - let config = util::load_config(CONFIG_FILE); - let bc_api: BlockchainApiClient = util::configure_api(BLOCKCHAIN_API_URL, &config)?; - - let id = String::from("999"); - let cid1 = String::from("123"); - let hash1 = String::from("ABCD-EFGH"); - let cid2 = String::from("5556"); - let hash2 = String::from("ZAZS-QWEA"); - - assert_eq!(bc_api.store_hash(&id, &cid1, &hash1)?, true); - assert_eq!(bc_api.store_hash(&id, &cid2, &hash2)?, true); - - let result = bc_api.get_hash_list(&id)?; - - assert_eq!(result.len(), 2); - - Ok(()) -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/tests/integration/daps_api_client.rs b/clearing-house-app/core-lib/tests/integration/daps_api_client.rs deleted file mode 100644 index f0fe59a9..00000000 --- a/clearing-house-app/core-lib/tests/integration/daps_api_client.rs +++ /dev/null @@ -1,27 +0,0 @@ -// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -// These tests are integration tests and need an up-and-running keyring-api -// Use config.yml to configure the urls correctly. -// Before running the tests make sure that there's a valid token in auth/mod.rs -// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -use core_lib::api::ApiClient; -use core_lib::api::client::daps_api::DapsApiClient; -use core_lib::constants::{DAPS_API_URL, DAPS_KID}; -use core_lib::errors::*; -use core_lib::util; -use biscuit::jwk::{JWK, KeyType}; -use biscuit::Empty; -use crate::TEST_CONFIG; - -/// before running make sure the blockchain api is available -#[test] -fn test_get_jwks() -> Result<()>{ - // configure daps_api - let api_url = util::load_from_test_config(DAPS_API_URL, TEST_CONFIG); - let daps_api = DapsApiClient::new(&api_url); - // convert "default" key to HashMap - - let jwk: JWK = daps_api.get_jwks().unwrap().find(DAPS_KID).unwrap().clone(); - assert_eq!(KeyType::RSA, jwk.algorithm.key_type()); - assert_eq!(DAPS_KID, jwk.common.key_id.unwrap()); - Ok(()) -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/tests/integration/database_client.rs b/clearing-house-app/core-lib/tests/integration/database_client.rs deleted file mode 100644 index b187b7e0..00000000 --- a/clearing-house-app/core-lib/tests/integration/database_client.rs +++ /dev/null @@ -1,191 +0,0 @@ -// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -// These tests all access the db, so if you run the tests use -// cargo test -- --test-threads=1 -// otherwise they will interfere with each other -// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -const TEST_CONFIG_FILE: &'static str = "./config.yml"; - -use core_lib::db::{DataStoreApi, DataStore}; -use core_lib::errors::*; -use core_lib::util; - -use crate::create_test_enc_document; - -fn db_setup() -> DataStore{ - let config = util::load_config(TEST_CONFIG_FILE); - - let db: DataStore = util::configure_db(&config).unwrap(); - if let Err(e) = db.clean_db(){ - panic!("Error while cleaning up database {:?}", e); - } - if let Err(e) = db.create_indexes(){ - panic!("Error while setting up database {:?}", e); - }; - db -} - -fn tear_down(db: DataStore){ - if let Err(e) = db.clean_db(){ - panic!("Error while tearing down database {:?}", e); - } -} - -// DOCUMENT -/// Testcase: Document exists in db and is found -#[test] -fn test_document_exists() -> Result<()>{ - // empty db and create tables - let db = db_setup(); - - // prepare test data - let pid = String::from("test_document_exists_pid"); - let dt_id = String::from("test_document_exists_dt"); - let id = String::from("test_document_exists_id"); - let doc = create_test_enc_document(&id, &pid, &dt_id); - db.add_document(doc.clone())?; - - // run the test - assert_eq!(db.exists_document(&id)?, true); - - // clean up - tear_down(db); - - Ok(()) -} - -/// Testcase: Document does not exist and is not found -#[test] -fn test_document_does_not_exist() -> Result<()>{ - // empty db and create tables - let db = db_setup(); - - // prepare test data - let pid = String::from("test_document_does_not_exist_pid"); - let dt_id = String::from("test_document_does_not_exist_dt"); - let id1 = String::from("test_document_does_not_exist_pid_id1"); - let id2 = String::from("test_document_does_not_exist_pid_id2"); - let doc = create_test_enc_document(&id1, &pid, &dt_id); - db.add_document(doc.clone())?; - - // run the test - assert_eq!(db.exists_document(&id2)?, false); - - // clean up - tear_down(db); - - Ok(()) -} - -/// Testcase: Document does not exist after delete -#[test] -fn test_delete_document_doc_is_deleted() -> Result<()>{ - // empty db and create tables - let db = db_setup(); - - // prepare test data - let pid = String::from("test_delete_document_doc_is_deleted_pid"); - let dt_id = String::from("test_delete_document_doc_is_deleted_dt"); - let id = String::from("test_delete_document_doc_is_deleted_id"); - let doc = create_test_enc_document(&id, &pid, &dt_id); - db.add_document(doc.clone())?; - - // db should be able to find the document - assert_eq!(db.exists_document(&id)?, true); - - // run the test - assert!(db.delete_document(&id)?); - - // db should not find document anymore - assert_eq!(db.exists_document(&id)?, false); - - // clean up - tear_down(db); - - Ok(()) -} - -/// Testcase: Other Documents still exist after delete -#[test] -fn test_delete_document_check_others() -> Result<()>{ - // empty db and create tables - let db = db_setup(); - - // prepare test data - let pid = String::from("test_delete_document_check_others_pid"); - let dt_id = String::from("test_delete_document_check_others_dt"); - let id1 = String::from("test_delete_document_check_others_id1"); - let id2 = String::from("test_delete_document_check_others_id2"); - let doc1 = create_test_enc_document(&id1, &pid, &dt_id); - let doc2 = create_test_enc_document(&id2, &pid, &dt_id); - db.add_document(doc1.clone())?; - db.add_document(doc2.clone())?; - - // db should be able to find both documents - assert_eq!(db.exists_document(&id1)?, true); - assert_eq!(db.exists_document(&id2)?, true); - - // run the test - assert!(db.delete_document(&id1)?); - - // db should still find the other document - assert_eq!(db.exists_document(&id2)?, true); - - // clean up - tear_down(db); - - Ok(()) -} - -/// Testcase: Document does not exist before delete -#[test] -fn test_delete_document_on_not_existing_doc() -> Result<()>{ - // empty db and create tables - let db = db_setup(); - - // prepare test data - let pid = String::from("test_delete_document_on_not_existing_doc_pid"); - let dt_id = String::from("test_delete_document_on_not_existing_doc_dt"); - let id1 = String::from("test_delete_document_on_not_existing_doc_id1"); - let id2 = String::from("test_delete_document_on_not_existing_doc_id2"); - let doc = create_test_enc_document(&id1, &pid, &dt_id); - db.add_document(doc.clone())?; - - // run the test - assert_eq!(db.delete_document(&id2)?, false); - - // clean up - tear_down(db); - - Ok(()) -} - -/// Testcase: Find the correct document -#[test] -fn test_get_document() -> Result<()>{ - // empty db and create tables - let db = db_setup(); - - // prepare test data - let pid = String::from("test_get_document_pid"); - let dt_id = String::from("test_get_document_dt"); - let id1 = String::from("test_get_document_id1"); - let id2 = String::from("test_get_document_id2"); - let doc1 = create_test_enc_document(&id1, &pid, &dt_id); - let doc2 = create_test_enc_document(&id2, &pid, &dt_id); - db.add_document(doc1.clone())?; - db.add_document(doc2.clone())?; - - // db should be able to find both documents - assert_eq!(db.exists_document(&id1)?, true); - assert_eq!(db.exists_document(&id2)?, true); - - // the test - let result = db.get_document(&id1, &pid)?; - assert_eq!(result.is_some(), true); - assert_eq!(result.unwrap().id, id1); - - // clean up - tear_down(db); - - Ok(()) -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/tests/integration/document_api_client.rs b/clearing-house-app/core-lib/tests/integration/document_api_client.rs deleted file mode 100644 index 69f06708..00000000 --- a/clearing-house-app/core-lib/tests/integration/document_api_client.rs +++ /dev/null @@ -1,224 +0,0 @@ -// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -// These tests are integration tests and need an up-and-running keyring-api and -// document-api. Use config.yml to configure the urls correctly. -// Before running the tests make sure that there's a valid token in auth/mod.rs -// Also note: Clean up will not work if a test fails. -// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -use core_lib::api::ApiClient; -use core_lib::constants::DOCUMENT_API_URL; -use core_lib::util; -use core_lib::errors::*; -use core_lib::api::client::document_api::DocumentApiClient; -use crate::{TOKEN, create_test_document, delete_test_doc_type_from_keyring, insert_test_doc_type_into_keyring, TEST_CONFIG}; - -/// Testcase: Standard case: store document as first document for pid -#[test] -fn test_store_first_document() -> Result<()> { - // configure client_api - let api_url = util::load_from_test_config(DOCUMENT_API_URL, TEST_CONFIG); - let doc_api = DocumentApiClient::new(&api_url); - - // prepare test data - let dt_id = String::from("test_store_first_document_dt"); - let pid = String::from("test_store_first_document_pid"); - let expected_doc = create_test_document(&pid, &dt_id, 0); - // clean up doc type (in case of previous test failure) - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - insert_test_doc_type_into_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - - // run the test - let result = doc_api.create_document(&TOKEN.to_string(), &expected_doc)?; - assert_eq!(result.chain_hash, String::from("0")); - - // clean up - assert!(doc_api.delete_document(&TOKEN.to_string(), &expected_doc.pid, &expected_doc.id)?); - - // tear down - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - - Ok(()) -} - -/// Testcase: Standard case: store document as first document for pid -#[test] -fn test_store_chained_document() -> Result<()> { - // configure client_api - let api_url = util::load_from_test_config(DOCUMENT_API_URL, TEST_CONFIG); - let doc_api = DocumentApiClient::new(&api_url); - - // prepare test data - let dt_id = String::from("test_store_chained_document_dt"); - let pid = String::from("test_store_chained_document_pid"); - let first_doc = create_test_document(&pid, &dt_id, 0); - let second_doc = create_test_document(&pid, &dt_id, 1); - // clean up doc type (in case of previous test failure) - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - insert_test_doc_type_into_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - // create test data in db - doc_api.create_document(&TOKEN.to_string(), &first_doc)?; - - // run the test - let result = doc_api.create_document(&TOKEN.to_string(), &second_doc)?; - assert_ne!(result.chain_hash, String::from("0")); - - // clean up - assert!(doc_api.delete_document(&TOKEN.to_string(), &first_doc.pid, &first_doc.id)?); - assert!(doc_api.delete_document(&TOKEN.to_string(), &second_doc.pid, &second_doc.id)?); - - // tear down - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - - Ok(()) -} - -/// Testcase: Standard case: retrieve document. -#[test] -fn test_get_document() -> Result<()>{ - // configure client_api - let api_url = util::load_from_test_config(DOCUMENT_API_URL, TEST_CONFIG); - let doc_api = DocumentApiClient::new(&api_url); - - // prepare test data - let dt_id = String::from("test_get_document_type_1"); - let pid = String::from("test_get_document_process_1"); - let expected_doc = create_test_document(&pid, &dt_id, 0); - // clean up doc type (in case of previous test failure) - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - insert_test_doc_type_into_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - - // create test data in db - doc_api.create_document(&TOKEN.to_string(), &expected_doc)?; - - // run test - let result = doc_api.get_document(&TOKEN.to_string(), &pid, &expected_doc.id)?.unwrap(); - println!("Result: {:?}", result); - - // checks - // ids should match - assert_eq!(result.id, expected_doc.id); - - // same document type - assert_eq!(result.dt_id, expected_doc.dt_id); - - // checking the parts - for i in 0..result.parts.len()-1{ - assert_eq!(expected_doc.parts[i].name, result.parts[i].name); - assert_eq!(expected_doc.parts[i].content, result.parts[i].content); - } - - // clean up - assert!(doc_api.delete_document(&TOKEN.to_string(), &expected_doc.pid, &expected_doc.id)?); - - // tear down - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - - Ok(()) -} - -/// Testcase: Retrieve all documents for pid, but there are no documents -#[test] -fn test_get_no_documents_for_pid() -> Result<()>{ - // configure client_api - let api_url = util::load_from_test_config(DOCUMENT_API_URL, TEST_CONFIG); - let doc_api = DocumentApiClient::new(&api_url); - - // prepare test data - let dt_id = String::from("test_get_no_documents_for_pid_type"); - let pid_with_doc = String::from("test_get_no_documents_for_pid_pid_1"); - let pid_without_doc = String::from("test_get_no_documents_for_pid_pid_2"); - let expected_doc = create_test_document(&pid_with_doc, &dt_id, 0); - // clean up doc type (in case of previous test failure) - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid_with_doc, &dt_id)?; - insert_test_doc_type_into_keyring(&TOKEN.to_string(), &pid_with_doc, &dt_id)?; - - // create test data in db - doc_api.create_document(&TOKEN.to_string(), &expected_doc)?; - - // run test - let result = doc_api.get_documents_for_pid(&TOKEN.to_string(), &pid_without_doc)?; - println!("Result: {:?}", result); - - // check that there are no documents found - assert_eq!(result.len(), 0); - - // clean up - assert!(doc_api.delete_document(&TOKEN.to_string(), &expected_doc.pid, &expected_doc.id)?); - - // tear down - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid_with_doc, &dt_id)?; - - Ok(()) -} - -/// Testcase: Standard case: Retrieve all documents for pid -//TODO -#[test] -fn test_get_documents_for_pid() -> Result<()>{ - // configure client_api - let api_url = util::load_from_test_config(DOCUMENT_API_URL, TEST_CONFIG); - let doc_api = DocumentApiClient::new(&api_url); - - // prepare test data - let dt_id = String::from("test_get_documents_for_pid_type"); - let pid = String::from("test_get_documents_for_pid_pid"); - let doc1 = create_test_document(&pid, &dt_id, 0); - let doc2 = create_test_document(&pid, &dt_id, 1); - let doc3 = create_test_document(&pid, &dt_id, 2); - // clean up doc type (in case of previous test failure) - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - insert_test_doc_type_into_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - - // create test data in db - doc_api.create_document(&TOKEN.to_string(), &doc1)?; - doc_api.create_document(&TOKEN.to_string(), &doc2)?; - doc_api.create_document(&TOKEN.to_string(), &doc3)?; - - // run test - let result = doc_api.get_documents_for_pid(&TOKEN.to_string(), &pid)?; - println!("Result: {:?}", result); - - // check that we got three documents back - assert_eq!(result.len(), 3); - - // tear down - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - assert!(doc_api.delete_document(&TOKEN.to_string(), &pid, &doc1.id)?); - assert!(doc_api.delete_document(&TOKEN.to_string(), &pid, &doc2.id)?); - assert!(doc_api.delete_document(&TOKEN.to_string(), &pid, &doc3.id)?); - - - Ok(()) -} - -/// Testcase: Ensure that IDS ids can be used if they are url_encoded -#[test] -fn test_create_document_url_encoded_id() -> Result<()>{ - // configure client_api - let api_url = util::load_from_test_config(DOCUMENT_API_URL, TEST_CONFIG); - let doc_api = DocumentApiClient::new(&api_url); - - // prepare test data - let dt_id = String::from("test_create_document_url_encoded_id_type_3"); - let pid = String::from("test_create_document_url_encoded_id_process_3"); - let id = String::from("https://w3id.org/idsa/autogen/ResultMessage/71ad9d3a-3743-4966-afa3-f5b02ba91eaa"); - // clean up doc type (in case of previous test failure) - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - insert_test_doc_type_into_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - - let mut doc = create_test_document(&pid, &dt_id, 0); - doc.id = id.clone(); - - // run test - let hash = doc_api.create_document(&TOKEN.to_string(), &doc); - - // check that it's not an error - assert!(hash.is_ok()); - - // clean up - assert!(doc_api.delete_document(&TOKEN.to_string(), &doc.pid, &id)?); - - // tear down - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - - Ok(()) -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/tests/integration/keyring_api_client.rs b/clearing-house-app/core-lib/tests/integration/keyring_api_client.rs deleted file mode 100644 index 53fc29d8..00000000 --- a/clearing-house-app/core-lib/tests/integration/keyring_api_client.rs +++ /dev/null @@ -1,87 +0,0 @@ -// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -// These tests are integration tests and need an up-and-running keyring-api -// Use config.yml to configure the urls correctly. -// Before running the tests make sure that there's a valid token in auth/mod.rs -// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -use core_lib::api::ApiClient; -use core_lib::api::client::keyring_api::KeyringApiClient; -use core_lib::constants::KEYRING_API_URL; -use core_lib::errors::*; -use core_lib::util; -use crate::{TOKEN, delete_test_doc_type_from_keyring, insert_test_doc_type_into_keyring, TEST_CONFIG}; - -/// The tests in this module requires a running key-ring-api -/// Testcase: Generate keys for test document type and check if the key_map is plausible -#[test] -fn test_generate_keys() -> Result<()> { - // configure client_api - let api_url = util::load_from_test_config(KEYRING_API_URL, TEST_CONFIG); - let key_api = KeyringApiClient::new(&api_url); - - // prepare test data - let dt_id = String::from("test_dt"); - let pid = String::from("test_pid"); - // clean up doc type (in case of previous test failure) - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - insert_test_doc_type_into_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - - // get the keys from keyring api - let keys = key_api.generate_keys(&TOKEN.to_string(), &pid, &dt_id)?; - - println!("key_ct: {}", hex::encode_upper(keys.keys_enc.as_ref().unwrap())); - - // check that KeyMap is meant for encryption - assert_eq!(keys.enc, true); - - // check that there's a key_ct - assert!(keys.keys_enc.is_some()); - - // check that there are three keys (one for each part in the dt) - assert_eq!(keys.keys.keys().len(), 3); - - // tear down - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - - Ok(()) -} - -/// Testcase: Decrypt keys and check that they match the previously generated keys -#[test] -fn test_decrypt_keys() -> Result<()> { - // configure client_api - let api_url = util::load_from_test_config(KEYRING_API_URL, TEST_CONFIG); - let key_api = KeyringApiClient::new(&api_url); - - // prepare test data - let dt_id = String::from("test_dt"); - let pid = String::from("test_pid"); - // clean up doc type (in case of previous test failure) - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - insert_test_doc_type_into_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - - // generate keys from keyring api - let keys = key_api.generate_keys(&TOKEN.to_string(), &pid, &dt_id)?; - - // decrypt the keys - let dec_keys = key_api.decrypt_keys(&TOKEN.to_string(), &pid, &dt_id, keys.keys_enc.as_ref().unwrap())?; - - // check that KeyMap is meant for decryption - assert_eq!(dec_keys.enc, false); - - // check that there's no key_ct - assert!(dec_keys.keys_enc.is_none()); - - // check that the keys match the previously generated ones - keys.keys.values().for_each( |entry| { - let dec_entry = dec_keys.keys.get(&entry.id).unwrap(); - assert_eq!(entry.key, dec_entry.key); - assert_eq!(entry.nonce, dec_entry.nonce); - assert_eq!(entry.id, dec_entry.id); - } - ); - - // tear down - delete_test_doc_type_from_keyring(&TOKEN.to_string(), &pid, &dt_id)?; - - Ok(()) -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/tests/integration/main.rs b/clearing-house-app/core-lib/tests/integration/main.rs deleted file mode 100644 index 69e3b9a8..00000000 --- a/clearing-house-app/core-lib/tests/integration/main.rs +++ /dev/null @@ -1,89 +0,0 @@ -use reqwest::{Client, StatusCode}; -use reqwest::header::{CONTENT_TYPE, HeaderValue}; - -use core_lib::constants::ROCKET_DOC_TYPE_API; -use core_lib::errors::*; -use core_lib::model::document::{Document, DocumentPart}; - -/// Update this token to run tests successfully that require authentication -pub const TOKEN: &'static str = "eyJ0eXAiOiJKV1QiLCJraWQiOiJkZWZhdWx0IiwiYWxnIjoiUlMyNTYifQ.eyJzY29wZXMiOlsiaWRzYzpJRFNfQ09OTkVDVE9SX0FUVFJJQlVURVNfQUxMIl0sImF1ZCI6Imlkc2M6SURTX0NPTk5FQ1RPUlNfQUxMIiwiaXNzIjoiaHR0cHM6Ly9kYXBzLmFpc2VjLmZyYXVuaG9mZXIuZGUiLCJuYmYiOjE2MzUyNDEyNzgsImlhdCI6MTYzNTI0MTI3OCwianRpIjoiT0RBNE5EazRNemsxT0RZMU16TXlOamN4TlE9PSIsImV4cCI6MTYzNTI0NDg3OCwic2VjdXJpdHlQcm9maWxlIjoiaWRzYzpUUlVTVF9TRUNVUklUWV9QUk9GSUxFIiwicmVmZXJyaW5nQ29ubmVjdG9yIjoiaHR0cDovL2NvbnN1bWVyLWNvcmUuZGVtbyIsIkB0eXBlIjoiaWRzOkRhdFBheWxvYWQiLCJAY29udGV4dCI6Imh0dHBzOi8vdzNpZC5vcmcvaWRzYS9jb250ZXh0cy9jb250ZXh0Lmpzb25sZCIsInRyYW5zcG9ydENlcnRzU2hhMjU2IjoiYzE1ZTY1NTgwODhkYmZlZjIxNWE0M2QyNTA3YmJkMTI0ZjQ0ZmI4ZmFjZDU2MWMxNDU2MWEyYzFhNjY5ZDBlMCIsInN1YiI6IkE1OjBDOkE1OkYwOjg0OkQ5OjkwOkJCOkJDOkQ5OjU3OjNBOjA0OkM4OjdGOjkzOkVEOjk3OkEyOjUyOmtleWlkOkNCOjhDOkM3OkI2Ojg1Ojc5OkE4OjIzOkE2OkNCOjE1OkFCOjE3OjUwOjJGOkU2OjY1OjQzOjVEOkU4In0.iemDKZXE_RXFKkffqpweTAXBb6YX0spU0b5Ez1ncQzEyDNkJ5UtsZkwZz8WqfWOdPqMA74ShzLMwfEtao3DoO4DfWrvXFAYh8Y6hHJjHO44kPm4rUdcymUsVLXxcWd8Jszi6HjRHLaJ1-466s1akDQ7yQB0l8g9PP7BOlYr2I00HZ_b5wQOWtwT2PQxeWjkBzTgP8iycF7kIT6jgTHYDkOAwIdiMgNH_dPaxOPfxupz5vJQPuC1o9-IAyXtk-yC9GNI18YtjYpqizB-Nm5QGlUSSYMrB7tUKEc46471QaC4tR_LkYDrGnDtJHrH_fq0eEe6wIKoUcdt_VnI9Km-Hpw"; -pub const TEST_CONFIG: &'static str = "config.yml"; - - -mod document_api_client; -mod keyring_api_client; -mod daps_api_client; -mod token_validation; - -fn create_test_document(pid: &String, dt_id: &String, tc: i64) -> Document{ - let p1 = DocumentPart::new(String::from("name"), Some(String::from("This is document part name."))); - let p2 = DocumentPart::new(String::from("payload"), Some(String::from("This is document part payload."))); - let p3 = DocumentPart::new(String::from("connector"), Some(String::from("This is document part connector."))); - let pts = vec!(p1, p2, p3); - let d = Document::new(pid.clone(), dt_id.clone(),tc, pts); - d -} - -fn create_dt_json(dt_id: &String, pid: &String) -> String{ - let begin_dt = r#"{"id":""#; - let begin_pid = r#"","pid":""#; - let rest = r#"","parts":[{"name":"name"},{"name":"payload"},{"name":"connector"}]}"#; - - let mut json = String::from(begin_dt); - json.push_str(dt_id); - json.push_str(begin_pid); - json.push_str(pid); - json.push_str(rest); - return json -} - -fn insert_test_doc_type_into_keyring(token: &String, pid: &String, dt_id: &String) -> Result{ - let client = Client::new(); - let dt_url = format!("http://localhost:8002{}", ROCKET_DOC_TYPE_API); - - let json_data = create_dt_json(dt_id, pid); - - println!("json_data: {}", json_data); - - println!("calling {}", &dt_url); - let mut response = client - .post(dt_url.as_str()) - .header(CONTENT_TYPE, HeaderValue::from_static("application/json")) - .bearer_auth(token) - .body(json_data).send()?; - - println!("Status Code: {}", &response.status()); - match response.status(){ - StatusCode::CREATED => { - println!("Response: {}", response.text()?); - Ok(true) - }, - _ => { - panic!("Couldn't prepare doc type for test"); - } - } -} - -fn delete_test_doc_type_from_keyring(token: &String, pid: &String, dt_id: &String) -> Result{ - let client = Client::new(); - let dt_url = format!("http://localhost:8002{}/{}/{}", ROCKET_DOC_TYPE_API, pid, dt_id); - - println!("calling {}", &dt_url); - let mut response = client - .delete(dt_url.as_str()) - .header(CONTENT_TYPE, HeaderValue::from_static("application/json")) - .bearer_auth(token) - .send()?; - - println!("Status Code: {}", &response.status()); - match response.status(){ - StatusCode::NO_CONTENT => { - println!("Response: {}", response.text()?); - Ok(true) - }, - _ => { - println!("Couldn't delete document type"); - Ok(false) - } - } -} \ No newline at end of file diff --git a/clearing-house-app/core-lib/tests/integration/token_validation.rs b/clearing-house-app/core-lib/tests/integration/token_validation.rs deleted file mode 100644 index 4f2d92af..00000000 --- a/clearing-house-app/core-lib/tests/integration/token_validation.rs +++ /dev/null @@ -1,56 +0,0 @@ -use biscuit::jwa::SignatureAlgorithm; -use biscuit::jwk::JWKSet; -use biscuit::{CompactJson, Empty}; -use core_lib::api::ApiClient; -use core_lib::api::auth::{self, ApiKey}; -use core_lib::api::client::daps_api::DapsApiClient; -use core_lib::constants::DAPS_API_URL; -use core_lib::errors::*; -use core_lib::util; -use serde::{Deserialize, Serialize}; -use crate::{TOKEN, TEST_CONFIG}; - -#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] -struct CustomClaims { - /// Recipient for which the JWT is intended - scopes: Vec, - #[serde(rename = "securityProfile")] - security_profile: String, - #[serde(rename = "@type")] - claim_type: String, - #[serde(rename = "@context")] - claim_context: String, - #[serde(rename = "transportCertsSha256")] - transport_certs_sha256: String, -} -impl CompactJson for CustomClaims { -} - -#[test] -fn test_valid_claims() -> Result<()>{ - // configure daps_api - let api_url = util::load_from_test_config(DAPS_API_URL, TEST_CONFIG); - let daps_api = DapsApiClient::new(&api_url); - // convert "default" key to HashMap - let jwks = daps_api.get_jwks()?; - let jwt: Result> = auth::validate_token(TOKEN, &jwks, Some(SignatureAlgorithm::RS256)); - assert!(jwt.is_ok(), "Token is invalid. Update test token!"); - let claims = jwt.unwrap().claims(); - assert_eq!(claims.private.scopes, vec!["idsc:IDS_CONNECTOR_ATTRIBUTES_ALL".to_string()]); - assert_eq!(claims.private.security_profile, "idsc:TRUST_SECURITY_PROFILE".to_string()); - assert_eq!(claims.private.transport_certs_sha256, "c15e6558088dbfef215a43d2507bbd124f44fb8facd561c14561a2c1a669d0e0".to_string()); - Ok(()) -} - -#[test] -fn test_invalid_claims() -> Result<()>{ - let invalid_token = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6ImRlZmF1bHQifQ.eyJpZHMtYXR0cmlidXRlcyI6eyJzZWN1cml0eV9wcm9maWxlIjp7ImF1ZGl0X2xvZ2dpbmciOjJ9fSwiaWRzX21lbWJlcnNoaXAiOnRydWUsImlkcy11cmkiOiJodHRwOi8vc29tZS11cmkiLCJ0cmFuc3BvcnRDZXJ0c1NoYTI1NiI6ImJhY2I4Nzk1NzU3MzBiYjA4M2YyODNmZDViNjdhOGNiODk2OTQ0ZDFiZTI4YzdiMzIxMTdjZmM3NTdjODFlOTYiLCJzY29wZXMiOlsiaWRzX2Nvbm5lY3RvciJdLCJhdWQiOiJJRFNfQ29ubmVjdG9yIiwiaXNzIjoiaHR0cHM6Ly9kYXBzLmFpc2VjLmZyYXVuaG9mZXIuZGUiLCJzdWIiOiJDPURFLE89RnJhdW5ob2ZlcixPVT1BSVNFQyxDTj02ZTYxNThiNC02OWZmLTRkMDQtYTg0Yi1hNDI4NTY0YWU0ZTYiLCJuYmYiOjE1NjI5MjU1MDAsImV4cCI6MTU2MjkyOTEwMH0.V4GZq3ZFnFAULoCiwhXtpno1uLab-mmAwRchhb2w_k4v0VYQYgWsFGf1EJPX-0QJfz4_WtTS_nQMq-MG9fP-Pe9BVXY43Wb9UBrrlaxylwnYbV0BCgUc-T-0uWdtJkRoQDqySnNRzYDMOKxZcOTXLG5d4eOHUulgiHa2muUeWw_c7bV-DKzNxUCzinxCEEVaOpovArJhRHSGgLd-8UI6BA-xehNQu_lmcaQ2ut0_VT-njwkY98haowrvEVcN9yHTm2jrWv-ajrs9phiR24A4wUqPMysDYZzIq_F6RfUBWovuu534nfo5mBXlc1JpT2NydN_dE2FM9nAWPpJ6_BEZxg"; - // configure daps_api - let api_url = util::load_from_test_config(DAPS_API_URL, TEST_CONFIG); - let daps_api = DapsApiClient::new(&api_url); - // convert "default" key to HashMap - let jwks:JWKSet = daps_api.get_jwks()?; - let jwt: Result> = auth::validate_token(invalid_token, &jwks, Some(SignatureAlgorithm::RS256)); - assert!(jwt.is_err(), "Token is valid. this should not happen, really!"); - Ok(()) -} diff --git a/clearing-house-app/document-api/Cargo.toml b/clearing-house-app/document-api/Cargo.toml deleted file mode 100644 index 3cb09181..00000000 --- a/clearing-house-app/document-api/Cargo.toml +++ /dev/null @@ -1,26 +0,0 @@ -[package] -name = "document-api" -version = "0.10.0" -authors = [ - "Mark Gall ", - "Georg Bramm ", -] -edition = "2018" - -[dependencies] -biscuit = { git = "https://github.com/lawliet89/biscuit", branch = "master" } -chrono = { version = "0.4", features = ["serde"] } -core-lib = {path = "../core-lib" } -error-chain = "0.12.4" -fern = "0.5" -futures = "0.3.24" -hex = "0.4.3" -log = "0.4.14" -mongodb ="2.3.0" -rocket = { version = "0.5.0-rc.1", features = ["json"] } -rocket_cors = { git = "https://github.com/lawliet89/rocket_cors", branch = "master" } -serde = "1.0" -serde_derive = "1.0" -serde_json = "1.0" -tokio = "1.8.1" -tokio-test = "0.4.2" diff --git a/clearing-house-app/document-api/Rocket.toml b/clearing-house-app/document-api/Rocket.toml deleted file mode 100644 index 4df585dc..00000000 --- a/clearing-house-app/document-api/Rocket.toml +++ /dev/null @@ -1,20 +0,0 @@ -[global] -limits = { json = 5242880 } - -[debug] -address = "0.0.0.0" -port = 8001 -log_level = "normal" -limits = { forms = 32768 } -database_url = "mongodb://localhost:27017" -keyring_api_url = "http://localhost:8002" -clear_db = true - -[release] -address = "0.0.0.0" -port = 8001 -log_level = "normal" -limits = { forms = 32768 } -database_url = "mongodb://document-mongo:27017" -keyring_api_url = "http://keyring-api:8002" -clear_db = false diff --git a/clearing-house-app/document-api/certs b/clearing-house-app/document-api/certs deleted file mode 120000 index 36343b9b..00000000 --- a/clearing-house-app/document-api/certs +++ /dev/null @@ -1 +0,0 @@ -../certs \ No newline at end of file diff --git a/clearing-house-app/document-api/src/db/bucket.rs b/clearing-house-app/document-api/src/db/bucket.rs deleted file mode 100644 index 1f782399..00000000 --- a/clearing-house-app/document-api/src/db/bucket.rs +++ /dev/null @@ -1,53 +0,0 @@ -use core_lib::model::document::EncryptedDocument; - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct DocumentBucket { - pub counter: u64, - pub pid: String, - pub dt_id: String, - pub from_ts: i64, - pub to_ts: i64, - pub documents: Vec -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct DocumentBucketSize { - pub capacity: i32, - pub size: i32, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DocumentBucketUpdate { - pub id: String, - pub ts: i64, - pub tc: i64, - pub hash: String, - pub keys_ct: String, - pub cts: Vec -} - -impl From<&EncryptedDocument> for DocumentBucketUpdate{ - fn from(doc: &EncryptedDocument) -> Self { - DocumentBucketUpdate{ - id: doc.id.clone(), - ts: doc.ts, - tc: doc.tc, - hash: doc.hash.clone(), - keys_ct: doc.keys_ct.clone(), - cts: doc.cts.to_vec() - } - } -} - -pub fn restore_from_bucket(pid: &String, dt_id: &String, bucket_update: DocumentBucketUpdate) -> EncryptedDocument{ - EncryptedDocument{ - id: bucket_update.id.clone(), - dt_id: dt_id.clone(), - pid: pid.clone(), - ts: bucket_update.ts, - tc: bucket_update.tc, - hash: bucket_update.hash.clone(), - keys_ct: bucket_update.keys_ct.clone(), - cts: bucket_update.cts.to_vec() - } -} \ No newline at end of file diff --git a/clearing-house-app/document-api/src/db/mod.rs b/clearing-house-app/document-api/src/db/mod.rs deleted file mode 100644 index e2afa409..00000000 --- a/clearing-house-app/document-api/src/db/mod.rs +++ /dev/null @@ -1,391 +0,0 @@ -use futures::stream::StreamExt; -use mongodb::{bson, Client, Database, IndexModel}; -use mongodb::bson::doc; -use mongodb::options::{AggregateOptions, CreateCollectionOptions, IndexOptions, UpdateOptions, WriteConcern}; -use rocket::{Build, Rocket}; -use rocket::fairing::{self, Fairing, Info, Kind}; -use chrono::NaiveDateTime; - -use core_lib::constants::{DATABASE_URL, DOCUMENT_DB, CLEAR_DB, MAX_NUM_RESPONSE_ENTRIES, MONGO_DT_ID, MONGO_ID, MONGO_PID, DOCUMENT_DB_CLIENT, MONGO_TC, MONGO_TS, MONGO_COLL_DOCUMENT_BUCKET, MONGO_TO_TS, MONGO_FROM_TS, MONGO_DOC_ARRAY, MONGO_COUNTER}; -use core_lib::db::{DataStoreApi, init_database_client}; -use core_lib::errors::*; -use core_lib::model::document::{Document, EncryptedDocument}; -use core_lib::model::SortingOrder; -use crate::db::bucket::{DocumentBucketSize, DocumentBucketUpdate, restore_from_bucket}; - -mod bucket; -#[cfg(test)] mod tests; - -#[derive(Clone, Debug)] -pub struct DatastoreConfigurator; - -#[rocket::async_trait] -impl Fairing for DatastoreConfigurator { - fn info(&self) -> Info { - Info { - name: "Configuring Document Database", - kind: Kind::Ignite - } - } - async fn on_ignite(&self, rocket: Rocket) -> fairing::Result { - let db_url: String = rocket.figment().extract_inner(DATABASE_URL).clone().unwrap(); - let clear_db = match rocket.figment().extract_inner(CLEAR_DB){ - Ok(value) => { - debug!("clear_db: '{}' found.", &value); - value - }, - Err(_) => { - false - } - }; - debug!("Using mongodb url: '{:#?}'", &db_url); - match init_database_client::(&db_url.as_str(), Some(DOCUMENT_DB_CLIENT.to_string())).await{ - Ok(datastore) => { - debug!("Check if database is empty..."); - match datastore.client.database(DOCUMENT_DB) - .list_collection_names(None) - .await{ - Ok(colls) => { - debug!("... found collections: {:#?}", &colls); - let number_of_colls = match colls.contains(&MONGO_COLL_DOCUMENT_BUCKET.to_string()){ - true => colls.len(), - false => 0 - }; - - if number_of_colls > 0 && clear_db{ - debug!("Database not empty and clear_db == true. Dropping database..."); - match datastore.client.database(DOCUMENT_DB).drop(None).await{ - Ok(_) => { - debug!("... done."); - } - Err(_) => { - debug!("... failed."); - return Err(rocket); - } - }; - } - if number_of_colls == 0 || clear_db{ - debug!("Database empty. Need to initialize..."); - let mut write_concern = WriteConcern::default(); - write_concern.journal = Some(true); - let mut options = CreateCollectionOptions::default(); - options.write_concern = Some(write_concern); - debug!("Create collection {} ...", MONGO_COLL_DOCUMENT_BUCKET); - match datastore.client.database(DOCUMENT_DB).create_collection(MONGO_COLL_DOCUMENT_BUCKET, options).await{ - Ok(_) => { - debug!("... done."); - } - Err(_) => { - debug!("... failed."); - return Err(rocket); - } - }; - - // This purpose of this index is to ensure that the transaction counter is unique - let mut index_options = IndexOptions::default(); - index_options.unique = Some(true); - let mut index_model = IndexModel::default(); - index_model.keys = doc!{format!("{}.{}",MONGO_DOC_ARRAY, MONGO_TC): 1}; - index_model.options = Some(index_options); - - debug!("Create unique index for {} ...", MONGO_COLL_DOCUMENT_BUCKET); - match datastore.client.database(DOCUMENT_DB).collection::(MONGO_COLL_DOCUMENT_BUCKET).create_index(index_model, None).await{ - Ok(result) => { - debug!("... index {} created", result.index_name); - } - Err(_) => { - debug!("... failed."); - return Err(rocket); - } - } - - // This creates a compound index over pid and the timestamp to enable paging using buckets - let mut compound_index_model = IndexModel::default(); - compound_index_model.keys = doc!{MONGO_PID: 1, MONGO_TS: 1}; - - debug!("Create unique index for {} ...", MONGO_COLL_DOCUMENT_BUCKET); - match datastore.client.database(DOCUMENT_DB).collection::(MONGO_COLL_DOCUMENT_BUCKET).create_index(compound_index_model, None).await{ - Ok(result) => { - debug!("... index {} created", result.index_name); - } - Err(_) => { - debug!("... failed."); - return Err(rocket); - } - } - } - debug!("... database initialized."); - Ok(rocket.manage(datastore)) - } - Err(_) => { - Err(rocket) - } - } - }, - Err(_) => Err(rocket) - } - } -} - -#[derive(Clone)] -pub struct DataStore { - client: Client, - database: Database -} - -impl DataStoreApi for DataStore { - fn new(client: Client) -> DataStore{ - DataStore { - client: client.clone(), - database: client.database(DOCUMENT_DB) - } - } -} - - -impl DataStore { - - pub async fn add_document(&self, doc: &EncryptedDocument) -> Result{ - debug!("add_document to bucket"); - let coll = self.database.collection::(MONGO_COLL_DOCUMENT_BUCKET); - let bucket_update = DocumentBucketUpdate::from(doc); - let mut update_options = UpdateOptions::default(); - update_options.upsert = Some(true); - let id = format!("^{}_", doc.pid.clone()); - let re = mongodb::bson::Regex{ - pattern: id, - options: String::new() - }; - - let query = doc!{"_id": re, MONGO_PID: doc.pid.clone(), MONGO_COUNTER: mongodb::bson::bson!({"$lt": MAX_NUM_RESPONSE_ENTRIES as i64})}; - - match coll.update_one(query, - doc! { - "$push": { - MONGO_DOC_ARRAY: mongodb::bson::to_bson(&bucket_update).unwrap(), - }, - "$inc": {"counter": 1}, - "$setOnInsert": { "_id": format!("{}_{}", doc.pid.clone(), doc.ts), MONGO_DT_ID: doc.dt_id.clone(), MONGO_FROM_TS: doc.ts}, - "$set": {MONGO_TO_TS: doc.ts}, - }, update_options).await{ - Ok(_r) => { - debug!("added new document: {:#?}", &_r.upserted_id); - Ok(true) - }, - Err(e) => { - error!("failed to store document: {:#?}", &e); - Err(Error::from(e)) - } - } - } - - /// checks if the document exists - /// document ids are globally unique - pub async fn exists_document(&self, id: &String) -> Result { - debug!("Check if document with id '{}' exists...", id); - let query = doc!{format!("{}.{}", MONGO_DOC_ARRAY, MONGO_ID): id.clone()}; - - let coll = self.database.collection::(MONGO_COLL_DOCUMENT_BUCKET); - match coll.count_documents(Some(query), None).await? { - 0 => { - debug!("Document with id '{}' does not exist!", &id); - Ok(false) - - }, - _ => { - debug!("... found."); - Ok(true) - } - } - } - - /// gets the model from the db - pub async fn get_document(&self, id: &String, pid: &String) -> Result> { - debug!("Trying to get doc with id {}...", id); - let coll = self.database.collection::(MONGO_COLL_DOCUMENT_BUCKET); - - let pipeline = vec![doc! {"$match":{ - MONGO_PID: pid.clone(), - format!("{}.{}", MONGO_DOC_ARRAY, MONGO_ID): id.clone() - }}, - doc! {"$unwind": format!("${}", MONGO_DOC_ARRAY)}, - doc! {"$addFields": {format!("{}.{}", MONGO_DOC_ARRAY, MONGO_PID): format!("${}", MONGO_PID), format!("{}.{}", MONGO_DOC_ARRAY, MONGO_DT_ID): format!("${}", MONGO_DT_ID)}}, - doc! {"$replaceRoot": { "newRoot": format!("${}", MONGO_DOC_ARRAY)}}, - doc! {"$match":{ MONGO_ID: id.clone()}}]; - - let mut results = coll.aggregate(pipeline, None).await?; - - if let Some(result) = results.next().await{ - let doc: EncryptedDocument = bson::from_document(result?)?; - return Ok(Some(doc)) - } - - return Ok(None) - } - - /// gets documents for a single process from the db - pub async fn get_document_with_previous_tc(&self, tc: i64) -> Result> { - let previous_tc = tc - 1; - debug!("Trying to get document for tc {} ...", previous_tc); - if previous_tc < 0 { - info!("... not entry exists."); - Ok(None) - } - else { - let coll = self.database.collection::(MONGO_COLL_DOCUMENT_BUCKET); - - let pipeline = vec![doc! {"$match":{ - format!("{}.{}", MONGO_DOC_ARRAY, MONGO_TC): previous_tc - }}, - doc! {"$unwind": format!("${}", MONGO_DOC_ARRAY)}, - doc! {"$addFields": {format!("{}.{}", MONGO_DOC_ARRAY, MONGO_PID): format!("${}", MONGO_PID), format!("{}.{}", MONGO_DOC_ARRAY, MONGO_DT_ID): format!("${}", MONGO_DT_ID)}}, - doc! {"$replaceRoot": { "newRoot": format!("${}", MONGO_DOC_ARRAY)}}, - doc! {"$match":{ MONGO_TC: previous_tc}}]; - - let mut results = coll.aggregate(pipeline, None).await?; - - return if let Some(result) = results.next().await { - debug!("Found {:#?}", &result); - let doc: EncryptedDocument = bson::from_document(result?)?; - Ok(Some(doc)) - } else { - warn!("Document with tc {} not found!", previous_tc); - Ok(None) - } - } - } - - /// gets a page of documents of a specific document type for a single process from the db defined by parameters page, size and sort - pub async fn get_documents_for_pid(&self, dt_id: &String, pid: &String, page: u64, size: u64, sort: &SortingOrder, date_from: &NaiveDateTime, date_to: &NaiveDateTime) -> Result> { - debug!("...trying to get page {} of size {} of documents for pid {} of dt {}...", pid, dt_id, page, size); - - match self.get_start_bucket_size(dt_id, pid, page, size, sort, date_from, date_to).await{ - Ok(bucket_size) => { - let offset = DataStore::get_offset(&bucket_size); - let start_bucket = DataStore::get_start_bucket(page, size, &bucket_size, offset); - trace!("...working with start_bucket {} and offset {} ...", start_bucket, offset); - let start_entry = DataStore::get_start_entry(page, size, start_bucket, &bucket_size, offset); - - trace!("...working with start_entry {} in start_bucket {} and offset {} ...", start_entry, start_bucket, offset); - - let skip_buckets = (start_bucket - 1) as i32; - let sort_order = match sort{ - SortingOrder::Ascending => { - 1 - }, - SortingOrder::Descending => { - - 1 - } - }; - - let pipeline = vec![doc! {"$match":{ - MONGO_PID: pid.clone(), - MONGO_DT_ID: dt_id.clone(), - MONGO_FROM_TS: {"$lte": date_to.timestamp()}, - MONGO_TO_TS: {"$gte": date_from.timestamp()} - }}, - doc! {"$sort" : {MONGO_FROM_TS: sort_order}}, - doc! {"$skip" : skip_buckets}, - // worst case: overlap between two buckets. - doc! {"$limit" : 2}, - doc! {"$unwind": format!("${}", MONGO_DOC_ARRAY)}, - doc! {"$replaceRoot": { "newRoot": "$documents"}}, - doc! {"$match":{ - MONGO_TS: {"$gte": date_from.timestamp(), "$lte": date_to.timestamp()} - }}, - doc! {"$sort" : {MONGO_TS: sort_order}}, - doc! {"$skip" : start_entry as i32}, - doc! { "$limit": size as i32}]; - - let coll = self.database.collection::(MONGO_COLL_DOCUMENT_BUCKET); - - let mut options = AggregateOptions::default(); - options.allow_disk_use = Some(true); - let mut results = coll.aggregate(pipeline, options).await?; - - let mut docs = vec!(); - while let Some(result) = results.next().await{ - let doc: DocumentBucketUpdate = bson::from_document(result?)?; - docs.push(restore_from_bucket(pid, dt_id, doc)); - } - - return Ok(docs) - } - Err(e) => { - error!("Error while getting bucket offset!"); - Err(Error::from(e)) - } - } - } - - /// offset is necessary for duration queries. There, start_entries of bucket depend on timestamps which usually creates an offset in the bucket - async fn get_start_bucket_size(&self, dt_id: &String, pid: &String, page: u64, size: u64, sort: &SortingOrder, date_from: &NaiveDateTime, date_to: &NaiveDateTime) -> Result { - debug!("...trying to get the offset for page {} of size {} of documents for pid {} of dt {}...", pid, dt_id, page, size); - let sort_order = match sort{ - SortingOrder::Ascending => { - 1 - }, - SortingOrder::Descending => { - - 1 - } - }; - let coll = self.database.collection::(MONGO_COLL_DOCUMENT_BUCKET); - - debug!("... match with pid: {}, dt_it: {}, to_ts <= {}, from_ts >= {} ...", pid, dt_id, date_from.timestamp(), date_to.timestamp()); - let pipeline = vec![doc! {"$match":{ - MONGO_PID: pid.clone(), - MONGO_DT_ID: dt_id.clone(), - MONGO_FROM_TS: {"$lte": date_to.timestamp()}, - MONGO_TO_TS: {"$gte": date_from.timestamp()} - }}, - // sorting according to sorting order, so we get either the start or end - doc! {"$sort" : {MONGO_FROM_TS: sort_order}}, - doc! {"$limit" : 1}, - // count all relevant documents in the target bucket - doc! {"$unwind": format!("${}", MONGO_DOC_ARRAY)}, - doc! {"$match":{ - format!("{}.{}", MONGO_DOC_ARRAY, MONGO_TS): {"$lte": date_to.timestamp(), "$gte": date_from.timestamp()} - }}, - // modify result to return total number of docs in bucket and number of relevant docs in bucket - doc! { "$group": { "_id": {"total": "$counter"}, "size": { "$sum": 1 } } }, - doc! { "$project": {"_id":0, "capacity": "$_id.total", "size":true}}]; - - let mut options = AggregateOptions::default(); - options.allow_disk_use = Some(true); - let mut results = coll.aggregate(pipeline, options).await?; - let mut bucket_size = DocumentBucketSize{ - capacity: MAX_NUM_RESPONSE_ENTRIES as i32, - size: 0 - }; - while let Some(result) = results.next().await{ - debug!("... retrieved: {:#?}", &result); - let result_bucket: DocumentBucketSize = bson::from_document(result?)?; - bucket_size = result_bucket; - } - debug!("... sending offset: {:?}", bucket_size); - Ok(bucket_size) - } - - fn get_offset(bucket_size: &DocumentBucketSize) -> u64 { - return (bucket_size.capacity - bucket_size.size) as u64 % MAX_NUM_RESPONSE_ENTRIES - } - - fn get_start_bucket(page: u64, size: u64, bucket_size: &DocumentBucketSize, offset: u64) -> u64{ - let docs_to_skip = (page - 1) * size + offset + MAX_NUM_RESPONSE_ENTRIES - bucket_size.capacity as u64; - return (docs_to_skip / MAX_NUM_RESPONSE_ENTRIES) + 1 - } - - fn get_start_entry(page: u64, size: u64, start_bucket: u64, bucket_size: &DocumentBucketSize, offset: u64) -> u64{ - // docs to skip calculated by page * size - let docs_to_skip = (page - 1) * size + offset; - let mut start_entry = 0; - if start_bucket > 1 { - start_entry = docs_to_skip - bucket_size.capacity as u64; - if start_entry > 2 { - start_entry = start_entry - (start_bucket - 2) * MAX_NUM_RESPONSE_ENTRIES - } - } - return start_entry - } - -} diff --git a/clearing-house-app/document-api/src/db/tests.rs b/clearing-house-app/document-api/src/db/tests.rs deleted file mode 100644 index 4fa5b7ed..00000000 --- a/clearing-house-app/document-api/src/db/tests.rs +++ /dev/null @@ -1,193 +0,0 @@ -// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -// These tests all access the db, so if you run the tests use -// cargo test -- --test-threads=1 -// otherwise they will interfere with each other -// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -use core_lib::db::DataStoreApi; -use core_lib::errors::*; -use core_lib::model::document::EncryptedDocument; -use mongodb::Client; -use crate::db::DataStore; -use chrono::Utc; - -const DATABASE_URL: &'static str = "mongodb://127.0.0.1:27017"; - -async fn db_setup() -> DataStore { - let client = Client::with_uri_str(DATABASE_URL).await.unwrap(); - let db = DataStore::new(client); - db.database.drop(None).await.expect("Database Error"); - db -} - -async fn tear_down(db: DataStore){ - db.database.drop(None).await.expect("Database Error"); -} - -fn create_test_enc_document(id: &String, pid: &String, dt_id: &String) -> EncryptedDocument{ - let mut cts = vec!(); - cts.push(String::from("1::4EBC3F1C2B8CB16C52E41424502FD112015D9C25919C2401514B5DD5B4233B65593CF0A4")); - cts.push(String::from("2::FE2195305E95B9F931660CBA20B4707A1D92123022371CEDD2E70A538A8771EE7540D9F34845BBAEECEC")); - let key_ct = String::from("very secure key ct"); - let ts = Utc::now().timestamp(); - EncryptedDocument::new(id.clone(), pid.clone(), dt_id.clone(), ts, 3241, key_ct, cts) -} - -// DOCUMENT -/// Testcase: Document exists in db and is found -#[tokio::test] -async fn test_document_exists() -> Result<()>{ - // empty db and create tables - let db = db_setup().await; - - // prepare test data - let pid = String::from("test_document_exists_pid"); - let dt_id = String::from("test_document_exists_dt"); - let id = String::from("test_document_exists_id"); - let doc = create_test_enc_document(&id, &pid, &dt_id); - db.add_document(doc.clone()).await?; - - // run the test - assert_eq!(db.exists_document(&id).await?, true); - - // clean up - tear_down(db).await; - - Ok(()) -} - -/// Testcase: Document does not exist and is not found -#[tokio::test] -async fn test_document_does_not_exist() -> Result<()>{ - // empty db and create tables - let db = db_setup().await; - - // prepare test data - let pid = String::from("test_document_does_not_exist_pid"); - let dt_id = String::from("test_document_does_not_exist_dt"); - let id1 = String::from("test_document_does_not_exist_pid_id1"); - let id2 = String::from("test_document_does_not_exist_pid_id2"); - let doc = create_test_enc_document(&id1, &pid, &dt_id); - db.add_document(doc.clone()).await?; - - // run the test - assert_eq!(db.exists_document(&id2).await?, false); - - // clean up - tear_down(db).await; - - Ok(()) -} - -/// Testcase: Document does not exist after delete -#[tokio::test] -async fn test_delete_document_doc_is_deleted() -> Result<()>{ - // empty db and create tables - let db = db_setup().await; - - // prepare test data - let pid = String::from("test_delete_document_doc_is_deleted_pid"); - let dt_id = String::from("test_delete_document_doc_is_deleted_dt"); - let id = String::from("test_delete_document_doc_is_deleted_id"); - let doc = create_test_enc_document(&id, &pid, &dt_id); - db.add_document(doc.clone()).await?; - - // db should be able to find the document - assert_eq!(db.exists_document(&id).await?, true); - - // run the test - assert!(db.delete_document(&id).await?); - - // db should not find document anymore - assert_eq!(db.exists_document(&id).await?, false); - - // clean up - tear_down(db).await; - - Ok(()) -} - -/// Testcase: Other Documents still exist after delete -#[tokio::test] -async fn test_delete_document_check_others() -> Result<()>{ - // empty db and create tables - let db = db_setup().await; - - // prepare test data - let pid = String::from("test_delete_document_check_others_pid"); - let dt_id = String::from("test_delete_document_check_others_dt"); - let id1 = String::from("test_delete_document_check_others_id1"); - let id2 = String::from("test_delete_document_check_others_id2"); - let doc1 = create_test_enc_document(&id1, &pid, &dt_id); - let doc2 = create_test_enc_document(&id2, &pid, &dt_id); - db.add_document(doc1.clone()).await?; - db.add_document(doc2.clone()).await?; - - // db should be able to find both documents - assert_eq!(db.exists_document(&id1).await?, true); - assert_eq!(db.exists_document(&id2).await?, true); - - // run the test - assert!(db.delete_document(&id1).await?); - - // db should still find the other document - assert_eq!(db.exists_document(&id2).await?, true); - - // clean up - tear_down(db).await; - - Ok(()) -} - -/// Testcase: Document does not exist before delete -#[tokio::test] -async fn test_delete_document_on_not_existing_doc() -> Result<()>{ - // empty db and create tables - let db = db_setup().await; - - // prepare test data - let pid = String::from("test_delete_document_on_not_existing_doc_pid"); - let dt_id = String::from("test_delete_document_on_not_existing_doc_dt"); - let id1 = String::from("test_delete_document_on_not_existing_doc_id1"); - let id2 = String::from("test_delete_document_on_not_existing_doc_id2"); - let doc = create_test_enc_document(&id1, &pid, &dt_id); - db.add_document(doc.clone()).await?; - - // run the test - assert_eq!(db.delete_document(&id2).await?, false); - - // clean up - tear_down(db).await; - - Ok(()) -} - -/// Testcase: Find the correct document -#[tokio::test] -async fn test_get_document() -> Result<()>{ - // empty db and create tables - let db = db_setup().await; - - // prepare test data - let pid = String::from("test_get_document_pid"); - let dt_id = String::from("test_get_document_dt"); - let id1 = String::from("test_get_document_id1"); - let id2 = String::from("test_get_document_id2"); - let doc1 = create_test_enc_document(&id1, &pid, &dt_id); - let doc2 = create_test_enc_document(&id2, &pid, &dt_id); - db.add_document(doc1.clone()).await?; - db.add_document(doc2.clone()).await?; - - // db should be able to find both documents - assert_eq!(db.exists_document(&id1).await?, true); - assert_eq!(db.exists_document(&id2).await?, true); - - // the test - let result = db.get_document(&id1, &pid).await?; - assert_eq!(result.is_some(), true); - assert_eq!(result.unwrap().id, id1); - - // clean up - tear_down(db).await; - - Ok(()) -} \ No newline at end of file diff --git a/clearing-house-app/document-api/src/doc_api.rs b/clearing-house-app/document-api/src/doc_api.rs deleted file mode 100644 index eb5edc9e..00000000 --- a/clearing-house-app/document-api/src/doc_api.rs +++ /dev/null @@ -1,312 +0,0 @@ -use rocket::State; -use chrono::Local; -use core_lib::{ - api::{ - ApiResponse, - client::keyring_api::KeyringApiClient, - crypto::ChClaims, - DocumentReceipt, - QueryResult, - }, - constants::{DEFAULT_DOC_TYPE, DEFAULT_NUM_RESPONSE_ENTRIES, MAX_NUM_RESPONSE_ENTRIES, PAYLOAD_PART, ROCKET_DOC_API}, - model::{ - crypto::{KeyCt, KeyCtList}, - document::Document, - parse_date, - sanitize_dates, - SortingOrder, - SortingOrder::{Ascending, Descending}, - validate_dates, - }, -}; -use rocket::fairing::AdHoc; -use rocket::serde::json::{json, Json}; -use std::convert::TryFrom; -use crate::db::DataStore; - - -#[post("/", format = "json", data = "")] -async fn create_enc_document( - ch_claims: ChClaims, - db: &State, - key_api: &State, - document: Json -) -> ApiResponse { - trace!("...user '{:?}'", &ch_claims.client_id); - let doc: Document = document.into_inner(); - // data validation - let payload: Vec = doc.parts.iter() - .filter(|p| String::from(PAYLOAD_PART) == p.name) - .map(|p | p.content.as_ref().unwrap().clone()).collect(); - if payload.len() > 1 { - return ApiResponse::BadRequest(String::from("Document contains two payloads!")); - } - else if payload.len() == 0 { - return ApiResponse::BadRequest(String::from("Document contains no payload!")); - } - - // check if doc id already exists - match db.exists_document(&doc.id).await { - Ok(true) => { - warn!("Document exists already!"); - ApiResponse::BadRequest(String::from("Document exists already!")) - }, - _ => { - debug!("Document does not exists!"); - debug!("getting keys"); - let keys; - match key_api.generate_keys(&ch_claims.client_id, &doc.pid, &doc.dt_id).await { - Ok(key_map) => { - keys = key_map; - debug!("got keys"); - }, - Err(e) => { - error!("Error while retrieving keys: {:?}", e); - return ApiResponse::InternalError(String::from("Error while retrieving keys!")) - }, - }; - - debug!("start encryption"); - let mut enc_doc; - match doc.encrypt(keys) { - Ok(ct) => { - debug!("got ct"); - enc_doc = ct - }, - Err(e) => { - error!("Error while encrypting: {:?}", e); - return ApiResponse::InternalError(String::from("Error while encrypting!")) - }, - }; - - // chain the document to previous documents - debug!("add the chain hash..."); - // get the document with the previous tc - match db.get_document_with_previous_tc(doc.tc).await{ - Ok(Some(previous_doc)) => { - enc_doc.hash = previous_doc.hash(); - }, - Ok(None) => { - if doc.tc == 0{ - info!("No entries found for pid {}. Beginning new chain!", &doc.pid); - } - else{ - // If this happens, db didn't find a tc entry that should exist. - return ApiResponse::InternalError(String::from("Error while creating the chain hash!")) - } - }, - Err(e) => { - error!("Error while creating the chain hash: {:?}", e); - return ApiResponse::InternalError(String::from("Error while creating the chain hash!")) - } - } - - // prepare the success result message - - - let receipt = DocumentReceipt::new(enc_doc.ts, &enc_doc.pid, &enc_doc.id, &enc_doc.hash); - - debug!("storing document ...."); - // store document - match db.add_document(&enc_doc).await { - Ok(_b) => ApiResponse::SuccessCreate(json!(receipt)), - Err(e) => { - error!("Error while adding: {:?}", e); - ApiResponse::InternalError(String::from("Error while storing document!")) - } - } - } - } -} - -#[get("/?&&&&&", format = "json")] -async fn get_enc_documents_for_pid( - ch_claims: ChClaims, - key_api: &State, - db: &State, - doc_type: Option, - page: Option, - size: Option, - sort: Option, - date_from: Option, - date_to: Option, - pid: String) -> ApiResponse { - debug!("Trying to retrieve documents for pid '{}'...", &pid); - trace!("...user '{:?}'", &ch_claims.client_id); - debug!("...page: {:#?}, size:{:#?} and sort:{:#?}", page, size, sort); - - // Parameter validation for pagination: - // Valid pages start from 1 - // Max page number as of yet unknown - let sanitized_page = match page{ - Some(p) => { - if p > 0{ - u64::try_from(p).unwrap() - } - else{ - warn!("...invalid page requested. Falling back to 1."); - 1 - } - }, - None => 1 - }; - - // Valid sizes are between 1 and MAX_NUM_RESPONSE_ENTRIES (1000) - let sanitized_size = match size{ - Some(s) => { - if s > 0 && s <= i32::try_from(MAX_NUM_RESPONSE_ENTRIES).unwrap() { - u64::try_from(s).unwrap() - } - else{ - warn!("...invalid size requested. Falling back to default."); - DEFAULT_NUM_RESPONSE_ENTRIES - } - }, - None => DEFAULT_NUM_RESPONSE_ENTRIES - }; - - // Sorting order is already validated and defaults to descending - let sanitized_sort = match sort{ - Some(s) => { - s - }, - None => Descending - }; - - // Parsing the dates for duration queries - let parsed_date_from = parse_date(date_from, false); - let parsed_date_to = parse_date(date_to, true); - - // Validation of dates with various checks. If none given dates default to date_now (date_to) and (date_now - 2 weeks) (date_from) - if !validate_dates(parsed_date_from, parsed_date_to){ - debug!("date validation failed!"); - return ApiResponse::BadRequest(String::from("Invalid date parameter!")); - } - let (sanitized_date_from, sanitized_date_to) = sanitize_dates(parsed_date_from, parsed_date_to); - - //new behavior: if pages are "invalid" return {}. Do not adjust page - //either call db with type filter or without to get cts - let start = Local::now(); - debug!("... using pagination with page: {}, size:{} and sort:{:#?}", sanitized_page, sanitized_size, &sanitized_sort); - - let dt_id = match doc_type{ - Some(dt) => dt, - None => String::from(DEFAULT_DOC_TYPE), - }; - let cts = match db.get_documents_for_pid(&dt_id, &pid, sanitized_page, sanitized_size, &sanitized_sort, &sanitized_date_from, &sanitized_date_to).await{ - Ok(cts) => cts, - Err(e) => { - error!("Error while retrieving document: {:?}", e); - return ApiResponse::InternalError(format!("Error while retrieving document for {}", &pid)) - }, - }; - - let result_size = i32::try_from(sanitized_size).ok(); - let result_page = i32::try_from(sanitized_page).ok(); - let result_sort = match sanitized_sort{ - Ascending => String::from("asc"), - Descending => String::from("desc"), - }; - - let mut result = QueryResult::new(sanitized_date_from.timestamp(), sanitized_date_to.timestamp(), result_page, result_size, result_sort, vec!()); - - // The db might contain no documents in which case we get an empty vector - if cts.is_empty(){ - debug!("Queried empty pid: {}", &pid); - ApiResponse::SuccessOk(json!(result)) - } - else{ - // Documents found for pid, now decrypting them - debug!("Found {} documents. Getting keys from keyring...", cts.len()); - let key_cts: Vec = cts.iter() - .map(|e| KeyCt::new(e.id.clone(), e.keys_ct.clone())).collect(); - // caution! we currently only support a single dt per call, so we use the first dt we found - let key_cts_list = KeyCtList::new(cts[0].dt_id.clone(), key_cts); - // decrypt cts - let key_maps = match key_api.decrypt_multiple_keys(&ch_claims.client_id, &pid,&key_cts_list).await{ - Ok(key_map) => { - key_map - } - Err(e) => { - error!("Error while retrieving keys from keyring: {:?}", e); - return ApiResponse::InternalError(format!("Error while retrieving keys from keyring")) - } - }; - debug!("... keys received. Starting decryption..."); - let pts_bulk : Vec = cts.iter().zip(key_maps.iter()) - .filter_map(|(ct,key_map)|{ - if ct.id != key_map.id{ - error!("Document and map don't match"); - }; - match ct.decrypt(key_map.map.keys.clone()){ - Ok(d) => Some(d), - Err(e) => { - warn!("Got empty document from decryption! {:?}", e); - None - } - } - }).collect(); - debug!("...done."); - let end = Local::now(); - let diff = end - start; - info!("Total time taken to run in ms: {}", diff.num_milliseconds()); - result.documents = pts_bulk; - ApiResponse::SuccessOk(json!(result)) - } -} - -/// Retrieve document with id for process with pid -#[get("//?", format = "json")] -async fn get_enc_document(ch_claims: ChClaims, key_api: &State, db: &State, pid: String, id: String, hash: Option) -> ApiResponse { - trace!("...user '{:?}'", &ch_claims.client_id); - trace!("trying to retrieve document with id '{}' for pid '{}'", &id, &pid); - if hash.is_some(){ - debug!("integrity check with hash: {}", hash.as_ref().unwrap()); - } - - match db.get_document(&id, &pid).await{ - //TODO: would like to send "{}" instead of "null" when dt is not found - Ok(Some(ct)) => { - match hex::decode(&ct.keys_ct){ - Ok(key_ct) => { - match key_api.decrypt_keys(&ch_claims.client_id, &pid, &ct.dt_id, &key_ct).await{ - Ok(key_map) => { - //TODO check the hash - match ct.decrypt(key_map.keys){ - Ok(d) => ApiResponse::SuccessOk(json!(d)), - Err(e) => { - warn!("Got empty document from decryption! {:?}", e); - return ApiResponse::NotFound(format!("Document {} not found!", &id)) - } - } - } - Err(e) => { - error!("Error while retrieving keys from keyring: {:?}", e); - return ApiResponse::InternalError(format!("Error while retrieving keys")) - } - } - - }, - Err(e) => { - error!("Error while decoding ciphertext: {:?}", e); - return ApiResponse::InternalError(format!("Key Ciphertext corrupted")) - } - } - }, - Ok(None) => { - debug!("Nothing found in db!"); - return ApiResponse::NotFound(format!("Document {} not found!", &id)) - } - Err(e) => { - error!("Error while retrieving document: {:?}", e); - return ApiResponse::InternalError(format!("Error while retrieving document {}", &id)) - } - } -} - -pub fn mount_api() -> AdHoc { - AdHoc::on_ignite("Mounting Document API", |rocket| async { - rocket - .mount(ROCKET_DOC_API, routes![create_enc_document, get_enc_document, get_enc_documents_for_pid]) - }) -} \ No newline at end of file diff --git a/clearing-house-app/document-api/src/main.rs b/clearing-house-app/document-api/src/main.rs deleted file mode 100644 index 8da3956b..00000000 --- a/clearing-house-app/document-api/src/main.rs +++ /dev/null @@ -1,74 +0,0 @@ -#[macro_use] extern crate rocket; -#[macro_use] extern crate serde_derive; - -use core_lib::api::client::{ApiClientConfigurator, ApiClientEnum}; -use core_lib::util::{add_service_config, setup_logger}; -use rocket::fairing::AdHoc; -use rocket::http::Method; -use rocket::{Rocket, Build}; -use rocket_cors::{ - AllowedHeaders, AllowedOrigins, - CorsOptions -}; -use core_lib::constants::ENV_DOCUMENT_SERVICE_ID; -use crate::db::DatastoreConfigurator; - -mod doc_api; -mod db; - -fn add_cors_options() -> AdHoc { - AdHoc::on_ignite("Adding CORS rules", |rocket| async { - let allowed_origins = AllowedOrigins::some_exact(&[ - "http://127.0.0.1", - "http://127.0.0.1:4200", - "http://127.0.0.1:8001", - "http://localhost", - "http://localhost:4200", - "http://localhost:8001", - "http://document-gui", - "http://document-gui.local", - "https://127.0.0.1", - "https://127.0.0.1:4200", - "https://127.0.0.1:8001", - "https://localhost", - "https://localhost:4200", - "https://localhost:8001", - "https://document-gui", - "https://document-gui.local" - ]); - - let cors_options = CorsOptions { - allowed_origins, - allowed_methods: vec![Method::Get, Method::Post, Method::Options, Method::Delete].into_iter().map(From::from).collect(), - allowed_headers: AllowedHeaders::some(&[ - "Access-Control-Allow-Origin", - "Access-Control-Allow-Methods", - "Access-Control-Allow-Headers", - "Accept", - "Authorization", - "Content-Type", - "Origin" - ]), - allow_credentials: true, - ..Default::default() - }.to_cors(); - - match cors_options { - Ok(cors) => rocket.attach(cors), - Err(_) => rocket - } - }) -} - -#[launch] -fn rocket() -> Rocket { - // setup logging - setup_logger().expect("Failure to set up the logger! Exiting..."); - - rocket::build() - .attach(doc_api::mount_api()) - .attach(add_cors_options()) - .attach(add_service_config(ENV_DOCUMENT_SERVICE_ID.to_string())) - .attach(DatastoreConfigurator) - .attach(ApiClientConfigurator::new(ApiClientEnum::Keyring)) -} \ No newline at end of file diff --git a/clearing-house-app/keyring-api/init_db/default_doc_type.json b/clearing-house-app/init_db/default_doc_type.json similarity index 100% rename from clearing-house-app/keyring-api/init_db/default_doc_type.json rename to clearing-house-app/init_db/default_doc_type.json diff --git a/clearing-house-app/keyring-api/Cargo.toml b/clearing-house-app/keyring-api/Cargo.toml deleted file mode 100644 index 7d3eda46..00000000 --- a/clearing-house-app/keyring-api/Cargo.toml +++ /dev/null @@ -1,32 +0,0 @@ -[package] -name = "keyring-api" -version = "0.10.0" -authors = [ - "Mark Gall ", - "Georg Bramm " -] -edition = "2018" - -[dependencies] -aes = "0.6.0" -aes-gcm-siv = "0.9.0" -base64 = "0.9.3" -biscuit = { git = "https://github.com/lawliet89/biscuit", branch = "master" } -chrono = { version = "0.4", features = ["serde"] } -core-lib = {path = "../core-lib" } -error-chain = "0.12.4" -fern = "0.5" -generic-array = "0.14.4" -hex = "0.4.3" -hkdf = "0.10.0" -log = "0.4.14" -mongodb = "2.3.0" -openssl = "0.10.32" -rocket = { version = "0.5.0-rc.1", features = ["json"] } -sha2 = "0.9.3" -serde = "1.0" -serde_derive = "1.0" -serde_json = "1.0" -tokio = "1.8.1" -tokio-test = "0.4.2" -yaml-rust = "0.4" diff --git a/clearing-house-app/keyring-api/Rocket.toml b/clearing-house-app/keyring-api/Rocket.toml deleted file mode 100644 index a2173c76..00000000 --- a/clearing-house-app/keyring-api/Rocket.toml +++ /dev/null @@ -1,18 +0,0 @@ -[global] -limits = { json = 5242880 } - -[debug] -address = "0.0.0.0" -port = 8002 -log_level = "normal" -limits = { forms = 32768 } -database_url = "mongodb://localhost:27018" -clear_db = true - -[release] -address = "0.0.0.0" -port = 8002 -log_level = "normal" -limits = { forms = 32768 } -database_url = "mongodb://keyring-mongo:27017" -clear_db = false diff --git a/clearing-house-app/keyring-api/certs b/clearing-house-app/keyring-api/certs deleted file mode 120000 index 3c23f4b3..00000000 --- a/clearing-house-app/keyring-api/certs +++ /dev/null @@ -1 +0,0 @@ -../certs/ \ No newline at end of file diff --git a/clearing-house-app/keyring-api/src/api/doc_type_api.rs b/clearing-house-app/keyring-api/src/api/doc_type_api.rs deleted file mode 100644 index ff2cd06b..00000000 --- a/clearing-house-app/keyring-api/src/api/doc_type_api.rs +++ /dev/null @@ -1,106 +0,0 @@ -use core_lib::api::ApiResponse; -use core_lib::constants::{ROCKET_DOC_TYPE_API, DEFAULT_PROCESS_ID}; -use rocket::fairing::AdHoc; -use rocket::State; -use rocket::serde::json::{json,Json}; - -use crate::db::KeyStore; -use crate::model::doc_type::DocumentType; - -#[post("/", format = "json", data = "")] -async fn create_doc_type(db: &State, doc_type: Json) -> ApiResponse { - let doc_type: DocumentType = doc_type.into_inner(); - debug!("adding doctype: {:?}", &doc_type); - match db.exists_document_type(&doc_type.pid, &doc_type.id).await{ - Ok(true) => ApiResponse::BadRequest(String::from("doctype already exists!")), - Ok(false) => { - match db.add_document_type(doc_type.clone()).await{ - Ok(()) => ApiResponse::SuccessCreate(json!(doc_type)), - Err(e) => { - error!("Error while adding doctype: {:?}", e); - return ApiResponse::InternalError(String::from("Error while adding document type!")) - } - } - }, - Err(e) => { - error!("Error while adding document type: {:?}", e); - return ApiResponse::InternalError(String::from("Error while checking database!")) - } - } -} - -#[post("/", format = "json", data = "")] -async fn update_doc_type(db: &State, id: String, doc_type: Json) -> ApiResponse { - let doc_type: DocumentType = doc_type.into_inner(); - match db.exists_document_type(&doc_type.pid, &doc_type.id).await{ - Ok(true) => ApiResponse::BadRequest(String::from("Doctype already exists!")), - Ok(false) => { - match db.update_document_type(doc_type, &id).await{ - Ok(id) => ApiResponse::SuccessOk(json!(id)), - Err(e) => { - error!("Error while adding doctype: {:?}", e); - return ApiResponse::InternalError(String::from("Error while storing document type!")) - } - } - }, - Err(e) => { - error!("Error while adding document type: {:?}", e); - return ApiResponse::InternalError(String::from("Error while checking database!")) - } - } -} - -#[delete("/", format = "json")] -async fn delete_default_doc_type(db: &State, id: String) -> ApiResponse{ - delete_doc_type(db, id, DEFAULT_PROCESS_ID.to_string()).await -} - -#[delete("//", format = "json")] -async fn delete_doc_type(db: &State, id: String, pid: String) -> ApiResponse{ - match db.delete_document_type(&id, &pid).await{ - Ok(true) => ApiResponse::SuccessNoContent(String::from("Document type deleted!")), - Ok(false) => ApiResponse::NotFound(String::from("Document type does not exist!")), - Err(e) => { - error!("Error while deleting doctype: {:?}", e); - ApiResponse::InternalError(format!("Error while deleting document type with id {}!", id)) - } - } -} - -#[get("/", format = "json")] -async fn get_default_doc_type(db: &State, id: String) -> ApiResponse { - get_doc_type(db, id, DEFAULT_PROCESS_ID.to_string()).await -} - -#[get("//", format = "json")] -async fn get_doc_type(db: &State, id: String, pid: String) -> ApiResponse { - match db.get_document_type(&id).await{ - //TODO: would like to send "{}" instead of "null" when dt is not found - Ok(dt) => ApiResponse::SuccessOk(json!(dt)), - Err(e) => { - error!("Error while retrieving doctype: {:?}", e); - ApiResponse::InternalError(format!("Error while retrieving document type with id {} and pid {}!", id, pid)) - } - } -} - -#[get("/", format = "json")] -async fn get_doc_types(db: &State) -> ApiResponse { - match db.get_all_document_types().await { - //TODO: would like to send "{}" instead of "null" when dt is not found - Ok(dt) => ApiResponse::SuccessOk(json!(dt)), - Err(e) => { - error!("Error while retrieving default doctypes: {:?}", e); - ApiResponse::InternalError(format!("Error while retrieving all document types")) - } - } -} - -pub fn mount_api() -> AdHoc { - AdHoc::on_ignite("Mounting Document Type API", |rocket| async { - rocket - .mount(ROCKET_DOC_TYPE_API, routes![create_doc_type, - update_doc_type, delete_default_doc_type, delete_doc_type, - get_default_doc_type, get_doc_type , get_doc_types]) - }) -} \ No newline at end of file diff --git a/clearing-house-app/keyring-api/src/api/key_api.rs b/clearing-house-app/keyring-api/src/api/key_api.rs deleted file mode 100644 index 50f42dc9..00000000 --- a/clearing-house-app/keyring-api/src/api/key_api.rs +++ /dev/null @@ -1,173 +0,0 @@ -use core_lib::api::ApiResponse; -use core_lib::api::crypto::ChClaims; -use core_lib::constants::ROCKET_KEYRING_API; -use core_lib::model::crypto::{KeyCtList, KeyMapListItem}; -use rocket::fairing::AdHoc; -use rocket::State; -use rocket::serde::json::{json, Json}; - -use crate::db::KeyStore; -use crate::crypto::{generate_key_map, restore_key_map}; - -#[get("/generate_keys/<_pid>?", format = "json")] -async fn generate_keys(ch_claims: ChClaims, db: &State, _pid: String, dt_id: String) -> ApiResponse { - trace!("generate_keys"); - trace!("...user '{:?}'", &ch_claims.client_id); - match db.get_msk().await{ - Ok(key) => { - // check that doc type exists for pid - match db.get_document_type(&dt_id).await{ - Ok(Some(dt)) => { - // generate new random key map - match generate_key_map(key, dt) { - Ok(key_map) => { - trace!("response: {:?}", &key_map); - return ApiResponse::SuccessCreate(json!(key_map)); - }, - Err(e) => { - error!("Error while generating key map: {}", e); - return ApiResponse::InternalError(String::from("Error while generating keys")); - } - } - } - Ok(None) =>{ - warn!("document type {} not found", &dt_id); - return ApiResponse::BadRequest(String::from("Document type not found!")); - } - Err(e) => { - warn!("Error while retrieving document type: {}", e); - return ApiResponse::InternalError(String::from("Error while retrieving document type")); - } - } - } - Err(e) => { - error!("Error while retrieving master key: {}", e); - return ApiResponse::InternalError(String::from("Error while generating keys")); - } - } -} - -#[get("/decrypt_keys/<_pid>", format = "json", data = "")] -async fn decrypt_keys(ch_claims: ChClaims, db: &State, _pid: Option, key_cts: Json) -> ApiResponse { - trace!("decrypt_keys"); - trace!("...user '{:?}'", &ch_claims.client_id); - let cts = key_cts.into_inner(); - debug!("number of cts to decrypt: {}", &cts.cts.len()); - - // get master key - match db.get_msk().await{ - Ok(m_key) => { - // check that doc type exists for pid - match db.get_document_type(&cts.dt).await{ - Ok(Some(dt)) => { - let mut dec_error_count = 0; - let mut map_error_count = 0; - // validate keys_ct input - let key_maps : Vec = cts.cts.iter().filter_map( - |key_ct| { - match hex::decode(key_ct.ct.clone()){ - Ok(key) => Some((key_ct.id.clone(), key)), - Err(e) => { - error!("Error while decoding key ciphertext: {}", e); - dec_error_count = dec_error_count + 1; - None - } - } - } - ).filter_map( - |(id, key)| { - match restore_key_map(m_key.clone(), dt.clone(), key){ - Ok(key_map) => { - Some(KeyMapListItem::new(id, key_map)) - }, - Err(e) => { - error!("Error while generating key map: {}", e); - map_error_count = map_error_count + 1; - None - } - } - } - ) - .collect(); - - let error_count = map_error_count + dec_error_count; - - // Currently, we don't tolerate errors while decrypting keys - if error_count > 0 { - return ApiResponse::InternalError(String::from("Error while decrypting keys")); - } - else{ - return ApiResponse::SuccessOk(json!(key_maps)); - } - } - Ok(None) =>{ - warn!("document type {} not found", &cts.dt); - return ApiResponse::BadRequest(String::from("Document type not found!")); - } - Err(e) => { - warn!("Error while retrieving document type: {}", e); - return ApiResponse::NotFound(String::from("Document type not found!")); - } - } - } - Err(e) => { - error!("Error while retrieving master key: {}", e); - return ApiResponse::InternalError(String::from("Error while decrypting keys")); - } - } - -} - -#[get("/decrypt_keys/<_pid>/?", format = "json")] -async fn decrypt_key_map(ch_claims: ChClaims, db: &State, keys_ct: String, _pid: Option, dt_id: String) -> ApiResponse { - trace!("decrypt_key_map"); - trace!("...user '{:?}'", &ch_claims.client_id); - trace!("ct: {}", &keys_ct); - // get master key - match db.get_msk().await{ - Ok(key) => { - // check that doc type exists for pid - match db.get_document_type(&dt_id).await{ - Ok(Some(dt)) => { - // validate keys_ct input - let keys_ct = match hex::decode(keys_ct){ - Ok(key) => key, - Err(e) => { - error!("Error while decoding key ciphertext: {}", e); - return ApiResponse::InternalError(String::from("Error while decrypting keys")); - } - }; - - match restore_key_map(key, dt, keys_ct){ - Ok(key_map) => { - return ApiResponse::SuccessOk(json!(key_map)); - }, - Err(e) => { - error!("Error while generating key map: {}", e); - return ApiResponse::InternalError(String::from("Error while restoring keys")); - } - } - } - Ok(None) =>{ - warn!("document type {} not found", &dt_id); - return ApiResponse::BadRequest(String::from("Document type not found!")); - } - Err(e) => { - warn!("Error while retrieving document type: {}", e); - return ApiResponse::NotFound(String::from("Document type not found!")); - } - } - } - Err(e) => { - error!("Error while retrieving master key: {}", e); - return ApiResponse::InternalError(String::from("Error while decrypting keys")); - } - } -} - -pub fn mount_api() -> AdHoc { - AdHoc::on_ignite("Mounting Keyring API", |rocket| async { - rocket - .mount(ROCKET_KEYRING_API, routes![decrypt_key_map, decrypt_keys, generate_keys]) - }) -} \ No newline at end of file diff --git a/clearing-house-app/keyring-api/src/api/mod.rs b/clearing-house-app/keyring-api/src/api/mod.rs deleted file mode 100644 index 46db62bd..00000000 --- a/clearing-house-app/keyring-api/src/api/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod doc_type_api; -pub mod key_api; diff --git a/clearing-house-app/keyring-api/src/crypto.rs b/clearing-house-app/keyring-api/src/crypto.rs deleted file mode 100644 index 584c1286..00000000 --- a/clearing-house-app/keyring-api/src/crypto.rs +++ /dev/null @@ -1,161 +0,0 @@ -use aes_gcm_siv::Aes256GcmSiv; -use aes_gcm_siv::aead::{Aead, NewAead}; -use core_lib::errors::*; -use core_lib::model::crypto::{KeyEntry, KeyMap}; -use generic_array::GenericArray; -use hkdf::Hkdf; -use openssl::rand::rand_bytes; -use sha2::Sha256; -use std::collections::HashMap; -use crate::model::doc_type::DocumentType; -use crate::model::crypto::MasterKey; - -const EXP_KEY_SIZE: usize = 32; -const EXP_NONCE_SIZE: usize = 12; -const EXP_BUFF_SIZE: usize = 44; - -fn initialize_kdf() -> (String, Hkdf) { - let salt = generate_random_seed(); - let ikm = generate_random_seed(); - let (master_key, kdf) = Hkdf::::extract(Some(&salt), &ikm); - (hex::encode_upper(master_key), kdf) -} - -pub fn generate_random_seed() -> Vec{ - let mut buf = [0u8; 256]; - rand_bytes(&mut buf).unwrap(); - buf.to_vec() -} - -fn derive_key_map(kdf: Hkdf, dt: DocumentType, enc: bool) -> HashMap{ - let mut key_map = HashMap::new(); - let mut okm = [0u8; EXP_BUFF_SIZE]; - let mut i = 0; - dt.parts.iter() - .for_each( |p| { - if kdf.expand(p.name.clone().as_bytes(), &mut okm).is_ok() { - let map_key = match enc{ - true => p.name.clone(), - false => i.to_string() - }; - key_map.insert(map_key, KeyEntry::new(i.to_string(), okm[..EXP_KEY_SIZE].to_vec(), okm[EXP_KEY_SIZE..].to_vec())); - } - i = i +1; - }); - key_map -} - -pub fn generate_key_map(mkey: MasterKey, dt: DocumentType) -> Result{ - debug!("generating encryption key_map for doc type: '{}'", &dt.id); - let (secret, doc_kdf) = initialize_kdf(); - let key_map = derive_key_map(doc_kdf, dt, true); - - debug!("encrypting the key seed"); - let kdf = restore_kdf(&mkey.key)?; - let mut okm = [0u8; EXP_BUFF_SIZE]; - if kdf.expand(hex::decode(mkey.salt)?.as_slice(), &mut okm).is_err(){ - bail!("Error while generating key"); - } - match encrypt_secret(&okm[..EXP_KEY_SIZE], &okm[EXP_KEY_SIZE..], secret){ - Ok(ct) => Ok(KeyMap::new(true, key_map, Some(ct))), - Err(e) => { - error!("Error while encrypting key seed: {:?}", e); - bail!("Error while encrypting key seed!"); - } - } -} - -pub fn restore_key_map(mkey: MasterKey, dt: DocumentType, keys_ct: Vec) -> Result{ - debug!("decrypting the key seed"); - let kdf = restore_kdf(&mkey.key)?; - let mut okm = [0u8; EXP_BUFF_SIZE]; - if kdf.expand(hex::decode(mkey.salt)?.as_slice(), &mut okm).is_err(){ - bail!("Error while generating key"); - } - - match decrypt_secret(&okm[..EXP_KEY_SIZE], &okm[EXP_KEY_SIZE..], &keys_ct){ - Ok(key_seed) => { - // generate new random key map - restore_keys(&key_seed, dt) - } - Err(e) => { - error!("Error while decrypting key ciphertext: {}", e); - bail!("Error while decrypting keys"); - } - } -} - -pub fn restore_keys(secret: &String, dt: DocumentType) -> Result{ - debug!("restoring decryption key_map for doc type: '{}'", &dt.id); - let kdf = restore_kdf(secret)?; - let key_map = derive_key_map(kdf, dt, false); - - Ok(KeyMap::new(false, key_map, None)) -} - -fn restore_kdf(secret: &String) -> Result>{ - debug!("restoring kdf from secret"); - let prk = match hex::decode(secret){ - Ok(key) => key, - Err(e) => { - error!("Error while decoding master key: {}", e); - bail!("Error while encrypting key seed!"); - } - }; - - match Hkdf::::from_prk(prk.as_slice()){ - Ok(kdf) => Ok(kdf), - Err(e) => { - error!("Error while instantiating hkdf: {}", e); - bail!("Error while encrypting key seed!") - } - } -} - -pub fn encrypt_secret(key: &[u8], nonce: &[u8], secret: String) -> Result>{ - // check key size - if key.len() != EXP_KEY_SIZE { - error!("Given key has size {} but expected {} bytes", key.len(), EXP_KEY_SIZE); - bail!("Incorrect key size") - } - // check nonce size - else if nonce.len() != EXP_NONCE_SIZE { - error!("Given nonce has size {} but expected {} bytes", nonce.len(), EXP_NONCE_SIZE); - bail!("Incorrect nonce size") - } - else{ - let key = GenericArray::from_slice(key); - let nonce = GenericArray::from_slice(nonce); - let cipher = Aes256GcmSiv::new(key); - - match cipher.encrypt(nonce, secret.as_bytes()){ - Ok(ct) => { - Ok(ct) - } - Err(e) => bail!("Error while encrypting {}", e) - } - } -} - -pub fn decrypt_secret(key: &[u8], nonce: &[u8], ct: &[u8]) -> Result{ - debug!("key len = {}", key.len()); - debug!("ct len = {}", ct.len()); - let key = GenericArray::from_slice(key); - let nonce = GenericArray::from_slice(nonce); - let cipher = Aes256GcmSiv::new(key); - - debug!("key: {}", hex::encode_upper(key)); - debug!("nonce: {}", hex::encode_upper(nonce)); - - debug!("ct len = {}", ct.len()); - debug!("nonce len = {}", nonce.len()); - match cipher.decrypt(nonce, ct){ - Ok(pt) => { - let pt = String::from_utf8(pt)?; - Ok(pt) - }, - Err(e) => { - bail!("Error while decrypting: {}", e) - } - } -} \ No newline at end of file diff --git a/clearing-house-app/keyring-api/src/db/crypto.rs b/clearing-house-app/keyring-api/src/db/crypto.rs deleted file mode 100644 index 492ec4c0..00000000 --- a/clearing-house-app/keyring-api/src/db/crypto.rs +++ /dev/null @@ -1,33 +0,0 @@ -use crate::crypto::generate_random_seed; -use hkdf::Hkdf; -use sha2::Sha256; -use core_lib::model::new_uuid; - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct MasterKey { - pub id: String, - pub key: String, - pub salt: Vec -} - -impl MasterKey{ - pub fn new(id: String, key: String, salt: Vec)-> MasterKey{ - MasterKey{ - id, - key, - salt - } - } - - pub fn new_random() -> MasterKey{ - let key_salt = generate_random_seed(); - let ikm = generate_random_seed(); - let (master_key, _) = Hkdf::::extract(Some(&key_salt), &ikm); - - MasterKey{ - id: new_uuid(), - key: hex::encode_upper(master_key), - salt: generate_random_seed() - } - } -} \ No newline at end of file diff --git a/clearing-house-app/keyring-api/src/db/doc_type.rs b/clearing-house-app/keyring-api/src/db/doc_type.rs deleted file mode 100644 index d2944b87..00000000 --- a/clearing-house-app/keyring-api/src/db/doc_type.rs +++ /dev/null @@ -1,87 +0,0 @@ -use core_lib::constants::{MONGO_ID, MONGO_PID, MONGO_COLL_DOC_TYPES}; -use core_lib::errors::*; -use rocket::futures::TryStreamExt; -use mongodb::bson::doc; - -use crate::db::KeyStore; -use crate::model::doc_type::DocumentType; - -impl KeyStore { - // DOCTYPE - pub async fn add_document_type(&self, doc_type: DocumentType) -> Result<()> { - let coll = self.database.collection::(MONGO_COLL_DOC_TYPES); - match coll.insert_one(doc_type.clone(), None).await { - Ok(_r) => { - debug!("added new document type: {}", &_r.inserted_id); - Ok(()) - }, - Err(e) => { - error!("failed to log document type {}", &doc_type.id); - Err(Error::from(e)) - } - } - } - - //TODO: Do we need to check that no documents of this type exist before we remove it from the db? - pub async fn delete_document_type(&self, id: &String, pid: &String) -> Result { - let coll = self.database.collection::(MONGO_COLL_DOC_TYPES); - let result = coll.delete_many(doc! { MONGO_ID: id, MONGO_PID: pid }, None).await?; - if result.deleted_count >= 1 { - Ok(true) - } else { - Ok(false) - } - } - - - /// checks if the model exits - pub async fn exists_document_type(&self, pid: &String, dt_id: &String) -> Result { - let coll = self.database.collection::(MONGO_COLL_DOC_TYPES); - let result = coll.find_one(Some(doc! { MONGO_ID: dt_id, MONGO_PID: pid }), None).await?; - match result { - Some(_r) => Ok(true), - None => { - debug!("document type with id {} and pid {:?} does not exist!", &dt_id, &pid); - Ok(false) - } - } - } - - pub async fn get_all_document_types(&self) -> Result> { - let coll = self.database.collection::(MONGO_COLL_DOC_TYPES); - let result = coll.find(None, None).await? - .try_collect().await.unwrap_or_else(|_| vec![]); - Ok(result) - } - - pub async fn get_document_type(&self, dt_id: &String) -> Result> { - let coll = self.database.collection::(MONGO_COLL_DOC_TYPES); - debug!("get_document_type for dt_id: '{}'", dt_id); - match coll.find_one(Some(doc! { MONGO_ID: dt_id}), None).await{ - Ok(result) => Ok(result), - Err(e) => { - error!("error while getting document type with id {}!", dt_id); - Err(Error::from(e)) - } - } - } - - pub async fn update_document_type(&self, doc_type: DocumentType, id: &String) -> Result { - let coll = self.database.collection::(MONGO_COLL_DOC_TYPES); - match coll.replace_one(doc! { MONGO_ID: id}, doc_type, None).await{ - Ok(r) => { - if r.matched_count != 1 || r.modified_count != 1{ - warn!("while replacing doc type {} matched '{}' dts and modified '{}'", id, r.matched_count, r.modified_count); - } - else{ - debug!("while replacing doc type {} matched '{}' dts and modified '{}'", id, r.matched_count, r.modified_count); - } - Ok(true) - }, - Err(e) => { - error!("error while updating document type with id {}: {:#?}", id, e); - Ok(false) - } - } - } -} \ No newline at end of file diff --git a/clearing-house-app/keyring-api/src/db/mod.rs b/clearing-house-app/keyring-api/src/db/mod.rs deleted file mode 100644 index 91451619..00000000 --- a/clearing-house-app/keyring-api/src/db/mod.rs +++ /dev/null @@ -1,175 +0,0 @@ -use core_lib::constants::{MONGO_COLL_MASTER_KEY, KEYRING_DB, FILE_DEFAULT_DOC_TYPE, DATABASE_URL, CLEAR_DB, KEYRING_DB_CLIENT}; -use core_lib::db::{DataStoreApi, init_database_client}; -use core_lib::errors::*; -use core_lib::util::read_file; -use mongodb::{Client, Database}; -use rocket::fairing::{self, Fairing, Info, Kind}; -use rocket::futures::TryStreamExt; -use rocket::{Rocket, Build}; -use std::process::exit; - -use crate::model::crypto::MasterKey; -use crate::model::doc_type::DocumentType; - - -pub(crate) mod doc_type; -#[cfg(test)] mod tests; - -#[derive(Clone, Debug)] -pub struct KeyStore { - client: Client, - database: Database -} - -impl DataStoreApi for KeyStore { - fn new(client: Client) -> KeyStore{ - KeyStore { - client: client.clone(), - database: client.database(KEYRING_DB) - } - } -} - -#[derive(Clone, Debug)] -pub struct KeyringDbConfigurator; - -#[rocket::async_trait] -impl Fairing for KeyringDbConfigurator { - fn info(&self) -> Info { - Info { - name: "Configuring Keyring Database", - kind: Kind::Ignite - } - } - async fn on_ignite(&self, rocket: Rocket) -> fairing::Result { - let db_url: String = rocket.figment().extract_inner(DATABASE_URL).clone().unwrap(); - let clear_db = match rocket.figment().extract_inner(CLEAR_DB) { - Ok(value) => { - debug!("clear_db: '{}' found.", &value); - value - }, - Err(_) => { - false - } - }; - debug!("Using database url: '{:#?}'", &db_url); - - match init_database_client::(&db_url.as_str(), Some(KEYRING_DB_CLIENT.to_string())).await { - Ok(keystore) => { - debug!("Check if database is empty..."); - match keystore.client.database(KEYRING_DB) - .list_collection_names(None) - .await { - Ok(colls) => { - debug!("... found collections: {:#?}", &colls); - if colls.len() > 0 && clear_db { - debug!("Database not empty and clear_db == true. Dropping database..."); - match keystore.client.database(KEYRING_DB).drop(None).await { - Ok(_) => { - debug!("... done."); - } - Err(_) => { - debug!("... failed."); - return Err(rocket); - } - }; - } - if colls.len() == 0 || clear_db { - debug!("Database empty. Need to initialize..."); - debug!("Adding initial document type..."); - match serde_json::from_str::(&read_file(FILE_DEFAULT_DOC_TYPE).unwrap_or(String::new())) { - Ok(dt) => { - match keystore.add_document_type(dt).await { - Ok(_) => { - debug!("... done."); - }, - Err(e) => { - error!("Error while adding initial document type: {:#?}", e); - return Err(rocket); - } - } - } - _ => { - error!("Error while loading initial document type"); - return Err(rocket); - } - }; - debug!("Creating master key..."); - // create master key - match keystore.store_master_key(MasterKey::new_random()).await { - Ok(true) => { - debug!("... done."); - }, - _ => { - error!("... failed to create master key"); - return Err(rocket); - } - }; - } - debug!("... database initialized."); - Ok(rocket.manage(keystore)) - } - Err(_) => { - Err(rocket) - } - } - }, - Err(_) => Err(rocket) - } - } -} - -impl KeyStore { - - /// Only one master key may exist in the database. - pub async fn store_master_key(&self, key: MasterKey) -> Result{ - debug!("Storing new master key..."); - let coll = self.database.collection::(MONGO_COLL_MASTER_KEY); - debug!("... but first check if there's already one."); - let result= coll.find(None, None).await - .expect("Error retrieving the master keys") - .try_collect().await.unwrap_or_else(|_| vec![]); - - if result.len() > 1{ - error!("Master Key table corrupted!"); - exit(1); - } - if result.len() == 1{ - error!("Master key already exists!"); - Ok(false) - } - else{ - //let db_key = bson::to_bson(&key) - // .expect("failed to serialize master key for database"); - match coll.insert_one(key, None).await{ - Ok(_r) => { - Ok(true) - }, - Err(e) => { - error!("master key could not be stored: {:?}", &e); - panic!("master key could not be stored") - } - } - } - } - - /// Only one master key may exist in the database. - pub async fn get_msk(&self) -> Result { - let coll = self.database.collection::(MONGO_COLL_MASTER_KEY); - let result= coll.find(None, None).await - .expect("Error retrieving the master keys") - .try_collect().await.unwrap_or_else(|_| vec![]); - - if result.len() > 1{ - error!("Master Key table corrupted!"); - exit(1); - } - if result.len() == 1{ - Ok(result[0].clone()) - } - else { - error!("Master Key missing!"); - exit(1); - } - } -} \ No newline at end of file diff --git a/clearing-house-app/keyring-api/src/db/tests.rs b/clearing-house-app/keyring-api/src/db/tests.rs deleted file mode 100644 index af31c016..00000000 --- a/clearing-house-app/keyring-api/src/db/tests.rs +++ /dev/null @@ -1,110 +0,0 @@ -// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -// These tests all access the db, so if you run the tests use -// cargo test -- --test-threads=1 -// otherwise they will interfere with each other -// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -use core_lib::errors::*; -use mongodb::Client; - -use crate::db::{DataStoreApi, KeyStore}; -use crate::model::doc_type::DocumentType; - -const DATABASE_URL: &'static str = "mongodb://127.0.0.1:27018"; - -async fn db_setup() -> KeyStore { - let client = Client::with_uri_str(DATABASE_URL).await.unwrap(); - let db = KeyStore::new(client); - db.database.drop(None).await.expect("Database Error"); - db -} - -async fn tear_down(db: KeyStore){ - db.database.drop(None).await.expect("Database Error"); -} - -/// Testcase: Document type exists -#[tokio::test] -async fn test_document_type_exists() -> Result<()>{ - // empty db and create tables - let db = db_setup().await; - - // prepare test data - let dt = DocumentType::new(String::from("test_document_type_exists_dt_dt"), String::from("test_document_type_exists_dt_pid"), vec!()); - db.add_document_type(dt.clone()).await?; - - // run the test: db should find document type - assert_eq!(db.exists_document_type(&dt.pid, &dt.id).await?, true); - - // clean up - tear_down(db).await; - - Ok(()) -} - - -/// Testcase: Document type exists for other pid and is not found -#[tokio::test] -async fn test_document_type_exists_for_other_pid() -> Result<()>{ - // empty db and create tables - let db = db_setup().await; - - // prepare test data - let dt = DocumentType::new(String::from("test_document_type_exists_for_other_pid_dt"), String::from("test_document_type_exists_for_other_pid_pid"), vec!()); - let wrong_pid = String::from("the_wrong_pid"); - db.add_document_type(dt.clone()).await?; - - // run the test: db should not find the document type - assert_eq!(db.exists_document_type(&wrong_pid, &dt.id).await?, false); - - // clean up - tear_down(db).await; - - Ok(()) -} - -/// Testcase: Delete on document type with correct pid results in deletion of document type -#[tokio::test] -async fn test_delete_document_type_correct_pid() -> Result<()>{ - // empty db and create tables - let db = db_setup().await; - - // prepare test data and insert into db - let dt = DocumentType::new(String::from("test_delete_document_type_correct_pid_id"), String::from("test_delete_document_type_correct_pid_pid"), vec!()); - let dt2 = DocumentType::new(String::from("test_delete_document_type_correct_pid_id"), String::from("test_delete_document_type_correct_pid_pid_2"), vec!()); - db.add_document_type(dt.clone()).await?; - db.add_document_type(dt2.clone()).await?; - - // run the test - db.delete_document_type(&dt.id, &dt.pid).await?; - - // db should not find document type - assert_eq!(db.exists_document_type(&dt.pid, &dt.id).await?, false); - - // clean up - tear_down(db).await; - - Ok(()) -} - -/// Testcase: Delete on document type with wrong pid results not in the deletion of document type -#[tokio::test] -async fn test_delete_document_type_wrong_pid() -> Result<()>{ - // empty db and create tables - let db = db_setup().await; - - // prepare test data and insert into db - let dt = DocumentType::new(String::from("test_delete_document_type_correct_pid_id"), String::from("test_delete_document_type_correct_pid_pid"), vec!()); - let wrong_pid = String::from("the_wrong_pid"); - db.add_document_type(dt.clone()).await?; - - // run the test - db.delete_document_type(&dt.id, &wrong_pid).await?; - - // db should still find document type - assert_eq!(db.exists_document_type(&dt.pid, &dt.id).await?, true); - - // clean up - tear_down(db).await; - - Ok(()) -} \ No newline at end of file diff --git a/clearing-house-app/keyring-api/src/main.rs b/clearing-house-app/keyring-api/src/main.rs deleted file mode 100644 index 5074728a..00000000 --- a/clearing-house-app/keyring-api/src/main.rs +++ /dev/null @@ -1,26 +0,0 @@ -#[macro_use] extern crate error_chain; -#[macro_use] extern crate rocket; -#[macro_use] extern crate serde_derive; - -use core_lib::util::{add_service_config, setup_logger}; -use rocket::{Build, Rocket}; -use core_lib::constants::ENV_KEYRING_SERVICE_ID; -use crate::db::KeyringDbConfigurator; - -mod api; -mod db; -mod crypto; -mod model; -#[cfg(test)] mod tests; - -#[launch] -fn rocket() -> Rocket { - // setup logging - setup_logger().expect("Failure to set up the logger! Exiting..."); - - rocket::build() - .attach(add_service_config(ENV_KEYRING_SERVICE_ID.to_string())) - .attach(api::key_api::mount_api()) - .attach(api::doc_type_api::mount_api()) - .attach(KeyringDbConfigurator) -} \ No newline at end of file diff --git a/clearing-house-app/keyring-api/src/model/crypto.rs b/clearing-house-app/keyring-api/src/model/crypto.rs deleted file mode 100644 index a700c76b..00000000 --- a/clearing-house-app/keyring-api/src/model/crypto.rs +++ /dev/null @@ -1,29 +0,0 @@ -use crate::crypto::generate_random_seed; -use hkdf::Hkdf; -use sha2::Sha256; -use core_lib::model::new_uuid; - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct MasterKey { - pub id: String, - pub key: String, - pub salt: String -} - -impl MasterKey{ - pub fn new(id: String, key: String, salt: String)-> MasterKey{ - MasterKey{ - id, - key, - salt - } - } - - pub fn new_random() -> MasterKey{ - let key_salt = generate_random_seed(); - let ikm = generate_random_seed(); - let (master_key, _) = Hkdf::::extract(Some(&key_salt), &ikm); - - MasterKey::new(new_uuid(), hex::encode_upper(master_key), hex::encode_upper(generate_random_seed())) - } -} \ No newline at end of file diff --git a/clearing-house-app/keyring-api/src/model/doc_type.rs b/clearing-house-app/keyring-api/src/model/doc_type.rs deleted file mode 100644 index 33a7f702..00000000 --- a/clearing-house-app/keyring-api/src/model/doc_type.rs +++ /dev/null @@ -1,29 +0,0 @@ -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct DocumentType { - pub id: String, - pub pid: String, - pub parts: Vec, -} - -impl DocumentType { - pub fn new(id: String, pid: String, parts: Vec) -> DocumentType { - DocumentType{ - id, - pid, - parts, - } - } -} - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct DocumentTypePart { - pub name: String, -} - -impl DocumentTypePart { - pub fn new(name: String) -> DocumentTypePart{ - DocumentTypePart{ - name - } - } -} diff --git a/clearing-house-app/keyring-api/src/model/mod.rs b/clearing-house-app/keyring-api/src/model/mod.rs deleted file mode 100644 index d1c0859d..00000000 --- a/clearing-house-app/keyring-api/src/model/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod doc_type; -pub(crate) mod crypto; \ No newline at end of file diff --git a/clearing-house-app/keyring-api/src/tests.rs b/clearing-house-app/keyring-api/src/tests.rs deleted file mode 100644 index 241c9475..00000000 --- a/clearing-house-app/keyring-api/src/tests.rs +++ /dev/null @@ -1,117 +0,0 @@ -use core_lib::errors::*; -use crate::model::doc_type::{DocumentType, DocumentTypePart}; -use crate::crypto::{encrypt_secret, decrypt_secret, generate_key_map, restore_key_map}; -use crate::model::crypto::MasterKey; - -fn create_test_document_type() -> DocumentType{ - let mut parts = vec!(); - parts.push(DocumentTypePart::new(String::from("name"))); - parts.push(DocumentTypePart::new(String::from("message"))); - parts.push(DocumentTypePart::new(String::from("connector"))); - - DocumentType::new(String::from("test_dt_1"), String::from("test_pid_1"), parts) -} - -#[test] -fn test_key_generation() -> Result<()>{ - // prepare test data - let dt = create_test_document_type(); - let k = String::from("C36D50B35B5981C8F1FAD6738848BD5A4F77EF77B56A4E66F7961B9B7A642B2B"); - let salt = String::from("A6E804FF70117E606686EDD8516C95734E239453AB52AC6E3F916D1D861412B574A91B01ECE5F9E4A17B498EDA132792CC9A89C031470950F87AE402B8DDA581410D7E310A5E4204F1467A4E4C240CCB180A84A1B1DE2A06FDB4474C98E78026FDCFB862DE7AC60A4A6772268EE397AF18C28F41DD9A10471E469833EB2092E28AE8D3DD58D98ACC521FC87B99A19912F70376F7E3026C960F903FE7B44F1903A5E36313EE1A8A60B2E317A6443B9408ABBA2763BD3ED42F406F5F19551ED84ADDAD0CD8A652ED72F0040E44CCF3C6CF854D5EA6FBFE9267DB4EBFAD5DE9BA3055049D71CC64A90B081C2A37ED0B5FDDB88AE864436A7D1F14FCA1F969B67F9E"); - let id = String::from("86177e93-29aa-477a-b63f-03ccd9c5679d"); - let mkey = MasterKey::new(id, k, salt); - - // run the test - let keys = generate_key_map(mkey, dt)?; - - // Keymap generated for encryption - assert_eq!(keys.enc, true); - - // there should be 3 items in the hash map - assert_eq!(keys.keys.len(), 3); - - // no key should be the same as another - keys.keys.values().for_each(|i| { - keys.keys.values().for_each(|j|{ - if i.id.ne(&j.id){ - assert!(i.nonce.ne(&j.nonce)); - assert!(i.key.ne(&j.key)); - } - }); - }); - - Ok(()) -} - -#[test] -fn test_restoring_keymap() -> Result<()>{ - // prepare test data - let dt = create_test_document_type(); - let keys_ct = hex::decode("29D816635437C4487DACD93349F6B853EAD8C6F37250901A5BEEF1529E2358BBE634E6D1BD923ED0F2F842DB83139A9786796190DA8DF8F09F0384C8842BA0316079F857C71184C0C4E2A74622D0BED7").unwrap(); - let k = String::from("C36D50B35B5981C8F1FAD6738848BD5A4F77EF77B56A4E66F7961B9B7A642B2B"); - let salt = String::from("A6E804FF70117E606686EDD8516C95734E239453AB52AC6E3F916D1D861412B574A91B01ECE5F9E4A17B498EDA132792CC9A89C031470950F87AE402B8DDA581410D7E310A5E4204F1467A4E4C240CCB180A84A1B1DE2A06FDB4474C98E78026FDCFB862DE7AC60A4A6772268EE397AF18C28F41DD9A10471E469833EB2092E28AE8D3DD58D98ACC521FC87B99A19912F70376F7E3026C960F903FE7B44F1903A5E36313EE1A8A60B2E317A6443B9408ABBA2763BD3ED42F406F5F19551ED84ADDAD0CD8A652ED72F0040E44CCF3C6CF854D5EA6FBFE9267DB4EBFAD5DE9BA3055049D71CC64A90B081C2A37ED0B5FDDB88AE864436A7D1F14FCA1F969B67F9E"); - let id = String::from("86177e93-29aa-477a-b63f-03ccd9c5679d"); - let mkey = MasterKey::new(id, k, salt); - - let mut expected_keys = vec!(); - expected_keys.push(hex::decode("0FCBA316FA47AC0E3EFF4D69B7780925ED22CFF46FC1A731B4E9942FED67BA04").unwrap()); - expected_keys.push(hex::decode("DE888EF80B13390CA76387F18528F3B3948B8C446D70C09F7C2A1D2346CFE917").unwrap()); - expected_keys.push(hex::decode("2E6953A92D081C5189DED6FB9644606257A2839CD2159F77166DF246E236B67C").unwrap()); - - let mut expected_nonces = vec!(); - expected_nonces.push(hex::decode("6A63BE704DC9687FA3FDFF26").unwrap()); - expected_nonces.push(hex::decode("D0E2744835BD2FFECFFA9AE6").unwrap()); - expected_nonces.push(hex::decode("83587A962A24F94D907CF2B7").unwrap()); - - // run the test - let result = restore_key_map(mkey, dt, keys_ct)?; - - // Keymap generated for decryption - assert_eq!(result.enc, false); - - // there should be 3 items in the hash map - assert_eq!(result.keys.len(), 3); - - // check the derived keys and nonces - result.keys.values().for_each(|i| { - let index = i.id.parse::().unwrap(); - assert_eq!(i.key, expected_keys[index]); - assert_eq!(i.nonce, expected_nonces[index]); - }); - - Ok(()) -} - - -#[test] -fn test_encrypting_secret() -> Result<()>{ - // prepare test data - let key = hex::decode("9530D8826CCE9D6CF377B849D63C7155F78343120A303D55F1A9BECAF25E9713").unwrap(); - let nonce = hex::decode("2C0802076377687B9A403120").unwrap(); - let secret = String::from("1EB18B9FC8CBA07F2EA00BC00FBE468AB1D48E2E28F14FAD61EA3A38B41E2586"); - let expected_ct = hex::decode("CAE855AF0FD950A25F2D629A344F2B51530EE98990A77D4B49868C3EB497913A9E936D9DBF9487A77A7B36709C8F1AE43A40D779D7D56351A606675A04FCE5F8B7E80C06B3E9A47083C2E604AD5F681D").unwrap(); - - // run the test - let result = encrypt_secret(key.as_slice(), nonce.as_slice(), secret.clone())?; - - assert_eq!(expected_ct, result); - - Ok(()) -} - -#[test] -fn test_decrypting_secret() -> Result<()>{ - // prepare test data - let key = hex::decode("9530D8826CCE9D6CF377B849D63C7155F78343120A303D55F1A9BECAF25E9713").unwrap(); - let nonce = hex::decode("2C0802076377687B9A403120").unwrap(); - let ct = hex::decode("CAE855AF0FD950A25F2D629A344F2B51530EE98990A77D4B49868C3EB497913A9E936D9DBF9487A77A7B36709C8F1AE43A40D779D7D56351A606675A04FCE5F8B7E80C06B3E9A47083C2E604AD5F681D").unwrap(); - let expected_secret = String::from("1EB18B9FC8CBA07F2EA00BC00FBE468AB1D48E2E28F14FAD61EA3A38B41E2586"); - - // run the test - let result = decrypt_secret(key.as_slice(), nonce.as_slice(), ct.as_slice())?; - - // check the decryption - assert_eq!(expected_secret, result); - - Ok(()) -} diff --git a/clearing-house-app/logging-service/keys/.DS_Store b/clearing-house-app/keys/.DS_Store similarity index 100% rename from clearing-house-app/logging-service/keys/.DS_Store rename to clearing-house-app/keys/.DS_Store diff --git a/clearing-house-app/logging-service/keys/private_key.der b/clearing-house-app/keys/private_key.der similarity index 100% rename from clearing-house-app/logging-service/keys/private_key.der rename to clearing-house-app/keys/private_key.der diff --git a/clearing-house-app/logging-service/keys/private_key_2048.der b/clearing-house-app/keys/private_key_2048.der similarity index 100% rename from clearing-house-app/logging-service/keys/private_key_2048.der rename to clearing-house-app/keys/private_key_2048.der diff --git a/clearing-house-app/logging-service/Cargo.toml b/clearing-house-app/logging-service/Cargo.toml deleted file mode 100644 index 19b1333b..00000000 --- a/clearing-house-app/logging-service/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -name = "logging-service" -version = "0.10.0" -authors = [ - "Mark Gall ", - "Georg Bramm ", -] -edition = "2018" - -[dependencies] -biscuit = { git = "https://github.com/lawliet89/biscuit", branch = "master" } -core-lib = { path = "../core-lib" } -chrono = { version = "0.4", features = ["serde"] } -error-chain = "0.12.1" -fern = "0.5" -log = "0.4" -mongodb ="2.3.0" -percent-encoding = "2.1.0" -rocket = { version = "0.5.0-rc.1", features = ["json"] } -serde = "1.0" -serde_derive = "1.0" -serde_json = "1.0" \ No newline at end of file diff --git a/clearing-house-app/logging-service/Rocket.toml b/clearing-house-app/logging-service/Rocket.toml deleted file mode 100644 index b068a570..00000000 --- a/clearing-house-app/logging-service/Rocket.toml +++ /dev/null @@ -1,26 +0,0 @@ -[global] -limits = { json = 5242880 } -connector_name = "https://clearinghouse.aisec.fraunhofer.de/" -infomodel_version = "4.0.0" -server_agent = "https://clearinghouse.aisec.fraunhofer.de" -signing_key = "keys/private_key.der" - -[debug] -address = "0.0.0.0" -port = 8000 -log_level = "normal" -limits = { forms = 32768 } -database_url = "mongodb://localhost:27019" -keyring_api_url = "http://localhost:8002" -document_api_url = "http://localhost:8001" -clear_db = true - -[release] -address = "0.0.0.0" -port = 8000 -log_level = "normal" -limits = { forms = 32768 } -database_url = "mongodb://logging-service-mongo:27017" -keyring_api_url = "http://keyring-api:8002" -document_api_url = "http://document-api:8001" -clear_db = false diff --git a/clearing-house-app/logging-service/certs b/clearing-house-app/logging-service/certs deleted file mode 120000 index 36343b9b..00000000 --- a/clearing-house-app/logging-service/certs +++ /dev/null @@ -1 +0,0 @@ -../certs \ No newline at end of file diff --git a/clearing-house-app/logging-service/src/db/mod.rs b/clearing-house-app/logging-service/src/db/mod.rs deleted file mode 100644 index 468fa5bd..00000000 --- a/clearing-house-app/logging-service/src/db/mod.rs +++ /dev/null @@ -1,213 +0,0 @@ -use core_lib::constants::{MONGO_ID, MONGO_COLL_PROCESSES, DATABASE_URL, CLEAR_DB, PROCESS_DB, PROCESS_DB_CLIENT, MONGO_COLL_TRANSACTIONS, MONGO_TC}; -use core_lib::db::{DataStoreApi, init_database_client}; -use core_lib::errors::*; -use core_lib::model::process::Process; -use mongodb::bson::doc; -use mongodb::{Client, Database}; -use rocket::fairing::{self, Fairing, Info, Kind}; -use rocket::futures::TryStreamExt; -use rocket::{Rocket, Build}; -use mongodb::options::{UpdateModifications, FindOneAndUpdateOptions, WriteConcern, CreateCollectionOptions}; -use crate::model::TransactionCounter; - -#[derive(Clone, Debug)] -pub struct ProcessStoreConfigurator; - -#[rocket::async_trait] -impl Fairing for ProcessStoreConfigurator { - fn info(&self) -> Info { - Info { - name: "Configuring Process Database", - kind: Kind::Ignite - } - } - async fn on_ignite(&self, rocket: Rocket) -> fairing::Result { - debug!("Preparing to initialize database..."); - let db_url: String = rocket.figment().extract_inner(DATABASE_URL).clone().unwrap(); - let clear_db = match rocket.figment().extract_inner(CLEAR_DB){ - Ok(value) => { - debug!("...clear_db: {} found. ", &value); - value - }, - Err(_) => { - false - } - }; - debug!("...using database url: '{:#?}'", &db_url); - - match init_database_client::(&db_url.as_str(), Some(PROCESS_DB_CLIENT.to_string())).await{ - Ok(process_store) => { - debug!("...check if database is empty..."); - match process_store.client.database(PROCESS_DB) - .list_collection_names(None) - .await{ - Ok(colls) => { - debug!("... found collections: {:#?}", &colls); - if colls.len() > 0 && clear_db{ - debug!("...database not empty and clear_db == true. Dropping database..."); - match process_store.client.database(PROCESS_DB).drop(None).await{ - Ok(_) => { - debug!("... done."); - } - Err(_) => { - debug!("... failed."); - return Err(rocket); - } - }; - } - if colls.len() == 0 || clear_db{ - debug!("..database empty. Need to initialize..."); - let mut write_concern = WriteConcern::default(); - write_concern.journal = Some(true); - let mut options = CreateCollectionOptions::default(); - options.write_concern = Some(write_concern); - debug!("...create collection {} ...", MONGO_COLL_TRANSACTIONS); - match process_store.client.database(PROCESS_DB).create_collection(MONGO_COLL_TRANSACTIONS, options).await{ - Ok(_) => { - debug!("... done."); - } - Err(_) => { - debug!("... failed."); - return Err(rocket); - } - }; - } - debug!("... database initialized."); - Ok(rocket.manage(process_store)) - } - Err(_) => { - Err(rocket) - } - } - }, - Err(_) => Err(rocket) - } - } -} - -#[derive(Clone)] -pub struct ProcessStore { - client: Client, - database: Database -} - -impl DataStoreApi for ProcessStore { - fn new(client: Client) -> ProcessStore{ - ProcessStore { - client: client.clone(), - database: client.database(PROCESS_DB) - } - } -} - -impl ProcessStore { - pub async fn get_transaction_counter(&self) -> Result>{ - debug!("Getting transaction counter..."); - let coll = self.database.collection::(MONGO_COLL_TRANSACTIONS); - match coll.find_one(None, None).await?{ - Some(t) => Ok(Some(t.tc)), - None => Ok(Some(0)) - } - } - - pub async fn increment_transaction_counter(&self) -> Result>{ - debug!("Getting transaction counter..."); - let coll = self.database.collection::(MONGO_COLL_TRANSACTIONS); - let mods = UpdateModifications::Document(doc!{"$inc": {MONGO_TC: 1 }}); - let mut opts = FindOneAndUpdateOptions::default(); - opts.upsert = Some(true); - match coll.find_one_and_update(doc!{}, mods, opts).await?{ - Some(t) => Ok(Some(t.tc)), - None => Ok(Some(0)) - } - } - - pub async fn get_processes(&self) -> Result> { - debug!("Trying to get all processes..."); - let coll = self.database.collection::(MONGO_COLL_PROCESSES); - let result = coll.find(None, None).await? - .try_collect().await.unwrap_or_else(|_| vec![]); - Ok(result) - } - - pub async fn delete_process(&self, pid: &String) -> Result { - debug!("Trying to delete process with pid '{}'...", pid); - let coll = self.database.collection::(MONGO_COLL_PROCESSES); - let result = coll.delete_one(doc! { MONGO_ID: pid }, None).await?; - if result.deleted_count == 1{ - debug!("... deleted one process."); - Ok(true) - } - else{ - warn!("deleted_count={}", result.deleted_count); - Ok(false) - } - } - - /// checks if the id exits - pub async fn exists_process(&self, pid: &String) -> Result { - debug!("Check if process with pid '{}' exists...", pid); - let coll = self.database.collection::(MONGO_COLL_PROCESSES); - let result = coll.find_one(Some(doc! { MONGO_ID: pid }), None).await?; - match result { - Some(_r) => { - debug!("... found."); - Ok(true) - }, - None => { - debug!("Process with pid '{}' does not exist!", pid); - Ok(false) - } - } - } - - pub async fn get_process(&self, pid: &String) -> Result> { - debug!("Trying to get process with id {}...", pid); - let coll = self.database.collection::(MONGO_COLL_PROCESSES); - match coll.find_one(Some(doc!{ MONGO_ID: pid }), None).await{ - Ok(process) => { - Ok(process) - }, - Err(e) => { - error!("Error while getting process: {:#?}!", &e); - Err(Error::from(e)) - } - } - } - - pub async fn is_authorized(&self, user: &String, pid: &String) -> Result{ - debug!("checking if user '{}' is authorized to access '{}'", user, pid); - return match self.get_process(&pid).await{ - Ok(Some(process)) => { - let authorized = process.owners.iter().any(|o| { - trace!("found owner {}", o); - user.eq(o) - }); - Ok(authorized) - } - Ok(None) => { - trace!("didn't find process"); - Ok(false) - }, - _ => { - Err(format!("User '{}' could not be authorized", &user).into()) - } - } - } - - // store process in db - pub async fn store_process(&self, process: Process) -> Result { - debug!("Storing process with pid {:#?}...", &process.id); - let coll = self.database.collection::(MONGO_COLL_PROCESSES); - match coll.insert_one(process, None).await { - Ok(_r) => { - debug!("...added new process: {}", &_r.inserted_id); - Ok(true) - }, - Err(e) => { - error!("...failed to store process: {:#?}", &e); - Err(Error::from(e)) - } - } - } -} \ No newline at end of file diff --git a/clearing-house-app/logging-service/src/logging_api.rs b/clearing-house-app/logging-service/src/logging_api.rs deleted file mode 100644 index 0704793c..00000000 --- a/clearing-house-app/logging-service/src/logging_api.rs +++ /dev/null @@ -1,404 +0,0 @@ -use core_lib::{ - api::{ - ApiResponse, - client::document_api::DocumentApiClient, - crypto::{ChClaims, get_jwks}, - }, - constants::{DEFAULT_NUM_RESPONSE_ENTRIES, MAX_NUM_RESPONSE_ENTRIES, DEFAULT_PROCESS_ID}, - model::{ - document::Document, - process::Process, - SortingOrder, - SortingOrder::Descending - } -}; -use rocket::serde::json::{json, Json}; -use rocket::fairing::AdHoc; -use rocket::form::validate::Contains; -use rocket::State; -use std::convert::TryFrom; - -use crate::model::{ids::{ - message::IdsMessage, - IdsQueryResult, - request::ClearingHouseMessage, -}, OwnerList, DataTransaction}; -use crate::db::ProcessStore; -use crate::model::constants::{ROCKET_CLEARING_HOUSE_BASE_API, ROCKET_LOG_API, ROCKET_QUERY_API, ROCKET_PROCESS_API, ROCKET_PK_API}; - -#[post( "/", format = "json", data = "")] -async fn log( - ch_claims: ChClaims, - db: &State, - doc_api: &State, - key_path: &State, - message: Json, - pid: String -) -> ApiResponse { - trace!("...user '{:?}'", &ch_claims.client_id); - let user = &ch_claims.client_id; - // Add non-InfoModel information to IdsMessage - let msg = message.into_inner(); - let mut m = msg.header; - m.payload = msg.payload; - m.payload_type = msg.payload_type; - m.pid = Some(pid.clone()); - - // validate that there is a payload - if m.payload.is_none() || (m.payload.is_some() && m.payload.as_ref().unwrap().trim().is_empty()){ - error!("Trying to log an empty payload!"); - return ApiResponse::BadRequest(String::from("No payload received for logging!")) - } - - // filter out calls for default process id and call application logic - match DEFAULT_PROCESS_ID.eq(pid.as_str()){ - true => { - warn!("Log to default pid '{}' not allowed", DEFAULT_PROCESS_ID); - ApiResponse::BadRequest(String::from("Document already exists")) - }, - false => { - // convenience: if process does not exist, we create it but only if no error occurred before - match db.get_process(&pid).await { - Ok(Some(_p)) => { - debug!("Requested pid '{}' exists. Nothing to create.", &pid); - } - Ok(None) => { - info!("Requested pid '{}' does not exist. Creating...", &pid); - // create a new process - let new_process = Process::new(pid.clone(), vec!(user.clone())); - - if db.store_process(new_process).await.is_err() { - error!("Error while creating process '{}'", &pid); - return ApiResponse::InternalError(String::from("Error while creating process")) - } - } - Err(_) => { - error!("Error while getting process '{}'", &pid); - return ApiResponse::InternalError(String::from("Error while getting process")) - } - } - - // now check if user is authorized to write to pid - match db.is_authorized(&user, &pid).await { - Ok(true) => info!("User authorized."), - Ok(false) => { - warn!("User is not authorized to write to pid '{}'", &pid); - warn!("This is the forbidden branch"); - return ApiResponse::Forbidden(String::from("User not authorized!")) - } - Err(_) => { - error!("Error while checking authorization of user '{}' for '{}'", &user, &pid); - return ApiResponse::InternalError(String::from("Error during authorization")) - } - } - - debug!("logging message for pid {}", &pid); - log_message(db, user, doc_api, key_path.inner().as_str(), m.clone()).await - } - } -} - -#[post( "/", format = "json", data = "")] -async fn create_process( - ch_claims: ChClaims, - db: &State, - message: Json, - pid: String -) -> ApiResponse { - let msg = message.into_inner(); - let mut m = msg.header; - m.payload = msg.payload; - m.payload_type = msg.payload_type; - - trace!("...user '{:?}'", &ch_claims.client_id); - let user = &ch_claims.client_id; - - // validate payload - let mut owners = vec!(user.clone()); - let payload = m.payload.clone().unwrap_or(String::new()); - if !payload.is_empty() { - trace!("OwnerList: '{:#?}'", &payload); - match serde_json::from_str::(&payload){ - Ok(owner_list) => { - for o in owner_list.owners{ - if !owners.contains(&o){ - owners.push(o); - } - } - }, - Err(e) => { - error!("Error while creating process '{}': {}", &pid, e); - return ApiResponse::BadRequest(String::from("Invalid owner list!")) - } - }; - }; - - // check if the pid already exists - match db.get_process(&pid).await{ - Ok(Some(p)) => { - warn!("Requested pid '{}' already exists.", &p.id); - if !p.owners.contains(user) { - ApiResponse::Forbidden(String::from("User not authorized!")) - } - else { - ApiResponse::BadRequest(String::from("Process already exists!")) - } - } - _ => { - // filter out calls for default process id - match DEFAULT_PROCESS_ID.eq(pid.as_str()) { - true => { - warn!("Log to default pid '{}' not allowed", DEFAULT_PROCESS_ID); - ApiResponse::BadRequest(String::from("Document already exists")) - }, - false => { - info!("Requested pid '{}' will have {} owners", &pid, owners.len()); - - // create process - info!("Requested pid '{}' does not exist. Creating...", &pid); - let new_process = Process::new(pid.clone(), owners); - - match db.store_process(new_process).await{ - Ok(_) => { - ApiResponse::SuccessCreate(json!(pid.clone())) - } - Err(e) => { - error!("Error while creating process '{}': {}", &pid, e); - ApiResponse::InternalError(String::from("Error while creating process")) - } - } - } - } - } - } -} - -async fn log_message( - db: &State, - user: &String, - doc_api: &State, - key_path: &str, - message: IdsMessage -) -> ApiResponse { - - debug!("transforming message to document..."); - let payload = message.payload.as_ref().unwrap().clone(); - // transform message to document - let mut doc = Document::from(message); - match db.get_transaction_counter().await{ - Ok(Some(tid)) => { - debug!("Storing document..."); - doc.tc = tid; - return match doc_api.create_document(&user, &doc).await{ - Ok(doc_receipt) => { - debug!("Increase transaction counter"); - match db.increment_transaction_counter().await{ - Ok(Some(_tid)) => { - debug!("Creating receipt..."); - let transaction = DataTransaction{ - transaction_id: doc.get_formatted_tc(), - timestamp: doc_receipt.timestamp, - process_id: doc_receipt.pid, - document_id: doc_receipt.doc_id, - payload, - chain_hash: doc_receipt.chain_hash, - client_id: user.clone(), - clearing_house_version: env!("CARGO_PKG_VERSION").to_string(), - }; - debug!("...done. Signing receipt..."); - ApiResponse::SuccessCreate(json!(transaction.sign(key_path))) - } - _ => { - error!("Error while incrementing transaction id!"); - ApiResponse::InternalError(String::from("Internal error while preparing transaction data")) - } - } - - }, - Err(e) => { - error!("Error while creating document: {:?}", e); - ApiResponse::BadRequest(String::from("Document already exists")) - } - } - }, - Ok(None) => { - println!("None!"); - ApiResponse::InternalError(String::from("Internal error while preparing transaction data")) - } - Err(e) => { - error!("Error while getting transaction id!"); - println!("{}", e); - ApiResponse::InternalError(String::from("Internal error while preparing transaction data")) - } - } -} - -#[post("/<_pid>", format = "json", rank=50)] -async fn unauth(_pid: Option) -> ApiResponse { - ApiResponse::Unauthorized(String::from("Token not valid!")) -} - -#[post("/<_pid>/<_id>", format = "json", rank=50)] -async fn unauth_id(_pid: Option, _id: Option) -> ApiResponse { - ApiResponse::Unauthorized(String::from("Token not valid!")) -} - -#[post("/?&&&&", format = "json", data = "")] -async fn query_pid( - ch_claims: ChClaims, - db: &State, - page: Option, - size: Option, - sort: Option, - date_to: Option, - date_from: Option, - doc_api: &State, - pid: String, - message: Json -) -> ApiResponse { - debug!("page: {:#?}, size:{:#?} and sort:{:#?}", page, size, sort); - - trace!("...user '{:?}'", &ch_claims.client_id); - let user = &ch_claims.client_id; - - // check if process exists - match db.exists_process(&pid).await { - Ok(true) => info!("User authorized."), - Ok(false) => return ApiResponse::NotFound(String::from("Process does not exist!")), - Err(_e) => { - error!("Error while checking process '{}' for user '{}'", &pid, &user); - return ApiResponse::InternalError(String::from("Cannot authorize user!")) - } - }; - - // now check if user is authorized to read infos in pid - match db.is_authorized(&user, &pid).await { - Ok(true) => { - info!("User authorized."); - }, - Ok(false) => { - warn!("User is not authorized to write to pid '{}'", &pid); - return ApiResponse::Forbidden(String::from("User not authorized!")) - } - Err(_) => { - error!("Error while checking authorization of user '{}' for '{}'", &user, &pid); - return ApiResponse::InternalError(String::from("Cannot authorize user!")) - } - } - - // sanity check for pagination - let sanitized_page = match page { - Some(p) => { - if p >= 0 { - p - } else { - warn!("...invalid page requested. Falling back to 0."); - 1 - } - }, - None => 1 - }; - - let sanitized_size = match size { - Some(s) => { - let converted_max = i32::try_from(MAX_NUM_RESPONSE_ENTRIES).unwrap(); - if s > converted_max { - warn!("...invalid size requested. Falling back to default."); - converted_max - } else { - if s > 0 { - s - } else { - warn!("...invalid size requested. Falling back to default."); - i32::try_from(DEFAULT_NUM_RESPONSE_ENTRIES).unwrap() - } - } - }, - None => i32::try_from(DEFAULT_NUM_RESPONSE_ENTRIES).unwrap() - }; - - let sanitized_sort = match sort { - Some(s) => s, - None => Descending - }; - - match doc_api.get_documents(&user, &pid, sanitized_page, sanitized_size, sanitized_sort, date_from, date_to).await { - Ok(r) => { - let messages: Vec = r.documents.iter().map(|d| IdsMessage::from(d.clone())).collect(); - let result = IdsQueryResult::new(r.date_from, r.date_to, r.page, r.size, r.order, messages); - ApiResponse::SuccessOk(json!(result)) - - }, - Err(e) => { - error!("Error while retrieving message: {:?}", e); - ApiResponse::InternalError(format!("Error while retrieving messages for pid {}!", &pid)) - } - } -} - -#[post("//", format = "json", data = "")] -async fn query_id(ch_claims: ChClaims, db: &State, doc_api: &State, pid: String, id: String, message: Json) -> ApiResponse { - - trace!("...user '{:?}'", &ch_claims.client_id); - let user = &ch_claims.client_id; - - // check if process exists - match db.exists_process(&pid).await { - Ok(true) => info!("User authorized."), - Ok(false) => return ApiResponse::NotFound(String::from("Process does not exist!")), - Err(_e) => { - error!("Error while checking process '{}' for user '{}'", &pid, &user); - return ApiResponse::InternalError(String::from("Cannot authorize user!")) - } - }; - - // now check if user is authorized to read infos in pid - match db.is_authorized(&user, &pid).await { - Ok(true) => { - info!("User authorized."); - }, - Ok(false) => { - warn!("User is not authorized to write to pid '{}'", &pid); - return ApiResponse::Forbidden(String::from("User not authorized!")) - } - Err(_) => { - error!("Error while checking authorization of user '{}' for '{}'", &user, &pid); - return ApiResponse::InternalError(String::from("Cannot authorize user!")) - } - } - - match doc_api.get_document(&user, &pid, &id).await { - Ok(Some(doc)) => { - // transform document to IDS message - let queried_message = IdsMessage::from(doc); - ApiResponse::SuccessOk(json!(queried_message)) - }, - Ok(None) => { - debug!("Queried a non-existing document: {}", &id); - ApiResponse::NotFound(format!("No message found with id {}!", &id)) - }, - Err(e) => { - error!("Error while retrieving message: {:?}", e); - ApiResponse::InternalError(format!("Error while retrieving message with id {}!", &id)) - } - } -} - -#[get("/.well-known/jwks.json", format = "json")] -async fn get_public_sign_key(key_path: &State) -> ApiResponse { - match get_jwks(key_path.as_str()){ - Some(jwks) => ApiResponse::SuccessOk(json!(jwks)), - None => ApiResponse::InternalError(String::from("Error reading signing key")) - } -} - -pub fn mount_api() -> AdHoc { - AdHoc::on_ignite("Mounting Clearing House API", |rocket| async { - rocket - .mount(format!("{}{}", ROCKET_CLEARING_HOUSE_BASE_API, ROCKET_LOG_API).as_str(), routes![log, unauth]) - .mount(format!("{}", ROCKET_PROCESS_API).as_str(), routes![create_process, unauth]) - .mount(format!("{}{}", ROCKET_CLEARING_HOUSE_BASE_API, ROCKET_QUERY_API).as_str(), - routes![query_id, query_pid, unauth, unauth_id]) - .mount(format!("{}", ROCKET_PK_API).as_str(), routes![get_public_sign_key]) - }) -} \ No newline at end of file diff --git a/clearing-house-app/logging-service/src/main.rs b/clearing-house-app/logging-service/src/main.rs deleted file mode 100644 index 6a953a57..00000000 --- a/clearing-house-app/logging-service/src/main.rs +++ /dev/null @@ -1,44 +0,0 @@ -#[macro_use] extern crate rocket; -#[macro_use] extern crate serde_derive; - -use std::env; -use std::path::Path; -use core_lib::api::client::{ApiClientConfigurator, ApiClientEnum}; -use core_lib::util::{add_service_config, setup_logger}; -use rocket::{Build, Rocket}; -use rocket::fairing::AdHoc; -use core_lib::constants::ENV_LOGGING_SERVICE_ID; - -use db::ProcessStoreConfigurator; -use model::constants::SIGNING_KEY; - -pub mod logging_api; -pub mod db; -pub mod model; - -pub fn add_signing_key() -> AdHoc { - AdHoc::try_on_ignite("Adding Signing Key", |rocket| async { - let private_key_path = rocket.figment().extract_inner(SIGNING_KEY).unwrap_or(String::from("keys/private_key.der")); - if Path::new(&private_key_path).exists(){ - Ok(rocket.manage(private_key_path)) - } - else{ - error!("Signing key not found! Aborting startup! Please configure signing_key!"); - return Err(rocket) - } - }) -} - -#[launch] -fn rocket() -> Rocket { - // setup logging - setup_logger().expect("Failure to set up the logger! Exiting..."); - - rocket::build() - .attach(ProcessStoreConfigurator) - .attach(add_signing_key()) - .attach(add_service_config(ENV_LOGGING_SERVICE_ID.to_string())) - .attach(ApiClientConfigurator::new(ApiClientEnum::Document)) - .attach(ApiClientConfigurator::new(ApiClientEnum::Keyring)) - .attach(logging_api::mount_api()) -} diff --git a/clearing-house-app/logging-service/src/model/constants.rs b/clearing-house-app/logging-service/src/model/constants.rs deleted file mode 100644 index 780cd304..00000000 --- a/clearing-house-app/logging-service/src/model/constants.rs +++ /dev/null @@ -1,11 +0,0 @@ -pub const CONTENT_TYPE: &'static str = "Content-Type"; -pub const APPLICATION_JSON: &'static str = "application/json"; -pub const SIGNING_KEY: &'static str = "signing_key"; - -pub const CLEARING_HOUSE_URL: &'static str = "clearing_house_url"; -pub const ROCKET_CLEARING_HOUSE_BASE_API: &'static str = "/messages"; -pub const ROCKET_PK_API: &'static str = "/"; -pub const ROCKET_PROCESS_API: &'static str = "/process"; -pub const ROCKET_QUERY_API: &'static str = "/query"; -pub const ROCKET_LOG_API: &'static str = "/log"; -pub const ROCKET_BLOCKCHAIN_BASE_API: &'static str = "/blockchain"; diff --git a/clearing-house-app/logging-service/src/model/ids/message.rs b/clearing-house-app/logging-service/src/model/ids/message.rs deleted file mode 100644 index 0c474655..00000000 --- a/clearing-house-app/logging-service/src/model/ids/message.rs +++ /dev/null @@ -1,377 +0,0 @@ -use std::collections::HashMap; -use core_lib::constants::DEFAULT_DOC_TYPE; -use core_lib::model::document::{Document, DocumentPart}; -use crate::model::ids::{InfoModelDateTime, InfoModelId, SecurityToken, MessageType}; - -const MESSAGE_ID: &'static str = "message_id"; -const MODEL_VERSION: &'static str = "model_version"; -const CORRELATION_MESSAGE: &'static str = "correlation_message"; -const TRANSFER_CONTRACT: &'static str = "transfer_contract"; -const ISSUED: &'static str = "issued"; -const ISSUER_CONNECTOR: &'static str = "issuer_connector"; -const CONTENT_VERSION: &'static str = "content_version"; -/// const RECIPIENT_CONNECTOR: &'static str = "recipient_connector"; // all messages should contain the CH connector, so we skip this information -const SENDER_AGENT: &'static str = "sender_agent"; -///const RECIPIENT_AGENT: &'static str = "recipient_agent"; // all messages should contain the CH agent, so we skip this information -const PAYLOAD: &'static str = "payload"; -const PAYLOAD_TYPE: &'static str = "payload_type"; - -pub const RESULT_MESSAGE: &'static str = "ResultMessage"; -pub const REJECTION_MESSAGE: &'static str = "RejectionMessage"; -pub const MESSAGE_PROC_NOTIFICATION_MESSAGE: &'static str = "MessageProcessedNotificationMessage"; - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct IdsMessage { - //IDS name - #[serde(rename = "@context")] - // random id without context - pub context: Option>, - //IDS name - #[serde(rename = "@type")] - // random id without context - pub type_message: MessageType, - //IDS id name - #[serde(rename = "@id", alias = "id", skip_serializing_if = "Option::is_none")] - // random id without context - pub id: Option, - //skip for IDS - #[serde(skip)] - // process id - pub pid: Option, - //IDS name - #[serde(rename = "ids:modelVersion", alias="modelVersion")] - // Version of the Information Model against which the Message should be interpreted - pub model_version: String, - //IDS name - #[serde(rename = "ids:correlationMessage", alias="correlationMessage", skip_serializing_if = "Option::is_none")] - // Correlated message, e.g. a response to a previous request - pub correlation_message: Option, - //IDS name - #[serde(rename = "ids:issued", alias="issued")] - // Date of issuing the Message - pub issued: InfoModelDateTime, - //IDS name - #[serde(rename = "ids:issuerConnector", alias="issuerConnector")] - // The Connector which is the origin of the message - pub issuer_connector: InfoModelId, - //IDS name - #[serde(rename = "ids:senderAgent", alias="senderAgent")] - // The Agent which initiated the Message - pub sender_agent: String, - //IDS name - #[serde(rename = "ids:recipientConnector", alias="recipientConnector", skip_serializing_if = "Option::is_none")] - // The Connector which is the recipient of the message - pub recipient_connector: Option>, - //IDS name - #[serde(rename = "ids:recipientAgent", alias="recipientAgent", skip_serializing_if = "Option::is_none")] - // The Agent for which the Message is intended - pub recipient_agent: Option>, - //IDS name - #[serde(rename = "ids:transferContract", alias="transferContract", skip_serializing_if = "Option::is_none")] - // The contract which is (or will be) the legal basis of the data transfer - pub transfer_contract: Option, - //IDS name - #[serde(rename = "ids:contentVersion", alias="contentVersion", skip_serializing_if = "Option::is_none")] - // The contract which is (or will be) the legal basis of the data transfer - pub content_version: Option, - //IDS name - #[serde(rename = "ids:securityToken", alias="securityToken", skip_serializing)] - // Authorization - pub security_token: Option, - //IDS name - #[serde(rename = "ids:authorizationToken", alias="authorizationToken", skip_serializing_if = "Option::is_none")] - // Authorization - pub authorization_token: Option, - //IDS name - #[serde(skip_serializing_if = "Option::is_none")] - // Authorization - pub payload: Option, - //IDS name - #[serde(skip_serializing_if = "Option::is_none")] - // Authorization - pub payload_type: Option, -} - - -macro_rules! hashmap { - ($( $key: expr => $val: expr ),*) => {{ - let mut map = ::std::collections::HashMap::new(); - $( map.insert($key, $val); )* - map - }} -} - -impl Default for IdsMessage { - fn default() -> Self { - IdsMessage { - context: Some(hashmap![ - "ids".to_string() => "https://w3id.org/idsa/core/".to_string(), - "idsc".to_string() => "https://w3id.org/idsa/code/".to_string() - ] - ), - type_message: MessageType::Message, - id: Some(autogen("MessageProcessedNotification")), - pid: None, - model_version: "".to_string(), - correlation_message: None, - issued: InfoModelDateTime::new(), - issuer_connector: InfoModelId::new("".to_string()), - sender_agent: "https://w3id.org/idsa/core/ClearingHouse".to_string(), - recipient_connector: None, - recipient_agent: None, - transfer_contract: None, - content_version: None, - security_token: None, - authorization_token: None, - payload: None, - payload_type: None, - } - } -} - -impl IdsMessage { - pub fn processed(msg: IdsMessage) -> IdsMessage { - let mut message = IdsMessage::clone(msg); - message.id = Some(autogen(MESSAGE_PROC_NOTIFICATION_MESSAGE)); - message.type_message = MessageType::MessageProcessedNotification; - return message; - } - - pub fn return_result(msg: IdsMessage) -> IdsMessage { - let mut message = IdsMessage::clone(msg); - message.id = Some(autogen(RESULT_MESSAGE)); - message.type_message = MessageType::ResultMessage; - return message; - } - - pub fn error(msg: IdsMessage) -> IdsMessage { - let mut message = IdsMessage::clone(msg); - message.id = Some(autogen(REJECTION_MESSAGE)); - message.type_message = MessageType::RejectionMessage; - return message; - } - - fn clone(msg: IdsMessage) -> IdsMessage { - IdsMessage { - context: msg.context.clone(), - type_message: msg.type_message.clone(), - id: msg.id.clone(), - pid: msg.pid.clone(), - model_version: msg.model_version.clone(), - correlation_message: msg.correlation_message.clone(), - issued: msg.issued.clone(), - issuer_connector: msg.issuer_connector.clone(), - sender_agent: msg.sender_agent.clone(), - recipient_connector: msg.recipient_connector.clone(), - recipient_agent: msg.recipient_agent.clone(), - transfer_contract: msg.transfer_contract.clone(), - security_token: msg.security_token.clone(), - authorization_token: msg.authorization_token.clone(), - payload: msg.payload.clone(), - content_version: msg.content_version.clone(), - payload_type: msg.payload.clone() - } - } - - pub fn restore() -> IdsMessage{ - IdsMessage { - type_message: MessageType::LogMessage, - //TODO recipient_agent CH - ..Default::default() - } - } -} - -/// Conversion from Document to IdsMessage -/// -/// note: Documents are converted into LogMessages. The LogMessage contains -/// the payload and payload type, which is the data that was stored previously. -/// All other fields of the LogMessage are meta data about the logging, e.g. -/// when the message was logged, etc. -/// -/// meta data that we also need to store -/// - message_id -/// - pid -/// - model_version -/// - correlation_message -/// - issued -/// - issuer_connector -/// - sender_agent -/// - transfer_contract -/// - content_version -/// - security_token -/// - authorization_token -/// - payload -/// - payload_type -impl From for IdsMessage { - - fn from(doc: Document) -> Self { - let mut m = IdsMessage::restore(); - // pid - m.pid = Some(doc.pid.clone()); - // message_id - let p_map = doc.get_parts_map(); - if let Some(v) = p_map.get(MESSAGE_ID) { - m.id = Some(v.as_ref().unwrap().clone()); - } - // model_version - if let Some(v) = p_map.get(MODEL_VERSION) { - m.model_version = v.as_ref().unwrap().clone(); - } - - // correlation_message - if let Some(v) = p_map.get(CORRELATION_MESSAGE) { - m.correlation_message = Some(v.as_ref().unwrap().clone()); - } - - // transfer_contract - if let Some(v) = p_map.get(TRANSFER_CONTRACT) { - m.transfer_contract = Some(v.as_ref().unwrap().clone()); - } - - // issued - if let Some(v) = p_map.get(ISSUED) { - match serde_json::from_str(v.as_ref().unwrap()) { - Ok(date_time) => { - m.issued = date_time; - }, - Err(e) => { - error!("Error while converting DateTimeStamp (field 'issued') from database: {}", e); - } - } - } - - // issuer_connector - if let Some(v) = p_map.get(ISSUER_CONNECTOR) { - m.issuer_connector = InfoModelId::SimpleId(v.as_ref().unwrap().clone()); - } - - // content_version - if let Some(v) = p_map.get(CONTENT_VERSION) { - m.content_version = Some(v.as_ref().unwrap().clone()); - } - - // sender_agent - if let Some(v) = p_map.get(SENDER_AGENT) { - m.sender_agent = v.clone().unwrap(); - } - - // payload - if let Some(v) = p_map.get(PAYLOAD) { - m.payload = Some(v.as_ref().unwrap().clone()); - } - - // payload_type - if let Some(v) = p_map.get(PAYLOAD_TYPE) { - m.payload_type = Some(v.as_ref().unwrap().clone()); - } - - //TODO: security_token - //TODO: authorization_token - - m - } -} - -/// Conversion from IdsMessage to Document -/// -/// most important part to store: -/// payload and payload type -/// -/// meta data that we also need to store -/// - message_id -/// - pid -/// - model_version -/// - correlation_message -/// - issued -/// - issuer_connector -/// - sender_agent -/// - transfer_contract -/// - content_version -/// - security_token -/// - authorization_token -/// - payload -/// - payload_type -impl From for Document { - fn from(m: IdsMessage) -> Self { - let mut doc_parts = vec![]; - - // message_id - let id = match m.id { - Some(m_id) => m_id, - None => autogen("Message"), - }; - - doc_parts.push(DocumentPart::new( - MESSAGE_ID.to_string(), - Some(id), - )); - - // model_version - doc_parts.push(DocumentPart::new( - MODEL_VERSION.to_string(), - Some(m.model_version), - )); - - // correlation_message - doc_parts.push(DocumentPart::new( - CORRELATION_MESSAGE.to_string(), - m.correlation_message, - )); - - // issued - doc_parts.push(DocumentPart::new( - ISSUED.to_string(), - serde_json::to_string(&m.issued).ok() - )); - - // issuer_connector - doc_parts.push(DocumentPart::new( - ISSUER_CONNECTOR.to_string(), - Some(m.issuer_connector.to_string()), - )); - - // sender_agent - doc_parts.push(DocumentPart::new( - SENDER_AGENT.to_string(), - Some(m.sender_agent.to_string()) - )); - - // transfer_contract - doc_parts.push(DocumentPart::new( - TRANSFER_CONTRACT.to_string(), - m.transfer_contract, - )); - - // content_version - doc_parts.push(DocumentPart::new( - CONTENT_VERSION.to_string(), - m.content_version, - )); - - // security_token - //TODO - - // authorization_token - //TODO - - // payload - doc_parts.push(DocumentPart::new( - PAYLOAD.to_string(), - m.payload.clone() - )); - - // payload_type - doc_parts.push(DocumentPart::new( - PAYLOAD_TYPE.to_string(), - m.payload_type.clone() - )); - - // pid - Document::new(m.pid.unwrap(), DEFAULT_DOC_TYPE.to_string(), -1, doc_parts) - } -} - -fn autogen(message: &str) -> String { - ["https://w3id.org/idsa/autogen/", message, "/", &Document::create_uuid()].concat() -} \ No newline at end of file diff --git a/clearing-house-app/logging-service/src/model/ids/mod.rs b/clearing-house-app/logging-service/src/model/ids/mod.rs deleted file mode 100644 index 4edd6255..00000000 --- a/clearing-house-app/logging-service/src/model/ids/mod.rs +++ /dev/null @@ -1,200 +0,0 @@ -use chrono::prelude::*; -use std::fmt; -use std::fmt::{Display, Formatter, Result}; -use crate::model::ids::message::IdsMessage; - -pub mod message; -pub mod request; - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -pub struct InfoModelComplexId { - //IDS name - #[serde(rename = "@id", alias="id", skip_serializing_if = "Option::is_none")] - // Correlated message, e.g. a response to a previous request - pub id: Option -} - -impl Display for InfoModelComplexId { - fn fmt(&self, f: &mut Formatter<'_>) -> Result { - match &self.id { - Some(id) => write!(f, "{}", serde_json::to_string(id).unwrap()), - None => write!(f, "") - } - } -} - -impl InfoModelComplexId { - pub fn new(id: String) -> InfoModelComplexId { - InfoModelComplexId { - id: Some(id) - } - } -} -impl From for InfoModelComplexId { - fn from(id: String) -> InfoModelComplexId { - InfoModelComplexId::new(id) - } -} - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -#[serde(untagged)] -pub enum InfoModelId { - SimpleId(String), - ComplexId(InfoModelComplexId) -} - -impl InfoModelId { - pub fn new(id: String) -> InfoModelId { - InfoModelId::SimpleId(id) - } - pub fn complex(id: InfoModelComplexId) -> InfoModelId { - InfoModelId::ComplexId(id) - } -} - -impl fmt::Display for InfoModelId { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - match self { - InfoModelId::SimpleId(id) => fmt.write_str(&id)?, - InfoModelId::ComplexId(id) => fmt.write_str(&id.to_string())? - } - Ok(()) - } -} -impl From for InfoModelId { - fn from(id: String) -> InfoModelId { - InfoModelId::SimpleId(id) - } -} - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -#[serde(untagged)] -pub enum InfoModelDateTime { - ComplexTime(InfoModelTimeStamp), - Time(DateTime) -} - -impl InfoModelDateTime { - pub fn new() -> InfoModelDateTime { - InfoModelDateTime::Time(Local::now()) - } - pub fn complex() -> InfoModelDateTime { - InfoModelDateTime::ComplexTime(InfoModelTimeStamp::default()) - } -} - -impl fmt::Display for InfoModelDateTime { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - match self { - InfoModelDateTime::Time(value) => fmt.write_str(&value.to_string())?, - InfoModelDateTime::ComplexTime(value) => fmt.write_str(&value.to_string())? - } - Ok(()) - } -} - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -pub struct InfoModelTimeStamp { - //IDS name - #[serde(rename = "@type", alias="type", skip_serializing_if = "Option::is_none")] - pub format: Option, - //IDS name - #[serde(rename = "@value", alias="value")] - pub value: DateTime, -} - -impl Default for InfoModelTimeStamp { - fn default() -> Self { - InfoModelTimeStamp { - format: Some("http://www.w3.org/2001/XMLSchema#dateTimeStamp".to_string()), - value: Local::now() - } - } -} -impl Display for InfoModelTimeStamp { - fn fmt(&self, f: &mut Formatter<'_>) -> Result { - match serde_json::to_string(&self) { - Ok(result) => write!(f, "{}", result), - Err(e) => { - error!("could not convert DateTimeStamp to json: {}", e); - write!(f, "") - } - } - } -} - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -pub enum MessageType { - #[serde(rename = "ids:Message")] - Message, - #[serde(rename = "ids:Query")] - Query, - #[serde(rename = "ids:LogMessage")] - LogMessage, - #[serde(rename = "ids:QueryMessage")] - QueryMessage, - #[serde(rename = "ids:RequestMessage")] - RequestMessage, - #[serde(rename = "ids:ResultMessage")] - ResultMessage, - #[serde(rename = "ids:RejectionMessage")] - RejectionMessage, - #[serde(rename = "ids:MessageProcessedNotificationMessage")] - MessageProcessedNotification, - #[serde(rename = "ids:DynamicAttributeToken")] - DAPSToken, - //otherwise - Other, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct SecurityToken { - //IDS name - #[serde(rename = "@type")] - // random id without context - pub type_message: MessageType, - //IDS name - #[serde(rename = "@id", alias = "id", skip_serializing_if = "Option::is_none")] - pub id: Option, - //IDS name - #[serde(rename = "ids:tokenFormat", alias = "tokenFormat")] - pub token_format: Option, - //IDS name - #[serde(rename = "ids:tokenValue", alias = "tokenValue")] - pub token_value: String, -} - -impl SecurityToken { - pub fn new() -> SecurityToken { - SecurityToken { - type_message: MessageType::DAPSToken, - id: Some(String::new()), - token_format: None, - token_value: String::new(), - } - } -} - -#[derive(Clone, Serialize, Deserialize, Debug)] -pub struct IdsQueryResult{ - pub date_from: String, - pub date_to: String, - pub page: i32, - pub size: i32, - pub order: String, - pub documents: Vec -} - -impl IdsQueryResult{ - pub fn new(date_from: i64, date_to: i64, page: Option, size: Option, order: String, documents: Vec) -> IdsQueryResult{ - - IdsQueryResult{ - date_from: NaiveDateTime::from_timestamp(date_from, 0).format("%Y-%m-%d %H:%M:%S").to_string(), - date_to: NaiveDateTime::from_timestamp(date_to, 0).format("%Y-%m-%d %H:%M:%S").to_string(), - page: page.unwrap_or(-1), - size: size.unwrap_or(-1), - order, - documents - } - } -} \ No newline at end of file diff --git a/clearing-house-app/logging-service/src/model/ids/request.rs b/clearing-house-app/logging-service/src/model/ids/request.rs deleted file mode 100644 index e56067db..00000000 --- a/clearing-house-app/logging-service/src/model/ids/request.rs +++ /dev/null @@ -1,19 +0,0 @@ -use crate::model::ids::message::IdsMessage; - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ClearingHouseMessage { - pub header: IdsMessage, - pub payload: Option, - #[serde(rename = "payloadType")] - pub payload_type: Option, -} - -impl ClearingHouseMessage { - pub fn new(header: IdsMessage, payload: Option, payload_type: Option) -> ClearingHouseMessage{ - ClearingHouseMessage{ - header, - payload, - payload_type - } - } -} \ No newline at end of file diff --git a/clearing-house-app/logging-service/src/model/mod.rs b/clearing-house-app/logging-service/src/model/mod.rs deleted file mode 100644 index f7a208b1..00000000 --- a/clearing-house-app/logging-service/src/model/mod.rs +++ /dev/null @@ -1,83 +0,0 @@ -use biscuit::{Empty, CompactJson}; -use biscuit::jws::{Compact, Header}; -use biscuit::jwa::SignatureAlgorithm; -use core_lib::api::crypto::get_fingerprint; - -pub mod constants; -pub mod ids; - -#[derive(Serialize, Deserialize)] -pub struct TransactionCounter{ - pub tc: i64 -} - -#[derive(Serialize, Deserialize)] -pub struct OwnerList{ - pub owners: Vec -} - -impl OwnerList{ - pub fn new(owners: Vec) -> OwnerList{ - OwnerList{ - owners, - } - } -} - -#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] -pub struct Receipt { - pub data: Compact -} - -#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] -pub struct DataTransaction { - pub transaction_id: String, - pub timestamp: i64, - pub process_id: String, - pub document_id: String, - pub payload: String, - pub chain_hash: String, - pub client_id: String, - pub clearing_house_version: String, -} - -impl CompactJson for DataTransaction{} - -impl DataTransaction{ - pub fn sign(&self, key_path: &str) -> Receipt{ - let jws = biscuit::jws::Compact::new_decoded(Header::from_registered_header(biscuit::jws::RegisteredHeader{ - algorithm: SignatureAlgorithm::PS512, - media_type: None, - key_id: get_fingerprint(key_path), - ..Default::default()}), self.clone()); - - let keypair = biscuit::jws::Secret::rsa_keypair_from_file(key_path).unwrap(); - println!("decoded JWS:{:#?}", &jws); - Receipt{ - data: jws.into_encoded(&keypair).unwrap() - } - } -} - -// convenience method for testing -impl From for DataTransaction{ - fn from(r: Receipt) -> Self { - match r.data.unverified_payload(){ - Ok(d) => d.clone(), - Err(e) => { - println!("Error occured: {:#?}", e); - DataTransaction{ - - transaction_id: "error".to_string(), - timestamp: 0, - process_id: "error".to_string(), - document_id: "error".to_string(), - payload: "error".to_string(), - chain_hash: "error".to_string(), - client_id: "error".to_string(), - clearing_house_version: "error".to_string(), - } - } - } - } -} \ No newline at end of file diff --git a/clearing-house-app/migrations/20240102094054_init.down.sql b/clearing-house-app/migrations/20240102094054_init.down.sql new file mode 100644 index 00000000..1d2918c4 --- /dev/null +++ b/clearing-house-app/migrations/20240102094054_init.down.sql @@ -0,0 +1,7 @@ +-- Add down migration script here +DROP TABLE IF EXISTS documents; +DROP TABLE IF EXISTS process_owners; +DROP INDEX IF EXISTS client_id_idx; +DROP TABLE IF EXISTS clients; +DROP INDEX IF EXISTS process_id_idx; +DROP TABLE IF EXISTS processes; \ No newline at end of file diff --git a/clearing-house-app/migrations/20240102094054_init.up.sql b/clearing-house-app/migrations/20240102094054_init.up.sql new file mode 100644 index 00000000..7aa69821 --- /dev/null +++ b/clearing-house-app/migrations/20240102094054_init.up.sql @@ -0,0 +1,44 @@ +-- Add up migration script here +CREATE TABLE processes +( + id SERIAL PRIMARY KEY, + process_id VARCHAR UNIQUE NOT NULL, + created_at TIMESTAMPTZ DEFAULT NOW() +); + +CREATE INDEX idx_processes_process_id ON processes (process_id); + +CREATE TABLE clients +( + id SERIAL PRIMARY KEY, + client_id VARCHAR UNIQUE NOT NULL, + created_at TIMESTAMPTZ DEFAULT NOW() +); + +CREATE INDEX idx_clients_client_id ON clients (client_id); + +CREATE TABLE process_owners +( + process_id INTEGER NOT NULL REFERENCES processes (id), + client_id INTEGER NOT NULL REFERENCES clients (id), + PRIMARY KEY (process_id, client_id) +); + +CREATE TABLE documents +( + id UUID PRIMARY KEY, + process_id INTEGER NOT NULL REFERENCES processes (id), + created_at TIMESTAMP NOT NULL, + model_version VARCHAR NOT NULL, + correlation_message VARCHAR, + transfer_contract VARCHAR, + issued JSONB, + issuer_connector JSONB NOT NULL, + content_version VARCHAR, + recipient_connector JSONB, + sender_agent VARCHAR, + recipient_agent JSONB, + payload BYTEA, + payload_type VARCHAR, + message_id VARCHAR +); diff --git a/clearing-house-app/src/config.rs b/clearing-house-app/src/config.rs new file mode 100644 index 00000000..d35d2809 --- /dev/null +++ b/clearing-house-app/src/config.rs @@ -0,0 +1,144 @@ +use std::fmt::Display; + +/// Represents the configuration for the application +#[derive(Debug, serde::Deserialize)] +pub(crate) struct CHConfig { + pub(crate) database_url: String, + pub(crate) clear_db: bool, + #[serde(default)] + pub(crate) log_level: Option, + #[serde(default)] + pub(crate) signing_key: Option, + performance_tracing: Option, +} + +/// Contains the log level for the application +#[derive(Debug, PartialEq, serde::Deserialize)] +#[serde(rename_all = "UPPERCASE")] +pub(crate) enum LogLevel { + Trace, + Debug, + Info, + Warn, + Error, +} + +impl From for tracing::Level { + fn from(val: LogLevel) -> Self { + match val { + LogLevel::Trace => tracing::Level::TRACE, + LogLevel::Debug => tracing::Level::DEBUG, + LogLevel::Info => tracing::Level::INFO, + LogLevel::Warn => tracing::Level::WARN, + LogLevel::Error => tracing::Level::ERROR, + } + } +} + +impl Display for LogLevel { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let str = match self { + LogLevel::Trace => String::from("TRACE"), + LogLevel::Debug => String::from("DEBUG"), + LogLevel::Info => String::from("INFO"), + LogLevel::Warn => String::from("WARN"), + LogLevel::Error => String::from("ERROR"), + }; + write!(f, "{str}") + } +} + +/// Read configuration from `config.toml` and environment variables. `config_file_override` can be +/// used to override the default config file, mainly for testing purposes. +pub(crate) fn read_config(config_file_override: Option<&std::path::Path>) -> CHConfig { + // Create config builder + let mut conf_builder = config::Config::builder(); + + // Override config file override path + conf_builder = if let Some(config_file) = config_file_override { + conf_builder.add_source(config::File::from(config_file)) + } else { + conf_builder.add_source(config::File::with_name("config.toml")) + }; + + // Add environment variables and finish + conf_builder = + conf_builder.add_source(config::Environment::with_prefix("CH_APP").prefix_separator("_")); + + // Finalize and deserialize + conf_builder + .build() + .expect("Failure to read configuration! Exiting...") + .try_deserialize::() + .expect("Failure to parse configuration! Exiting...") +} + +/// Configure logging based on environment variable `RUST_LOG` +pub(crate) fn configure_logging(config: &CHConfig) { + if std::env::var("RUST_LOG").is_err() { + if let Some(level) = &config.log_level { + std::env::set_var("RUST_LOG", level.to_string()); + } + } + + // setup logging + let mut subscriber_builder = tracing_subscriber::fmt() + .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()); + + // Add performance tracing + if let Some(true) = config.performance_tracing { + subscriber_builder = + subscriber_builder.with_span_events(tracing_subscriber::fmt::format::FmtSpan::CLOSE); + } + + subscriber_builder.init(); +} + +#[cfg(test)] +mod test { + use serial_test::serial; + + /// Test reading config from environment variables + #[test] + #[serial] + fn test_read_config_from_env() { + std::env::set_var("CH_APP_DATABASE_URL", "mongodb://localhost:27117"); + std::env::set_var("CH_APP_CLEAR_DB", "true"); + std::env::set_var("CH_APP_LOG_LEVEL", "INFO"); + + let conf = super::read_config(None); + assert_eq!(conf.database_url, "mongodb://localhost:27117"); + assert!(conf.clear_db); + assert_eq!(conf.log_level, Some(super::LogLevel::Info)); + + // Cleanup + std::env::remove_var("CH_APP_DATABASE_URL"); + std::env::remove_var("CH_APP_CLEAR_DB"); + std::env::remove_var("CH_APP_LOG_LEVEL"); + } + + /// Test reading config from toml file + #[test] + #[serial] + fn test_read_config_from_toml() { + // Create tempfile + let file = tempfile::Builder::new().suffix(".toml").tempfile().expect("Failure to create tempfile"); + + // Write config to file + let toml = r#"database_url = "mongodb://localhost:27019" +clear_db = true +log_level = "ERROR" +"#; + + // Write to file + std::fs::write(file.path(), toml).expect("Failure to write config file!"); + + // Read config + let conf = super::read_config(Some(file.path())); + + // Test + assert_eq!(conf.database_url, "mongodb://localhost:27019"); + assert!(conf.clear_db); + assert_eq!(conf.log_level, Some(super::LogLevel::Error)); + } +} diff --git a/clearing-house-app/src/db/mod.rs b/clearing-house-app/src/db/mod.rs new file mode 100644 index 00000000..782737b3 --- /dev/null +++ b/clearing-house-app/src/db/mod.rs @@ -0,0 +1,51 @@ +#[cfg(feature = "mongodb")] +pub(crate) mod mongo_doc_store; +#[cfg(feature = "mongodb")] +pub(crate) mod mongo_process_store; +#[cfg(feature = "postgres")] +pub(crate) mod postgres_document_store; +#[cfg(feature = "postgres")] +pub(crate) mod postgres_process_store; + +use crate::model::document::Document; +use crate::model::process::Process; +use crate::model::SortingOrder; + +#[cfg(feature = "mongodb")] +pub async fn init_database_client( + db_url: &str, + client_name: Option, +) -> anyhow::Result { + let mut client_options = match mongodb::options::ClientOptions::parse(&db_url.to_string()).await + { + Ok(co) => co, + Err(_) => { + anyhow::bail!("Can't parse database connection string"); + } + }; + + client_options.app_name = client_name; + mongodb::Client::with_options(client_options).map_err(|e| e.into()) +} + +pub(crate) trait ProcessStore { + async fn get_processes(&self) -> anyhow::Result>; + async fn delete_process(&self, pid: &str) -> anyhow::Result; + async fn exists_process(&self, pid: &str) -> anyhow::Result; + async fn get_process(&self, pid: &str) -> anyhow::Result>; + async fn store_process(&self, process: Process) -> anyhow::Result<()>; +} + +pub(crate) trait DocumentStore { + async fn add_document(&self, doc: Document) -> anyhow::Result; + async fn exists_document(&self, id: &uuid::Uuid) -> anyhow::Result; + async fn get_document(&self, id: &str, pid: &str) -> anyhow::Result>; + async fn get_documents_for_pid( + &self, + pid: &str, + page: u64, + size: u64, + sort: &SortingOrder, + date: (&chrono::NaiveDateTime, &chrono::NaiveDateTime), + ) -> anyhow::Result>; +} diff --git a/clearing-house-app/src/db/mongo_doc_store.rs b/clearing-house-app/src/db/mongo_doc_store.rs new file mode 100644 index 00000000..c8d4c1b0 --- /dev/null +++ b/clearing-house-app/src/db/mongo_doc_store.rs @@ -0,0 +1,433 @@ +use crate::db::init_database_client; +use crate::db::mongo_doc_store::bucket::DocumentBucketSize; +use crate::model::constants::{ + DOCUMENT_DB, DOCUMENT_DB_CLIENT, MAX_NUM_RESPONSE_ENTRIES, MONGO_COLL_DOCUMENT_BUCKET, + MONGO_COUNTER, MONGO_DOC_ARRAY, MONGO_FROM_TS, MONGO_ID, MONGO_PID, MONGO_TO_TS, MONGO_TS, +}; +use crate::model::document::Document; +use crate::model::SortingOrder; +use anyhow::anyhow; +use futures::StreamExt; +use mongodb::bson::doc; +use mongodb::options::{AggregateOptions, CreateCollectionOptions, UpdateOptions, WriteConcern}; +use mongodb::{bson, Client, IndexModel}; + +#[derive(Clone, Debug)] +pub struct MongoDocumentStore { + database: mongodb::Database, +} + +impl MongoDocumentStore { + fn new(client: Client) -> MongoDocumentStore { + MongoDocumentStore { + database: client.database(DOCUMENT_DB), + } + } + + pub async fn init_datastore(db_url: &str, clear_db: bool) -> anyhow::Result { + debug!("Using mongodb url: '{:#?}'", &db_url); + match init_database_client(db_url, Some(DOCUMENT_DB_CLIENT.to_string())).await { + Ok(docstore) => { + debug!("Check if database is empty..."); + match docstore + .database(DOCUMENT_DB) + .list_collection_names(None) + .await + { + Ok(colls) => { + debug!("... found collections: {:#?}", &colls); + let number_of_colls = + match colls.contains(&MONGO_COLL_DOCUMENT_BUCKET.to_string()) { + true => colls.len(), + false => 0, + }; + + if number_of_colls > 0 && clear_db { + debug!("Database not empty and clear_db == true. Dropping database..."); + match docstore.database(DOCUMENT_DB).drop(None).await { + Ok(_) => { + debug!("... done."); + } + Err(_) => { + debug!("... failed."); + return Err(anyhow!("Failed to drop database")); + } + }; + } + if number_of_colls == 0 || clear_db { + debug!("Database empty. Need to initialize..."); + let mut write_concern = WriteConcern::default(); + write_concern.journal = Some(true); + let mut options = CreateCollectionOptions::default(); + options.write_concern = Some(write_concern); + debug!("Create collection {} ...", MONGO_COLL_DOCUMENT_BUCKET); + match docstore + .database(DOCUMENT_DB) + .create_collection(MONGO_COLL_DOCUMENT_BUCKET, options) + .await + { + Ok(_) => { + debug!("... done."); + } + Err(_) => { + debug!("... failed."); + return Err(anyhow!("Failed to create collection")); + } + }; + + // This purpose of this index is to ensure that the transaction counter is unique + /*let mut index_options = IndexOptions::default(); + index_options.unique = Some(true); + let mut index_model = IndexModel::default(); + index_model.keys = doc! {format!("{}.{}",MONGO_DOC_ARRAY, MONGO_TC): 1}; + index_model.options = Some(index_options); + + debug!("Create unique index for {} ...", MONGO_COLL_DOCUMENT_BUCKET); + match datastore + .client + .database(DOCUMENT_DB) + .collection::(MONGO_COLL_DOCUMENT_BUCKET) + .create_index(index_model, None) + .await + { + Ok(result) => { + debug!("... index {} created", result.index_name); + } + Err(_) => { + debug!("... failed."); + return Err(anyhow!("Failed to create index")); + } + }*/ + + // This creates a compound index over pid and the timestamp to enable paging using buckets + let mut compound_index_model = IndexModel::default(); + compound_index_model.keys = doc! {MONGO_PID: 1, MONGO_TS: 1}; + + debug!("Create unique index for {} ...", MONGO_COLL_DOCUMENT_BUCKET); + match docstore + .database(DOCUMENT_DB) + .collection::(MONGO_COLL_DOCUMENT_BUCKET) + .create_index(compound_index_model, None) + .await + { + Ok(result) => { + debug!("... index {} created", result.index_name); + } + Err(_) => { + debug!("... failed."); + return Err(anyhow!("Failed to create compound index")); + } + } + } + debug!("... database initialized."); + Ok(Self::new(docstore)) + } + Err(_) => Err(anyhow!("Failed to list collections")), + } + } + Err(_) => Err(anyhow!("Failed to initialize database client")), + } + } + + /// offset is necessary for duration queries. There, start_entries of bucket depend on timestamps which usually creates an offset in the bucket + #[tracing::instrument(skip_all)] + async fn get_start_bucket_size( + &self, + pid: &str, + page: u64, + size: u64, + sort: &SortingOrder, + (date_from, date_to): (&chrono::NaiveDateTime, &chrono::NaiveDateTime), + ) -> anyhow::Result { + debug!("...trying to get the offset for page {page} of size {size} of documents for pid {pid}..."); + let sort_order = match sort { + SortingOrder::Ascending => 1, + SortingOrder::Descending => -1, + }; + let coll = self + .database + .collection::(MONGO_COLL_DOCUMENT_BUCKET); + + debug!( + "... match with pid: {pid}, to_ts <= {}, from_ts >= {} ...", + date_to.timestamp(), + date_from.timestamp(), + ); + let pipeline = vec![ + doc! {"$match":{ + MONGO_PID: pid, + MONGO_FROM_TS: {"$lte": date_to.timestamp()}, + MONGO_TO_TS: {"$gte": date_from.timestamp()} + }}, + // sorting according to sorting order, so we get either the start or end + doc! {"$sort" : {MONGO_FROM_TS: sort_order}}, + doc! {"$limit" : 1}, + // count all relevant documents in the target bucket + doc! {"$unwind": format!("${}", MONGO_DOC_ARRAY)}, + doc! {"$match":{ + format!("{}.{}", MONGO_DOC_ARRAY, MONGO_TS): {"$lte": date_to.timestamp(), "$gte": date_from.timestamp()} + }}, + // modify result to return total number of docs in bucket and number of relevant docs in bucket + doc! { "$group": { "_id": {"total": "$counter"}, "size": { "$sum": 1 } } }, + doc! { "$project": {"_id":0, "capacity": "$_id.total", "size":true}}, + ]; + + let mut options = AggregateOptions::default(); + options.allow_disk_use = Some(true); + let mut results = coll.aggregate(pipeline, options).await?; + let mut bucket_size = DocumentBucketSize { + capacity: MAX_NUM_RESPONSE_ENTRIES as i32, + size: 0, + }; + while let Some(result) = results.next().await { + debug!("... retrieved: {:#?}", &result); + let result_bucket: DocumentBucketSize = bson::from_document(result?)?; + bucket_size = result_bucket; + } + debug!("... sending offset: {:?}", bucket_size); + Ok(bucket_size) + } + + #[tracing::instrument(skip_all)] + fn get_offset(bucket_size: &DocumentBucketSize) -> u64 { + (bucket_size.capacity - bucket_size.size) as u64 % MAX_NUM_RESPONSE_ENTRIES + } + + #[tracing::instrument(skip_all)] + fn get_start_bucket( + page: u64, + size: u64, + bucket_size: &DocumentBucketSize, + offset: u64, + ) -> u64 { + let docs_to_skip = + (page - 1) * size + offset + MAX_NUM_RESPONSE_ENTRIES - bucket_size.capacity as u64; + (docs_to_skip / MAX_NUM_RESPONSE_ENTRIES) + 1 + } + + #[tracing::instrument(skip_all)] + fn get_start_entry( + page: u64, + size: u64, + start_bucket: u64, + bucket_size: &DocumentBucketSize, + offset: u64, + ) -> u64 { + // docs to skip calculated by page * size + let docs_to_skip = (page - 1) * size + offset; + let mut start_entry = 0; + if start_bucket > 1 { + start_entry = docs_to_skip - bucket_size.capacity as u64; + if start_entry > 2 { + start_entry -= (start_bucket - 2) * MAX_NUM_RESPONSE_ENTRIES + } + } + start_entry + } +} + +impl super::DocumentStore for MongoDocumentStore { + #[tracing::instrument(skip_all)] + async fn add_document(&self, doc: Document) -> anyhow::Result { + debug!("add_document to bucket"); + let coll = self + .database + .collection::(MONGO_COLL_DOCUMENT_BUCKET); + let mut update_options = UpdateOptions::default(); + update_options.upsert = Some(true); + let id = format!("^{}_", doc.pid.clone()); + let re = mongodb::bson::Regex { + pattern: id, + options: String::new(), + }; + + let query = doc! {"_id": re, MONGO_PID: doc.pid.clone(), MONGO_COUNTER: mongodb::bson::bson!({"$lt": MAX_NUM_RESPONSE_ENTRIES as i64})}; + + match coll.update_one(query, + doc! { + "$push": { + MONGO_DOC_ARRAY: mongodb::bson::to_bson(&doc)?, + }, + "$inc": {"counter": 1}, + "$setOnInsert": { "_id": format!("{}_{}_{}", doc.pid.clone(), doc.ts.timestamp(), crate::util::new_uuid()), MONGO_FROM_TS: doc.ts.timestamp()}, + "$set": {MONGO_TO_TS: doc.ts.timestamp()}, + }, update_options).await { + Ok(_r) => { + debug!("added new document: {:#?}", &_r.upserted_id); + Ok(true) + } + Err(e) => { + error!("failed to store document: {:#?}", &e); + Err(e.into()) + } + } + } + + /// checks if the document exists + /// document ids are globally unique + #[tracing::instrument(skip_all)] + async fn exists_document(&self, id: &uuid::Uuid) -> anyhow::Result { + debug!("Check if document with id '{}' exists...", id.to_string()); + let query = doc! {format!("{}.{}", MONGO_DOC_ARRAY, MONGO_ID): id.to_string()}; + + let coll = self + .database + .collection::(MONGO_COLL_DOCUMENT_BUCKET); + match coll.count_documents(Some(query), None).await? { + 0 => { + debug!("Document with id '{}' does not exist!", &id.to_string()); + Ok(false) + } + _ => { + debug!("... found."); + Ok(true) + } + } + } + + /// gets the model from the db + #[tracing::instrument(skip_all)] + async fn get_document(&self, id: &str, pid: &str) -> anyhow::Result> { + debug!("Trying to get doc with id {}...", id); + let coll = self + .database + .collection::(MONGO_COLL_DOCUMENT_BUCKET); + + let pipeline = vec![ + doc! {"$match":{ + MONGO_PID: pid.to_owned(), + format!("{}.{}", MONGO_DOC_ARRAY, MONGO_ID): id.to_owned(), + }}, + doc! {"$unwind": format!("${}", MONGO_DOC_ARRAY)}, + doc! {"$addFields": {format!("{}.{}", MONGO_DOC_ARRAY, MONGO_PID): format!("${}", MONGO_PID)}}, + doc! {"$replaceRoot": { "newRoot": format!("${}", MONGO_DOC_ARRAY)}}, + doc! {"$match":{ MONGO_ID: id.to_owned()}}, + ]; + + let mut results = coll.aggregate(pipeline, None).await?; + + if let Some(result) = results.next().await { + let doc: Document = bson::from_document(result?)?; + return Ok(Some(doc)); + } + + Ok(None) + } + + /// gets a page of documents of a specific document type for a single process from the db defined by parameters page, size and sort + #[tracing::instrument(skip_all)] + async fn get_documents_for_pid( + &self, + pid: &str, + page: u64, + size: u64, + sort: &SortingOrder, + (date_from, date_to): (&chrono::NaiveDateTime, &chrono::NaiveDateTime), + ) -> anyhow::Result> { + debug!("...trying to get page {page} of size {size} of documents for pid {pid}..."); + + match self + .get_start_bucket_size(pid, page, size, sort, (date_from, date_to)) + .await + { + Ok(bucket_size) => { + let offset = MongoDocumentStore::get_offset(&bucket_size); + let start_bucket = + MongoDocumentStore::get_start_bucket(page, size, &bucket_size, offset); + trace!( + "...working with start_bucket {} and offset {} ...", + start_bucket, + offset + ); + let start_entry = MongoDocumentStore::get_start_entry( + page, + size, + start_bucket, + &bucket_size, + offset, + ); + + trace!( + "...working with start_entry {} in start_bucket {} and offset {} ...", + start_entry, + start_bucket, + offset + ); + + let skip_buckets = (start_bucket - 1) as i32; + let sort_order = match sort { + SortingOrder::Ascending => 1, + SortingOrder::Descending => -1, + }; + + let pipeline = vec![ + doc! {"$match":{ + MONGO_PID: pid, + MONGO_FROM_TS: {"$lte": date_to.timestamp()}, + MONGO_TO_TS: {"$gte": date_from.timestamp()} + }}, + doc! {"$sort": {MONGO_FROM_TS: sort_order}}, + doc! {"$skip": skip_buckets}, + // worst case: overlap between two buckets. + doc! {"$limit": 2}, + doc! {"$unwind": format! ("${}", MONGO_DOC_ARRAY)}, + doc! {"$replaceRoot": { "newRoot": "$documents"}}, + doc! {"$match":{ + MONGO_TS: {"$gte": date_from.timestamp(), "$lte": date_to.timestamp()} + }}, + doc! {"$sort": {MONGO_TS: sort_order}}, + doc! {"$skip": start_entry as i32}, + doc! {"$limit": size as i32}, + ]; + + let coll = self + .database + .collection::(MONGO_COLL_DOCUMENT_BUCKET); + + let mut options = AggregateOptions::default(); + options.allow_disk_use = Some(true); + let mut results = coll.aggregate(pipeline, options).await?; + + let mut docs = vec![]; + while let Some(result) = results.next().await { + let doc: Document = bson::from_document(result?)?; + docs.push(doc); + } + + Ok(docs) + } + Err(e) => { + error!("Error while getting bucket offset!"); + Err(e) + } + } + } +} + +mod bucket { + use super::Document; + + #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] + pub struct DocumentBucket { + pub counter: u64, + pub pid: String, + pub dt_id: String, + pub from_ts: i64, + pub to_ts: i64, + pub documents: Vec, + } + + #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] + pub struct DocumentBucketSize { + pub capacity: i32, + pub size: i32, + } + + #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] + pub struct DocumentBucketUpdate { + pub id: String, + pub ts: i64, + pub keys_ct: String, + pub cts: Vec, + } +} diff --git a/clearing-house-app/src/db/mongo_process_store.rs b/clearing-house-app/src/db/mongo_process_store.rs new file mode 100644 index 00000000..1e355e9a --- /dev/null +++ b/clearing-house-app/src/db/mongo_process_store.rs @@ -0,0 +1,156 @@ +use crate::db::init_database_client; +use crate::model::constants::{ + MONGO_COLL_PROCESSES, MONGO_COLL_TRANSACTIONS, MONGO_ID, PROCESS_DB, PROCESS_DB_CLIENT, +}; +use crate::model::process::Process; +use anyhow::anyhow; +use futures::TryStreamExt; +use mongodb::bson::doc; +use mongodb::options::{CreateCollectionOptions, WriteConcern}; +use mongodb::{Client, Database}; + +#[derive(Clone, Debug)] +pub struct MongoProcessStore { + database: Database, +} + +impl MongoProcessStore { + fn new(client: Client) -> MongoProcessStore { + MongoProcessStore { + database: client.database(PROCESS_DB), + } + } + + pub async fn init_process_store(db_url: &str, clear_db: bool) -> anyhow::Result { + debug!("...using database url: '{:#?}'", &db_url); + + match init_database_client(db_url, Some(PROCESS_DB_CLIENT.to_string())).await { + Ok(process_store) => { + debug!("...check if database is empty..."); + match process_store + .database(PROCESS_DB) + .list_collection_names(None) + .await + { + Ok(colls) => { + debug!("... found collections: {:#?}", &colls); + if !colls.is_empty() && clear_db { + debug!( + "...database not empty and clear_db == true. Dropping database..." + ); + match process_store.database(PROCESS_DB).drop(None).await { + Ok(_) => { + debug!("... done."); + } + Err(_) => { + debug!("... failed."); + return Err(anyhow!("Failed to drop database")); + } + }; + } + if colls.is_empty() || clear_db { + debug!("..database empty. Need to initialize..."); + let mut write_concern = WriteConcern::default(); + write_concern.journal = Some(true); + let mut options = CreateCollectionOptions::default(); + options.write_concern = Some(write_concern); + debug!("...create collection {} ...", MONGO_COLL_TRANSACTIONS); + match process_store + .database(PROCESS_DB) + .create_collection(MONGO_COLL_TRANSACTIONS, options) + .await + { + Ok(_) => { + debug!("... done."); + } + Err(_) => { + debug!("... failed."); + return Err(anyhow!("Failed to create collection")); + } + }; + } + debug!("... database initialized."); + Ok(Self::new(process_store)) + } + Err(_) => Err(anyhow!("Failed to list collections")), + } + } + Err(_) => Err(anyhow!("Failed to initialize database client")), + } + } +} + +impl super::ProcessStore for MongoProcessStore { + async fn get_processes(&self) -> anyhow::Result> { + debug!("Trying to get all processes..."); + let coll = self.database.collection::(MONGO_COLL_PROCESSES); + let result = coll + .find(None, None) + .await? + .try_collect() + .await + .unwrap_or_else(|_| vec![]); + Ok(result) + } + + async fn delete_process(&self, pid: &str) -> anyhow::Result { + debug!("Trying to delete process with pid '{}'...", pid); + let coll = self.database.collection::(MONGO_COLL_PROCESSES); + let result = coll.delete_one(doc! { MONGO_ID: pid }, None).await?; + if result.deleted_count == 1 { + debug!("... deleted one process."); + Ok(true) + } else { + warn!("deleted_count={}", result.deleted_count); + Ok(false) + } + } + + /// checks if the id exits + #[tracing::instrument(skip_all)] + async fn exists_process(&self, pid: &str) -> anyhow::Result { + debug!("Check if process with pid '{}' exists...", pid); + let coll = self.database.collection::(MONGO_COLL_PROCESSES); + let result = coll.find_one(Some(doc! { MONGO_ID: pid }), None).await?; + match result { + Some(_r) => { + debug!("... found."); + Ok(true) + } + None => { + debug!("Process with pid '{}' does not exist!", pid); + Ok(false) + } + } + } + + #[tracing::instrument(skip_all)] + async fn get_process(&self, pid: &str) -> anyhow::Result> { + debug!("Trying to get process with id {}...", pid); + let coll = self.database.collection::(MONGO_COLL_PROCESSES); + match coll.find_one(Some(doc! { MONGO_ID: pid }), None).await { + Ok(process) => Ok(process), + Err(e) => { + error!("Error while getting process: {:#?}!", &e); + Err(e.into()) + } + } + } + + /// store process in db + #[tracing::instrument(skip_all)] + async fn store_process(&self, process: Process) -> anyhow::Result<()> { + debug!("Storing process with pid {:#?}...", &process.id); + let coll = self.database.collection::(MONGO_COLL_PROCESSES); + match coll.insert_one(process, None).await { + Ok(_r) => { + debug!("...added new process: {}", &_r.inserted_id); + Ok(()) + } + Err(e) => { + error!("...failed to store process: {:#?}", &e); + Err(e.into()) + } + } + } +} diff --git a/clearing-house-app/src/db/postgres_document_store.rs b/clearing-house-app/src/db/postgres_document_store.rs new file mode 100644 index 00000000..a4a6a238 --- /dev/null +++ b/clearing-house-app/src/db/postgres_document_store.rs @@ -0,0 +1,205 @@ +use crate::model::document::Document; +use crate::model::ids::{InfoModelDateTime, InfoModelId}; +use crate::model::SortingOrder; + +pub(crate) struct PostgresDocumentStore { + db: sqlx::PgPool, +} + +impl PostgresDocumentStore { + pub(crate) async fn new(db: sqlx::PgPool, clear_db: bool) -> Self { + if clear_db { + info!("Clearing database 'documents'"); + sqlx::query("TRUNCATE documents") + .execute(&db) + .await + .expect("Clearing database 'documents' failed"); + } + + Self { db } + } +} + +impl super::DocumentStore for PostgresDocumentStore { + async fn add_document(&self, doc: Document) -> anyhow::Result { + let doc = DocumentRow::from(doc); + + sqlx::query( + r"INSERT INTO documents + (id, process_id, created_at, model_version, correlation_message, + transfer_contract, issued, issuer_connector, content_version, recipient_connector, + sender_agent, recipient_agent, payload, payload_type, message_id) + VALUES + ($1, (SELECT id from processes where process_id = $2), $3, $4, $5, + $6, $7, $8, $9, $10, + $11, $12, $13, $14, $15)", + ) + .bind(doc.id) // 1 + .bind(doc.process_id) // 2 + .bind(doc.created_at) // 3 + .bind(doc.model_version) // 4 + .bind(doc.correlation_message) // 5 + .bind(doc.transfer_contract) // 6 + .bind(doc.issued) // 7 + .bind(doc.issuer_connector) // 8 + .bind(doc.content_version) // 9 + .bind(doc.recipient_connector) // 10 + .bind(doc.sender_agent) // 11 + .bind(doc.recipient_agent) // 12 + .bind(doc.payload) // 13 + .bind(doc.payload_type) // 14 + .bind(doc.message_id) // 15 + .execute(&self.db) + .await?; + + Ok(true) + } + + async fn exists_document(&self, id: &uuid::Uuid) -> anyhow::Result { + sqlx::query("SELECT id FROM documents WHERE id = $1") + .bind(id) + .fetch_optional(&self.db) + .await + .map(|r| r.is_some()) + .map_err(std::convert::Into::into) + } + + async fn get_document(&self, id: &str, pid: &str) -> anyhow::Result> { + sqlx::query_as::<_, DocumentRow>( + r"SELECT documents.id, processes.process_id, documents.created_at, model_version, correlation_message, + transfer_contract, issued, issuer_connector, content_version, recipient_connector, + sender_agent, recipient_agent, payload, payload_type, message_id + FROM documents + LEFT JOIN processes ON processes.id = documents.process_id + WHERE id = $1 AND processes.process_id = $2", + ) + .bind(id) + .bind(pid) + .fetch_optional(&self.db) + .await + .map(|r| r.map(DocumentRow::into)) + .map_err(std::convert::Into::into) + } + + /// Get documents for a process + /// + /// # Lints + /// + /// Disabled `clippy::cast_possible_wrap` because cast is handled + #[allow(clippy::cast_possible_wrap)] + async fn get_documents_for_pid( + &self, + pid: &str, + page: u64, + size: u64, + sort: &SortingOrder, + (date_from, date_to): (&chrono::NaiveDateTime, &chrono::NaiveDateTime), + ) -> anyhow::Result> { + let sort_order = match sort { + SortingOrder::Ascending => "ASC", + SortingOrder::Descending => "DESC", + }; + + sqlx::query_as::<_, DocumentRow>( + format!( + r"SELECT documents.id, processes.process_id, documents.created_at, model_version, correlation_message, + transfer_contract, issued, issuer_connector, content_version, recipient_connector, + sender_agent, recipient_agent, payload, payload_type, message_id + FROM documents + LEFT JOIN processes ON processes.id = documents.process_id + WHERE processes.process_id = $1 AND documents.created_at BETWEEN $2 AND $3 + ORDER BY created_at {sort_order} + LIMIT $4 OFFSET $5") + .as_str(), + ) + .bind(pid) + .bind(date_from) + .bind(date_to) + .bind(cast_i64(size)?) + .bind(cast_i64((page - 1) * size)?) + .fetch_all(&self.db) + .await + .map(|r| r.into_iter().map(DocumentRow::into).collect()) + .map_err(std::convert::Into::into) + } +} + +/// Cast u64 to i64 with out-of-range check +fn cast_i64(value: u64) -> anyhow::Result { + if value > i64::MAX as u64 { + Err(anyhow::anyhow!("size out-of-range")) + } else { + #[allow(clippy::cast_possible_wrap)] + Ok(value as i64) + } +} + +#[derive(sqlx::FromRow)] +struct DocumentRow { + id: uuid::Uuid, + process_id: String, + created_at: chrono::NaiveDateTime, + model_version: String, + correlation_message: Option, + transfer_contract: Option, + issued: sqlx::types::Json, + issuer_connector: sqlx::types::Json, + content_version: Option, + recipient_connector: Option>>, + sender_agent: String, + recipient_agent: Option>>, + payload: Option>, + payload_type: Option, + message_id: Option, +} + +impl From for DocumentRow { + fn from(value: Document) -> Self { + Self { + id: value.id, + process_id: value.pid, + created_at: value.ts.naive_utc(), + model_version: value.content.model_version, + correlation_message: value.content.correlation_message, + transfer_contract: value.content.transfer_contract, + issued: sqlx::types::Json(value.content.issued), + issuer_connector: sqlx::types::Json(value.content.issuer_connector), + content_version: value.content.content_version, + recipient_connector: value.content.recipient_connector.map(sqlx::types::Json), + sender_agent: value.content.sender_agent, + recipient_agent: value.content.recipient_agent.map(sqlx::types::Json), + payload: value.content.payload.map(|s| s.as_bytes().to_owned()), + payload_type: value.content.payload_type, + message_id: value.content.id, + } + } +} + +impl From for Document { + fn from(value: DocumentRow) -> Self { + use chrono::TimeZone; + + Self { + id: value.id, + pid: value.process_id, + ts: chrono::Local.from_utc_datetime(&value.created_at), + content: crate::model::ids::message::IdsMessage { + model_version: value.model_version, + correlation_message: value.correlation_message, + transfer_contract: value.transfer_contract, + issued: value.issued.0, + issuer_connector: value.issuer_connector.0, + content_version: value.content_version, + recipient_connector: value.recipient_connector.map(|s| s.0), + sender_agent: value.sender_agent, + recipient_agent: value.recipient_agent.map(|s| s.0), + payload: value + .payload + .map(|s| String::from_utf8_lossy(s.as_ref()).to_string()), + payload_type: value.payload_type, + id: value.message_id, + ..Default::default() + }, + } + } +} diff --git a/clearing-house-app/src/db/postgres_process_store.rs b/clearing-house-app/src/db/postgres_process_store.rs new file mode 100644 index 00000000..3c3edf5b --- /dev/null +++ b/clearing-house-app/src/db/postgres_process_store.rs @@ -0,0 +1,136 @@ +use crate::model::process::Process; +use sqlx::Row; + +pub(crate) struct PostgresProcessStore { + db: sqlx::PgPool, +} + +impl PostgresProcessStore { + pub(crate) async fn new(db: sqlx::PgPool, clear_db: bool) -> Self { + if clear_db { + info!("Clearing database 'process_owners', 'clients' and 'processes'"); + sqlx::query("TRUNCATE process_owners, clients, processes CASCADE") + .execute(&db) + .await + .expect("Clearing databases 'process_owners', 'clients' and 'processes' failed."); + } + + Self { db } + } +} + +impl super::ProcessStore for PostgresProcessStore { + async fn get_processes(&self) -> anyhow::Result> { + sqlx::query_as::<_, ProcessRow>( + r"SELECT p.process_id, p.created_at, ARRAY_AGG(c.client_id) AS owners FROM processes p + LEFT JOIN process_owners po ON p.id = po.process_id + LEFT JOIN clients c ON po.client_id = c.id + GROUP BY p.process_id, p.created_at", + ) + .fetch_all(&self.db) + .await + .map(|r| r.into_iter().map(std::convert::Into::into).collect()) + .map_err(std::convert::Into::into) + } + + async fn delete_process(&self, pid: &str) -> anyhow::Result { + sqlx::query("DELETE FROM processes WHERE process_id = $1 CASCADE") + .bind(pid) + .execute(&self.db) + .await + .map(|r| r.rows_affected() == 1) + .map_err(std::convert::Into::into) + } + + async fn exists_process(&self, pid: &str) -> anyhow::Result { + sqlx::query("SELECT process_id FROM processes WHERE process_id = $1") + .bind(pid) + .fetch_optional(&self.db) + .await + .map(|r| r.is_some()) + .map_err(std::convert::Into::into) + } + + async fn get_process(&self, pid: &str) -> anyhow::Result> { + sqlx::query_as::<_, ProcessRow>( + r"SELECT p.process_id, p.created_at, ARRAY_AGG(c.client_id) AS owners FROM processes p + LEFT JOIN process_owners po ON p.id = po.process_id + LEFT JOIN clients c ON po.client_id = c.id + WHERE p.process_id = $1 + GROUP BY p.process_id, p.created_at", + ) + .bind(pid) + .fetch_optional(&self.db) + .await + .map(|r| r.map(std::convert::Into::into)) + .map_err(std::convert::Into::into) + } + + async fn store_process(&self, process: Process) -> anyhow::Result<()> { + let process = ProcessRow::from(process); + let mut tx = self.db.begin().await?; + + // Create a process + let process_row = + sqlx::query(r"INSERT INTO processes (process_id) VALUES ($1) RETURNING id") + .bind(&process.process_id) + .fetch_one(&mut *tx) + .await?; + + let pid = process_row.get::("id"); + + for o in process.owners { + // Check if client exists + let client_row = sqlx::query(r"SELECT id FROM clients WHERE client_id = $1") + .bind(&o) + .fetch_optional(&mut *tx) + .await?; + + // If not, create it + let client_row = match client_row { + Some(crow) => crow, + None => { + sqlx::query(r"INSERT INTO clients (client_id) VALUES ($1) RETURNING id") + .bind(&o) + .fetch_one(&mut *tx) + .await? + } + }; + + // Get id of client + let client_id = client_row.get::("id"); + + // Create process owner + sqlx::query(r"INSERT INTO process_owners (process_id, client_id) VALUES ($1, $2)") + .bind(pid) + .bind(client_id) + .execute(&mut *tx) + .await?; + } + + tx.commit().await?; + + Ok(()) + } +} + +#[derive(sqlx::FromRow, Debug)] +struct ProcessRow { + pub process_id: String, + pub owners: Vec, +} + +impl From for ProcessRow { + fn from(p: Process) -> Self { + Self { + process_id: p.id, + owners: p.owners, + } + } +} + +impl From for Process { + fn from(value: ProcessRow) -> Self { + Self::new(value.process_id, value.owners) + } +} diff --git a/clearing-house-app/src/lib.rs b/clearing-house-app/src/lib.rs new file mode 100644 index 00000000..62b648c8 --- /dev/null +++ b/clearing-house-app/src/lib.rs @@ -0,0 +1,124 @@ +#![forbid(unsafe_code)] +#![warn(clippy::all, clippy::pedantic, clippy::unwrap_used)] +#![allow(clippy::module_name_repetitions)] + +#[macro_use] +extern crate tracing; + +use crate::model::constants::ENV_LOGGING_SERVICE_ID; +use crate::util::ServiceConfig; +use std::sync::Arc; + +mod config; +mod db; +pub mod model; +mod ports; +mod services; +pub mod util; + +#[cfg(feature = "postgres")] +type PostgresLoggingService = services::logging_service::LoggingService< + db::postgres_process_store::PostgresProcessStore, + db::postgres_document_store::PostgresDocumentStore, +>; +#[cfg(feature = "mongodb")] +type MongoLoggingService = services::logging_service::LoggingService< + db::mongo_process_store::MongoProcessStore, + db::mongo_doc_store::MongoDocumentStore, +>; + +/// Contains the application state +#[derive(Clone)] +pub(crate) struct AppState { + #[cfg(feature = "postgres")] + pub logging_service: Arc, + #[cfg(feature = "mongodb")] + pub logging_service: Arc, + pub service_config: Arc, + pub signing_key_path: String, +} + +impl AppState { + + /// Connect to the database and execute database migrations + async fn setup_postgres(conf: &config::CHConfig) -> anyhow::Result { + info!("Connecting to database"); + let pool = sqlx::PgPool::connect(&conf.database_url).await?; + + info!("Migrating database"); + sqlx::migrate!() + .run(&pool) + .await + .expect("Failed to migrate database!"); + + Ok(pool) + } + + /// Initialize the application state from config + async fn init(conf: &config::CHConfig) -> anyhow::Result { + #[cfg(feature = "postgres")] + let pool = Self::setup_postgres(conf).await?; + + trace!("Initializing Process store"); + #[cfg(feature = "mongodb")] + let process_store = db::mongo_process_store::MongoProcessStore::init_process_store( + &conf.database_url, + conf.clear_db, + ) + .await + .expect("Failure to initialize process store! Exiting..."); + #[cfg(feature = "postgres")] + let process_store = + db::postgres_process_store::PostgresProcessStore::new(pool.clone(), conf.clear_db) + .await; + + trace!("Initializing Document store"); + #[cfg(feature = "mongodb")] + let doc_store = db::mongo_doc_store::MongoDocumentStore::init_datastore( + &conf.database_url, + conf.clear_db, + ) + .await + .expect("Failure to initialize document store! Exiting..."); + #[cfg(feature = "postgres")] + let doc_store = + db::postgres_document_store::PostgresDocumentStore::new(pool, conf.clear_db).await; + + trace!("Initializing services"); + let doc_service = Arc::new(services::document_service::DocumentService::new(doc_store)); + let logging_service = Arc::new(services::logging_service::LoggingService::new( + process_store, + doc_service.clone(), + )); + + let service_config = Arc::new(util::init_service_config( + ENV_LOGGING_SERVICE_ID, + )?); + let signing_key = util::init_signing_key(conf.signing_key.as_deref())?; + + Ok(Self { + signing_key_path: signing_key, + service_config, + logging_service, + }) + } +} + +/// Initialize the application +/// +/// # Errors +/// +/// Throws an error if the `AppState` cannot be initialized +pub async fn app() -> anyhow::Result { + // Read configuration + let conf = config::read_config(None); + config::configure_logging(&conf); + + tracing::info!("Config read successfully! Initializing application ..."); + + // Initialize application state + let app_state = AppState::init(&conf).await?; + + // Setup router + Ok(ports::router().with_state(app_state)) +} diff --git a/clearing-house-app/src/main.rs b/clearing-house-app/src/main.rs new file mode 100644 index 00000000..d3024906 --- /dev/null +++ b/clearing-house-app/src/main.rs @@ -0,0 +1,24 @@ +#![forbid(unsafe_code)] +#![warn(clippy::all, clippy::pedantic, clippy::unwrap_used)] + +use tokio::net::TcpListener; + +/// Main function: Reading config, initializing application state, starting server +#[tokio::main] +async fn main() -> Result<(), anyhow::Error> { + #[cfg(feature = "sentry")] + let _guard = sentry::init(("https://347cc3aa30aa0c07d437da8c780838d3@o4506146399322112.ingest.sentry.io/4506155710480384", sentry::ClientOptions { + release: sentry::release_name!(), + ..Default::default() + })); + + // Setup router + let app = clearing_house_app::app().await?; + + // Bind port and start server + let listener = TcpListener::bind("0.0.0.0:8000").await?; + tracing::info!("Starting server: Listening on 0.0.0.0:8000"); + Ok(axum::serve(listener, app.into_make_service()) + .with_graceful_shutdown(clearing_house_app::util::shutdown_signal()) + .await?) +} diff --git a/clearing-house-app/src/model/claims.rs b/clearing-house-app/src/model/claims.rs new file mode 100644 index 00000000..49a7b667 --- /dev/null +++ b/clearing-house-app/src/model/claims.rs @@ -0,0 +1,248 @@ +use crate::model::constants::{ENV_SHARED_SECRET, SERVICE_HEADER}; +use crate::AppState; +use anyhow::Context; +use axum::extract::FromRef; +use axum::response::IntoResponse; +use num_bigint::BigUint; +use std::env; + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct ChClaims { + pub client_id: String, +} + +impl ChClaims { + #[must_use] + pub fn new(client_id: &str) -> Self { + Self { + client_id: client_id.to_string(), + } + } +} + +impl std::fmt::Display for ChClaims { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "<{}>", self.client_id) + } +} + +pub struct ExtractChClaims(pub ChClaims); + +#[async_trait::async_trait] +impl axum::extract::FromRequestParts for ExtractChClaims + where + S: Send + Sync, + AppState: FromRef, +{ + type Rejection = axum::response::Response; + + async fn from_request_parts( + parts: &mut axum::http::request::Parts, + state: &S, + ) -> Result { + let axum::extract::State(app_state) = + axum::extract::State::::from_request_parts(parts, state) + .await + .map_err(axum::response::IntoResponse::into_response)?; + if let Some(token) = parts.headers.get(SERVICE_HEADER) { + let token = token.to_str().map_err(|_| { + ( + axum::http::StatusCode::BAD_REQUEST, + format!("Invalid token in {SERVICE_HEADER}"), + ) + .into_response() + })?; + debug!("...received service header: {:?}", token); + + match decode_token::(token, app_state.service_config.service_id.as_str()) { + Ok(claims) => { + debug!("...retrieved claims and succeed"); + Ok(ExtractChClaims(claims)) + } + Err(e) => { + error!("...failed to retrieve and validate claims: {}", e); + Err((axum::http::StatusCode::BAD_REQUEST, "Invalid token").into_response()) + } + } + } else { + Err((axum::http::StatusCode::BAD_REQUEST, "Missing token").into_response()) + } + } +} + +/// Returns the `JWKSet` for the RSA keypair at `key_path` +/// +/// # Panics +/// +/// Panics if the key at `key_path` is not a valid RSA keypair or does not exist. +#[must_use] +pub fn get_jwks(key_path: &str) -> Option> { + let keypair = biscuit::jws::Secret::rsa_keypair_from_file(key_path) + .unwrap_or_else(|_| panic!("Failed to load keyfile from path {key_path}")); + + if let biscuit::jws::Secret::RsaKeyPair(a) = keypair { + use ring::signature::KeyPair; + let pk_modulus = + BigUint::from_bytes_be(a.public_key().modulus().big_endian_without_leading_zero()); + let pk_e = BigUint::from_bytes_be( + a.as_ref() + .public_key() + .exponent() + .big_endian_without_leading_zero(), + ); + + let params = biscuit::jwk::RSAKeyParameters { + n: pk_modulus, + e: pk_e, + ..Default::default() + }; + + let common = biscuit::jwk::CommonParameters { + key_id: get_fingerprint(key_path), + ..Default::default() + }; + + let jwk = biscuit::jwk::JWK:: { + common, + algorithm: biscuit::jwk::AlgorithmParameters::RSA(params), + additional: biscuit::Empty::default(), + }; + + let jwks = biscuit::jwk::JWKSet:: { keys: vec![jwk] }; + return Some(jwks); + } + None +} + +/// Returns the fingerprint of the RSA keypair at `key_path` +/// +/// # Panics +/// +/// Panics if the key at `key_path` is not a valid RSA keypair or does not exist. +#[must_use] +pub fn get_fingerprint(key_path: &str) -> Option { + use ring::signature::KeyPair; + let keypair = biscuit::jws::Secret::rsa_keypair_from_file(key_path) + .unwrap_or_else(|_| panic!("File exists at '{key_path}' and is a valid RSA keypair")); + if let biscuit::jws::Secret::RsaKeyPair(a) = keypair { + let pk_modulus = a + .as_ref() + .public_key() + .modulus() + .big_endian_without_leading_zero() + .to_vec(); + let pk_e = a + .as_ref() + .public_key() + .exponent() + .big_endian_without_leading_zero() + .to_vec(); + + let pk = openssh_keys::PublicKey::from_rsa(pk_e, pk_modulus); + Some(pk.fingerprint()) + } else { + None + } +} + +/// Creates a JWT token with the given `issuer`, `audience` and `private_claims` +/// +/// # Panics +/// +/// Panics if the `ENV_SHARED_SECRET` is not set +pub fn create_token< + T: std::fmt::Display + Clone + serde::Serialize + for<'de> serde::Deserialize<'de>, +>( + issuer: &str, + audience: &str, + private_claims: &T, +) -> String { + let secret = env::var(ENV_SHARED_SECRET).unwrap_or_else(|_| panic!("Shared Secret not configured. Please configure environment variable {ENV_SHARED_SECRET}")); + let signing_secret = biscuit::jws::Secret::Bytes(secret.to_string().into_bytes()); + let expiration_date = chrono::Utc::now() + chrono::Duration::minutes(5); + + let claims = biscuit::ClaimsSet:: { + registered: biscuit::RegisteredClaims { + issuer: Some(issuer.to_string()), + issued_at: Some(biscuit::Timestamp::from(chrono::Utc::now())), + audience: Some(biscuit::SingleOrMultiple::Single(audience.to_string())), + expiry: Some(biscuit::Timestamp::from(expiration_date)), + ..Default::default() + }, + private: private_claims.clone(), + }; + + // Construct the JWT + let jwt = biscuit::jws::Compact::new_decoded( + From::from(biscuit::jws::RegisteredHeader { + algorithm: biscuit::jwa::SignatureAlgorithm::HS256, + ..Default::default() + }), + claims, + ); + + jwt.into_encoded(&signing_secret) + .expect("Encoded JWT with the signing secret") + .unwrap_encoded() + .to_string() +} + +/// Decodes the given `token` and validates it against the given `audience` +/// +/// # Errors +/// +/// Returns an error if the token is invalid or the audience is not as expected. +pub fn decode_token serde::Deserialize<'de>>( + token: &str, + audience: &str, +) -> anyhow::Result { + use biscuit::Presence::Required; + use biscuit::Validation::Validate; + let signing_secret = match env::var(ENV_SHARED_SECRET) { + Ok(secret) => biscuit::jws::Secret::Bytes(secret.to_string().into_bytes()), + Err(e) => { + error!( + "Shared Secret not configured. Please configure environment variable {}", + ENV_SHARED_SECRET + ); + return Err(e.into()); + } + }; + let jwt: biscuit::jws::Compact, biscuit::Empty> = + biscuit::JWT::<_, biscuit::Empty>::new_encoded(token); + let decoded_jwt = match jwt.decode(&signing_secret, biscuit::jwa::SignatureAlgorithm::HS256) { + Ok(x) => Ok(x), + Err(e) => { + error!("Failed to decode token {}", e); + Err(e) + } + }?; + let claim_presence_options = biscuit::ClaimPresenceOptions { + issuer: Required, + audience: Required, + issued_at: Required, + expiry: Required, + ..Default::default() + }; + let val_options = biscuit::ValidationOptions { + claim_presence_options, + // Issuer is not validated. Wouldn't make much of a difference if we did + // issued_at: Validate(Duration::minutes(5)), + audience: Validate(audience.to_string()), + ..Default::default() + }; + + decoded_jwt + .validate(val_options) + .with_context(|| "Failed validating JWT")?; + Ok(decoded_jwt.payload()?.private.clone()) +} + +#[cfg(test)] +mod test { + #[test] + fn get_fingerprint() { + let fingerprint = super::get_fingerprint("keys/private_key.der").expect("Fingerprint can be generated"); + assert_eq!(fingerprint, "Qra//29Frxbj5hh5Azef+G36SeiOm9q7s8+w8uGLD28"); + } +} diff --git a/clearing-house-app/src/model/constants.rs b/clearing-house-app/src/model/constants.rs new file mode 100644 index 00000000..ff66bc46 --- /dev/null +++ b/clearing-house-app/src/model/constants.rs @@ -0,0 +1,102 @@ +#![allow(dead_code)] + +pub const CONTENT_TYPE: &str = "Content-Type"; +pub const APPLICATION_JSON: &str = "application/json"; +pub const SIGNING_KEY: &str = "signing_key"; + +pub const CLEARING_HOUSE_URL: &str = "clearing_house_url"; +pub const ROCKET_CLEARING_HOUSE_BASE_API: &str = "/messages"; +pub const ROCKET_PK_API: &str = "/"; +pub const ROCKET_QUERY_API: &str = "/query"; +pub const ROCKET_LOG_API: &str = "/log"; +pub const ROCKET_BLOCKCHAIN_BASE_API: &str = "/blockchain"; + +// From core_lib + +// definition of daps constants +pub const DAPS_AUD: &str = "idsc:IDS_CONNECTORS_ALL"; +pub const DAPS_JWKS: &str = ".well-known/jwks.json"; +pub const DAPS_KID: &str = "default"; +pub const DAPS_AUTHHEADER: &str = "Authorization"; +pub const DAPS_AUTHBEARER: &str = "Bearer"; +pub const DAPS_CERTIFICATES: &str = "certs"; + +// definition of custom headers +pub const SERVICE_HEADER: &str = "CH-SERVICE"; + +// definition of config parameters (in config files) +pub const DATABASE_URL: &str = "database_url"; +pub const DOCUMENT_API_URL: &str = "document_api_url"; +pub const KEYRING_API_URL: &str = "keyring_api_url"; +pub const DAPS_API_URL: &str = "daps_api_url"; +pub const CLEAR_DB: &str = "clear_db"; + +// define here the config options from environment variables +pub const ENV_API_LOG_LEVEL: &str = "API_LOG_LEVEL"; +pub const ENV_SHARED_SECRET: &str = "SHARED_SECRET"; +pub const ENV_DOCUMENT_SERVICE_ID: &str = "SERVICE_ID_DOC"; +pub const ENV_KEYRING_SERVICE_ID: &str = "SERVICE_ID_KEY"; +pub const ENV_LOGGING_SERVICE_ID: &str = "SERVICE_ID_LOG"; + +// definition of rocket mount points +pub const ROCKET_DOC_API: &str = "/doc"; +pub const ROCKET_DOC_TYPE_API: &str = "/doctype"; +pub const ROCKET_POLICY_API: &str = "/policy"; +pub const ROCKET_STATISTICS: &str = "/statistics"; +pub const ROCKET_PROCESS_API: &str = "/process"; +pub const ROCKET_KEYRING_API: &str = "/keyring"; +pub const ROCKET_USER_API: &str = "/users"; + +// definition of service names +pub const DOCUMENT_DB_CLIENT: &str = "document-api"; +pub const KEYRING_DB_CLIENT: &str = "keyring-api"; +pub const PROCESS_DB_CLIENT: &str = "logging-service"; + +// definition of table names +pub const MONGO_DB: &str = "ch_ids"; +pub const DOCUMENT_DB: &str = "document"; +pub const KEYRING_DB: &str = "keyring"; +pub const PROCESS_DB: &str = "process"; +pub const MONGO_COLL_DOCUMENTS: &str = "documents"; +pub const MONGO_COLL_DOCUMENT_BUCKET: &str = "document_bucket"; +pub const MONGO_COLL_DOC_TYPES: &str = "doc_types"; +pub const MONGO_COLL_DOC_PARTS: &str = "parts"; +pub const MONGO_COLL_PROCESSES: &str = "processes"; +pub const MONGO_COLL_TRANSACTIONS: &str = "transactions"; +pub const MONGO_COLL_MASTER_KEY: &str = "keys"; + +// definition of database fields +pub const MONGO_ID: &str = "id"; +pub const MONGO_MKEY: &str = "msk"; +pub const MONGO_PID: &str = "pid"; +pub const MONGO_DT_ID: &str = "dt_id"; +pub const MONGO_NAME: &str = "name"; +pub const MONGO_OWNER: &str = "owner"; +pub const MONGO_TS: &str = "ts"; +pub const MONGO_TC: &str = "tc"; + +pub const MONGO_DOC_ARRAY: &str = "documents"; +pub const MONGO_COUNTER: &str = "counter"; +pub const MONGO_FROM_TS: &str = "from_ts"; +pub const MONGO_TO_TS: &str = "to_ts"; + +// definition of default database values +pub const DEFAULT_PROCESS_ID: &str = "default"; +pub const MAX_NUM_RESPONSE_ENTRIES: u64 = 1000; +pub const DEFAULT_NUM_RESPONSE_ENTRIES: u64 = 100; + +pub const DEFAULT_DOC_TYPE: &str = "IDS_MESSAGE"; + +// split string symbols for vec_to_string and string_to_vec +pub const SPLIT_QUOTE: &str = "'"; +pub const SPLIT_SIGN: &str = "~"; +pub const SPLIT_CT: &str = "::"; + +// definition of file names and folders +pub const FOLDER_DB: &str = "db_init"; +pub const FOLDER_DATA: &str = "data"; +pub const FILE_DOC: &str = "document.json"; +pub const FILE_DEFAULT_DOC_TYPE: &str = "init_db/default_doc_type.json"; + +// definition of special document parts +pub const PAYLOAD_PART: &str = "payload"; diff --git a/clearing-house-app/src/model/document.rs b/clearing-house-app/src/model/document.rs new file mode 100644 index 00000000..0c655b13 --- /dev/null +++ b/clearing-house-app/src/model/document.rs @@ -0,0 +1,29 @@ +use chrono::Local; + +use crate::model::ids::message::IdsMessage; + +#[derive(Clone, serde::Serialize, serde::Deserialize, Debug)] +pub struct Document { + /// Document id + pub id: uuid::Uuid, + /// Process ID + pub pid: String, + /// timestamp: unix timestamp + pub ts: chrono::DateTime, + /// Content of the document + pub content: IdsMessage, +} + +/// Documents should have a globally unique id, setting the id manually is discouraged. +impl Document { + + #[must_use] + pub fn new(pid: String, content: IdsMessage) -> Self { + Self { + id: uuid::Uuid::new_v4(), + pid, + ts: Local::now(), + content, + } + } +} diff --git a/clearing-house-app/src/model/ids/message.rs b/clearing-house-app/src/model/ids/message.rs new file mode 100644 index 00000000..8b8e16c2 --- /dev/null +++ b/clearing-house-app/src/model/ids/message.rs @@ -0,0 +1,194 @@ +use crate::model::document::Document; +use crate::model::ids::{InfoModelDateTime, InfoModelId, MessageType, SecurityToken}; +use std::collections::HashMap; + +/// Metadata describing payload exchanged by interacting Connectors. +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct IdsMessage { + //IDS name + #[serde(rename = "@context")] + // random id without context + pub context: Option>, + //IDS name + #[serde(rename = "@type")] + // random id without context + pub type_message: MessageType, + //IDS id name + #[serde(rename = "@id", alias = "id", skip_serializing_if = "Option::is_none")] + // random id without context + pub id: Option, + //skip for IDS + #[serde(skip)] + // process id + pub pid: Option, + /// Version of the Information Model against which the Message should be interpreted + #[serde(rename = "ids:modelVersion", alias = "modelVersion")] + pub model_version: String, + /// Correlated message, e.g., response to a previous message. Value: URI of the correlatedMessage + #[serde( + rename = "ids:correlationMessage", + alias = "correlationMessage", + skip_serializing_if = "Option::is_none" + )] + pub correlation_message: Option, + /// Date of issuing the Message + #[serde(rename = "ids:issued", alias = "issued")] + pub issued: InfoModelDateTime, + #[serde(rename = "ids:issuerConnector", alias = "issuerConnector")] + /// Origin Connector of the message. Value: URI of origin Connector + pub issuer_connector: InfoModelId, + /// Agent, which initiated the message. Value: URI of an instance of ids:Agent. + #[serde(rename = "ids:senderAgent", alias = "senderAgent")] + pub sender_agent: String, + /// Target Connector. Value: URI of target Connector. Can have multiple values at the same time. + #[serde( + rename = "ids:recipientConnector", + alias = "recipientConnector", + skip_serializing_if = "Option::is_none" + )] + pub recipient_connector: Option>, + /// Agent, for which the message is intended. Value: URI of an instance of ids:Agent. Can have multiple values at the same time + #[serde( + rename = "ids:recipientAgent", + alias = "recipientAgent", + skip_serializing_if = "Option::is_none" + )] + pub recipient_agent: Option>, + /// Contract which is (or will be) the legal basis of the data transfer. Value: Instance of class ids:Contract. + #[serde( + rename = "ids:transferContract", + alias = "transferContract", + skip_serializing_if = "Option::is_none" + )] + pub transfer_contract: Option, + /// Value describing the version of the content. Value: Version number of the content. + #[serde( + rename = "ids:contentVersion", + alias = "contentVersion", + skip_serializing_if = "Option::is_none" + )] + pub content_version: Option, + /// Token representing a claim, that the sender supports a certain security profile. Value: Instance of ids:DynamicAttributeToken. + #[serde( + rename = "ids:securityToken", + alias = "securityToken", + skip_serializing + )] + pub security_token: Option, + /// An authorization token. The token can be issued from the Connector of the Data Provider (A) to the Connector of the + /// Data Consumer (B). Can be used to avoid full authentication via DAPS, if Connector B wants to access the data of + /// Connector A. Value: Instance of ids:Token + #[serde( + rename = "ids:authorizationToken", + alias = "authorizationToken", + skip_serializing_if = "Option::is_none" + )] + pub authorization_token: Option, + //IDS name + #[serde(skip_serializing_if = "Option::is_none")] + // Authorization + pub payload: Option, + //IDS name + #[serde(skip_serializing_if = "Option::is_none")] + // Authorization + pub payload_type: Option, +} + +impl Default for IdsMessage { + fn default() -> Self { + IdsMessage { + context: Some(std::collections::HashMap::from([ + ("ids".to_string(), "https://w3id.org/idsa/core/".to_string()), + ( + "idsc".to_string(), + "https://w3id.org/idsa/code/".to_string(), + ), + ])), + type_message: MessageType::Message, + id: Some(autogen("MessageProcessedNotification")), + pid: None, + model_version: String::new(), + correlation_message: None, + issued: InfoModelDateTime::default(), + issuer_connector: InfoModelId::new(String::new()), + sender_agent: "https://w3id.org/idsa/core/ClearingHouse".to_string(), + recipient_connector: None, + recipient_agent: None, + transfer_contract: None, + content_version: None, + security_token: None, + authorization_token: None, + payload: None, + payload_type: None, + } + } +} + +/// Conversion from `Document` to `IdsMessage` +/// +/// note: Documents are converted into `LogMessage`'s. The `LogMessage` contains +/// the `payload` and `payload_type`, which is the data that was stored previously. +/// All other fields of the `LogMessage` are `metadata` about the logging, e.g. +/// when the message was logged, etc. +/// +/// metadata that we also need to store +/// - `message_id` +/// - `pid` +/// - `model_version` +/// - `correlation_message` +/// - `issued` +/// - `issuer_connector` +/// - `sender_agent` +/// - `transfer_contract` +/// - `content_version` +/// - `security_token` +/// - `authorization_token` +/// - `payload` +/// - `payload_type` +impl From for IdsMessage { + fn from(doc: Document) -> Self { + doc.content.clone() + } +} + +/// Conversion from `IdsMessage` to `Document` +/// +/// most important part to store: +/// `payload` and `payload_type` +/// +/// metadata that we also need to store +/// - `message_id` +/// - `pid` +/// - `model_version` +/// - `correlation_message` +/// - `issued` +/// - `issuer_connector` +/// - `sender_agent` +/// - `transfer_contract` +/// - `content_version` +/// - `security_token` +/// - `authorization_token` +/// - `payload` +/// - `payload_type` +impl From for Document { + fn from(value: IdsMessage) -> Self { + let mut m = value.clone(); + + m.id = Some(m.id.unwrap_or_else(|| autogen("Message"))); + + // Remove security tokens to protect against impersonation of other owners of the same process + m.security_token = None; + m.authorization_token = None; + + Document::new(m.pid.clone().expect("Missing pid"), m) + } +} + +#[inline] +fn autogen(message: &str) -> String { + format!( + "https://w3id.org/idsa/autogen/{}/{}", + message, + uuid::Uuid::new_v4() + ) +} diff --git a/clearing-house-app/src/model/ids/mod.rs b/clearing-house-app/src/model/ids/mod.rs new file mode 100644 index 00000000..0cb81623 --- /dev/null +++ b/clearing-house-app/src/model/ids/mod.rs @@ -0,0 +1,399 @@ +use crate::model::ids::message::IdsMessage; + +pub mod message; +pub mod request; + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq)] +pub struct InfoModelComplexId { + /// IDS name + #[serde(rename = "@id", alias = "id", skip_serializing_if = "Option::is_none")] + /// Correlated message, e.g. a response to a previous request + pub id: Option, +} + +impl std::fmt::Display for InfoModelComplexId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use serde::ser::Error; + + match &self.id { + Some(id) => write!( + f, + "{}", + serde_json::to_string(id).map_err(|e| std::fmt::Error::custom(format!( + "JSON serialization failed: {e}" + )))? + ), + None => write!(f, ""), + } + } +} + +impl InfoModelComplexId { + #[must_use] + pub fn new(id: String) -> InfoModelComplexId { + InfoModelComplexId { id: Some(id) } + } +} + +impl From for InfoModelComplexId { + fn from(id: String) -> InfoModelComplexId { + InfoModelComplexId::new(id) + } +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq)] +#[serde(untagged)] +pub enum InfoModelId { + SimpleId(String), + ComplexId(InfoModelComplexId), +} + +impl InfoModelId { + #[must_use] + pub fn new(id: String) -> InfoModelId { + InfoModelId::SimpleId(id) + } +} + +impl std::fmt::Display for InfoModelId { + fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + InfoModelId::SimpleId(id) => fmt.write_str(id)?, + InfoModelId::ComplexId(id) => fmt.write_str(&id.to_string())?, + } + Ok(()) + } +} + +impl From for InfoModelId { + fn from(id: String) -> InfoModelId { + InfoModelId::SimpleId(id) + } +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq)] +#[serde(untagged)] +pub enum InfoModelDateTime { + ComplexTime(InfoModelTimeStamp), + Time(chrono::DateTime), +} + +impl Default for InfoModelDateTime { + fn default() -> InfoModelDateTime { + InfoModelDateTime::Time(chrono::Local::now()) + } +} + +impl std::fmt::Display for InfoModelDateTime { + fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + InfoModelDateTime::Time(value) => fmt.write_str(&value.to_string())?, + InfoModelDateTime::ComplexTime(value) => fmt.write_str(&value.to_string())?, + } + Ok(()) + } +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq)] +pub struct InfoModelTimeStamp { + //IDS name + #[serde( + rename = "@type", + alias = "type", + skip_serializing_if = "Option::is_none" + )] + pub format: Option, + //IDS name + #[serde(rename = "@value", alias = "value")] + pub value: chrono::DateTime, +} + +impl Default for InfoModelTimeStamp { + fn default() -> Self { + InfoModelTimeStamp { + format: Some("http://www.w3.org/2001/XMLSchema#dateTimeStamp".to_string()), + value: chrono::Local::now(), + } + } +} + +impl std::fmt::Display for InfoModelTimeStamp { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match serde_json::to_string(&self) { + Ok(result) => write!(f, "{result}"), + Err(e) => { + error!("could not convert DateTimeStamp to json: {}", e); + write!(f, "") + } + } + } +} + +/** +There are three Subclasses of the abstract ids:Message class. Namely the ids:RequestMessage, ids:ResponseMessage +and ids:NotificationMessage. Each subclass itself has subclasses that fulfill a specific purpose in the communication process. + +For communication in the IDS, usually the more specific subclasses of the three mentioned ones are used. +The message classes relevant for the Connector to Connector communication are listed below. The entire Collection of Messages +available in the Information Model can be found here. + +Based on [v4.2.0](https://github.com/International-Data-Spaces-Association/InformationModel/blob/v4.2.0/taxonomies/Message.ttl) + */ +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq)] +pub enum MessageType { + #[serde(rename = "ids:Message")] + Message, + + /// ## Basic Message Types: Request, Response, Notification + /// Client-generated message initiating a communication, motivated by a certain reason and with an answer expected. + #[serde(rename = "ids:RequestMessage")] + RequestMessage, + /// Response messages hold information about the reaction of a recipient to a formerly sent command or event. They must be correlated to this message. + #[serde(rename = "ids:ResponseMessage")] + ResponseMessage, + /// Event messages are informative and no response is expected by the sender. + #[serde(rename = "ids:NotificationMessage")] + NotificationMessage, + + /// ## Core IDS Messages + /// Command messages are usually sent when a response is expected by the sender. Changes state on the recipient side. Therefore, commands are not 'safe' in the sense of REST. + #[serde(rename = "ids:CommandMessage")] + CommandMessage, + /// Result messages are intended to annotate the results of a query command. + #[serde(rename = "ids:ResultMessage")] + ResultMessage, + /// Rejection messages are specialized response messages that notify the sender of a message that processing of this message has failed. + #[serde(rename = "ids:RejectionMessage")] + RejectionMessage, + + /// ## Self-description + /// Message requesting metadata. If no URI is supplied via the ids:requestedElement field, this messages is treated like a self-description request and the recipient should return its self-description via an ids:DescriptionResponseMessage. However, if a URI is supplied, the Connector should either return metadata about the requested element via an ids:DescriptionResponseMessage, or send an ids:RejectionMessage, e.g., because the element was not found. + #[serde(rename = "ids:DescriptionRequestMessage")] + DescriptionRequestMessage, + /// Message containing the metadata, which a Connector previously requested via the ids:DescriptionRequestMessage, in its payload. + #[serde(rename = "ids:DescriptionResponseMessage")] + DescriptionResponseMessage, + + /// ## Connector-related Messages + /// Superclass of all messages, indicating a change of a connector's conditions. + #[serde(rename = "ids:ConnectorNotificationMessage")] + ConnectorNotificationMessage, + /// Event notifying the recipient(s) about the availability and current configuration of a connector. The payload of the message must contain the updated connector's self-description. + #[serde(rename = "ids:ConnectorUpdateMessage")] + ConnectorUpdateMessage, + /// Event notifying the recipient(s) that a connector will be unavailable. The same connector may be available again in the future. + #[serde(rename = "ids:ConnectorUnavailableMessage")] + ConnectorUnavailableMessage, + /// Whenever a Connector has been successfully certified by the Certification Body, the Identity Provider can use this message to notify Infrastructure Components. + #[serde(rename = "ids:ConnectorCertificateGrantedMessage")] + ConnectorCertificateGrantedMessage, + /// Indicates that a (previously certified) Connector is no more certified. This could happen, for instance, if the Certification Body revokes a granted certificate or if the certificate just expires. + #[serde(rename = "ids:ConnectorCertificateRevokedMessage")] + ConnectorCertificateRevokedMessage, + + /// ## Participant-related Messages + /// Superclass of all messages, indicating a change of a particpants's conditions. + #[serde(rename = "ids:ParticipantNotificationMessage")] + ParticipantNotificationMessage, + /// Event notifying the recipient(s) about the availability and current description of a participant. The payload of the message must contain the participant's self-description. + #[serde(rename = "ids:ParticipantUpdateMessage")] + ParticipantUpdateMessage, + /// Event notifying the recipient(s) that a participant will be unavailable. The same participant may be available again in the future. + #[serde(rename = "ids:ParticipantUnavailableMessage")] + ParticipantUnavailableMessage, + /// Whenever a Participant has been successfully certified by the Certification Body, the Identity Provider can use this message to notify Infrastructure Components. + #[serde(rename = "ids:ParticipantCertificateGrantedMessage")] + ParticipantCertificateGrantedMessage, + /// Indicates that a (previously certified) Participant is no more certified. This could happen, for instance, if the Certification Body revokes a granted certificate or if the certificate just expires. + #[serde(rename = "ids:ParticipantCertificateRevokedMessage")] + ParticipantCertificateRevokedMessage, + + /// ## Query related Messages + /// Query message intended to be consumed by a component. + #[serde(rename = "ids:QueryMessage")] + QueryMessage, + /// Class of query languages in which query strings may be formalized. + #[serde(rename = "ids:QueryLanguage")] + QueryLanguage, + /// Class of recipients of a query message, e.g., BROKER, APPSTORE, ANY. + #[serde(rename = "ids:QueryTarget")] + QueryTarget, + + /// ## Contract Negotiation related Messages + /// Message containing a suggested content contract (as offered by the data consumer to the data provider) in the associated payload (which is an instance of ids:ContractRequest). + #[serde(rename = "ids:ContractRequestMessage")] + ContractRequestMessage, + /// Message containing a response to a contract request (of a data consumer) in form of a counter-proposal of a contract in the associated payload (which is an instance of ids:ContractOffer). + #[serde(rename = "ids:ContractResponseMessage")] + ContractResponseMessage, + /// Message containing a offered content contract (as offered by a data provider to the data consumer) in the associated payload (which is an instance of ids:ContractOffer). In contrast to the ids:ContractResponseMessage, the ids:ContractOfferMessage is not related to a previous contract + #[serde(rename = "ids:ContractOfferMessage")] + ContractOfferMessage, + /// Message containing a contract, as an instance of ids:ContractAgreement, with resource access modalities on which two parties have agreed in the payload. + #[serde(rename = "ids:ContractAgreementMessage")] + ContractAgreementMessage, + /// Message indicating rejection of a contract. + #[serde(rename = "ids:ContractRejectionMessage")] + ContractRejectionMessage, + /// Message containing supplemental information to access resources of a contract (e.g., resource access tokens). + #[serde(rename = "ids:ContractSupplementMessage")] + ContractSupplementMessage, + + /// ## Security-related Messages + /// Message requesting an access token. This is intended for point-to-point communication with, e.g., Brokers. + #[serde(rename = "ids:AccessTokenRequestMessage")] + AccessTokenRequestMessage, + /// Response to an access token request, intended for point-to-point communication. + #[serde(rename = "ids:AccessTokenResponseMessage")] + AccessTokenResponseMessage, + + /// ## Resource related messages + /// Superclass of all messages, indicating a change of a resource. + #[serde(rename = "ids:ResourceNotificationMessage")] + ResourceNotificationMessage, + /// Message indicating the availability and current description of a specific resource. The resource must be present in the payload of this message. + #[serde(rename = "ids:ResourceUpdateMessage")] + ResourceUpdateMessage, + /// Message indicating that a specific resource is unavailable. The same resource may be available again in the future. + #[serde(rename = "ids:ResourceUnavailableMessage")] + ResourceUnavailableMessage, + /// Message requesting the recipient to invoke a specific operation. + #[serde(rename = "ids:OperationInvokeMessage")] + OperationInvokeMessage, + /// Notification that a request has been accepted and is being processed. + #[serde(rename = "ids:RequestInProcessMessage")] + RequestInProcessMessage, + /// Notification that a message has been successfully processed (i.e. not ignored or rejected). + #[serde(rename = "ids:MessageProcessedNotificationMessage")] + MessageProcessedNotificationMessage, + /// Message indicating that the result of a former InvokeOperation message is available. May transfer the result data in its associated payload section. + #[serde(rename = "ids:OperationResultMessage")] + OperationResultMessage, + + /// ## Artifact-related Messages + /// Message asking for retrieving the specified Artifact as the payload of an ArtifactResponse message. + #[serde(rename = "ids:ArtifactRequestMessage")] + ArtifactRequestMessage, + /// Message that follows up a RetrieveArtifact Message and contains the Artifact's data in the payload section. + #[serde(rename = "ids:ArtifactResponseMessage")] + ArtifactResponseMessage, + + /// ## Upload Messages + /// Message used to upload a data to a recipient. Payload contains data. + #[serde(rename = "ids:UploadMessage")] + UploadMessage, + /// Message that follows up a UploadMessage and contains the upload confirmation. + #[serde(rename = "ids:UploadResponseMessage")] + UploadResponseMessage, + + /// ## ParIS Messages + /// This class is deprecated. Use ids:DescriptionRequestMessage instead. Message asking for retrieving the specified Participants information as the payload of an ids:ParticipantResponse message. + #[serde(rename = "ids:ParticipantRequestMessage")] + ParticipantRequestMessage, + /// This class is deprecated. Use ids:DescriptionResponseMessage instead. ParticipantResponseMessage follows up a ParticipantRequestMessage and contains the Participant's information in the payload section. + #[serde(rename = "ids:ParticipantResponseMessage")] + ParticipantResponseMessage, + + /// ## Log messaging + /// Log Message which can be used to transfer logs e.g., to the clearing house. + #[serde(rename = "ids:LogMessage")] + LogMessage, + + /// ## App-related Messages + /// Message that asks for registration or update of a data app to the App Store. Payload contains app-related metadata (instance of class ids:AppResource). Message header may contain an app identifier parameter of a prior registered data app. If the app identifier is supplied, the message should be interpreted as a registration for an app update. Otherwise this message is used to register a new app. + #[serde(rename = "ids:AppRegistrationRequestMessage")] + AppRegistrationRequestMessage, + /// Message that follows up an AppRegistrationRequestMessage and contains the app registration confirmation. + #[serde(rename = "ids:AppRegistrationResponseMessage")] + AppRegistrationResponseMessage, + /// Message that usually follows a AppRegistrationResponseMessage and is used to upload a data app to the app store. Payload contains data app. Note that the message must refer to the prior sent, corresponding AppResource instance. The IRI of the ids:appArtifactReference must must match the IRI of the artifact which is the value for the ids:instance property. The ids:instance is specific for each representation. Therefore, if someone wants to upload multiple representations for an app, he has to state them using multiple ids:instance properties inside the AppRepresentation (and therefore inside the AppResource). Otherwise no mapping between payload and app metadata can be achieved. + #[serde(rename = "ids:AppUploadMessage")] + AppUploadMessage, + /// Message that follows up an AppUploadMessage and contains the app upload confimation. + #[serde(rename = "ids:AppUploadResponseMessage")] + AppUploadResponseMessage, + /// Superclass of all messages, indicating a change of a DataApp. Unlike Resource-related Messages, AppNotificationMessages should lead to a state change for an app at the recipient, the AppStore. + #[serde(rename = "ids:AppNotificationMessage")] + AppNotificationMessage, + /// Message indicating that a specific App should be available (again) in the AppStore. + #[serde(rename = "ids:AppAvailableMessage")] + AppAvailableMessage, + /// Message indicating that a specific App should be unavailable in the AppStore. + #[serde(rename = "ids:AppUnavailableMessage")] + AppUnavailableMessage, + /// Message indicating that an App should be deleted from the AppStore. + #[serde(rename = "ids:AppDeleteMessage")] + AppDeleteMessage, + + /// TODO: Not existent in the IDS Information Model + #[serde(rename = "ids:DynamicAttributeToken")] + DAPSToken, + /* + #[serde(rename = "ids:Query")] + Query, + //otherwise + Other, + */ +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct SecurityToken { + //IDS name + #[serde(rename = "@type")] + // random id without context + pub type_message: MessageType, + //IDS name + #[serde(rename = "@id", alias = "id", skip_serializing_if = "Option::is_none")] + pub id: Option, + //IDS name + #[serde(rename = "ids:tokenFormat", alias = "tokenFormat")] + pub token_format: Option, + //IDS name + #[serde(rename = "ids:tokenValue", alias = "tokenValue")] + pub token_value: String, +} + +#[derive(Clone, serde::Serialize, serde::Deserialize, Debug)] +pub struct IdsQueryResult { + pub date_from: String, + pub date_to: String, + pub page: i32, + pub size: i32, + pub order: String, + pub documents: Vec, +} + +impl IdsQueryResult { + /// Create a new `IdsQueryResult` + /// + /// # Panics + /// + /// Panics if the `date_from` or `date_to` seconds are out of reach for `chrono::NaiveDateTime::from_timestamp_opt` + #[must_use] + pub fn new( + date_from: i64, + date_to: i64, + page: Option, + size: Option, + order: String, + documents: Vec, + ) -> IdsQueryResult { + let date_from = chrono::NaiveDateTime::from_timestamp_opt(date_from, 0) + .expect("Invalid date_from seconds") + .format("%Y-%m-%d %H:%M:%S") + .to_string(); + let date_to = chrono::NaiveDateTime::from_timestamp_opt(date_to, 0) + .expect("Invalid date_to seconds") + .format("%Y-%m-%d %H:%M:%S") + .to_string(); + + IdsQueryResult { + date_from, + date_to, + page: page.unwrap_or(-1), + size: size.unwrap_or(-1), + order, + documents, + } + } +} diff --git a/clearing-house-app/src/model/ids/request.rs b/clearing-house-app/src/model/ids/request.rs new file mode 100644 index 00000000..88fa8cfc --- /dev/null +++ b/clearing-house-app/src/model/ids/request.rs @@ -0,0 +1,10 @@ +use crate::model::ids::message::IdsMessage; + +/// IDS Multipart message represented as a JSON struct +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct ClearingHouseMessage { + pub header: IdsMessage, + pub payload: Option, + #[serde(rename = "payloadType")] + pub payload_type: Option, +} diff --git a/clearing-house-app/src/model/mod.rs b/clearing-house-app/src/model/mod.rs new file mode 100644 index 00000000..49428087 --- /dev/null +++ b/clearing-house-app/src/model/mod.rs @@ -0,0 +1,169 @@ +use std::ops::Add; + +pub mod claims; +pub mod constants; +pub(crate) mod document; +pub mod ids; +pub mod process; + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub enum SortingOrder { + #[serde(rename = "asc")] + Ascending, + #[serde(rename = "desc")] + Descending, +} + +/// Time 00:00:00; idiomatic way to create a `chrono::NaiveTime` object +const fn start_of_day() -> chrono::NaiveTime { + if let Some(time) = chrono::NaiveTime::from_hms_opt(0, 0, 0) { + time + } else { + panic!("00:00:00 is a valid time") + } +} + +/// Time 23:59:59; idiomatic way to create a `chrono::NaiveTime` object +const fn end_of_day() -> chrono::NaiveTime { + if let Some(time) = chrono::NaiveTime::from_hms_opt(23, 59, 59) { + time + } else { + panic!("23:59:59 is a valid time") + } +} + +/// Parses a date string into a `chrono::NaiveDateTime` object. If `to_date` is true, the time will be set to 23:59:59, otherwise it is 00:00:00. +pub fn parse_date(date: Option, to_date: bool) -> Option { + // If it is a to_date, we want to set the time to 23:59:59, otherwise it is 00:00:00 + let time: chrono::NaiveTime = if to_date { + end_of_day() + } else { + start_of_day() + }; + + match date { + Some(d) => { + debug!("Parsing date: {}", &d); + match chrono::NaiveDate::parse_from_str(&d, "%Y-%m-%d") { + Ok(date) => Some(date.and_time(time)), + Err(e) => { + error!("Parsing date '{d}' failed: {:#?}", e); + None + } + } + } + None => None, + } +} + +/// Validates the provided dates. `date_now` is optional and defaults to `chrono::Local::now().naive_local()`. +/// +/// # Errors +/// +/// Throws an error if `date_from` is `Option::None` and `date_to` is `Option::Some()`. +pub fn validate_and_sanitize_dates( + date_from: Option, + date_to: Option, + date_now: Option, +) -> anyhow::Result<(chrono::NaiveDateTime, chrono::NaiveDateTime)> { + let now = date_now.unwrap_or(chrono::Local::now().naive_local()); + debug!( + "... validating dates: now: {:#?} , from: {:#?} , to: {:#?}", + &now, &date_from, &date_to + ); + + let default_to_date = now.add(chrono::Duration::seconds(1)); + let default_from_date = default_to_date + .date() + .and_time(start_of_day()) + - chrono::Duration::weeks(2); + + match (date_from, date_to) { + (Some(from), None) if from < now => Ok((from, default_to_date)), + (Some(from), Some(to)) if from < now && to <= now && from < to => Ok((from, to)), + (None, None) => Ok((default_from_date, default_to_date)), + _ => Err(anyhow::anyhow!("Invalid date parameters")), + } +} + +#[cfg(test)] +mod test { + use std::ops::Add; + use crate::model::{end_of_day, start_of_day}; + + #[test] + fn validate_and_sanitize_dates() { + // Setup dates for testing + let date_now = chrono::Local::now().naive_local(); + let date_now_midnight = date_now + .date() + .and_time(start_of_day()); + let date_from = date_now_midnight - chrono::Duration::weeks(2); + let date_to = date_now_midnight - chrono::Duration::weeks(1); + + // # Good cases + assert_eq!( + (date_from, date_now.add(chrono::Duration::seconds(1))), + super::validate_and_sanitize_dates(None, None, Some(date_now)) + .expect("Should be valid") + ); + assert_eq!( + (date_from, date_now.add(chrono::Duration::seconds(1))), + super::validate_and_sanitize_dates(Some(date_from), None, Some(date_now)) + .expect("Should be valid") + ); + assert_eq!( + (date_from, date_to), + super::validate_and_sanitize_dates(Some(date_from), Some(date_to), Some(date_now)) + .expect("Should be valid") + ); + assert_eq!( + (date_from, date_to), + super::validate_and_sanitize_dates(Some(date_from), Some(date_to), Some(date_to)) + .expect("Should be valid") + ); + + // # Bad cases + // no to without from not satisfied + assert!(super::validate_and_sanitize_dates(None, Some(date_to), Some(date_now)).is_err()); + // from < now not satisfied + assert!(super::validate_and_sanitize_dates(Some(date_now), None, Some(date_to)).is_err()); + // from < to not satisfied + assert!( + super::validate_and_sanitize_dates(Some(date_to), Some(date_from), Some(date_now)) + .is_err() + ); + // from < to not satisfied + assert!( + super::validate_and_sanitize_dates(Some(date_to), Some(date_to), Some(date_now)) + .is_err() + ); + // to < now not satisfied + assert!( + super::validate_and_sanitize_dates(Some(date_from), Some(date_now), Some(date_to)) + .is_err() + ); + // from < now && to < now not satisfied + assert!( + super::validate_and_sanitize_dates(Some(date_to), Some(date_now), Some(date_from)) + .is_err() + ); + } + + #[test] + fn parse_date() { + let wrong_date = Some("2020-13-01".to_string()); + let valid_date = Some("2020-01-01".to_string()); + let valid_date_parsed = chrono::NaiveDate::from_ymd_opt(2020, 1, 1).expect("This is valid"); + + assert!(super::parse_date(wrong_date, false).is_none()); + assert_eq!( + super::parse_date(valid_date.clone(), false), + Some(valid_date_parsed.and_time(start_of_day())) + ); + assert_eq!( + super::parse_date(valid_date, true), + Some(valid_date_parsed.and_time(end_of_day())) + ); + } +} diff --git a/clearing-house-app/src/model/process.rs b/clearing-house-app/src/model/process.rs new file mode 100644 index 00000000..1f936cb0 --- /dev/null +++ b/clearing-house-app/src/model/process.rs @@ -0,0 +1,72 @@ +#[derive(Clone, serde::Serialize, serde::Deserialize, Debug)] +pub struct Process { + pub id: String, + pub owners: Vec, +} + +impl Process { + #[must_use] + pub fn new(id: String, owners: Vec) -> Self { + Self { id, owners } + } + + #[must_use] + pub fn is_authorized(&self, owner: &str) -> bool { + self.owners.contains(&owner.to_string()) + } +} + +#[derive(serde::Serialize, serde::Deserialize)] +pub struct TransactionCounter { + pub tc: i64, +} + +#[derive(serde::Serialize, serde::Deserialize)] +pub struct OwnerList { + pub owners: Vec, +} + +#[derive(Debug, PartialEq, Clone, serde::Serialize, serde::Deserialize)] +pub struct Receipt { + pub data: biscuit::jws::Compact, +} + +#[derive(Debug, PartialEq, Clone, serde::Serialize, serde::Deserialize)] +pub struct DataTransaction { + pub timestamp: i64, + pub process_id: String, + pub document_id: String, + pub payload: String, + pub client_id: String, + pub clearing_house_version: String, +} + +impl biscuit::CompactJson for DataTransaction {} + +impl DataTransaction { + /// Signs a `DataTransaction` with a given key on the `key_path` and returns a `Receipt`. + /// + /// # Panics + /// + /// Panics if the key at `key_path` is not a valid RSA keypair or does not exist + pub fn sign(&self, key_path: &str) -> Receipt { + let jws = biscuit::jws::Compact::new_decoded( + biscuit::jws::Header::from_registered_header(biscuit::jws::RegisteredHeader { + algorithm: biscuit::jwa::SignatureAlgorithm::PS512, + media_type: None, + key_id: crate::model::claims::get_fingerprint(key_path), + ..Default::default() + }), + self.clone(), + ); + + let keypair = biscuit::jws::Secret::rsa_keypair_from_file(key_path) + .unwrap_or_else(|_| panic!("File exists at '{key_path}' and is a valid RSA keypair")); + debug!("decoded JWS:{:#?}", &jws); + Receipt { + data: jws + .into_encoded(&keypair) + .expect("Encoded JWS with keypair"), + } + } +} diff --git a/clearing-house-app/src/ports/logging_api.rs b/clearing-house-app/src/ports/logging_api.rs new file mode 100644 index 00000000..cb60f9a8 --- /dev/null +++ b/clearing-house-app/src/ports/logging_api.rs @@ -0,0 +1,135 @@ +use crate::model::claims::ExtractChClaims; +use crate::{model::claims::get_jwks, model::SortingOrder, AppState}; +use axum::http::StatusCode; +use biscuit::jwk::JWKSet; + +use crate::model::ids::message::IdsMessage; +use crate::model::ids::request::ClearingHouseMessage; +use crate::model::ids::IdsQueryResult; +use crate::model::process::Receipt; +use crate::services::logging_service::LoggingServiceError; + +type LoggingApiResult = super::ApiResult; + +async fn log( + ExtractChClaims(ch_claims): ExtractChClaims, + axum::extract::State(state): axum::extract::State, + axum::extract::Path(pid): axum::extract::Path, + axum::extract::Json(message): axum::extract::Json, +) -> LoggingApiResult { + match state + .logging_service + .log(ch_claims, state.signing_key_path.as_str(), message, pid) + .await + { + Ok(id) => Ok((StatusCode::CREATED, axum::Json(id))), + Err(e) => { + error!("Error while logging: {:?}", e); + Err(e) + } + } +} + +#[derive(serde::Serialize)] +struct CreateProcessResponse { + pub pid: String, +} + +async fn create_process( + ExtractChClaims(ch_claims): ExtractChClaims, + axum::extract::State(state): axum::extract::State, + axum::extract::Path(pid): axum::extract::Path, + axum::extract::Json(message): axum::extract::Json, +) -> LoggingApiResult { + match state + .logging_service + .create_process(ch_claims, message, pid) + .await + { + Ok(id) => Ok(( + StatusCode::CREATED, + axum::Json(CreateProcessResponse { pid: id }), + )), + Err(e) => { + error!("Error while creating process: {:?}", e); + Err(e) + } + } +} + +#[derive(serde::Deserialize)] +struct QueryParams { + pub page: Option, + pub size: Option, + pub sort: Option, + pub date_to: Option, + pub date_from: Option, +} + +async fn query_pid( + ExtractChClaims(ch_claims): ExtractChClaims, + axum::extract::State(state): axum::extract::State, + axum::extract::Query(params): axum::extract::Query, + axum::extract::Path(pid): axum::extract::Path, + axum::extract::Json(_): axum::extract::Json, +) -> LoggingApiResult { + match state + .logging_service + .query_pid( + ch_claims, + params.page, + params.size, + params.sort, + (params.date_to, params.date_from), + pid, + ) + .await + { + Ok(result) => Ok((StatusCode::OK, axum::Json(result))), + Err(e) => { + error!("Error while querying: {:?}", e); + Err(e) + } + } +} + +async fn query_id( + ExtractChClaims(ch_claims): ExtractChClaims, + axum::extract::State(state): axum::extract::State, + axum::extract::Path(pid): axum::extract::Path, + axum::extract::Path(id): axum::extract::Path, + axum::extract::Json(message): axum::extract::Json, +) -> LoggingApiResult { + match state + .logging_service + .query_id(ch_claims, pid, id, message) + .await + { + Ok(result) => Ok((StatusCode::OK, axum::Json(result))), + Err(e) => { + error!("Error while querying: {:?}", e); + Err(e) + } + } +} + +async fn get_public_sign_key( + axum::extract::State(state): axum::extract::State, +) -> super::ApiResult, &'static str> { + match get_jwks(state.signing_key_path.as_str()) { + Some(jwks) => Ok((StatusCode::OK, axum::Json(jwks))), + None => Err("Error reading signing key"), + } +} + +pub(crate) fn router() -> axum::routing::Router { + axum::Router::new() + .route("/messages/log/:pid", axum::routing::post(log)) + .route("/process/:pid", axum::routing::post(create_process)) + .route("/messages/query/:pid", axum::routing::post(query_pid)) + .route("/messages/query/:pid/:id", axum::routing::post(query_id)) + .route( + "/.well-known/jwks.json", + axum::routing::get(get_public_sign_key), + ) +} diff --git a/clearing-house-app/src/ports/mod.rs b/clearing-house-app/src/ports/mod.rs new file mode 100644 index 00000000..5ebb30bd --- /dev/null +++ b/clearing-house-app/src/ports/mod.rs @@ -0,0 +1,16 @@ +//! # Ports +//! +//! This module contains the ports of the logging service. Ports are used to communicate with other +//! services. In this case, the logging service implements REST-API endpoints to provide access to +//! the logging service. +use crate::AppState; + +pub(crate) mod logging_api; + +/// Router for the logging service +pub(crate) fn router() -> axum::routing::Router { + axum::Router::new().merge(logging_api::router()) +} + +/// Result type alias for the API +pub(crate) type ApiResult = Result<(axum::http::StatusCode, axum::response::Json), E>; diff --git a/clearing-house-app/src/services/document_service.rs b/clearing-house-app/src/services/document_service.rs new file mode 100644 index 00000000..0fc72fc6 --- /dev/null +++ b/clearing-house-app/src/services/document_service.rs @@ -0,0 +1,238 @@ +use crate::db::DocumentStore; +use crate::model::claims::ChClaims; +use crate::model::constants::{DEFAULT_NUM_RESPONSE_ENTRIES, MAX_NUM_RESPONSE_ENTRIES}; +use crate::model::document::Document; +use crate::model::{parse_date, validate_and_sanitize_dates, SortingOrder}; +use crate::services::{DocumentReceipt, QueryResult}; +use std::convert::TryFrom; + +/// Error type for `DocumentService` +#[derive(thiserror::Error, Debug)] +pub enum DocumentServiceError { + #[error("Document already exists!")] + DocumentAlreadyExists, + #[error("Document contains no payload!")] + MissingPayload, + #[error("Error during database operation: {description}: {source}")] + DatabaseError { + source: anyhow::Error, + description: String, + }, + #[error("Invalid dates in query!")] + InvalidDates, + #[error("Document not found!")] + NotFound, +} + +impl axum::response::IntoResponse for DocumentServiceError { + fn into_response(self) -> axum::response::Response { + use axum::http::StatusCode; + match self { + Self::DocumentAlreadyExists | Self::MissingPayload | Self::InvalidDates => (StatusCode::BAD_REQUEST, self.to_string()).into_response(), + Self::DatabaseError { + source, + description, + } => ( + StatusCode::INTERNAL_SERVER_ERROR, + format!("{description}: {source}"), + ) + .into_response(), + Self::NotFound => (StatusCode::NOT_FOUND, self.to_string()).into_response(), + } + } +} + +#[derive(Clone, Debug)] +pub struct DocumentService { + db: T, +} + +impl DocumentService { + pub fn new(db: T) -> Self { + Self { db } + } + + #[tracing::instrument(skip_all)] + pub(crate) async fn create_enc_document( + &self, + ch_claims: ChClaims, + doc: Document, + ) -> Result { + trace!("...user '{:?}'", &ch_claims.client_id); + // data validation + if doc.content.payload.is_none() { + return Err(DocumentServiceError::MissingPayload); + } + + // check if doc id already exists + if let Ok(true) = self.db.exists_document(&doc.id).await { + warn!("Document exists already!"); + Err(DocumentServiceError::DocumentAlreadyExists) + } else { + // prepare the success result message + let receipt = DocumentReceipt::new(doc.ts, &doc.pid, &doc.id.to_string()); + + trace!("storing document ...."); + // store document + match self.db.add_document(doc).await { + Ok(_b) => Ok(receipt), + Err(e) => { + error!("Error while adding: {:?}", e); + Err(DocumentServiceError::DatabaseError { + source: e, + description: "Error while adding document".to_string(), + }) + } + } + } + } + + #[tracing::instrument(skip_all)] + pub(crate) async fn get_enc_documents_for_pid( + &self, + ch_claims: ChClaims, + page: Option, + size: Option, + sort: Option, + (date_from, date_to): (Option, Option), + pid: String, + ) -> Result { + debug!("Trying to retrieve documents for pid '{pid}'..."); + trace!("...user '{:?}'", &ch_claims.client_id); + debug!("...page: {page:?}, size:{size:?} and sort:{sort:?}"); + + let sanitized_page = Self::sanitize_page(page); + let sanitized_size = Self::sanitize_size(size); + + // Sorting order is already validated and defaults to descending + let sanitized_sort = sort.unwrap_or(SortingOrder::Descending); + + // Parsing the dates for duration queries + let parsed_date_from = parse_date(date_from, false); + let parsed_date_to = parse_date(date_to, true); + + // Validation of dates with various checks. If none given dates default to date_now (date_to) and (date_now - 2 weeks) (date_from) + let Ok((sanitized_date_from, sanitized_date_to)) = + validate_and_sanitize_dates(parsed_date_from, parsed_date_to, None) + else { + debug!("date validation failed!"); + return Err(DocumentServiceError::InvalidDates); + }; + + //new behavior: if pages are "invalid" return {}. Do not adjust page + //either call db with type filter or without to get cts + debug!( + "... using pagination with page: {}, size:{} and sort:{:#?}", + sanitized_page, sanitized_size, &sanitized_sort + ); + + let docs = match self + .db + .get_documents_for_pid( + &pid, + sanitized_page, + sanitized_size, + &sanitized_sort, + (&sanitized_date_from, &sanitized_date_to), + ) + .await + { + Ok(docs) => docs, + Err(e) => { + error!("Error while retrieving document: {:?}", e); + return Err(DocumentServiceError::DatabaseError { + source: e, + description: "Error while retrieving document".to_string(), + }); + } + }; + + let result_size = i32::try_from(sanitized_size).ok(); + let result_page = i32::try_from(sanitized_page).ok(); + let result_sort = match sanitized_sort { + SortingOrder::Ascending => String::from("asc"), + SortingOrder::Descending => String::from("desc"), + }; + + let mut result = QueryResult::new( + sanitized_date_from.timestamp(), + sanitized_date_to.timestamp(), + result_page, + result_size, + result_sort, + vec![], + ); + + // The db might contain no documents in which case we get an empty vector + if docs.is_empty() { + debug!("Queried empty pid: {}", &pid); + Ok(result) + } else { + result.documents = docs; + Ok(result) + } + } + + #[tracing::instrument(skip_all)] + pub(crate) async fn get_enc_document( + &self, + ch_claims: ChClaims, + pid: String, + id: String, + hash: Option, + ) -> Result { + trace!("...user '{:?}'", &ch_claims.client_id); + trace!("trying to retrieve document with id '{id}' for pid '{pid}'"); + if let Some(hash) = hash { + debug!("integrity check with hash: {}", hash); + } + + match self.db.get_document(&id, &pid).await { + Ok(Some(ct)) => Ok(ct), + Ok(None) => { + debug!("Nothing found in db!"); + Err(DocumentServiceError::NotFound) // NotFound + } + Err(e) => { + error!("Error while retrieving document: {:?}", e); + Err(DocumentServiceError::DatabaseError { + source: e, + description: "Error while retrieving document".to_string(), + }) + } + } + } + + #[inline] + fn sanitize_page(page: Option) -> u64 { + // Parameter validation for pagination: + // Valid pages start from 1 + match page { + Some(p) => { + if p > 0 { + p + } else { + warn!("...invalid page requested. Falling back to 1."); + 1 + } + } + None => 1, + } + } + + #[inline] + fn sanitize_size(size: Option) -> u64 { + // Valid sizes are between 1 and MAX_NUM_RESPONSE_ENTRIES (1000) + match size { + Some(s) => { + if s > 0 && s <= MAX_NUM_RESPONSE_ENTRIES { + s + } else { + warn!("...invalid size requested. Falling back to default."); + DEFAULT_NUM_RESPONSE_ENTRIES + } + } + None => DEFAULT_NUM_RESPONSE_ENTRIES, + } + } +} diff --git a/clearing-house-app/src/services/logging_service.rs b/clearing-house-app/src/services/logging_service.rs new file mode 100644 index 00000000..40fa1b39 --- /dev/null +++ b/clearing-house-app/src/services/logging_service.rs @@ -0,0 +1,354 @@ +use crate::model::{ + claims::ChClaims, + constants::{DEFAULT_NUM_RESPONSE_ENTRIES, DEFAULT_PROCESS_ID, MAX_NUM_RESPONSE_ENTRIES}, + {document::Document, process::Process, SortingOrder}, +}; +use std::sync::Arc; + +use crate::db::{DocumentStore, ProcessStore}; +use crate::model::{ + ids::{message::IdsMessage, request::ClearingHouseMessage, IdsQueryResult}, + process::{DataTransaction, OwnerList, Receipt}, +}; +use crate::services::document_service::DocumentService; + +/// Error type for `LoggingService` +#[derive(Debug, thiserror::Error)] +pub enum LoggingServiceError { + #[error("Received empty payload, which cannot be logged!")] + EmptyPayloadReceived, + #[error("Accessing default PID is not allowed!")] + AttemptedAccessToDefaultPid, + #[error("Error during database operation: {description}: {source}")] + DatabaseError { + source: anyhow::Error, + description: String, + }, + #[error("User not authorized!")] + UserNotAuthorized, + #[error("Invalid request received!")] + InvalidRequest, + #[error("Process already exists!")] + ProcessAlreadyExists, + #[error("Process '{0}' does not exist!")] + ProcessDoesNotExist(String), + #[error("Parsing error in {0}")] + ParsingError(#[from] serde_json::Error), + #[error("DocumentService error in {0}")] + DocumentServiceError(#[from] crate::services::document_service::DocumentServiceError), +} + +impl axum::response::IntoResponse for LoggingServiceError { + fn into_response(self) -> axum::response::Response { + use axum::http::StatusCode; + match self { + Self::EmptyPayloadReceived | Self::AttemptedAccessToDefaultPid | Self::InvalidRequest | Self::ProcessAlreadyExists | Self::ParsingError(_) => { + (StatusCode::BAD_REQUEST, self.to_string()).into_response() + } + Self::DatabaseError { + source, + description, + } => ( + StatusCode::INTERNAL_SERVER_ERROR, + format!("{description}: {source}"), + ) + .into_response(), + Self::UserNotAuthorized => (StatusCode::FORBIDDEN, self.to_string()).into_response(), + Self::ProcessDoesNotExist(_) => { + (StatusCode::NOT_FOUND, self.to_string()).into_response() + } + Self::DocumentServiceError(e) => e.into_response(), + } + } +} + +#[derive(Debug)] +pub(crate) struct LoggingService { + db: T, + doc_api: Arc>, +} + +impl LoggingService { + pub fn new(db: T, doc_api: Arc>) -> LoggingService { + LoggingService { db, doc_api } + } + + pub async fn log( + &self, + ch_claims: ChClaims, + key_path: &str, + msg: ClearingHouseMessage, + pid: String, + ) -> Result { + trace!("...user '{}'", &ch_claims.client_id); + let user = &ch_claims.client_id; + // Add non-InfoModel information to IdsMessage + let mut m = msg.header; + m.payload = msg.payload; + m.payload_type = msg.payload_type; + m.pid = Some(pid.clone()); + + // Check for default process id + Self::check_for_default_pid(&pid)?; + + // validate that there is a payload + let payload = match m.payload.clone() { + Some(p) if !p.trim().is_empty() => Ok(p), + _ => { + error!("Trying to log an empty payload!"); + Err(LoggingServiceError::EmptyPayloadReceived) // BadRequest + } + }?; + + // Check if process exists and if the user is authorized to access the process + match self.get_process_and_check_authorized(&pid, user).await + { + Err(LoggingServiceError::ProcessDoesNotExist(_)) => { + // convenience: if process does not exist, we create it but only if no error occurred before + info!("Requested pid '{}' does not exist. Creating...", &pid); + // create a new process + let new_process = Process::new(pid.clone(), vec![user.clone()]); + + if let Err(e) = self.db.store_process(new_process).await { + error!("Error while creating process '{}'", & pid); + return Err(LoggingServiceError::DatabaseError { + source: e, + description: "Creating process failed".to_string(), + }); // InternalError + } + } + Err(e) => { + warn!("Error while checking process: {:?}", e); + return Err(e); + } + Ok(_) => {} + } + + // transform message to document + debug!("transforming message to document..."); + let doc: Document = m.into(); + + debug!("Storing document..."); + match self + .doc_api + .create_enc_document(ChClaims::new(user), doc.clone()) + .await + { + Ok(doc_receipt) => { + debug!("Creating receipt..."); + let transaction = DataTransaction { + timestamp: doc_receipt.timestamp, + process_id: doc_receipt.pid, + document_id: doc_receipt.doc_id, + payload, + client_id: user.to_owned(), + clearing_house_version: env!("CARGO_PKG_VERSION").to_string(), + }; + debug!("...done. Signing receipt..."); + Ok(transaction.sign(key_path)) + } + Err(e) => { + error!("Error while creating document: {:?}", e); + Err(LoggingServiceError::DocumentServiceError(e)) + } + } + } + + pub(crate) async fn create_process( + &self, + ch_claims: ChClaims, + msg: ClearingHouseMessage, + pid: String, + ) -> Result { + let mut m = msg.header; + m.payload = msg.payload; + m.payload_type = msg.payload_type; + + trace!("...user '{:?}'", &ch_claims.client_id); + let user = &ch_claims.client_id; + + // Check for default process id + Self::check_for_default_pid(&pid)?; + + // validate payload + let mut owners = vec![user.clone()]; + match m.payload { + Some(ref payload) if !payload.is_empty() => { + trace!("OwnerList: '{:#?}'", &payload); + match serde_json::from_str::(payload) { + Ok(owner_list) => { + for o in owner_list.owners { + if !owners.contains(&o) { + owners.push(o); + } + } + } + Err(e) => { + error!("Could not parse OwnerList '{payload}' for pid '{pid}': {e}"); + return Err(LoggingServiceError::InvalidRequest); // BadRequest + } + }; + } + _ => {} + }; + + // check if the pid already exists + match self.db.get_process(&pid).await { + Ok(Some(p)) => { + warn!("Requested pid '{}' already exists.", &p.id); + if p.owners.contains(user) { + Err(LoggingServiceError::ProcessAlreadyExists) // BadRequest + } else { + Err(LoggingServiceError::UserNotAuthorized) // Forbidden + } + } + Ok(None) => { + info!("Requested pid '{}' will have {} owners", &pid, owners.len()); + + // create process + info!("Requested pid '{}' does not exist. Creating...", &pid); + let new_process = Process::new(pid.clone(), owners); + + match self.db.store_process(new_process).await { + Ok(()) => Ok(pid.clone()), + Err(e) => { + error!("Error while creating process '{}': {}", &pid, e); + Err(LoggingServiceError::DatabaseError { + source: e, + description: "Creating process failed".to_string(), + }) // InternalError + } + } + } + Err(e) => Err(LoggingServiceError::DatabaseError { + source: e, + description: "Error while getting process".to_string(), + }), + } + } + + pub(crate) async fn query_pid( + &self, + ch_claims: ChClaims, + page: Option, + size: Option, + sort: Option, + (date_to, date_from): (Option, Option), + pid: String, + ) -> Result { + debug!("page: {:#?}, size:{:#?} and sort:{:#?}", page, size, sort); + + trace!("...user '{}'", &ch_claims.client_id); + let user = &ch_claims.client_id; + + // Check if process exists and if the user is authorized to access the process + self.get_process_and_check_authorized(&pid, user).await?; + + let sanitized_page = page.unwrap_or(1); + let sanitized_size = match size { + Some(s) => s.min(MAX_NUM_RESPONSE_ENTRIES), + None => DEFAULT_NUM_RESPONSE_ENTRIES, + }; + + let sanitized_sort = sort.unwrap_or(SortingOrder::Descending); + + match self + .doc_api + .get_enc_documents_for_pid( + ChClaims::new(user), + Some(sanitized_page), + Some(sanitized_size), + Some(sanitized_sort), + (date_from, date_to), + pid.clone(), + ) + .await + { + Ok(r) => { + let messages: Vec = r + .documents + .iter() + .map(|d| IdsMessage::from(d.clone())) + .collect(); + let result = + IdsQueryResult::new(r.date_from, r.date_to, r.page, r.size, r.order, messages); + Ok(result) + } + Err(e) => { + error!("Error while retrieving message: {:?}", e); + Err(LoggingServiceError::DocumentServiceError(e)) + } + } + } + + /// Query a single message by its `id` and `pid` + /// + /// `_message` is required because the `ClearingHouseMessage` as request body is required by the route + #[allow(clippy::no_effect_underscore_binding)] + pub(crate) async fn query_id( + &self, + ch_claims: ChClaims, + pid: String, + id: String, + _message: ClearingHouseMessage, + ) -> Result { + trace!("...user '{}'", &ch_claims.client_id); + let user = &ch_claims.client_id; + + // Check if process exists and if the user is authorized to access the process + self.get_process_and_check_authorized(&pid, user).await?; + + match self + .doc_api + .get_enc_document(ChClaims::new(user), pid.clone(), id.clone(), None) + .await + { + Ok(doc) => { + // transform document to IDS message + let queried_message = IdsMessage::from(doc); + Ok(queried_message) + } + Err(e) => { + error!("Error while retrieving message: {:?}", e); + Err(LoggingServiceError::DocumentServiceError(e)) + } + } + } + + /// Checks if the given pid is the default pid + fn check_for_default_pid(pid: &str) -> Result<(), LoggingServiceError> { + // Check for default process id + if DEFAULT_PROCESS_ID.eq(pid) { + warn!("Log to default pid '{}' not allowed", DEFAULT_PROCESS_ID); + Err(LoggingServiceError::AttemptedAccessToDefaultPid) + } else { + Ok(()) + } + } + + /// Checks if a process exists and the user is authorized to access the process + async fn get_process_and_check_authorized( + &self, + pid: &String, + user: &str, + ) -> Result { + match self.db.get_process(pid).await { + Ok(Some(p)) if !p.is_authorized(user) => { + warn!("User is not authorized to read from pid '{}'", &pid); + Err(LoggingServiceError::UserNotAuthorized) + } + Ok(Some(p)) => { + info!("User authorized."); + Ok(p) + } + Ok(None) => Err(LoggingServiceError::ProcessDoesNotExist(pid.clone())), + Err(e) => { + error!("Error while getting process '{}': {}", &pid, e); + Err(LoggingServiceError::DatabaseError { + source: e, + description: "Getting process failed".to_string(), + }) + } + } + } +} diff --git a/clearing-house-app/src/services/mod.rs b/clearing-house-app/src/services/mod.rs new file mode 100644 index 00000000..c5595d56 --- /dev/null +++ b/clearing-house-app/src/services/mod.rs @@ -0,0 +1,61 @@ +//! # Services +//! +//! This module contains the Application Services that are used by the API Controllers. It is +//! responsible for the business logic of the application. The services are used by the API +//! Controllers to handle the requests and responses. +//! +use crate::model::document::Document; + +pub(crate) mod document_service; +pub(crate) mod logging_service; + +#[derive(Clone, serde::Serialize, serde::Deserialize, Debug)] +pub struct DocumentReceipt { + pub timestamp: i64, + pub pid: String, + pub doc_id: String, +} + +impl DocumentReceipt { + pub fn new( + timestamp: chrono::DateTime, + pid: &str, + doc_id: &str, + ) -> DocumentReceipt { + DocumentReceipt { + timestamp: timestamp.timestamp(), + pid: pid.to_string(), + doc_id: doc_id.to_string(), + } + } +} + +#[derive(Clone, serde::Serialize, serde::Deserialize, Debug)] +pub struct QueryResult { + pub date_from: i64, + pub date_to: i64, + pub page: Option, + pub size: Option, + pub order: String, + pub documents: Vec, +} + +impl QueryResult { + pub fn new( + date_from: i64, + date_to: i64, + page: Option, + size: Option, + order: String, + documents: Vec, + ) -> QueryResult { + QueryResult { + date_from, + date_to, + page, + size, + order, + documents, + } + } +} diff --git a/clearing-house-app/src/util.rs b/clearing-house-app/src/util.rs new file mode 100644 index 00000000..a43e2ba9 --- /dev/null +++ b/clearing-house-app/src/util.rs @@ -0,0 +1,77 @@ +use crate::model::claims::get_fingerprint; + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct ServiceConfig { + pub service_id: String, +} + +pub(super) fn init_service_config(service_id: &str) -> anyhow::Result { + match std::env::var(service_id) { + Ok(id) => Ok(ServiceConfig { service_id: id }), + Err(_e) => { + anyhow::bail!( + "Service ID not configured. Please configure environment variable {}", + &service_id + ); + } + } +} + +pub(super) fn init_signing_key(signing_key_path: Option<&str>) -> anyhow::Result { + let private_key_path = signing_key_path.unwrap_or("keys/private_key.der"); + if std::path::Path::new(&private_key_path).exists() + && get_fingerprint(private_key_path).is_some() + { + Ok(private_key_path.to_string()) + } else { + anyhow::bail!("Signing key not found! Aborting startup! Please configure signing_key!"); + } +} + +/// Signal handler to catch a Ctrl+C and initiate a graceful shutdown +/// +/// # Panics +/// +/// May panic if the signal handler cannot be installed +pub async fn shutdown_signal() { + let ctrl_c = async { + tokio::signal::ctrl_c() + .await + .expect("failed to install Ctrl+C handler"); + }; + + #[cfg(unix)] + let terminate = async { + tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) + .expect("failed to install signal handler") + .recv() + .await; + }; + + #[cfg(not(unix))] + let terminate = std::future::pending::<()>(); + + tokio::select! { + () = ctrl_c => {}, + () = terminate => {}, + } + + info!("signal received, starting graceful shutdown"); +} + +/// Returns a new UUID as a string with hyphens. +#[must_use] +pub fn new_uuid() -> String { + use uuid::Uuid; + Uuid::new_v4().hyphenated().to_string() +} + +#[cfg(test)] +mod test { + #[test] + fn test_new_uuid() { + let uuid = super::new_uuid(); + assert_eq!(uuid.len(), 36); + assert_eq!(uuid.chars().filter(|&c| c == '-').count(), 4); + } +} diff --git a/clearing-house-app/tests/README.md b/clearing-house-app/tests/README.md new file mode 100644 index 00000000..69608ed7 --- /dev/null +++ b/clearing-house-app/tests/README.md @@ -0,0 +1,5 @@ +# Integration Tests + +Prerequisites: + +- Docker Deamon running and Docker CLI installed \ No newline at end of file diff --git a/clearing-house-app/tests/create_process.rs b/clearing-house-app/tests/create_process.rs new file mode 100644 index 00000000..d7ee88cd --- /dev/null +++ b/clearing-house-app/tests/create_process.rs @@ -0,0 +1,233 @@ +#![cfg(test)] + +use axum::http::{Request, StatusCode}; +use biscuit::jwa::SignatureAlgorithm::PS512; +use biscuit::jwk::JWKSet; +use clearing_house_app::model::claims::{get_fingerprint, ChClaims}; +use clearing_house_app::model::ids::message::IdsMessage; +use clearing_house_app::model::ids::request::ClearingHouseMessage; +use clearing_house_app::model::ids::{IdsQueryResult, InfoModelId, MessageType}; +use clearing_house_app::model::process::{OwnerList, Receipt}; +use clearing_house_app::model::{claims::create_token, constants::SERVICE_HEADER}; +use clearing_house_app::util::new_uuid; +use tower::ServiceExt; + +#[tokio::test] +async fn log_message() { + const CLIENT_ID: &str = "69:F5:9D:B0:DD:A6:9D:30:5F:58:AA:2D:20:4D:B2:39:F0:54:FC:3B:keyid:4F:66:7D:BD:08:EE:C6:4A:D1:96:D8:7C:6C:A2:32:8A:EC:A6:AD:49"; + + // Start testcontainer: Postgres + let docker = testcontainers::clients::Cli::default(); + let postgres_instance = docker.run(testcontainers_modules::postgres::Postgres::default()); + let connection_string = format!( + "postgres://postgres:postgres@127.0.0.1:{}/postgres", + postgres_instance.get_host_port_ipv4(5432) + ); + + std::env::set_var("SERVICE_ID_LOG", "test"); + std::env::set_var("SHARED_SECRET", "test"); + std::env::set_var("CH_APP_LOG_LEVEL", "TRACE"); + std::env::set_var("CH_APP_CLEAR_DB", "false"); + std::env::set_var("CH_APP_DATABASE_URL", connection_string); + + let app = clearing_house_app::app().await.unwrap(); + + // Prerequisite JWKS for checking the signature + let response = app + .clone() + .oneshot( + Request::builder() + .uri("/.well-known/jwks.json") + .body(axum::body::Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + + let body = axum::body::to_bytes(response.into_body(), usize::MAX) + .await + .unwrap(); + assert!(!body.is_empty()); + let jwks = serde_json::from_slice::>(&body).expect("Decoded the JWKSet"); + + // --------------------------------------------------------------------------------------------- + + // Create a process + let pid = new_uuid(); + let id = new_uuid(); + + let process_owners = OwnerList { + owners: vec![CLIENT_ID.to_string()], + }; + let process_owners_payload = serde_json::to_string(&process_owners).expect("Should serialize"); + + let msg = ClearingHouseMessage { + header: IdsMessage { + context: Some(std::collections::HashMap::from([ + ("ids".to_string(), "https://w3id.org/idsa/core/".to_string()), + ( + "idsc".to_string(), + "https://w3id.org/idsa/code/".to_string(), + ), + ])), + type_message: MessageType::RequestMessage, + id: Some(id.clone()), + model_version: "test".to_string(), + issuer_connector: InfoModelId::new("test-connector".to_string()), + sender_agent: "https://w3id.org/idsa/core/ClearingHouse".to_string(), + ..Default::default() + }, + payload: Some(process_owners_payload), + payload_type: None, + }; + + let claims = ChClaims::new(CLIENT_ID); + + // Send create process message + let response = app + .clone() + .oneshot( + Request::builder() + .uri(format!("/process/{}", pid)) + .method("POST") + .header("Content-Type", "application/json") + .header(SERVICE_HEADER, create_token("test", "test", &claims)) + .body(serde_json::to_string(&msg).unwrap()) + .unwrap(), + ) + .await + .unwrap(); + + // Check status code + assert_eq!(response.status(), StatusCode::CREATED); + + // --------------------------------------------------------------------------------------------- + + // Send authorized log message + + let log_msg = ClearingHouseMessage { + header: IdsMessage { + context: Some(std::collections::HashMap::from([ + ("ids".to_string(), "https://w3id.org/idsa/core/".to_string()), + ( + "idsc".to_string(), + "https://w3id.org/idsa/code/".to_string(), + ), + ])), + type_message: MessageType::LogMessage, + id: Some(id.clone()), + model_version: "test".to_string(), + issuer_connector: InfoModelId::new("test-connector".to_string()), + sender_agent: "https://w3id.org/idsa/core/ClearingHouse".to_string(), + ..Default::default() + }, + payload: Some("test".to_string()), + payload_type: None, + }; + + // Send log message + let log_response = app + .clone() + .oneshot( + Request::builder() + .uri(format!("/messages/log/{}", pid)) + .method("POST") + .header("Content-Type", "application/json") + .header(SERVICE_HEADER, create_token("test", "test", &claims)) + .body(serde_json::to_string(&log_msg).unwrap()) + .unwrap(), + ) + .await + .unwrap(); + + // Check status code + assert_eq!(log_response.status(), StatusCode::CREATED); + // get body + let body = axum::body::to_bytes(log_response.into_body(), usize::MAX) + .await + .unwrap(); + assert!(!body.is_empty()); + + // Decode receipt + let receipt = serde_json::from_slice::(&body).unwrap(); + println!("Receipt: {:?}", receipt); + let decoded_receipt = receipt + .data + .decode_with_jwks(&jwks, Some(PS512)) + .expect("Decoding JWS successful"); + let decoded_receipt_header = decoded_receipt + .header() + .expect("Header is now already decoded"); + + assert_eq!( + decoded_receipt_header.registered.key_id, + get_fingerprint("keys/private_key.der") + ); + + let decoded_receipt_payload = decoded_receipt + .payload() + .expect("Payload is now already decoded"); + println!("Decoded Receipt: {:?}", decoded_receipt); + + assert_eq!(decoded_receipt_payload.process_id, pid); + assert_eq!(decoded_receipt_payload.payload, "test".to_string()); + + // --------------------------------------------------------------------------------------------- + + // Query ID + let query_resp = app + .clone() + .oneshot( + Request::builder() + .uri(format!("/messages/query/{}", pid)) + .method("POST") + .header("Content-Type", "application/json") + .header(SERVICE_HEADER, create_token("test", "test", &claims)) + .body(serde_json::to_string(&log_msg).unwrap()) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(query_resp.status(), StatusCode::OK); + + let body = axum::body::to_bytes(query_resp.into_body(), usize::MAX) + .await + .unwrap(); + assert!(!body.is_empty()); + + let ids_message = serde_json::from_slice::(&body).unwrap(); + println!("IDS Query Result: {:?}", ids_message); + let query_docs = ids_message.documents; + + // Check the only document in the result + assert_eq!(query_docs.len(), 1); + let doc = query_docs + .first() + .expect("Document is there, just checked") + .to_owned(); + assert_eq!(doc.payload.expect("Payload is there"), "test".to_string()); + assert_eq!(doc.model_version, "test".to_string()); + + // --------------------------------------------------------------------------------------------- + + // Send unauthorized message + let unauthorized_claims = ChClaims::new("unauthorized"); + + // Send log message + let log_response_unauth = app + .oneshot( + Request::builder() + .uri(format!("/messages/log/{}", pid)) + .method("POST") + .header("Content-Type", "application/json") + .header(SERVICE_HEADER, create_token("test", "test", &unauthorized_claims)) + .body(serde_json::to_string(&log_msg).unwrap()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(log_response_unauth.status(), StatusCode::FORBIDDEN); +} \ No newline at end of file diff --git a/clearing-house-app/tests/log.rs b/clearing-house-app/tests/log.rs new file mode 100644 index 00000000..564520a0 --- /dev/null +++ b/clearing-house-app/tests/log.rs @@ -0,0 +1,161 @@ +#![cfg(test)] + +use axum::http::{Request, StatusCode}; +use biscuit::jwa::SignatureAlgorithm::PS512; +use biscuit::jwk::JWKSet; +use clearing_house_app::model::claims::{get_fingerprint, ChClaims}; +use clearing_house_app::model::ids::message::IdsMessage; +use clearing_house_app::model::ids::request::ClearingHouseMessage; +use clearing_house_app::model::ids::{IdsQueryResult, InfoModelId, MessageType}; +use clearing_house_app::model::process::Receipt; +use clearing_house_app::model::{claims::create_token, constants::SERVICE_HEADER}; +use clearing_house_app::util::new_uuid; +use tower::ServiceExt; + +#[tokio::test] +async fn log_message() { + // Start testcontainer: Postgres + let docker = testcontainers::clients::Cli::default(); + let postgres_instance = docker.run(testcontainers_modules::postgres::Postgres::default()); + let connection_string = format!( + "postgres://postgres:postgres@127.0.0.1:{}/postgres", + postgres_instance.get_host_port_ipv4(5432) + ); + + std::env::set_var("SERVICE_ID_LOG", "test"); + std::env::set_var("SHARED_SECRET", "test"); + std::env::set_var("CH_APP_LOG_LEVEL", "TRACE"); + std::env::set_var("CH_APP_CLEAR_DB", "false"); + std::env::set_var("CH_APP_DATABASE_URL", connection_string); + + let app = clearing_house_app::app().await.unwrap(); + + // Prerequisite JWKS for checking the signature + let response = app + .clone() + .oneshot( + Request::builder() + .uri("/.well-known/jwks.json") + .body(axum::body::Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + + let body = axum::body::to_bytes(response.into_body(), usize::MAX) + .await + .unwrap(); + assert!(!body.is_empty()); + let jwks = serde_json::from_slice::>(&body).expect("Decoded the JWKSet"); + + // --------------------------------------------------------------------------------------------- + // Create a message + let pid = new_uuid(); + let id = new_uuid(); + + let msg = ClearingHouseMessage { + header: IdsMessage { + context: Some(std::collections::HashMap::from([ + ("ids".to_string(), "https://w3id.org/idsa/core/".to_string()), + ( + "idsc".to_string(), + "https://w3id.org/idsa/code/".to_string(), + ), + ])), + type_message: MessageType::LogMessage, + id: Some(id.clone()), + model_version: "test".to_string(), + issuer_connector: InfoModelId::new("test-connector".to_string()), + sender_agent: "https://w3id.org/idsa/core/ClearingHouse".to_string(), + ..Default::default() + }, + payload: Some("test".to_string()), + payload_type: None, + }; + + let claims = ChClaims::new("69:F5:9D:B0:DD:A6:9D:30:5F:58:AA:2D:20:4D:B2:39:F0:54:FC:3B:keyid:4F:66:7D:BD:08:EE:C6:4A:D1:96:D8:7C:6C:A2:32:8A:EC:A6:AD:49"); + + // Send log message + let response = app + .clone() + .oneshot( + Request::builder() + .uri(format!("/messages/log/{}", pid)) + .method("POST") + .header("Content-Type", "application/json") + .header(SERVICE_HEADER, create_token("test", "test", &claims)) + .body(serde_json::to_string(&msg).unwrap()) + .unwrap(), + ) + .await + .unwrap(); + + // Check status code + assert_eq!(response.status(), StatusCode::CREATED); + // get body + let body = axum::body::to_bytes(response.into_body(), usize::MAX) + .await + .unwrap(); + assert!(!body.is_empty()); + + // Decode receipt + let receipt = serde_json::from_slice::(&body).unwrap(); + println!("Receipt: {:?}", receipt); + let decoded_receipt = receipt + .data + .decode_with_jwks(&jwks, Some(PS512)) + .expect("Decoding JWS successful"); + let decoded_receipt_header = decoded_receipt + .header() + .expect("Header is now already decoded"); + + assert_eq!( + decoded_receipt_header.registered.key_id, + get_fingerprint("keys/private_key.der") + ); + + let decoded_receipt_payload = decoded_receipt + .payload() + .expect("Payload is now already decoded"); + println!("Decoded Receipt: {:?}", decoded_receipt); + + assert_eq!(decoded_receipt_payload.process_id, pid); + assert_eq!(decoded_receipt_payload.payload, "test".to_string()); + + // --------------------------------------------------------------------------------------------- + + // Query ID + let query_resp = app + .oneshot( + Request::builder() + .uri(format!("/messages/query/{}", pid)) + .method("POST") + .header("Content-Type", "application/json") + .header(SERVICE_HEADER, create_token("test", "test", &claims)) + .body(serde_json::to_string(&msg).unwrap()) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(query_resp.status(), StatusCode::OK); + + let body = axum::body::to_bytes(query_resp.into_body(), usize::MAX) + .await + .unwrap(); + assert!(!body.is_empty()); + + let ids_message = serde_json::from_slice::(&body).unwrap(); + println!("IDS Query Result: {:?}", ids_message); + let query_docs = ids_message.documents; + + // Check the only document in the result + assert_eq!(query_docs.len(), 1); + let doc = query_docs + .first() + .expect("Document is there, just checked") + .to_owned(); + assert_eq!(doc.payload.expect("Payload is there"), "test".to_string()); + assert_eq!(doc.model_version, "test".to_string()); +} diff --git a/clearing-house-app/tests/public_key.rs b/clearing-house-app/tests/public_key.rs new file mode 100644 index 00000000..29724f0c --- /dev/null +++ b/clearing-house-app/tests/public_key.rs @@ -0,0 +1,39 @@ +use axum::body::Body; +use axum::http::{Request, StatusCode}; +use biscuit::jwk::JWKSet; +use tower::ServiceExt; + +#[tokio::test] +async fn retrieve_public_key() { + // Start testcontainer: Postgres + let docker = testcontainers::clients::Cli::default(); + let postgres_instance = docker.run(testcontainers_modules::postgres::Postgres::default()); + let connection_string = format!( + "postgres://postgres:postgres@127.0.0.1:{}/postgres", + postgres_instance.get_host_port_ipv4(5432) + ); + + std::env::set_var("SERVICE_ID_LOG", "test"); + std::env::set_var("CH_APP_DATABASE_URL", connection_string); + + let app = clearing_house_app::app().await.unwrap(); + + let response = app + .oneshot( + Request::builder() + .uri("/.well-known/jwks.json") + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + + let body = axum::body::to_bytes(response.into_body(), usize::MAX) + .await + .unwrap(); + assert!(!body.is_empty()); + let jwks = serde_json::from_slice::>(&body).expect("Decoded the JWKSet"); + println!("JWKS: {:?}", jwks); +} diff --git a/clearing-house-edc/.gitignore b/clearing-house-edc/.gitignore new file mode 100644 index 00000000..e7ae6232 --- /dev/null +++ b/clearing-house-edc/.gitignore @@ -0,0 +1,5 @@ +.gradle/ +.idea/ +.vscode/ +**/build/ +**/bin/ diff --git a/clearing-house-edc/README.md b/clearing-house-edc/README.md new file mode 100644 index 00000000..6862e27f --- /dev/null +++ b/clearing-house-edc/README.md @@ -0,0 +1,44 @@ +## CLEARING HOUSE EDC +This repository contains the Clearing House Extension that works with the Eclipse Dataspace Connector +allowing logging operations. + +## Install +### Configurations +It is required to configure those parameters: + +| Parameter name | Description | Default value | +|----------------------------------------|----------------------------------------------|------------------------| +| `truzzt.clearinghouse.jwt.audience` | Defines the intended recipients of the token | 1 | +| `truzzt.clearinghouse.jwt.issuer` | Person or entity offering the token | 1 | +| `truzzt.clearinghouse.jwt.sign.secret` | Secret key to encode the token | 123 | +| `truzzt.clearinghouse.jwt.expires.at` | Time to token Expiration (in Seconds) | 30 | +| `truzzt.clearinghouse.app.base.url` | Base URL from the clearing house app | http://localhost:8000 | + +### Build +To build the project run the command below: + + ./gradlew build + + +### Running +Local execution: + + java -Dedc.fs.config=launchers/connector-local/resources/config.properties -Dedc.keystore=launchers/connector-local/resources/keystore.jks -Dedc.keystore.password=password -Dedc.vault=launchers/connector-local/resources/vault.properties -jar launchers/connector-local/build/libs/clearing-house-edc.jar + +## Tests + +### Running Tests +To run the unit-tests execute the following command: + + ./gradlew test + + +### Test Coverage +To generate the tests coverage execute the following command: + + ./gradlew jacocoTestReport + +The coverage reports will be available in the following folders: + +- [core/build/reports/jacoco/test/html/index.html](./core/build/reports/jacoco/test/html/index.html) +- [extensions/multipart/build/reports/jacoco/test/html/index.html](./extensions/multipart/build/reports/jacoco/test/html/index.html) diff --git a/clearing-house-edc/build.gradle.kts b/clearing-house-edc/build.gradle.kts new file mode 100644 index 00000000..589224c5 --- /dev/null +++ b/clearing-house-edc/build.gradle.kts @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ + +plugins { + `java-base` +} + +val javaVersion: String by project + +java { + toolchain { + languageVersion = JavaLanguageVersion.of(javaVersion) + } +} diff --git a/clearing-house-edc/core/build.gradle.kts b/clearing-house-edc/core/build.gradle.kts new file mode 100644 index 00000000..957d08fd --- /dev/null +++ b/clearing-house-edc/core/build.gradle.kts @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ + +plugins { + `java-library` + `java-test-fixtures` + jacoco +} + +val auth0JWTVersion: String by project + +configurations.all { + exclude(group = "de.fraunhofer.iais.eis.ids.infomodel", module = "java") +} + +dependencies { + api(edc.spi.core) + + implementation(edc.ids) + implementation(edc.ids.jsonld.serdes) + implementation(edc.api.management.config) + implementation(libs.jersey.multipart) + implementation("com.auth0:java-jwt:${auth0JWTVersion}") + + implementation(":infomodel-java-4.1.3") + implementation(":infomodel-util-4.0.4") + + testImplementation(libs.junit.jupiter.api) + testImplementation(libs.mockito.inline) + testImplementation(libs.mockito.inline) + + testFixturesImplementation(edc.ids) + testFixturesImplementation("com.auth0:java-jwt:${auth0JWTVersion}") + + testRuntimeOnly(libs.junit.jupiter.engine) +} + +tasks.test { + useJUnitPlatform() +} +tasks.jacocoTestReport { + reports { + xml.required = true + } + dependsOn(tasks.test) + classDirectories.setFrom( + files(classDirectories.files.map { + fileTree(it) { + exclude( + "**/dto/**", + "**/types/clearinghouse/*", + "**/types/ids/*") + } + }) + ) +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/app/AppSender.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/app/AppSender.java new file mode 100644 index 00000000..f0584de7 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/app/AppSender.java @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2023 truzzt GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ +package de.truzzt.clearinghouse.edc.app; + +import de.truzzt.clearinghouse.edc.app.delegate.AppSenderDelegate; +import de.truzzt.clearinghouse.edc.dto.AppSenderRequest; +import de.truzzt.clearinghouse.edc.types.TypeManagerUtil; +import okhttp3.MediaType; +import okhttp3.Request; +import okhttp3.RequestBody; +import okhttp3.Response; +import org.eclipse.edc.spi.EdcException; +import org.eclipse.edc.spi.http.EdcHttpClient; +import org.eclipse.edc.spi.monitor.Monitor; + +import static java.lang.String.format; + +public class AppSender { + private static final String JSON_CONTENT_TYPE = "application/json"; + + private final Monitor monitor; + private final EdcHttpClient httpClient; + private final TypeManagerUtil typeManagerUtil; + + public AppSender(Monitor monitor, + EdcHttpClient httpClient, + TypeManagerUtil typeManagerUtil) { + this.monitor = monitor; + this.httpClient = httpClient; + this.typeManagerUtil = typeManagerUtil; + } + + public P send(AppSenderRequest request, AppSenderDelegate

appSenderDelegate) { + + var json = typeManagerUtil.toJson(request.getBody()); + var requestBody = RequestBody.create(json, MediaType.get(JSON_CONTENT_TYPE)); + + var httpRequest = new Request.Builder() + .url(request.getUrl()) + .addHeader("Ch-Service", request.getToken()) + .addHeader("Content-Type", JSON_CONTENT_TYPE) + .post(requestBody) + .build(); + + try (Response response = httpClient.execute(httpRequest)) { + monitor.debug("Response received from Clearing House App. Status: " + response.code()); + + if (response.isSuccessful()) { + try (var body = response.body()) { + if (body == null) { + throw new EdcException("Received an empty response body from Clearing House App"); + } else { + return appSenderDelegate.parseResponseBody(body); + } + } catch (Exception e) { + throw new EdcException("Error reading Clearing House App response body", e); + } + } else { + throw new EdcException(format("Received an error from Clearing House App. Status: %s, message: %s", response.code(), response.message())); + } + } catch (java.io.IOException e) { + throw new EdcException("Error sending request to Clearing House App", e); + } + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/app/delegate/AppSenderDelegate.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/app/delegate/AppSenderDelegate.java new file mode 100644 index 00000000..95ecb314 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/app/delegate/AppSenderDelegate.java @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2023 truzzt GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ +package de.truzzt.clearinghouse.edc.app.delegate; + +import okhttp3.ResponseBody; + +public interface AppSenderDelegate { + + B parseResponseBody(ResponseBody responseBody); +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/app/delegate/CreateProcessDelegate.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/app/delegate/CreateProcessDelegate.java new file mode 100644 index 00000000..2f518157 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/app/delegate/CreateProcessDelegate.java @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2023 truzzt GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ +package de.truzzt.clearinghouse.edc.app.delegate; + +import de.truzzt.clearinghouse.edc.dto.*; +import de.truzzt.clearinghouse.edc.types.TypeManagerUtil; +import de.truzzt.clearinghouse.edc.types.clearinghouse.Context; +import de.truzzt.clearinghouse.edc.types.clearinghouse.Header; +import de.truzzt.clearinghouse.edc.types.clearinghouse.SecurityToken; +import de.truzzt.clearinghouse.edc.types.clearinghouse.TokenFormat; +import okhttp3.ResponseBody; + +public class CreateProcessDelegate implements AppSenderDelegate { + + private final TypeManagerUtil typeManagerUtil; + + public CreateProcessDelegate(TypeManagerUtil typeManagerUtil) { + this.typeManagerUtil = typeManagerUtil; + } + + public String buildRequestUrl(String baseUrl, HandlerRequest handlerRequest) { + return baseUrl + "/process/" + handlerRequest.getPid(); + } + + public CreateProcessRequest buildRequestBody(HandlerRequest handlerRequest) { + var header = handlerRequest.getHeader(); + + var multipartContext = header.getContext(); + var context = new Context(multipartContext.getIds(), multipartContext.getIdsc()); + + var multipartSecurityToken = header.getSecurityToken(); + var multipartTokenFormat = multipartSecurityToken.getTokenFormat(); + var securityToken = SecurityToken.Builder.newInstance(). + type(multipartSecurityToken.getType()). + id(multipartSecurityToken.getId()). + tokenFormat(new TokenFormat(multipartTokenFormat.getId())). + tokenValue(multipartSecurityToken.getTokenValue()). + build(); + + var requestHeader = Header.Builder.newInstance() + .context(context) + .id(header.getId()) + .type(header.getType()) + .securityToken(securityToken) + .issuerConnector(header.getIssuerConnector()) + .modelVersion(header.getModelVersion()) + .issued(header.getIssued()) + .senderAgent(header.getSenderAgent()) + .build(); + + return new CreateProcessRequest(requestHeader, handlerRequest.getPayload()); + } + + @Override + public CreateProcessResponse parseResponseBody(ResponseBody responseBody) { + return typeManagerUtil.parse(responseBody.byteStream(), CreateProcessResponse.class); + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/app/delegate/LoggingMessageDelegate.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/app/delegate/LoggingMessageDelegate.java new file mode 100644 index 00000000..bbc9892b --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/app/delegate/LoggingMessageDelegate.java @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2023 truzzt GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ +package de.truzzt.clearinghouse.edc.app.delegate; + +import de.truzzt.clearinghouse.edc.dto.HandlerRequest; +import de.truzzt.clearinghouse.edc.dto.LoggingMessageRequest; +import de.truzzt.clearinghouse.edc.dto.LoggingMessageResponse; +import de.truzzt.clearinghouse.edc.types.TypeManagerUtil; +import de.truzzt.clearinghouse.edc.types.clearinghouse.Context; +import de.truzzt.clearinghouse.edc.types.clearinghouse.Header; +import de.truzzt.clearinghouse.edc.types.clearinghouse.SecurityToken; +import de.truzzt.clearinghouse.edc.types.clearinghouse.TokenFormat; +import okhttp3.ResponseBody; + +public class LoggingMessageDelegate implements AppSenderDelegate { + + private final TypeManagerUtil typeManagerUtil; + + public LoggingMessageDelegate(TypeManagerUtil typeManagerUtil) { + this.typeManagerUtil = typeManagerUtil; + } + + public String buildRequestUrl(String baseUrl, HandlerRequest handlerRequest) { + return baseUrl + "/messages/log/" + handlerRequest.getPid(); + } + + public LoggingMessageRequest buildRequestBody(HandlerRequest handlerRequest) { + var header = handlerRequest.getHeader(); + + var multipartContext = header.getContext(); + var context = new Context(multipartContext.getIds(), multipartContext.getIdsc()); + + var multipartSecurityToken = header.getSecurityToken(); + var multipartTokenFormat = multipartSecurityToken.getTokenFormat(); + var securityToken = SecurityToken.Builder.newInstance(). + type(multipartSecurityToken.getType()). + id(multipartSecurityToken.getId()). + tokenFormat(new TokenFormat(multipartTokenFormat.getId())). + tokenValue(multipartSecurityToken.getTokenValue()). + build(); + + var requestHeader = Header.Builder.newInstance() + .context(context) + .id(header.getId()) + .type(header.getType()) + .securityToken(securityToken) + .issuerConnector(header.getIssuerConnector()) + .modelVersion(header.getModelVersion()) + .issued(header.getIssued()) + .senderAgent(header.getSenderAgent()) + .build(); + + return new LoggingMessageRequest(requestHeader, handlerRequest.getPayload()); + } + + @Override + public LoggingMessageResponse parseResponseBody(ResponseBody responseBody) { + return typeManagerUtil.parse(responseBody.byteStream(), LoggingMessageResponse.class); + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/AppSenderRequest.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/AppSenderRequest.java new file mode 100644 index 00000000..dfd73975 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/AppSenderRequest.java @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2023 truzzt GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ +package de.truzzt.clearinghouse.edc.dto; + +import org.jetbrains.annotations.NotNull; + +import java.util.Objects; + +public class AppSenderRequest { + + private final String url; + private final String token; + private final B body; + + public AppSenderRequest(@NotNull String url, @NotNull String token, @NotNull B body) { + this.url = url; + this.token = token; + this.body = body; + } + + public String getUrl() { + return url; + } + + public String getToken() { + return token; + } + + public B getBody() { + return body; + } + + public static class Builder { + + private String url; + private String token; + private R body; + + private Builder() { + } + + public static Builder newInstance() { + return new Builder(); + } + + public Builder url(@NotNull String url) { + this.url = url; + return this; + } + + public Builder token(@NotNull String token) { + this.token = token; + return this; + } + + public Builder body(@NotNull R body) { + this.body = body; + return this; + } + + public AppSenderRequest build() { + Objects.requireNonNull(url, "ClearingHouse request url is null."); + Objects.requireNonNull(token, "ClearingHouse request token is null."); + Objects.requireNonNull(body, "ClearingHouse request body is null."); + + return new AppSenderRequest(url, token, body); + } + } + +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/CreateProcessRequest.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/CreateProcessRequest.java new file mode 100644 index 00000000..ad3078ac --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/CreateProcessRequest.java @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2023 truzzt GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ +package de.truzzt.clearinghouse.edc.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import de.truzzt.clearinghouse.edc.types.clearinghouse.Header; +import org.jetbrains.annotations.NotNull; + +public class CreateProcessRequest { + + @JsonProperty("header") + @NotNull + private final Header header; + + @JsonProperty("payload") + @NotNull + private final String payload; + + public CreateProcessRequest(@NotNull Header header, @NotNull String payload) { + this.header = header; + this.payload = payload; + } +} + diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/CreateProcessResponse.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/CreateProcessResponse.java new file mode 100644 index 00000000..fb591d41 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/CreateProcessResponse.java @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2023 truzzt GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ +package de.truzzt.clearinghouse.edc.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.jetbrains.annotations.NotNull; + +public class CreateProcessResponse { + + @JsonProperty("pid") + @NotNull + private String pid; + + public String getPid() { + return pid; + } + +} \ No newline at end of file diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/HandlerRequest.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/HandlerRequest.java new file mode 100644 index 00000000..090ed378 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/HandlerRequest.java @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.dto; + +import de.truzzt.clearinghouse.edc.types.ids.Message; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.util.Objects; + +public class HandlerRequest { + + private final String pid; + private final Message header; + private final String payload; + + private HandlerRequest(String pid, @NotNull Message header, @Nullable String payload) { + this.pid = pid; + this.header = header; + this.payload = payload; + } + + public String getPid() { + return pid; + } + + public Message getHeader() { + return header; + } + + public String getPayload() { + return payload; + } + + public static class Builder { + + private String pid; + private Message header; + private String payload; + + private Builder() { + } + + public static Builder newInstance() { + return new Builder(); + } + + public Builder pid(@NotNull String pid) { + this.pid = pid; + return this; + } + + public Builder header(@NotNull Message header) { + this.header = header; + return this; + } + + public Builder payload(@NotNull String payload) { + this.payload = payload; + return this; + } + + public HandlerRequest build() { + Objects.requireNonNull(header, "Multipart request header is null."); + + return new HandlerRequest(pid, header, payload); + } + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/HandlerResponse.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/HandlerResponse.java new file mode 100644 index 00000000..e79ae880 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/HandlerResponse.java @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.dto; + +import de.truzzt.clearinghouse.edc.types.ids.Message; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.util.Objects; + +public class HandlerResponse { + + private final Message header; + private final Object payload; + + private HandlerResponse(@NotNull Message header, @Nullable Object payload) { + this.header = header; + this.payload = payload; + } + + @NotNull + public Message getHeader() { + return header; + } + + @Nullable + public Object getPayload() { + return payload; + } + + public static class Builder { + + private Message header; + private Object payload; + + private Builder() { + } + + public static Builder newInstance() { + return new Builder(); + } + + public Builder header(@Nullable Message header) { + this.header = header; + return this; + } + + public Builder payload(@Nullable Object payload) { + this.payload = payload; + return this; + } + + public HandlerResponse build() { + Objects.requireNonNull(header, "Multipart response header is null."); + return new HandlerResponse(header, payload); + } + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/LoggingMessageRequest.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/LoggingMessageRequest.java new file mode 100644 index 00000000..32c96a67 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/LoggingMessageRequest.java @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2023 truzzt GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ +package de.truzzt.clearinghouse.edc.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import de.truzzt.clearinghouse.edc.types.clearinghouse.Header; +import org.jetbrains.annotations.NotNull; + +public class LoggingMessageRequest { + + @JsonProperty("header") + @NotNull + private Header header; + + @JsonProperty("payload") + @NotNull + private String payload; + + public LoggingMessageRequest(@NotNull Header header, @NotNull String payload) { + this.header = header; + this.payload = payload; + } +} + diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/LoggingMessageResponse.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/LoggingMessageResponse.java new file mode 100644 index 00000000..ac90345b --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/dto/LoggingMessageResponse.java @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2023 truzzt GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ +package de.truzzt.clearinghouse.edc.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.jetbrains.annotations.NotNull; + +public class LoggingMessageResponse { + + @JsonProperty("data") + @NotNull + private String data; + + public String getData() { + return data; + } + +} \ No newline at end of file diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/handler/Handler.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/handler/Handler.java new file mode 100644 index 00000000..648da132 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/handler/Handler.java @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.handler; + +import com.auth0.jwt.JWT; +import com.auth0.jwt.algorithms.Algorithm; +import de.truzzt.clearinghouse.edc.dto.HandlerRequest; +import de.truzzt.clearinghouse.edc.dto.HandlerResponse; +import de.truzzt.clearinghouse.edc.types.ids.SecurityToken; +import org.eclipse.edc.spi.EdcException; +import org.eclipse.edc.spi.system.ServiceExtensionContext; +import org.jetbrains.annotations.NotNull; + +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.util.Date; + +import static de.truzzt.clearinghouse.edc.util.SettingsConstants.JWT_AUDIENCE_DEFAULT_VALUE; +import static de.truzzt.clearinghouse.edc.util.SettingsConstants.JWT_AUDIENCE_SETTING; +import static de.truzzt.clearinghouse.edc.util.SettingsConstants.JWT_EXPIRES_AT_DEFAULT_VALUE; +import static de.truzzt.clearinghouse.edc.util.SettingsConstants.JWT_EXPIRES_AT_SETTING; +import static de.truzzt.clearinghouse.edc.util.SettingsConstants.JWT_ISSUER_DEFAULT_VALUE; +import static de.truzzt.clearinghouse.edc.util.SettingsConstants.JWT_ISSUER_SETTING; +import static de.truzzt.clearinghouse.edc.util.SettingsConstants.JWT_SIGN_SECRET_DEFAULT_VALUE; +import static de.truzzt.clearinghouse.edc.util.SettingsConstants.JWT_SIGN_SECRET_SETTING; + +public interface Handler { + + boolean canHandle(@NotNull HandlerRequest handlerRequest); + + @NotNull HandlerResponse handleRequest(@NotNull HandlerRequest handlerRequest); + + default Date convertLocalDateTime(LocalDateTime localDateTime) { + return Date.from(localDateTime.atZone(ZoneId.systemDefault()).toInstant()); + } + + default @NotNull String buildJWTToken(@NotNull SecurityToken securityToken, ServiceExtensionContext context) { + + var tokenValue = securityToken.getTokenValue(); + var decodedToken = JWT.decode(tokenValue); + + var referringConnector = decodedToken.getClaim("referringConnector").asString(); + if (referringConnector == null) { + throw new EdcException("JWT Token referringConnector is missing"); + } + + var issuedAt = LocalDateTime.now(); + var expiresAt = issuedAt.plusSeconds( + Long.parseLong(context.getSetting(JWT_EXPIRES_AT_SETTING ,JWT_EXPIRES_AT_DEFAULT_VALUE))); + + var jwtToken = JWT.create() + .withAudience(context.getSetting(JWT_AUDIENCE_SETTING, JWT_AUDIENCE_DEFAULT_VALUE)) + .withIssuer(context.getSetting(JWT_ISSUER_SETTING, JWT_ISSUER_DEFAULT_VALUE)) + .withClaim("client_id", referringConnector) + .withIssuedAt(convertLocalDateTime(issuedAt)) + .withExpiresAt(convertLocalDateTime(expiresAt)); + + return jwtToken.sign(Algorithm.HMAC256(context.getSetting(JWT_SIGN_SECRET_SETTING ,JWT_SIGN_SECRET_DEFAULT_VALUE))); + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/handler/LogMessageHandler.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/handler/LogMessageHandler.java new file mode 100644 index 00000000..26b41046 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/handler/LogMessageHandler.java @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2023 truzzt GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ +package de.truzzt.clearinghouse.edc.handler; + +import de.truzzt.clearinghouse.edc.app.AppSender; +import de.truzzt.clearinghouse.edc.app.delegate.LoggingMessageDelegate; +import de.truzzt.clearinghouse.edc.dto.AppSenderRequest; +import de.truzzt.clearinghouse.edc.dto.HandlerRequest; +import de.truzzt.clearinghouse.edc.dto.HandlerResponse; +import de.truzzt.clearinghouse.edc.types.TypeManagerUtil; +import org.eclipse.edc.protocol.ids.spi.types.IdsId; +import org.eclipse.edc.spi.system.ServiceExtensionContext; +import org.jetbrains.annotations.NotNull; + +import static de.truzzt.clearinghouse.edc.util.ResponseUtil.createMultipartResponse; +import static de.truzzt.clearinghouse.edc.util.ResponseUtil.messageProcessedNotification; + +import static de.truzzt.clearinghouse.edc.util.SettingsConstants.APP_BASE_URL_SETTING; +import static de.truzzt.clearinghouse.edc.util.SettingsConstants.APP_BASE_URL_DEFAULT_VALUE; + +public class LogMessageHandler implements Handler { + + private final IdsId connectorId; + private final AppSender appSender; + private final LoggingMessageDelegate senderDelegate; + + private final ServiceExtensionContext context; + + public LogMessageHandler(IdsId connectorId, + TypeManagerUtil typeManagerUtil, + AppSender appSender, + ServiceExtensionContext context) { + this.connectorId = connectorId; + this.appSender = appSender; + this.context = context; + + this.senderDelegate = new LoggingMessageDelegate(typeManagerUtil); + } + + @Override + public boolean canHandle(@NotNull HandlerRequest handlerRequest) { + return handlerRequest.getHeader().getType().equals("ids:LogMessage"); + } + + @Override + public @NotNull HandlerResponse handleRequest(@NotNull HandlerRequest handlerRequest) { + var baseUrl = context.getSetting(APP_BASE_URL_SETTING, APP_BASE_URL_DEFAULT_VALUE); + var header = handlerRequest.getHeader(); + + var url = senderDelegate.buildRequestUrl(baseUrl, handlerRequest); + var token = buildJWTToken(handlerRequest.getHeader().getSecurityToken(), context); + var body = senderDelegate.buildRequestBody(handlerRequest); + + var request = AppSenderRequest.Builder.newInstance().url(url).token(token).body(body).build(); + + var response = appSender.send(request, senderDelegate); + return createMultipartResponse(messageProcessedNotification(header, connectorId), response); + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/handler/RequestMessageHandler.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/handler/RequestMessageHandler.java new file mode 100644 index 00000000..6a0cd0ef --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/handler/RequestMessageHandler.java @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2023 truzzt GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ +package de.truzzt.clearinghouse.edc.handler; + +import de.truzzt.clearinghouse.edc.app.AppSender; +import de.truzzt.clearinghouse.edc.app.delegate.CreateProcessDelegate; +import de.truzzt.clearinghouse.edc.dto.AppSenderRequest; +import de.truzzt.clearinghouse.edc.dto.HandlerRequest; +import de.truzzt.clearinghouse.edc.dto.HandlerResponse; +import de.truzzt.clearinghouse.edc.types.TypeManagerUtil; +import org.eclipse.edc.protocol.ids.spi.types.IdsId; +import org.eclipse.edc.spi.system.ServiceExtensionContext; +import org.jetbrains.annotations.NotNull; + +import static de.truzzt.clearinghouse.edc.util.ResponseUtil.createMultipartResponse; +import static de.truzzt.clearinghouse.edc.util.ResponseUtil.messageProcessedNotification; +import static de.truzzt.clearinghouse.edc.util.SettingsConstants.APP_BASE_URL_DEFAULT_VALUE; +import static de.truzzt.clearinghouse.edc.util.SettingsConstants.APP_BASE_URL_SETTING; + +public class RequestMessageHandler implements Handler { + + private final IdsId connectorId; + private final AppSender appSender; + private final CreateProcessDelegate senderDelegate; + + private final ServiceExtensionContext context; + + public RequestMessageHandler(IdsId connectorId, + TypeManagerUtil typeManagerUtil, + AppSender appSender, + ServiceExtensionContext context) { + this.connectorId = connectorId; + this.appSender = appSender; + this.context = context; + + this.senderDelegate = new CreateProcessDelegate(typeManagerUtil); + } + + @Override + public boolean canHandle(@NotNull HandlerRequest handlerRequest) { + return handlerRequest.getHeader().getType().equals("ids:RequestMessage"); + } + + @Override + public @NotNull HandlerResponse handleRequest(@NotNull HandlerRequest handlerRequest) { + var baseUrl = context.getSetting(APP_BASE_URL_SETTING, APP_BASE_URL_DEFAULT_VALUE); + var header = handlerRequest.getHeader(); + + var url = senderDelegate.buildRequestUrl(baseUrl, handlerRequest); + var token = buildJWTToken(handlerRequest.getHeader().getSecurityToken(), context); + var body = senderDelegate.buildRequestBody(handlerRequest); + + var request = AppSenderRequest.Builder.newInstance().url(url).token(token).body(body).build(); + + var response = appSender.send(request, senderDelegate); + return createMultipartResponse(messageProcessedNotification(header, connectorId), response); + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/TypeManagerUtil.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/TypeManagerUtil.java new file mode 100644 index 00000000..370002f2 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/TypeManagerUtil.java @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.types; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.eclipse.edc.spi.EdcException; + +import java.io.IOException; +import java.io.InputStream; + +public class TypeManagerUtil { + + private final ObjectMapper mapper; + + public TypeManagerUtil(ObjectMapper mapper) { + this.mapper = mapper; + } + + public T parse(InputStream inputStream, Class type) { + try { + return mapper.readValue(inputStream, type); + } catch (IOException e) { + throw new EdcException("Error parsing to type " + type, e); + } + } + + public String toJson(Object object) { + try { + return mapper.writeValueAsString(object); + } catch (JsonProcessingException e) { + throw new EdcException("Error converting to JSON", e); + } + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/clearinghouse/Context.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/clearinghouse/Context.java new file mode 100644 index 00000000..a835cde7 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/clearinghouse/Context.java @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.types.clearinghouse; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.jetbrains.annotations.NotNull; + +public class Context { + @JsonProperty("ids") + @NotNull + private final String ids; + + @JsonProperty("idsc") + @NotNull + private final String idsc; + + public Context(@NotNull String ids, @NotNull String idsc) { + this.ids = ids; + this.idsc = idsc; + } + + public String getIds() { + return ids; + } + + public String getIdsc() { + return idsc; + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/clearinghouse/Header.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/clearinghouse/Header.java new file mode 100644 index 00000000..f5e55c72 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/clearinghouse/Header.java @@ -0,0 +1,183 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.types.clearinghouse; + +import com.fasterxml.jackson.annotation.JsonFormat; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.jetbrains.annotations.NotNull; + +import javax.xml.datatype.XMLGregorianCalendar; +import java.net.URI; +import java.util.Objects; + +public class Header { + + @JsonProperty("@context") + @NotNull + private final Context context; + + @JsonProperty("@id") + @NotNull + private final String id; + + @JsonProperty("@type") + @NotNull + private final String type; + + @JsonProperty("securityToken") + @NotNull + private final SecurityToken securityToken; + + @JsonProperty("issuerConnector") + @NotNull + private final String issuerConnector; + + @JsonProperty("modelVersion") + @NotNull + private final String modelVersion; + + @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSzzz") + @NotNull + @JsonProperty("issued") + private final XMLGregorianCalendar issued; + + @JsonProperty("senderAgent") + @NotNull + private final String senderAgent; + + private Header(@NotNull Context context, + @NotNull String id, + @NotNull String type, + @NotNull SecurityToken securityToken, + @NotNull String issuerConnector, + @NotNull String modelVersion, + @NotNull XMLGregorianCalendar issued, + @NotNull String senderAgent) { + this.context = context; + this.id = id; + this.type = type; + this.securityToken = securityToken; + this.issuerConnector = issuerConnector; + this.modelVersion = modelVersion; + this.issued = issued; + this.senderAgent = senderAgent; + } + + public Context getContext() { + return context; + } + + public String getId() { + return id; + } + + public String getType() { + return type; + } + + public SecurityToken getSecurityToken() { + return securityToken; + } + + public String getIssuerConnector() { + return issuerConnector; + } + + public String getModelVersion() { + return modelVersion; + } + + public XMLGregorianCalendar getIssued() { + return issued; + } + + public String getSenderAgent() { + return senderAgent; + } + + public static class Builder { + + private Context context; + private String id; + private String type; + private SecurityToken securityToken; + private String issuerConnector; + private String modelVersion; + private XMLGregorianCalendar issued; + private String senderAgent; + + private Builder() { + } + + public static Builder newInstance() { + return new Builder(); + } + + public Builder context(@NotNull Context context) { + this.context = context; + return this; + } + + public Builder id(@NotNull URI id) { + this.id = id.toString(); + return this; + } + + public Builder type(@NotNull String type) { + this.type = type; + return this; + } + + public Builder securityToken(@NotNull SecurityToken securityToken) { + this.securityToken = securityToken; + return this; + } + + public Builder issuerConnector(@NotNull URI issuerConnector) { + this.issuerConnector = issuerConnector.toString(); + return this; + } + + public Builder modelVersion(@NotNull String modelVersion) { + this.modelVersion = modelVersion; + return this; + } + + public Builder issued(@NotNull XMLGregorianCalendar issued) { + this.issued = issued; + return this; + } + + public Builder senderAgent(@NotNull URI senderAgent) { + this.senderAgent = senderAgent.toString(); + return this; + } + + public Header build() { + Objects.requireNonNull(context, "Logging message request header context null."); + Objects.requireNonNull(id, "Logging message request header id is null."); + Objects.requireNonNull(type, "Logging message request header type is null."); + Objects.requireNonNull(securityToken, "Logging message request header security token is null."); + + Objects.requireNonNull(issuerConnector, "Logging message request header issuer connector is null."); + Objects.requireNonNull(modelVersion, "Logging message request header model version is null."); + Objects.requireNonNull(issued, "Logging message request header issued is null."); + Objects.requireNonNull(senderAgent, "Logging message request header sender agent is null."); + + return new Header(context, id, type, securityToken, issuerConnector, modelVersion, issued, senderAgent); + } + } +} + diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/clearinghouse/SecurityToken.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/clearinghouse/SecurityToken.java new file mode 100644 index 00000000..9f341540 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/clearinghouse/SecurityToken.java @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.types.clearinghouse; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.jetbrains.annotations.NotNull; + +import java.net.URI; +import java.util.Objects; + +public class SecurityToken { + + @JsonProperty("@type") + @NotNull + private final String type; + + @JsonProperty("@id") + @NotNull + private final String id; + + @JsonProperty("tokenFormat") + @NotNull + private final TokenFormat tokenFormat; + + @JsonProperty("tokenValue") + @NotNull + private final String tokenValue; + + private SecurityToken(@NotNull String type, + @NotNull String id, + @NotNull TokenFormat tokenFormat, + @NotNull String tokenValue) { + this.type = type; + this.id = id; + this.tokenFormat = tokenFormat; + this.tokenValue = tokenValue; + } + + public String getType() { + return type; + } + + public String getId() { + return id; + } + + public TokenFormat getTokenFormat() { + return tokenFormat; + } + + public String getTokenValue() { + return tokenValue; + } + + public static class Builder { + + private String type; + private String id; + private TokenFormat tokenFormat; + private String tokenValue; + + private Builder() { + } + + public static Builder newInstance() { + return new Builder(); + } + + public Builder type(@NotNull String type) { + this.type = type; + return this; + } + + public Builder id(@NotNull URI id) { + this.id = id.toString(); + return this; + } + + public Builder tokenFormat(@NotNull TokenFormat tokenFormat) { + this.tokenFormat = tokenFormat; + return this; + } + + public Builder tokenValue(@NotNull String tokenValue) { + this.tokenValue = tokenValue; + return this; + } + + public SecurityToken build() { + Objects.requireNonNull(type, "Security token type is null."); + Objects.requireNonNull(id, "Security token id is null."); + Objects.requireNonNull(tokenFormat, "Security token tokenFormat is null."); + Objects.requireNonNull(tokenValue, "Security token tokenValue is null."); + + return new SecurityToken(type, id, tokenFormat, tokenValue); + } + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/clearinghouse/TokenFormat.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/clearinghouse/TokenFormat.java new file mode 100644 index 00000000..13fd596d --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/clearinghouse/TokenFormat.java @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.types.clearinghouse; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.jetbrains.annotations.NotNull; + +import java.net.URI; + +public class TokenFormat { + + @JsonProperty("@id") + @NotNull + private final String id; + + public TokenFormat(@NotNull URI id) { + this.id = id.toString(); + } + + public String getId() { + return id; + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/Context.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/Context.java new file mode 100644 index 00000000..d0bd908a --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/Context.java @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.types.ids; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.jetbrains.annotations.NotNull; + +public class Context { + @JsonProperty("ids") + @NotNull + private String ids; + + @JsonProperty("idsc") + @NotNull + private String idsc; + + public String getIds() { + return ids; + } + + public String getIdsc() { + return idsc; + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/Message.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/Message.java new file mode 100644 index 00000000..c363fbc3 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/Message.java @@ -0,0 +1,149 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.types.ids; + +import com.fasterxml.jackson.annotation.JsonFormat; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.jetbrains.annotations.NotNull; + +import javax.xml.datatype.XMLGregorianCalendar; +import java.net.URI; +import java.util.List; + +public class Message { + + @JsonProperty("@context") + @NotNull + private Context context; + + @JsonProperty("@id") + @NotNull + private URI id; + + @JsonProperty("@type") + @NotNull + private String type; + + @JsonProperty("ids:securityToken") + @NotNull + private SecurityToken securityToken; + + @JsonProperty("ids:issuerConnector") + @NotNull + private URI issuerConnector; + + @JsonProperty("ids:recipientConnector") + @NotNull + private List recipientConnector; + + @JsonProperty("ids:modelVersion") + @NotNull + String modelVersion; + + @JsonProperty("ids:issued") + @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS") + @NotNull + XMLGregorianCalendar issued; + + @JsonProperty("ids:senderAgent") + @NotNull + private URI senderAgent; + + @JsonProperty("ids:correlationMessage") + private Message correlationMessage; + + public Message() { + } + + public Message(URI id) { + this.id = id; + } + + public URI getId() { + return id; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public URI getIssuerConnector() { + return issuerConnector; + } + + public void setIssuerConnector(URI issuerConnector) { + this.issuerConnector = issuerConnector; + } + + public List getRecipientConnector() { + return recipientConnector; + } + + public void setRecipientConnector(List recipientConnector) { + this.recipientConnector = recipientConnector; + } + + public String getModelVersion() { + return modelVersion; + } + + public void setModelVersion(String modelVersion) { + this.modelVersion = modelVersion; + } + + public XMLGregorianCalendar getIssued() { + return issued; + } + + public void setIssued(XMLGregorianCalendar issued) { + this.issued = issued; + } + + public SecurityToken getSecurityToken() { + return securityToken; + } + + public void setSecurityToken(SecurityToken securityToken) { + this.securityToken = securityToken; + } + + public URI getSenderAgent() { + return senderAgent; + } + + public void setSenderAgent(URI senderAgent) { + this.senderAgent = senderAgent; + } + + public Context getContext() { + return context; + } + + public void setContext(Context context) { + this.context = context; + } + + public Message getCorrelationMessage() { + return correlationMessage; + } + + public void setCorrelationMessage(Message correlationMessage) { + this.correlationMessage = correlationMessage; + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/RejectionMessage.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/RejectionMessage.java new file mode 100644 index 00000000..f6a77ca8 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/RejectionMessage.java @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.types.ids; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.jetbrains.annotations.NotNull; + +import java.net.URI; + +public class RejectionMessage extends Message { + + @JsonProperty("ids:rejectionReason") + @NotNull + RejectionReason rejectionReason; + + public RejectionMessage() { + } + public RejectionMessage(@NotNull URI id) { + super(id); + } + + public RejectionReason getRejectionReason() { + return rejectionReason; + } + + public void setRejectionReason(@NotNull RejectionReason rejectionReason) { + this.rejectionReason = rejectionReason; + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/RejectionReason.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/RejectionReason.java new file mode 100644 index 00000000..506998bb --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/RejectionReason.java @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.types.ids; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeName; +import org.jetbrains.annotations.NotNull; + +import java.net.URI; + +@JsonIgnoreProperties(ignoreUnknown = true) +@JsonTypeName("ids:RejectionReason") +public class RejectionReason { + + @JsonProperty("@id") + @NotNull + private URI id; + + public RejectionReason() { + } + + public RejectionReason(@NotNull URI id) { + this.id = id; + } + + public static final RejectionReason BAD_PARAMETERS = + new RejectionReason(URI.create("https://w3id.org/idsa/code/BAD_PARAMETERS")); + + public static final RejectionReason MALFORMED_MESSAGE = + new RejectionReason(URI.create("https://w3id.org/idsa/code/MALFORMED_MESSAGE")); + + public static final RejectionReason MESSAGE_TYPE_NOT_SUPPORTED = + new RejectionReason(URI.create("https://w3id.org/idsa/code/MESSAGE_TYPE_NOT_SUPPORTED")); + + public static final RejectionReason NOT_AUTHENTICATED = + new RejectionReason(URI.create("https://w3id.org/idsa/code/NOT_AUTHENTICATED")); + + public static final RejectionReason INTERNAL_RECIPIENT_ERROR = + new RejectionReason(URI.create("https://w3id.org/idsa/code/INTERNAL_RECIPIENT_ERROR")); + + public static final RejectionReason METHOD_NOT_SUPPORTED = + new RejectionReason(URI.create("https://w3id.org/idsa/code/METHOD_NOT_SUPPORTED")); + + public static final RejectionReason NOT_AUTHORIZED = + new RejectionReason(URI.create("https://w3id.org/idsa/code/NOT_AUTHORIZED")); + + public static final RejectionReason NOT_FOUND = + new RejectionReason(URI.create("https://w3id.org/idsa/code/NOT_FOUND")); + + public static final RejectionReason TEMPORARILY_NOT_AVAILABLE = + new RejectionReason(URI.create("https://w3id.org/idsa/code/TEMPORARILY_NOT_AVAILABLE")); + + public static final RejectionReason TOO_MANY_RESULTS = + new RejectionReason(URI.create("https://w3id.org/idsa/code/TOO_MANY_RESULTS")); + + public static final RejectionReason VERSION_NOT_SUPPORTED = + new RejectionReason(URI.create("https://w3id.org/idsa/code/VERSION_NOT_SUPPORTED")); + + public URI getId() { + return id; + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/SecurityToken.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/SecurityToken.java new file mode 100644 index 00000000..c659df6e --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/SecurityToken.java @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.types.ids; + +import com.fasterxml.jackson.annotation.JsonProperty; +import de.truzzt.clearinghouse.edc.types.util.VocabUtil; +import org.jetbrains.annotations.NotNull; + +import java.net.URI; + +public class SecurityToken { + + @JsonProperty("@id") + @NotNull + private URI id; + + @JsonProperty("@type") + @NotNull + private String type; + + @JsonProperty("ids:tokenFormat") + @NotNull + private TokenFormat tokenFormat; + + @JsonProperty("ids:tokenValue") + @NotNull + private String tokenValue; + + public SecurityToken() { + id = VocabUtil.createRandomUrl("dynamicAttributeToken"); + } + + public URI getId() { + return id; + } + public void setId(URI id) { + this.id = id; + } + + public String getType() { + return type; + } + public void setType(String type) { + this.type = type; + } + + public TokenFormat getTokenFormat() { + return tokenFormat; + } + public void setTokenFormat(TokenFormat tokenFormat) { + this.tokenFormat = tokenFormat; + } + + public String getTokenValue() { + return tokenValue; + } + public void setTokenValue(String tokenValue) { + this.tokenValue = tokenValue; + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/TokenFormat.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/TokenFormat.java new file mode 100644 index 00000000..1bb06024 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/ids/TokenFormat.java @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.types.ids; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.jetbrains.annotations.NotNull; + +import java.net.URI; + +public class TokenFormat { + + public static final String JWT_TOKEN_FORMAT_IDS = "idsc:JWT"; + public static final String JWT_TOKEN_FORMAT_DSP = "https://w3id.org/idsa/code/JWT"; + + @JsonProperty("@id") + @NotNull + private URI id; + + public URI getId() { + return id; + } + + public static boolean isValid(String id) { + return id.equals(JWT_TOKEN_FORMAT_IDS) || id.equals(JWT_TOKEN_FORMAT_DSP); + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/util/VocabUtil.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/util/VocabUtil.java new file mode 100644 index 00000000..8d187601 --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/types/util/VocabUtil.java @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.types.util; + +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.UUID; + +public class VocabUtil { + public static String randomUrlBase; + + public static URI createRandomUrl(String path) { + try { + if (randomUrlBase != null) { + if (!randomUrlBase.endsWith("/")) { + randomUrlBase = randomUrlBase + "/"; + } + + return (new URL(randomUrlBase + path + "/" + UUID.randomUUID())).toURI(); + } else { + return (new URL("https", "w3id.org", "/idsa/autogen/" + path + "/" + UUID.randomUUID())).toURI(); + } + } catch (URISyntaxException | MalformedURLException var3) { + throw new RuntimeException(var3); + } + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/util/ResponseUtil.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/util/ResponseUtil.java new file mode 100644 index 00000000..0403b3bf --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/util/ResponseUtil.java @@ -0,0 +1,163 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.util; + +import de.truzzt.clearinghouse.edc.dto.HandlerResponse; +import de.truzzt.clearinghouse.edc.types.TypeManagerUtil; +import de.truzzt.clearinghouse.edc.types.ids.Message; +import de.truzzt.clearinghouse.edc.types.ids.RejectionMessage; +import de.truzzt.clearinghouse.edc.types.ids.RejectionReason; +import jakarta.ws.rs.core.MediaType; +import org.eclipse.edc.protocol.ids.spi.domain.IdsConstants; +import org.eclipse.edc.protocol.ids.spi.types.IdsId; +import org.eclipse.edc.protocol.ids.spi.types.IdsType; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import org.glassfish.jersey.media.multipart.FormDataBodyPart; +import org.glassfish.jersey.media.multipart.FormDataMultiPart; + +import javax.xml.datatype.DatatypeConfigurationException; +import javax.xml.datatype.DatatypeFactory; +import javax.xml.datatype.XMLGregorianCalendar; +import java.net.URI; +import java.time.ZonedDateTime; +import java.util.GregorianCalendar; +import java.util.UUID; + +public class ResponseUtil { + + private static final String PROCESSED_NOTIFICATION_TYPE = "ids:MessageProcessedNotificationMessage"; + + public static FormDataMultiPart createFormDataMultiPart(TypeManagerUtil typeManagerUtil, + String headerName, + Message headerValue, + String payloadName, + Object payloadValue) { + var multiPart = createFormDataMultiPart(typeManagerUtil, headerName, headerValue); + + if (payloadValue != null) { + multiPart.bodyPart(new FormDataBodyPart(payloadName, typeManagerUtil.toJson(payloadValue), MediaType.APPLICATION_JSON_TYPE)); + } + + return multiPart; + } + + public static FormDataMultiPart createFormDataMultiPart(TypeManagerUtil typeManagerUtil, String headerName, Message headerValue) { + var multiPart = new FormDataMultiPart(); + + if (headerValue != null) { + multiPart.bodyPart(new FormDataBodyPart(headerName, typeManagerUtil.toJson(headerValue), MediaType.APPLICATION_JSON_TYPE)); + } + return multiPart; + } + + public static HandlerResponse createMultipartResponse(@NotNull Message header, @NotNull Object payload) { + return HandlerResponse.Builder.newInstance() + .header(header) + .payload(payload) + .build(); + } + + public static Message messageProcessedNotification(@NotNull Message correlationMessage, + @NotNull IdsId connectorId) { + var messageId = getMessageId(); + + Message message = new Message(messageId); + message.setContext(correlationMessage.getContext()); + message.setType(PROCESSED_NOTIFICATION_TYPE); + message.setSecurityToken(correlationMessage.getSecurityToken()); + message.setIssuerConnector(connectorId.toUri()); + message.setModelVersion(IdsConstants.INFORMATION_MODEL_VERSION); + message.setIssued(gregorianNow()); + message.setSenderAgent(connectorId.toUri()); + + return message; + } + + @NotNull + public static RejectionMessage notAuthenticated(@NotNull Message correlationMessage, + @NotNull IdsId connectorId) { + RejectionMessage rejectionMessage = createRejectionMessage(correlationMessage, connectorId); + rejectionMessage.setRejectionReason(RejectionReason.NOT_AUTHENTICATED); + + return rejectionMessage; + } + + @NotNull + public static RejectionMessage malformedMessage(@Nullable Message correlationMessage, + @NotNull IdsId connectorId) { + RejectionMessage rejectionMessage = createRejectionMessage(correlationMessage, connectorId); + rejectionMessage.setRejectionReason(RejectionReason.MALFORMED_MESSAGE); + + return rejectionMessage; + } + + @NotNull + public static RejectionMessage messageTypeNotSupported(@NotNull Message correlationMessage, + @NotNull IdsId connectorId) { + RejectionMessage rejectionMessage = createRejectionMessage(correlationMessage, connectorId); + rejectionMessage.setRejectionReason(RejectionReason.MESSAGE_TYPE_NOT_SUPPORTED); + + return rejectionMessage; + } + + @NotNull + public static RejectionMessage internalRecipientError(@NotNull Message correlationMessage, + @NotNull IdsId connectorId) { + RejectionMessage rejectionMessage = createRejectionMessage(correlationMessage, connectorId); + rejectionMessage.setRejectionReason(RejectionReason.INTERNAL_RECIPIENT_ERROR); + + return rejectionMessage; + } + + @NotNull + private static RejectionMessage createRejectionMessage(@Nullable Message correlationMessage, + @NotNull IdsId connectorId) { + var messageId = getMessageId(); + + var rejectionMessage = new RejectionMessage(messageId); + rejectionMessage.setModelVersion(IdsConstants.INFORMATION_MODEL_VERSION); + rejectionMessage.setIssued(gregorianNow()); + rejectionMessage.setIssuerConnector(connectorId.toUri()); + rejectionMessage.setSenderAgent(connectorId.toUri()); + rejectionMessage.setCorrelationMessage(correlationMessage); + + return rejectionMessage; + } + + @NotNull + public static RejectionMessage createRejectionMessage(@NotNull RejectionReason reason, + @Nullable Message correlationMessage, + @NotNull IdsId connectorId) { + RejectionMessage rejectionMessage = createRejectionMessage(correlationMessage, connectorId); + rejectionMessage.setRejectionReason(reason); + + return rejectionMessage; + } + + private static URI getMessageId() { + return IdsId.Builder.newInstance().value(UUID.randomUUID().toString()).type(IdsType.MESSAGE).build().toUri(); + } + + private static XMLGregorianCalendar gregorianNow() { + try { + GregorianCalendar gregorianCalendar = GregorianCalendar.from(ZonedDateTime.now()); + return DatatypeFactory.newInstance().newXMLGregorianCalendar(gregorianCalendar); + } catch (DatatypeConfigurationException e) { + throw new RuntimeException(e); + } + } +} diff --git a/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/util/SettingsConstants.java b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/util/SettingsConstants.java new file mode 100644 index 00000000..79b75ece --- /dev/null +++ b/clearing-house-edc/core/src/main/java/de/truzzt/clearinghouse/edc/util/SettingsConstants.java @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2023 truzzt GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - initial implementation + * + */ +package de.truzzt.clearinghouse.edc.util; + +public class SettingsConstants { + + public static final String JWT_AUDIENCE_SETTING = "truzzt.clearinghouse.jwt.audience"; + public static final String JWT_AUDIENCE_DEFAULT_VALUE = "1"; + + public static final String JWT_ISSUER_SETTING = "truzzt.clearinghouse.jwt.issuer"; + public static final String JWT_ISSUER_DEFAULT_VALUE = "1"; + + public static final String JWT_SIGN_SECRET_SETTING = "truzzt.clearinghouse.jwt.sign.secret"; + public static final String JWT_SIGN_SECRET_DEFAULT_VALUE = "123"; + + public static final String JWT_EXPIRES_AT_SETTING = "truzzt.clearinghouse.jwt.expires.at"; + public static final String JWT_EXPIRES_AT_DEFAULT_VALUE = "30"; + + public static final String APP_BASE_URL_SETTING = "truzzt.clearinghouse.app.base.url"; + public static final String APP_BASE_URL_DEFAULT_VALUE = "http://localhost:8000"; + +} diff --git a/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/app/AppSenderTest.java b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/app/AppSenderTest.java new file mode 100644 index 00000000..d25e793d --- /dev/null +++ b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/app/AppSenderTest.java @@ -0,0 +1,105 @@ +package de.truzzt.clearinghouse.edc.app; + +import com.fasterxml.jackson.databind.ObjectMapper; +import de.truzzt.clearinghouse.edc.tests.TestUtils; +import de.truzzt.clearinghouse.edc.app.delegate.LoggingMessageDelegate; +import de.truzzt.clearinghouse.edc.types.TypeManagerUtil; +import okhttp3.Request; +import okhttp3.ResponseBody; +import org.eclipse.edc.spi.EdcException; +import org.eclipse.edc.spi.http.EdcHttpClient; +import org.eclipse.edc.spi.monitor.Monitor; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import java.io.IOException; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.spy; + +public class AppSenderTest { + + private AppSender sender; + @Mock + private Monitor monitor; + @Mock + private TypeManagerUtil typeManagerUtil; + @Mock + private LoggingMessageDelegate senderDelegate; + @Mock + private EdcHttpClient httpClient; + + private final ObjectMapper mapper = new ObjectMapper(); + + @BeforeEach + public void setUp() { + MockitoAnnotations.openMocks(this); + senderDelegate = spy(new LoggingMessageDelegate(typeManagerUtil)); + sender = new AppSender(monitor, httpClient ,typeManagerUtil); + } + + @Test + public void sendSuccessful() throws IOException { + + doReturn(TestUtils.getValidHandlerRequest(mapper).toString()) + .when(typeManagerUtil).toJson(any(Object.class)); + doReturn(TestUtils.getValidResponse(TestUtils.getValidAppSenderRequest(mapper).getUrl())) + .when(httpClient).execute(any(Request.class)); + doReturn(TestUtils.getValidLoggingMessageResponse(TestUtils.getValidAppSenderRequest(mapper).getUrl(), mapper)) + .when(senderDelegate).parseResponseBody(any(ResponseBody.class)); + + var response = sender.send(TestUtils.getValidAppSenderRequest(mapper), senderDelegate); + + assertNotNull(response); + } + + @Test + public void sendWithHttpRequestError() throws IOException { + + doReturn(TestUtils.getValidHandlerRequest(mapper).toString()) + .when(typeManagerUtil).toJson(any(Object.class)); + + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> + sender.send(TestUtils.getInvalidUrlAppSenderRequest(mapper), senderDelegate)); + + assertEquals("Expected URL scheme 'http' or 'https'", exception.getMessage().substring(0,37)); + } + + @Test + public void sendWithUnsuccessfulResponseBodyError() throws IOException { + + doReturn(TestUtils.getValidHandlerRequest(mapper).toString()) + .when(typeManagerUtil).toJson(any(Object.class)); + doReturn(TestUtils.getUnsuccessfulResponse(TestUtils.getValidAppSenderRequest(mapper).getUrl())) + .when(httpClient).execute(any(Request.class)); + doReturn(TestUtils.getValidLoggingMessageResponse(TestUtils.getValidAppSenderRequest(mapper).getUrl(), mapper)) + .when(senderDelegate).parseResponseBody(any(ResponseBody.class)); + + EdcException exception = assertThrows(EdcException.class, () -> + sender.send(TestUtils.getValidAppSenderRequest(mapper), senderDelegate)); + + assertEquals("Received an error from Clearing House App. Status: 401, message: Unauthorized", exception.getMessage()); + } + + @Test + public void sendWithNullResponseBodyError() throws IOException { + + doReturn(TestUtils.getValidHandlerRequest(mapper).toString()) + .when(typeManagerUtil).toJson(any(Object.class)); + doReturn(TestUtils.getResponseWithoutBody(TestUtils.getValidAppSenderRequest(mapper).getUrl())) + .when(httpClient).execute(any(Request.class)); + doReturn(TestUtils.getValidLoggingMessageResponse(TestUtils.getValidAppSenderRequest(mapper).getUrl(), mapper)) + .when(senderDelegate).parseResponseBody(any(ResponseBody.class)); + + EdcException exception = assertThrows(EdcException.class, () -> + sender.send(TestUtils.getValidAppSenderRequest(mapper), senderDelegate)); + + assertEquals("Error reading Clearing House App response body", exception.getMessage()); + } +} \ No newline at end of file diff --git a/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/app/delegate/LoggingMessageDelegateTest.java b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/app/delegate/LoggingMessageDelegateTest.java new file mode 100644 index 00000000..9e927111 --- /dev/null +++ b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/app/delegate/LoggingMessageDelegateTest.java @@ -0,0 +1,68 @@ +package de.truzzt.clearinghouse.edc.app.delegate; + +import com.fasterxml.jackson.databind.ObjectMapper; +import de.truzzt.clearinghouse.edc.tests.TestUtils; +import de.truzzt.clearinghouse.edc.dto.HandlerRequest; +import de.truzzt.clearinghouse.edc.dto.LoggingMessageRequest; +import de.truzzt.clearinghouse.edc.dto.LoggingMessageResponse; +import de.truzzt.clearinghouse.edc.types.TypeManagerUtil; +import okhttp3.ResponseBody; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.spy; + +class LoggingMessageDelegateTest { + + @Mock + private TypeManagerUtil typeManagerUtil; + @Mock + private LoggingMessageDelegate senderDelegate; + + private final ObjectMapper mapper = new ObjectMapper(); + + @BeforeEach + public void setUp() { + MockitoAnnotations.openMocks(this); + senderDelegate = spy(new LoggingMessageDelegate(typeManagerUtil)); + } + + @Test + public void successfulBuildRequestUrl() { + + HandlerRequest request = TestUtils.getValidHandlerRequest(mapper); + + String response = senderDelegate.buildRequestUrl(TestUtils.TEST_BASE_URL, request); + + assertNotNull(response); + assertEquals(response, "http://localhost:8000/messages/log/" +request.getPid()); + } + + @Test + public void successfulBuildRequestBody() { + + HandlerRequest request = TestUtils.getValidHandlerRequest(mapper); + + LoggingMessageRequest response = senderDelegate.buildRequestBody(request); + + assertNotNull(response); + } + + @Test + public void successfulParseResponseBody() { + + ResponseBody body = TestUtils.getValidResponseBody(); + doReturn(TestUtils.getValidLoggingMessageResponse(TestUtils.getValidAppSenderRequest(mapper).getUrl(), mapper)) + .when(senderDelegate).parseResponseBody(any(ResponseBody.class)); + + LoggingMessageResponse response = senderDelegate.parseResponseBody(body); + + assertNotNull(response); + } +} \ No newline at end of file diff --git a/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/handler/LogMessageHandlerTest.java b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/handler/LogMessageHandlerTest.java new file mode 100644 index 00000000..9c71f4ff --- /dev/null +++ b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/handler/LogMessageHandlerTest.java @@ -0,0 +1,111 @@ +package de.truzzt.clearinghouse.edc.handler; + +import com.auth0.jwt.JWT; +import com.fasterxml.jackson.databind.ObjectMapper; +import de.truzzt.clearinghouse.edc.tests.TestUtils; +import de.truzzt.clearinghouse.edc.app.AppSender; +import de.truzzt.clearinghouse.edc.app.delegate.LoggingMessageDelegate; +import de.truzzt.clearinghouse.edc.dto.HandlerRequest; +import de.truzzt.clearinghouse.edc.dto.HandlerResponse; +import de.truzzt.clearinghouse.edc.types.TypeManagerUtil; +import de.truzzt.clearinghouse.edc.types.ids.SecurityToken; +import okhttp3.ResponseBody; +import org.eclipse.edc.protocol.ids.spi.types.IdsId; +import org.eclipse.edc.spi.EdcException; +import org.eclipse.edc.spi.system.ServiceExtensionContext; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import static de.truzzt.clearinghouse.edc.util.SettingsConstants.APP_BASE_URL_DEFAULT_VALUE; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.spy; + +class LogMessageHandlerTest { + @Mock + private IdsId connectorId; + @Mock + private TypeManagerUtil typeManagerUtil; + @Mock + private AppSender appSender; + @Mock + private ServiceExtensionContext context; + @Mock + private LogMessageHandler logMessageHandler; + @Mock + private LoggingMessageDelegate senderDelegate; + + private final ObjectMapper mapper = new ObjectMapper(); + + @BeforeEach + public void setUp() { + MockitoAnnotations.openMocks(this); + senderDelegate = spy(new LoggingMessageDelegate(typeManagerUtil)); + logMessageHandler = spy(new LogMessageHandler(connectorId, typeManagerUtil, appSender, context)); + } + + @Test + public void successfulCanHandle(){ + + HandlerRequest request = TestUtils.getValidHandlerRequest(mapper); + + Boolean response = logMessageHandler.canHandle(request); + + assertNotNull(response); + assertEquals(response, true); + } + + @Test + public void invalidMessageTypeCanHandle(){ + + HandlerRequest request = TestUtils.getInvalidHandlerRequest(mapper); + + Boolean response = logMessageHandler.canHandle(request); + + assertNotNull(response); + assertEquals(response, false); + } + + @Test + public void successfulHandleRequest(){ + HandlerRequest request = TestUtils.getValidHandlerRequest(mapper); + doReturn(JWT.create().toString()) + .when(logMessageHandler).buildJWTToken(any(SecurityToken.class), any(ServiceExtensionContext.class)); + doReturn(TestUtils.getValidLoggingMessageResponse(TestUtils.getValidAppSenderRequest(mapper).getUrl(), mapper)) + .when(senderDelegate).parseResponseBody(any(ResponseBody.class)); + doReturn(APP_BASE_URL_DEFAULT_VALUE+ "/headers/log/" + request.getPid()) + .when(senderDelegate) + .buildRequestUrl(any(String.class), any(HandlerRequest.class)); + doReturn(TestUtils.getValidLoggingMessageRequest(request)) + .when(senderDelegate).buildRequestBody(any(HandlerRequest.class)); + + HandlerResponse response = logMessageHandler.handleRequest(request); + + assertNotNull(response); + assertEquals(response.getHeader().getType(), "ids:MessageProcessedNotificationMessage"); + } + + @Test + public void missingReferringConnectorBuildJwtToken() { + EdcException exception = assertThrows(EdcException.class, () -> logMessageHandler.buildJWTToken( + TestUtils.getInvalidTokenHandlerRequest(mapper) + .getHeader() + .getSecurityToken(), context)); + + assertEquals("JWT Token referringConnector is missing",exception.getMessage()); + } + @Test + public void successfulBuildJwtToken() { + doReturn("1").when(context).getSetting(anyString(), anyString()); + var response = logMessageHandler.buildJWTToken( + TestUtils.getValidHandlerRequest(mapper) + .getHeader() + .getSecurityToken(), context); + + assertNotNull(response); + } +} \ No newline at end of file diff --git a/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/handler/RequestMessageHandlerTest.java b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/handler/RequestMessageHandlerTest.java new file mode 100644 index 00000000..eac9ab1b --- /dev/null +++ b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/handler/RequestMessageHandlerTest.java @@ -0,0 +1,114 @@ +package de.truzzt.clearinghouse.edc.handler; + +import com.auth0.jwt.JWT; +import com.fasterxml.jackson.databind.ObjectMapper; +import de.truzzt.clearinghouse.edc.app.AppSender; +import de.truzzt.clearinghouse.edc.app.delegate.CreateProcessDelegate; +import de.truzzt.clearinghouse.edc.app.delegate.LoggingMessageDelegate; +import de.truzzt.clearinghouse.edc.dto.HandlerRequest; +import de.truzzt.clearinghouse.edc.dto.HandlerResponse; +import de.truzzt.clearinghouse.edc.tests.TestUtils; +import de.truzzt.clearinghouse.edc.types.TypeManagerUtil; +import de.truzzt.clearinghouse.edc.types.ids.SecurityToken; +import okhttp3.ResponseBody; +import org.eclipse.edc.protocol.ids.spi.types.IdsId; +import org.eclipse.edc.spi.EdcException; +import org.eclipse.edc.spi.system.ServiceExtensionContext; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import static de.truzzt.clearinghouse.edc.util.SettingsConstants.APP_BASE_URL_DEFAULT_VALUE; +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.spy; + +class RequestMessageHandlerTest { + + @Mock + private IdsId connectorId; + @Mock + private TypeManagerUtil typeManagerUtil; + @Mock + private AppSender appSender; + @Mock + private ServiceExtensionContext context; + + @Mock + private CreateProcessDelegate createProcessDelegate; + @Mock + private RequestMessageHandler requestMessageHandler; + + private final ObjectMapper mapper = new ObjectMapper(); + + @BeforeEach + public void setUp() { + MockitoAnnotations.openMocks(this); + requestMessageHandler = spy(new RequestMessageHandler(connectorId, typeManagerUtil, appSender, context)); + } + @Test + void successfulCanHandle() { + HandlerRequest request = TestUtils.getValidHandlerCreateProcessRequest(mapper); + + Boolean response = requestMessageHandler.canHandle(request); + + assertNotNull(response); + assertEquals(true, response); + } + + @Test + public void invalidMessageTypeCanHandle(){ + + HandlerRequest request = TestUtils.getInvalidHandlerRequest(mapper); + + Boolean response = requestMessageHandler.canHandle(request); + + assertNotNull(response); + assertEquals(response, false); + } + + @Test + public void successfulHandleRequest(){ + HandlerRequest request = TestUtils.getValidHandlerRequest(mapper); + doReturn(JWT.create().toString()) + .when(requestMessageHandler).buildJWTToken(any(SecurityToken.class), any(ServiceExtensionContext.class)); + + doReturn(TestUtils.getValidCreateProcessResponse(TestUtils.getValidAppSenderRequest(mapper).getUrl(), mapper)) + .when(createProcessDelegate).parseResponseBody(any(ResponseBody.class)); + + doReturn(APP_BASE_URL_DEFAULT_VALUE+ "/process/" + request.getPid()) + .when(createProcessDelegate) + .buildRequestUrl(any(String.class), any(HandlerRequest.class)); + + doReturn(TestUtils.getValidCreateProcessRequest(request)) + .when(createProcessDelegate).buildRequestBody(any(HandlerRequest.class)); + + HandlerResponse response = requestMessageHandler.handleRequest(request); + + assertNotNull(response); + assertEquals(response.getHeader().getType(), "ids:MessageProcessedNotificationMessage"); + } + + @Test + public void missingReferringConnectorBuildJwtToken() { + EdcException exception = assertThrows(EdcException.class, () -> requestMessageHandler.buildJWTToken( + TestUtils.getInvalidTokenHandlerRequest(mapper) + .getHeader() + .getSecurityToken(), context)); + + assertEquals("JWT Token referringConnector is missing",exception.getMessage()); + } + @Test + public void successfulBuildJwtToken() { + doReturn("1").when(context).getSetting(anyString(), anyString()); + var response = requestMessageHandler.buildJWTToken( + TestUtils.getValidHandlerRequest(mapper) + .getHeader() + .getSecurityToken(), context); + + assertNotNull(response); + } +} \ No newline at end of file diff --git a/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/tests/TestUtils.java b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/tests/TestUtils.java new file mode 100644 index 00000000..44e236d4 --- /dev/null +++ b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/tests/TestUtils.java @@ -0,0 +1,225 @@ +package de.truzzt.clearinghouse.edc.tests; + +import com.auth0.jwt.JWT; +import com.fasterxml.jackson.databind.ObjectMapper; +import de.truzzt.clearinghouse.edc.dto.AppSenderRequest; +import de.truzzt.clearinghouse.edc.dto.CreateProcessRequest; +import de.truzzt.clearinghouse.edc.dto.CreateProcessResponse; +import de.truzzt.clearinghouse.edc.dto.HandlerRequest; +import de.truzzt.clearinghouse.edc.dto.LoggingMessageRequest; +import de.truzzt.clearinghouse.edc.dto.LoggingMessageResponse; +import de.truzzt.clearinghouse.edc.types.clearinghouse.Context; +import de.truzzt.clearinghouse.edc.types.clearinghouse.Header; +import de.truzzt.clearinghouse.edc.types.clearinghouse.SecurityToken; +import de.truzzt.clearinghouse.edc.types.clearinghouse.TokenFormat; +import de.truzzt.clearinghouse.edc.types.ids.Message; +import okhttp3.*; +import org.eclipse.edc.spi.EdcException; + +import java.io.File; +import java.io.IOException; +import java.util.UUID; + +public class TestUtils extends BaseTestUtils { + + public static final String TEST_BASE_URL = "http://localhost:8000"; + + private static final String TEST_PAYLOAD = "Hello World"; + private static final String TEST_CREATE_PROCCESS_PAYLOAD = "{ \"owners\": [\"1\", \"2\"]}";; + private static final String VALID_HEADER_JSON = "headers/valid-header.json"; + private static final String VALID_CREATE_PROCESS_HEADER_JSON = "headers/valid-create-process-header.json"; + private static final String INVALID_HEADER_JSON = "headers/invalid-header.json"; + private static final String INVALID_TYPE_HEADER_JSON = "headers/invalid-type.json"; + private static final String INVALID_TOKEN_HEADER_JSON = "headers/invalid-token.json"; + + public static Message getValidHeader(ObjectMapper mapper) { + return parseFile(mapper, Message.class, VALID_HEADER_JSON); + } + + public static Message getValidCreateProcessHeader(ObjectMapper mapper) { + return parseFile(mapper, Message.class, VALID_CREATE_PROCESS_HEADER_JSON); + } + + public static Message getInvalidTokenHeader(ObjectMapper mapper) { + return parseFile(mapper, Message.class, INVALID_TOKEN_HEADER_JSON); + } + + public static Message getInvalidTypeHeader(ObjectMapper mapper) { + return parseFile(mapper, Message.class, INVALID_TYPE_HEADER_JSON); + } + + public static Response getValidResponse(String url) { + + Request mockRequest = new Request.Builder() + .url(url) + .build(); + ResponseBody body = getValidResponseBody(); + + Headers headers = new Headers.Builder().add("Test","Test").build(); + + return new Response(mockRequest, Protocol.HTTP_2, "", 200, null, + headers, body, null, null, + null, 1000L, 1000L, null); + } + + public static Response getResponseWithoutBody(String url) { + + Request mockRequest = new Request.Builder() + .url(url) + .build(); + + Headers headers = new Headers.Builder().add("Test","Test").build(); + + return new Response(mockRequest, Protocol.HTTP_2, "", 200, null, + headers, null, null, null, + null, 1000L, 1000L, null); + } + + public static Response getUnsuccessfulResponse(String url) { + + Request mockRequest = new Request.Builder() + .url(url) + .build(); + ResponseBody body = getValidResponseBody(); + + Headers headers = new Headers.Builder().add("Test","Test").build(); + + return new Response(mockRequest, Protocol.HTTP_2, "Unauthorized", 401, null, + headers, body, null, null, + null, 1000L, 1000L, null); + } + + public static LoggingMessageResponse getValidLoggingMessageResponse(String url, ObjectMapper mapper) { + try { + return mapper.readValue(getValidResponse(url).body().byteStream(), LoggingMessageResponse.class); + + } catch (IOException e) { + throw new EdcException("Error parsing response", e); + } + } + + public static CreateProcessResponse getValidCreateProcessResponse(String url, ObjectMapper mapper) { + try { + return mapper.readValue(getValidResponse(url).body().byteStream(), CreateProcessResponse.class); + + } catch (IOException e) { + throw new EdcException("Error parsing response", e); + } + } + + public static LoggingMessageRequest getValidLoggingMessageRequest(HandlerRequest handlerRequest) { + + var header = handlerRequest.getHeader(); + + var multipartContext = header.getContext(); + var context = new Context(multipartContext.getIds(), multipartContext.getIdsc()); + + var multipartSecurityToken = header.getSecurityToken(); + var multipartTokenFormat = multipartSecurityToken.getTokenFormat(); + var securityToken = SecurityToken.Builder.newInstance(). + type(multipartSecurityToken.getType()). + id(multipartSecurityToken.getId()). + tokenFormat(new TokenFormat(multipartTokenFormat.getId())). + tokenValue(multipartSecurityToken.getTokenValue()). + build(); + + var requestHeader = Header.Builder.newInstance() + .context(context) + .id(header.getId()) + .type(header.getType()) + .securityToken(securityToken) + .issuerConnector(header.getIssuerConnector()) + .modelVersion(header.getModelVersion()) + .issued(header.getIssued()) + .senderAgent(header.getSenderAgent()) + .build(); + + return new LoggingMessageRequest(requestHeader, handlerRequest.getPayload()); + } + + public static CreateProcessRequest getValidCreateProcessRequest(HandlerRequest handlerRequest) { + + var header = handlerRequest.getHeader(); + + var multipartContext = header.getContext(); + var context = new Context(multipartContext.getIds(), multipartContext.getIdsc()); + + var multipartSecurityToken = header.getSecurityToken(); + var multipartTokenFormat = multipartSecurityToken.getTokenFormat(); + var securityToken = SecurityToken.Builder.newInstance(). + type(multipartSecurityToken.getType()). + id(multipartSecurityToken.getId()). + tokenFormat(new TokenFormat(multipartTokenFormat.getId())). + tokenValue(multipartSecurityToken.getTokenValue()). + build(); + + var requestHeader = Header.Builder.newInstance() + .context(context) + .id(header.getId()) + .type(header.getType()) + .securityToken(securityToken) + .issuerConnector(header.getIssuerConnector()) + .modelVersion(header.getModelVersion()) + .issued(header.getIssued()) + .senderAgent(header.getSenderAgent()) + .build(); + + return new CreateProcessRequest(requestHeader, handlerRequest.getPayload()); + } + + public static ResponseBody getValidResponseBody(){ + return ResponseBody.create( + MediaType.get("application/json; charset=utf-8"), + "{}" + ); + } + + public static HandlerRequest getValidHandlerRequest(ObjectMapper mapper){ + return HandlerRequest.Builder.newInstance() + .pid(UUID.randomUUID().toString()) + .header(getValidHeader(mapper)) + .payload(TEST_PAYLOAD).build(); + } + + public static HandlerRequest getValidHandlerCreateProcessRequest(ObjectMapper mapper){ + return HandlerRequest.Builder.newInstance() + .pid(UUID.randomUUID().toString()) + .header(getValidCreateProcessHeader(mapper)) + .payload(TEST_PAYLOAD).build(); + } + + public static HandlerRequest getInvalidTokenHandlerRequest(ObjectMapper mapper){ + return HandlerRequest.Builder.newInstance() + .pid(UUID.randomUUID().toString()) + .header(getInvalidTokenHeader(mapper)) + .payload(TEST_PAYLOAD).build(); + } + + public static HandlerRequest getInvalidHandlerRequest(ObjectMapper mapper){ + return HandlerRequest.Builder.newInstance() + .pid(UUID.randomUUID().toString()) + .header(getInvalidTypeHeader(mapper) ) + .payload(TEST_PAYLOAD).build(); + } + + public static AppSenderRequest getValidAppSenderRequest(ObjectMapper mapper){ + return new AppSenderRequest(TEST_BASE_URL+ "/headers/log/" + UUID.randomUUID(), + JWT.create().toString(), + getValidHandlerRequest(mapper) + ); + } + + public static AppSenderRequest getInvalidUrlAppSenderRequest(ObjectMapper mapper){ + return new AppSenderRequest(UUID.randomUUID().toString(), + JWT.create().toString(), + getValidHandlerRequest(mapper) + ); + } + + public static File getValidHeaderFile() { + return getFile(VALID_HEADER_JSON).toFile(); + } + public static File getInvalidHeaderFile() { + return getFile(INVALID_HEADER_JSON).toFile(); + } +} diff --git a/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/types/TypeManagerUtilTest.java b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/types/TypeManagerUtilTest.java new file mode 100644 index 00000000..7fa48ac5 --- /dev/null +++ b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/types/TypeManagerUtilTest.java @@ -0,0 +1,86 @@ +package de.truzzt.clearinghouse.edc.types; + +import com.fasterxml.jackson.databind.ObjectMapper; +import de.truzzt.clearinghouse.edc.tests.TestUtils; +import de.truzzt.clearinghouse.edc.types.ids.Message; +import org.eclipse.edc.spi.EdcException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import java.io.ByteArrayInputStream; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +class TypeManagerUtilTest { + + @Mock + private TypeManagerUtil typeManagerUtil; + + private final ObjectMapper mapper = new ObjectMapper(); + + @BeforeEach + void setUp() { + MockitoAnnotations.openMocks(this); + typeManagerUtil = new TypeManagerUtil(mapper); + } + + @Test + void successfulParse() throws IOException { + InputStream is = new FileInputStream(TestUtils.getValidHeaderFile()); + + Message msg = typeManagerUtil.parse(is, Message.class); + assertNotNull(msg); + assertEquals("ids:LogMessage", msg.getType()); + } + + @Test + void typeErrorParse() { + EdcException exception = + assertThrows(EdcException.class, + () -> typeManagerUtil.parse( + new FileInputStream(TestUtils.getInvalidHeaderFile()), + Message.class) + ); + assertEquals("Error parsing to type class de.truzzt.clearinghouse.edc.types.ids.Message", exception.getMessage()); + } + + @Test + void successfulToJson() throws IOException { + Message msgBefore = mapper.readValue(TestUtils.getValidHeaderFile(), Message.class); + + var json = typeManagerUtil.toJson(msgBefore); + assertNotNull(json); + + InputStream is = new ByteArrayInputStream(json.getBytes()); + Message msgAfter = typeManagerUtil.parse(is, Message.class); + + assertEquals(msgBefore.getType(), msgAfter.getType()); + } + + @Test + void errorConvertingToJson() throws IOException { + + var mockedMapper = mock(ObjectMapper.class); + doThrow(new EdcException("Error converting to JSON")) + .when(mockedMapper).writeValueAsString(anyString()); + + typeManagerUtil = new TypeManagerUtil(mockedMapper); + + EdcException exception = + assertThrows(EdcException.class, + () -> typeManagerUtil.toJson("fadsfsdafd") + ); + + assertEquals("Error converting to JSON",exception.getMessage() ); + } +} \ No newline at end of file diff --git a/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/types/ids/util/VocabUtilTest.java b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/types/ids/util/VocabUtilTest.java new file mode 100644 index 00000000..6e6063ae --- /dev/null +++ b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/types/ids/util/VocabUtilTest.java @@ -0,0 +1,47 @@ +package de.truzzt.clearinghouse.edc.types.ids.util; + +import de.truzzt.clearinghouse.edc.types.util.VocabUtil; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +class VocabUtilTest { + @Mock + private VocabUtil vocabUtil; + + @BeforeEach + void setUp() { + MockitoAnnotations.openMocks(this); + } + + @Test + void successfulCreateRandomUrl() { + vocabUtil.randomUrlBase = "http://www.test"; + var response = vocabUtil.createRandomUrl("test-successful"); + assertNotNull(response); + assertEquals("http://www.test/test-successful/", response.toString().substring(0,32)); + } + + @Test + void errorInvalidUrlCreateRandomUrl() { + vocabUtil.randomUrlBase = "htt://....."; + RuntimeException exception = assertThrows(RuntimeException.class, () -> + vocabUtil.createRandomUrl("test-successful")); + + assertNotNull(exception); + assertEquals("java.net.MalformedURLException: unknown protocol: htt", exception.getMessage()); + } + + @Test + void successfulNullRandomUrlCreateRandomUrl() { + vocabUtil.randomUrlBase = null; + var response = vocabUtil.createRandomUrl("test-successful"); + assertNotNull(response); + assertEquals("https://w3id.org/idsa/autogen/test-successful/", response.toString().substring(0,46)); + } +} \ No newline at end of file diff --git a/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/util/ResponseUtilTest.java b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/util/ResponseUtilTest.java new file mode 100644 index 00000000..8adabf0d --- /dev/null +++ b/clearing-house-edc/core/src/test/java/de/truzzt/clearinghouse/edc/util/ResponseUtilTest.java @@ -0,0 +1,94 @@ +package de.truzzt.clearinghouse.edc.util; + +import com.fasterxml.jackson.databind.ObjectMapper; +import de.truzzt.clearinghouse.edc.tests.TestUtils; +import de.truzzt.clearinghouse.edc.types.TypeManagerUtil; +import org.eclipse.edc.protocol.ids.spi.types.IdsId; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +class ResponseUtilTest { + + @Mock + private TypeManagerUtil typeManagerUtil; + @Mock + private IdsId connectorId; + + private final ObjectMapper mapper = new ObjectMapper(); + + @BeforeEach + public void setUp() { + MockitoAnnotations.openMocks(this); + typeManagerUtil = new TypeManagerUtil(new ObjectMapper()); + } + + @Test + public void createFormDataMultiPart() { + + var response = ResponseUtil.createFormDataMultiPart(typeManagerUtil, + "Header Name", + TestUtils.getValidHeader(new ObjectMapper()), + "Payload", + "Payload Value" + ); + + assertNotNull(response); + } + + @Test + public void testCreateFormDataMultiPart() { + var response = ResponseUtil.createFormDataMultiPart(typeManagerUtil, + "Header Name", + TestUtils.getValidHeader(mapper) + ); + + assertNotNull(response); + } + + @Test + public void createMultipartResponse() { + var response = ResponseUtil.createMultipartResponse(TestUtils.getValidHeader(mapper), + "Payload Value"); + + assertNotNull(response); + } + + @Test + public void messageProcessedNotification() { + var response = ResponseUtil.messageProcessedNotification(TestUtils.getValidHeader(mapper), connectorId); + + assertNotNull(response); + } + + @Test + public void notAuthenticated() { + var response = ResponseUtil.notAuthenticated(TestUtils.getValidHeader(mapper), connectorId); + + assertNotNull(response); + } + + @Test + public void malformedMessage() { + var response = ResponseUtil.malformedMessage(TestUtils.getValidHeader(mapper), connectorId); + + assertNotNull(response); + } + + @Test + public void messageTypeNotSupported() { + var response = ResponseUtil.messageTypeNotSupported(TestUtils.getValidHeader(mapper), connectorId); + + assertNotNull(response); + } + + @Test + public void internalRecipientError() { + var response = ResponseUtil.internalRecipientError(TestUtils.getValidHeader(mapper), connectorId); + + assertNotNull(response); + } +} \ No newline at end of file diff --git a/clearing-house-edc/core/src/test/resources/headers/invalid-header.json b/clearing-house-edc/core/src/test/resources/headers/invalid-header.json new file mode 100644 index 00000000..58522c01 --- /dev/null +++ b/clearing-house-edc/core/src/test/resources/headers/invalid-header.json @@ -0,0 +1,19 @@ +{ + "@context":{ + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@id":"https://w3id.org/idsa/autogen/logMessage/9fdba4ad-f750-4bbc-a7f0-f648ac853508", + "ids:securityToken": { + "@type" : "ids:DynamicAttributeToken", + "@id" : "https://w3id.org/idsa/autogen/dynamicAttributeToken/7bbbd2c1-2d75-4e3d-bd10-c52d0381cab0", + "tokenValue" : "eyJ0eXAiOiJKV1QiLCJraWQiOiJkZWZhdWx0IiwiYWxnIjoiUlMyNTYifQ.eyJzY29wZXMiOlsiaWRzYzpJRFNfQ09OTkVDVE9SX0FUVFJJQlVURVNfQUxMIl0sImF1ZCI6Imlkc2M6SURTX0NPTk5FQ1RPUlNfQUxMIiwiaXNzIjoiaHR0cHM6Ly9kYXBzLmFpc2VjLmZyYXVuaG9mZXIuZGUiLCJuYmYiOjE2MzQ2NTA3MzksImlhdCI6MTYzNDY1MDczOSwianRpIjoiTVRneE9EUXdPVFF6TXpZd05qWXlOVFExTUE9PSIsImV4cCI6MTYzNDY1NDMzOSwic2VjdXJpdHlQcm9maWxlIjoiaWRzYzpCQVNFX1NFQ1VSSVRZX1BST0ZJTEUiLCJyZWZlcnJpbmdDb25uZWN0b3IiOiJodHRwOi8vYnJva2VyLmlkcy5pc3N0LmZyYXVuaG9mZXIuZGUuZGVtbyIsIkB0eXBlIjoiaWRzOkRhdFBheWxvYWQiLCJAY29udGV4dCI6Imh0dHBzOi8vdzNpZC5vcmcvaWRzYS9jb250ZXh0cy9jb250ZXh0Lmpzb25sZCIsInRyYW5zcG9ydENlcnRzU2hhMjU2IjoiOTc0ZTYzMjRmMTJmMTA5MTZmNDZiZmRlYjE4YjhkZDZkYTc4Y2M2YTZhMDU2NjAzMWZhNWYxYTM5ZWM4ZTYwMCIsInN1YiI6IjkyOjE0OkU3OkFDOjEwOjIyOkYyOkNDOjA1OjZFOjJBOjJCOjhEOkRCOjEwOkQ2OjREOkEwOkExOjUzOmtleWlkOkNCOjhDOkM3OkI2Ojg1Ojc5OkE4OjIzOkE2OkNCOjE1OkFCOjE3OjUwOjJGOkU2OjY1OjQzOjVEOkU4In0.Qw3gWMgwnKQyVatbsozcin6qtQbLyXlk6QdaLajGaDmxSYqCKEcAje4kiDp5Fqj04WPmVyF0k8c1BJA3KGnaW3Qcikv4MNxqqoenvKIrSTokXsA7-osqBCfxLhV-s2lSXVTAtV_Q7f71eSoR5j-7nPPX8_nf4Xup4_VzfnwRmnuAbLfHfWThbupxFazC34r3waXCltOTFVa_XDlwEDMpPY7vEPeaqIt2t6ofVGo_HF86UB19liL-UZvp0uSE9z2fhloyxOrx9B_xavGS7pP6oRaumSJEN_x9dfdeDS98HQ_oBSSGBzaI4fM7ik35Yg42KQwmkZesD6P_YSEzVLcJDg", + "tokenFormat" : { + "@id" : "idsc:JWT" + } + }, + "senderAgent":"http://example.org", + "modelVersion":"4.1.0", + "issued" : "2021-06-23T17:27:23.566+02:00", + "issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" +} \ No newline at end of file diff --git a/clearing-house-edc/core/src/test/resources/headers/invalid-token.json b/clearing-house-edc/core/src/test/resources/headers/invalid-token.json new file mode 100644 index 00000000..c3542b3b --- /dev/null +++ b/clearing-house-edc/core/src/test/resources/headers/invalid-token.json @@ -0,0 +1,20 @@ +{ + "@context":{ + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@type":"ids:LogMessage", + "@id":"https://w3id.org/idsa/autogen/logMessage/9fdba4ad-f750-4bbc-a7f0-f648ac853508", + "ids:securityToken": { + "@type" : "ids:DynamicAttributeToken", + "@id" : "https://w3id.org/idsa/autogen/dynamicAttributeToken/7bbbd2c1-2d75-4e3d-bd10-c52d0381cab0", + "ids:tokenValue" : "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzY29wZXMiOlsiaWRzYzpJRFNfQ09OTkVDVE9SX0FUVFJJQlVURVNfQUxMIl0sImF1ZCI6Imlkc2M6SURTX0NPTk5FQ1RPUlNfQUxMIiwiaXNzIjoiaHR0cHM6Ly9kYXBzLmFpc2VjLmZyYXVuaG9mZXIuZGUiLCJuYmYiOjE2MzQ2NTA3MzksImlhdCI6MTYzNDY1MDczOSwianRpIjoiTVRneE9EUXdPVFF6TXpZd05qWXlOVFExTUE9PSIsImV4cCI6MTYzNDY1NDMzOSwic2VjdXJpdHlQcm9maWxlIjoiaWRzYzpCQVNFX1NFQ1VSSVRZX1BST0ZJTEUiLCJAdHlwZSI6ImlkczpEYXRQYXlsb2FkIiwiQGNvbnRleHQiOiJodHRwczovL3czaWQub3JnL2lkc2EvY29udGV4dHMvY29udGV4dC5qc29ubGQiLCJ0cmFuc3BvcnRDZXJ0c1NoYTI1NiI6Ijk3NGU2MzI0ZjEyZjEwOTE2ZjQ2YmZkZWIxOGI4ZGQ2ZGE3OGNjNmE2YTA1NjYwMzFmYTVmMWEzOWVjOGU2MDAifQ.bz-XdCsjNwk8ce-9oHFta2wyojw7m4yplSGUoX1yAWY", + "ids:tokenFormat" : { + "@id" : "idsc:JWT" + } + }, + "ids:senderAgent":"http://example.org", + "ids:modelVersion":"4.1.0", + "ids:issued" : "2021-06-23T17:27:23.566+02:00", + "ids:issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" +} \ No newline at end of file diff --git a/clearing-house-edc/core/src/test/resources/headers/invalid-type.json b/clearing-house-edc/core/src/test/resources/headers/invalid-type.json new file mode 100644 index 00000000..e96fc8d2 --- /dev/null +++ b/clearing-house-edc/core/src/test/resources/headers/invalid-type.json @@ -0,0 +1,20 @@ +{ + "@context":{ + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@type":"ids:otherMessage", + "@id":"https://w3id.org/idsa/autogen/logMessage/9fdba4ad-f750-4bbc-a7f0-f648ac853508", + "ids:securityToken": { + "@type" : "ids:DynamicAttributeToken", + "@id" : "https://w3id.org/idsa/autogen/dynamicAttributeToken/7bbbd2c1-2d75-4e3d-bd10-c52d0381cab0", + "ids:tokenValue" : "eyJ0eXAiOiJKV1QiLCJraWQiOiJkZWZhdWx0IiwiYWxnIjoiUlMyNTYifQ.eyJzY29wZXMiOlsiaWRzYzpJRFNfQ09OTkVDVE9SX0FUVFJJQlVURVNfQUxMIl0sImF1ZCI6Imlkc2M6SURTX0NPTk5FQ1RPUlNfQUxMIiwiaXNzIjoiaHR0cHM6Ly9kYXBzLmFpc2VjLmZyYXVuaG9mZXIuZGUiLCJuYmYiOjE2MzQ2NTA3MzksImlhdCI6MTYzNDY1MDczOSwianRpIjoiTVRneE9EUXdPVFF6TXpZd05qWXlOVFExTUE9PSIsImV4cCI6MTYzNDY1NDMzOSwic2VjdXJpdHlQcm9maWxlIjoiaWRzYzpCQVNFX1NFQ1VSSVRZX1BST0ZJTEUiLCJyZWZlcnJpbmdDb25uZWN0b3IiOiJodHRwOi8vYnJva2VyLmlkcy5pc3N0LmZyYXVuaG9mZXIuZGUuZGVtbyIsIkB0eXBlIjoiaWRzOkRhdFBheWxvYWQiLCJAY29udGV4dCI6Imh0dHBzOi8vdzNpZC5vcmcvaWRzYS9jb250ZXh0cy9jb250ZXh0Lmpzb25sZCIsInRyYW5zcG9ydENlcnRzU2hhMjU2IjoiOTc0ZTYzMjRmMTJmMTA5MTZmNDZiZmRlYjE4YjhkZDZkYTc4Y2M2YTZhMDU2NjAzMWZhNWYxYTM5ZWM4ZTYwMCIsInN1YiI6IjkyOjE0OkU3OkFDOjEwOjIyOkYyOkNDOjA1OjZFOjJBOjJCOjhEOkRCOjEwOkQ2OjREOkEwOkExOjUzOmtleWlkOkNCOjhDOkM3OkI2Ojg1Ojc5OkE4OjIzOkE2OkNCOjE1OkFCOjE3OjUwOjJGOkU2OjY1OjQzOjVEOkU4In0.Qw3gWMgwnKQyVatbsozcin6qtQbLyXlk6QdaLajGaDmxSYqCKEcAje4kiDp5Fqj04WPmVyF0k8c1BJA3KGnaW3Qcikv4MNxqqoenvKIrSTokXsA7-osqBCfxLhV-s2lSXVTAtV_Q7f71eSoR5j-7nPPX8_nf4Xup4_VzfnwRmnuAbLfHfWThbupxFazC34r3waXCltOTFVa_XDlwEDMpPY7vEPeaqIt2t6ofVGo_HF86UB19liL-UZvp0uSE9z2fhloyxOrx9B_xavGS7pP6oRaumSJEN_x9dfdeDS98HQ_oBSSGBzaI4fM7ik35Yg42KQwmkZesD6P_YSEzVLcJDg", + "ids:tokenFormat" : { + "@id" : "idsc:JWT" + } + }, + "ids:senderAgent":"http://example.org", + "ids:modelVersion":"4.1.0", + "ids:issued" : "2021-06-23T17:27:23.566+02:00", + "ids:issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" +} \ No newline at end of file diff --git a/clearing-house-edc/core/src/test/resources/headers/valid-create-process-header.json b/clearing-house-edc/core/src/test/resources/headers/valid-create-process-header.json new file mode 100644 index 00000000..a1cd612f --- /dev/null +++ b/clearing-house-edc/core/src/test/resources/headers/valid-create-process-header.json @@ -0,0 +1,21 @@ +{ + "@context":{ + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@type":"ids:RequestMessage", + "@id":"https://w3id.org/idsa/autogen/logMessage/9fdba4ad-f750-4bbc-a7f0-f648ac853508", + "ids:securityToken": { + "@type" : "ids:DynamicAttributeToken", + "@id" : "https://w3id.org/idsa/autogen/dynamicAttributeToken/7bbbd2c1-2d75-4e3d-bd10-c52d0381cab0", + "ids:tokenValue" : "eyJ0eXAiOiJKV1QiLCJraWQiOiJkZWZhdWx0IiwiYWxnIjoiUlMyNTYifQ.eyJzY29wZXMiOlsiaWRzYzpJRFNfQ09OTkVDVE9SX0FUVFJJQlVURVNfQUxMIl0sImF1ZCI6Imlkc2M6SURTX0NPTk5FQ1RPUlNfQUxMIiwiaXNzIjoiaHR0cHM6Ly9kYXBzLmFpc2VjLmZyYXVuaG9mZXIuZGUiLCJuYmYiOjE2MzQ2NTA3MzksImlhdCI6MTYzNDY1MDczOSwianRpIjoiTVRneE9EUXdPVFF6TXpZd05qWXlOVFExTUE9PSIsImV4cCI6MTYzNDY1NDMzOSwic2VjdXJpdHlQcm9maWxlIjoiaWRzYzpCQVNFX1NFQ1VSSVRZX1BST0ZJTEUiLCJyZWZlcnJpbmdDb25uZWN0b3IiOiJodHRwOi8vYnJva2VyLmlkcy5pc3N0LmZyYXVuaG9mZXIuZGUuZGVtbyIsIkB0eXBlIjoiaWRzOkRhdFBheWxvYWQiLCJAY29udGV4dCI6Imh0dHBzOi8vdzNpZC5vcmcvaWRzYS9jb250ZXh0cy9jb250ZXh0Lmpzb25sZCIsInRyYW5zcG9ydENlcnRzU2hhMjU2IjoiOTc0ZTYzMjRmMTJmMTA5MTZmNDZiZmRlYjE4YjhkZDZkYTc4Y2M2YTZhMDU2NjAzMWZhNWYxYTM5ZWM4ZTYwMCIsInN1YiI6IjkyOjE0OkU3OkFDOjEwOjIyOkYyOkNDOjA1OjZFOjJBOjJCOjhEOkRCOjEwOkQ2OjREOkEwOkExOjUzOmtleWlkOkNCOjhDOkM3OkI2Ojg1Ojc5OkE4OjIzOkE2OkNCOjE1OkFCOjE3OjUwOjJGOkU2OjY1OjQzOjVEOkU4In0.Qw3gWMgwnKQyVatbsozcin6qtQbLyXlk6QdaLajGaDmxSYqCKEcAje4kiDp5Fqj04WPmVyF0k8c1BJA3KGnaW3Qcikv4MNxqqoenvKIrSTokXsA7-osqBCfxLhV-s2lSXVTAtV_Q7f71eSoR5j-7nPPX8_nf4Xup4_VzfnwRmnuAbLfHfWThbupxFazC34r3waXCltOTFVa_XDlwEDMpPY7vEPeaqIt2t6ofVGo_HF86UB19liL-UZvp0uSE9z2fhloyxOrx9B_xavGS7pP6oRaumSJEN_x9dfdeDS98HQ_oBSSGBzaI4fM7ik35Yg42KQwmkZesD6P_YSEzVLcJDg", + "ids:tokenFormat" : { + "@id" : "idsc:JWT" + } + }, + "ids:senderAgent":"http://example.org", + "ids:modelVersion":"4.1.0", + "ids:issued" : "2021-06-23T17:27:23.566+02:00", + "ids:issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" +} + diff --git a/clearing-house-edc/core/src/test/resources/headers/valid-header.json b/clearing-house-edc/core/src/test/resources/headers/valid-header.json new file mode 100644 index 00000000..85057bbb --- /dev/null +++ b/clearing-house-edc/core/src/test/resources/headers/valid-header.json @@ -0,0 +1,20 @@ +{ + "@context":{ + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@type":"ids:LogMessage", + "@id":"https://w3id.org/idsa/autogen/logMessage/9fdba4ad-f750-4bbc-a7f0-f648ac853508", + "ids:securityToken": { + "@type" : "ids:DynamicAttributeToken", + "@id" : "https://w3id.org/idsa/autogen/dynamicAttributeToken/7bbbd2c1-2d75-4e3d-bd10-c52d0381cab0", + "ids:tokenValue" : "eyJ0eXAiOiJKV1QiLCJraWQiOiJkZWZhdWx0IiwiYWxnIjoiUlMyNTYifQ.eyJzY29wZXMiOlsiaWRzYzpJRFNfQ09OTkVDVE9SX0FUVFJJQlVURVNfQUxMIl0sImF1ZCI6Imlkc2M6SURTX0NPTk5FQ1RPUlNfQUxMIiwiaXNzIjoiaHR0cHM6Ly9kYXBzLmFpc2VjLmZyYXVuaG9mZXIuZGUiLCJuYmYiOjE2MzQ2NTA3MzksImlhdCI6MTYzNDY1MDczOSwianRpIjoiTVRneE9EUXdPVFF6TXpZd05qWXlOVFExTUE9PSIsImV4cCI6MTYzNDY1NDMzOSwic2VjdXJpdHlQcm9maWxlIjoiaWRzYzpCQVNFX1NFQ1VSSVRZX1BST0ZJTEUiLCJyZWZlcnJpbmdDb25uZWN0b3IiOiJodHRwOi8vYnJva2VyLmlkcy5pc3N0LmZyYXVuaG9mZXIuZGUuZGVtbyIsIkB0eXBlIjoiaWRzOkRhdFBheWxvYWQiLCJAY29udGV4dCI6Imh0dHBzOi8vdzNpZC5vcmcvaWRzYS9jb250ZXh0cy9jb250ZXh0Lmpzb25sZCIsInRyYW5zcG9ydENlcnRzU2hhMjU2IjoiOTc0ZTYzMjRmMTJmMTA5MTZmNDZiZmRlYjE4YjhkZDZkYTc4Y2M2YTZhMDU2NjAzMWZhNWYxYTM5ZWM4ZTYwMCIsInN1YiI6IjkyOjE0OkU3OkFDOjEwOjIyOkYyOkNDOjA1OjZFOjJBOjJCOjhEOkRCOjEwOkQ2OjREOkEwOkExOjUzOmtleWlkOkNCOjhDOkM3OkI2Ojg1Ojc5OkE4OjIzOkE2OkNCOjE1OkFCOjE3OjUwOjJGOkU2OjY1OjQzOjVEOkU4In0.Qw3gWMgwnKQyVatbsozcin6qtQbLyXlk6QdaLajGaDmxSYqCKEcAje4kiDp5Fqj04WPmVyF0k8c1BJA3KGnaW3Qcikv4MNxqqoenvKIrSTokXsA7-osqBCfxLhV-s2lSXVTAtV_Q7f71eSoR5j-7nPPX8_nf4Xup4_VzfnwRmnuAbLfHfWThbupxFazC34r3waXCltOTFVa_XDlwEDMpPY7vEPeaqIt2t6ofVGo_HF86UB19liL-UZvp0uSE9z2fhloyxOrx9B_xavGS7pP6oRaumSJEN_x9dfdeDS98HQ_oBSSGBzaI4fM7ik35Yg42KQwmkZesD6P_YSEzVLcJDg", + "ids:tokenFormat" : { + "@id" : "idsc:JWT" + } + }, + "ids:senderAgent":"http://example.org", + "ids:modelVersion":"4.1.0", + "ids:issued" : "2021-06-23T17:27:23.566+02:00", + "ids:issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" +} \ No newline at end of file diff --git a/clearing-house-edc/core/src/testFixtures/java/de/truzzt/clearinghouse/edc/tests/BaseTestUtils.java b/clearing-house-edc/core/src/testFixtures/java/de/truzzt/clearinghouse/edc/tests/BaseTestUtils.java new file mode 100644 index 00000000..d4ac0554 --- /dev/null +++ b/clearing-house-edc/core/src/testFixtures/java/de/truzzt/clearinghouse/edc/tests/BaseTestUtils.java @@ -0,0 +1,80 @@ +package de.truzzt.clearinghouse.edc.tests; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.eclipse.edc.spi.EdcException; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.nio.file.Path; + +public class BaseTestUtils { + + protected static T parseFile(ObjectMapper mapper, Class type, String path) { + + ClassLoader classLoader = BaseTestUtils.class.getClassLoader(); + var jsonResource = classLoader.getResource(path); + + if (jsonResource == null) { + throw new EdcException("Header json file not found: " + path); + } + + URI jsonUrl; + try { + jsonUrl = jsonResource.toURI(); + } catch (URISyntaxException e) { + throw new EdcException("Error finding json file on classpath", e); + } + + Path filePath = Path.of(jsonUrl); + if (!Files.exists(filePath)) { + throw new EdcException("Header json file not found: " + path); + } + + T object; + try { + var jsonContents = Files.readAllBytes(filePath); + object = mapper.readValue(jsonContents, type); + + } catch (IOException e){ + throw new EdcException("Error parsing json file", e); + } + + return object; + } + + protected static Path getFile(String path) { + + ClassLoader classLoader = BaseTestUtils.class.getClassLoader(); + var jsonResource = classLoader.getResource(path); + + if (jsonResource == null) { + throw new EdcException("Header json file not found: " + path); + } + + URI jsonUrl; + try { + jsonUrl = jsonResource.toURI(); + } catch (URISyntaxException e) { + throw new EdcException("Error finding json file on classpath", e); + } + + Path filePath = Path.of(jsonUrl); + if (!Files.exists(filePath)) { + throw new EdcException("Header json file not found: " + path); + } + + return filePath; + } + + protected static String readFile(String path) { + var file = getFile(path); + + try { + return Files.readString(file); + } catch (IOException e) { + throw new EdcException("Error reading file contents", e); + } + } +} diff --git a/clearing-house-edc/extensions/multipart/build.gradle.kts b/clearing-house-edc/extensions/multipart/build.gradle.kts new file mode 100644 index 00000000..68ed15c9 --- /dev/null +++ b/clearing-house-edc/extensions/multipart/build.gradle.kts @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ + +plugins { + `java-library` + `java-test-fixtures` + jacoco +} + +configurations.all { + exclude(group = "de.fraunhofer.iais.eis.ids.infomodel", module = "java") +} + +dependencies { + api(edc.spi.core) + + implementation(project(":core")) + + implementation(edc.ids) + implementation(edc.ids.jsonld.serdes) + implementation(edc.api.management.config) + implementation(libs.jakarta.rsApi) + implementation(libs.jersey.multipart) + + implementation(":infomodel-java-4.1.3") + implementation(":infomodel-util-4.0.4") + + testImplementation(libs.junit.jupiter.api) + testImplementation(libs.mockito.inline) + testImplementation(libs.mockito.inline) + + testImplementation(testFixtures(project(":core"))) + + testRuntimeOnly(libs.junit.jupiter.engine) +} + +tasks.test { + useJUnitPlatform() +} +tasks.jacocoTestReport { + dependsOn(tasks.test) + reports { + xml.required = true + } +} diff --git a/clearing-house-edc/extensions/multipart/src/main/java/de/truzzt/clearinghouse/edc/multipart/MultipartController.java b/clearing-house-edc/extensions/multipart/src/main/java/de/truzzt/clearinghouse/edc/multipart/MultipartController.java new file mode 100644 index 00000000..d1532c27 --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/main/java/de/truzzt/clearinghouse/edc/multipart/MultipartController.java @@ -0,0 +1,284 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.multipart; + +import de.fraunhofer.iais.eis.DynamicAttributeTokenBuilder; +import de.truzzt.clearinghouse.edc.handler.Handler; +import de.truzzt.clearinghouse.edc.dto.HandlerRequest; +import de.truzzt.clearinghouse.edc.dto.HandlerResponse; +import de.truzzt.clearinghouse.edc.multipart.dto.RequestValidationResponse; +import de.truzzt.clearinghouse.edc.types.TypeManagerUtil; +import de.truzzt.clearinghouse.edc.types.ids.Message; + +import de.truzzt.clearinghouse.edc.types.ids.RejectionMessage; +import de.truzzt.clearinghouse.edc.types.ids.TokenFormat; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; + +import jakarta.ws.rs.core.Response; +import org.eclipse.edc.protocol.ids.spi.service.DynamicAttributeTokenService; +import org.eclipse.edc.protocol.ids.spi.types.IdsId; +import org.eclipse.edc.spi.monitor.Monitor; +import org.eclipse.edc.util.string.StringUtils; +import org.glassfish.jersey.media.multipart.FormDataParam; +import org.jetbrains.annotations.NotNull; + +import java.io.InputStream; +import java.util.List; + +import static de.truzzt.clearinghouse.edc.util.ResponseUtil.createFormDataMultiPart; +import static de.truzzt.clearinghouse.edc.util.ResponseUtil.createRejectionMessage; +import static de.truzzt.clearinghouse.edc.util.ResponseUtil.internalRecipientError; +import static de.truzzt.clearinghouse.edc.util.ResponseUtil.malformedMessage; +import static de.truzzt.clearinghouse.edc.util.ResponseUtil.messageTypeNotSupported; +import static de.truzzt.clearinghouse.edc.util.ResponseUtil.notAuthenticated; +import static java.lang.String.format; + +@Consumes({MediaType.MULTIPART_FORM_DATA}) +@Produces({MediaType.MULTIPART_FORM_DATA}) +@Path("/") +public class MultipartController { + + private static final String HEADER = "header"; + private static final String PAYLOAD = "payload"; + private static final String PID = "pid"; + private static final String LOG_ID = "InfrastructureController"; + + private final Monitor monitor; + private final IdsId connectorId; + private final TypeManagerUtil typeManagerUtil; + private final DynamicAttributeTokenService tokenService; + private final String idsWebhookAddress; + private final List multipartHandlers; + + public MultipartController(@NotNull Monitor monitor, + @NotNull IdsId connectorId, + @NotNull TypeManagerUtil typeManagerUtil, + @NotNull DynamicAttributeTokenService tokenService, + @NotNull String idsWebhookAddress, + @NotNull List multipartHandlers) { + this.monitor = monitor; + this.connectorId = connectorId; + this.typeManagerUtil = typeManagerUtil; + this.tokenService = tokenService; + this.idsWebhookAddress = idsWebhookAddress; + this.multipartHandlers = multipartHandlers; + } + + @POST + @Path("messages/log/{pid}") + public Response logMessage(@PathParam(PID) String pid, + @FormDataParam(HEADER) InputStream headerInputStream, + @FormDataParam(PAYLOAD) String payload) { + var response = validaRequest(pid, headerInputStream); + if (response.fail()) + return response.getError(); + + // Check if payload is missing + if (payload == null) { + monitor.severe(LOG_ID + ": Payload is missing"); + return Response.status(Response.Status.BAD_REQUEST) + .entity(createFormDataMultiPart(typeManagerUtil, HEADER, malformedMessage(null, connectorId))) + .build(); + } + + return processRequest(pid, response.getHeader(), payload); + } + + @POST + @Path("process/{pid}") + public Response createProcess(@PathParam(PID) String pid, + @FormDataParam(HEADER) InputStream headerInputStream, + @FormDataParam(PAYLOAD) String payload){ + var response = validaRequest(pid, headerInputStream); + if (response.fail()) + return response.getError(); + + return processRequest(pid, response.getHeader(), payload); + } + + RequestValidationResponse validaRequest(String pid, InputStream headerInputStream){ + // Check if pid is missing + if (pid == null) { + monitor.severe(LOG_ID + ": PID is missing"); + return new RequestValidationResponse(Response.status(Response.Status.BAD_REQUEST) + .entity(createFormDataMultiPart(typeManagerUtil, HEADER, malformedMessage(null, connectorId))) + .build()); + } + + // Check if header is missing + if (headerInputStream == null) { + monitor.severe(LOG_ID + ": Header is missing"); + return new RequestValidationResponse(Response.status(Response.Status.BAD_REQUEST) + .entity(createFormDataMultiPart(typeManagerUtil, HEADER, malformedMessage(null, connectorId))) + .build()); + } + + // Convert header to message + Message header; + try { + header = typeManagerUtil.parse(headerInputStream, Message.class); + } catch (Exception e) { + monitor.severe(format(LOG_ID + ": Header parsing failed: %s", e.getMessage())); + return new RequestValidationResponse(Response.status(Response.Status.BAD_REQUEST) + .entity(createFormDataMultiPart(typeManagerUtil, HEADER, malformedMessage(null, connectorId))) + .build()); + } + + // Check if any required header field missing + if (header.getId() == null + || (header.getId() != null && StringUtils.isNullOrBlank(header.getId().toString())) + || StringUtils.isNullOrBlank(header.getType()) + || StringUtils.isNullOrBlank(header.getModelVersion()) + || header.getIssued() == null + || header.getIssuerConnector() == null + || (header.getIssuerConnector() != null && StringUtils.isNullOrBlank(header.getIssuerConnector().toString())) + || header.getSenderAgent() == null + || (header.getSenderAgent() != null && StringUtils.isNullOrBlank(header.getSenderAgent().toString())) + ) { + return new RequestValidationResponse(Response.status(Response.Status.BAD_REQUEST) + .entity(createFormDataMultiPart(typeManagerUtil, HEADER, malformedMessage(header, connectorId))) + .build()); + } + + // Check if security token is present + var securityToken = header.getSecurityToken(); + if (securityToken == null || securityToken.getTokenValue() == null) { + monitor.severe(LOG_ID + ": Token is missing in header"); + return new RequestValidationResponse(Response.status(Response.Status.BAD_REQUEST) + .entity(createFormDataMultiPart(typeManagerUtil, HEADER, notAuthenticated(header, connectorId))) + .build()); + } + + // Check the security token type + var tokenFormat = securityToken.getTokenFormat().getId().toString(); + if (!TokenFormat.isValid(tokenFormat)) { + monitor.severe(LOG_ID + ": Invalid security token type: " + tokenFormat); + return new RequestValidationResponse(Response.status(Response.Status.BAD_REQUEST) + .entity(createFormDataMultiPart(typeManagerUtil, HEADER, malformedMessage(null, connectorId))) + .build()); + } + + // Validate DAT + if (!validateToken(header)) { + return new RequestValidationResponse(Response.status(Response.Status.FORBIDDEN) + .entity(createFormDataMultiPart(typeManagerUtil, HEADER, notAuthenticated(header, connectorId))) + .build()); + } + + return new RequestValidationResponse(header); + } + + Response processRequest(String pid, Message header, String payload){ + + // Build the multipart request + var multipartRequest = HandlerRequest.Builder.newInstance() + .pid(pid) + .header(header) + .payload(payload) + .build(); + + // Send to handler processing + HandlerResponse handlerResponse; + try { + handlerResponse = multipartHandlers.stream() + .filter(h -> h.canHandle(multipartRequest)) + .findFirst() + .map(it -> it.handleRequest(multipartRequest)) + .orElseGet(() -> HandlerResponse.Builder.newInstance() + .header(messageTypeNotSupported(header, connectorId)) + .build()); + } catch (Exception e) { + monitor.severe(LOG_ID + ": Error in message handler processing", e); + return Response.status(Response.Status.INTERNAL_SERVER_ERROR) + .entity(createFormDataMultiPart(typeManagerUtil, HEADER, internalRecipientError(header, connectorId))) + .build(); + } + + // Get the response token + if (!getResponseToken(header, handlerResponse)) { + return Response.status(Response.Status.INTERNAL_SERVER_ERROR) + .entity(createFormDataMultiPart(typeManagerUtil, HEADER, internalRecipientError(header, connectorId))) + .build(); + } + + // Build the response + if (handlerResponse.getHeader() instanceof RejectionMessage) { + var rejectionMessage = (RejectionMessage) handlerResponse.getHeader(); + + return Response.status(Response.Status.INTERNAL_SERVER_ERROR) + .entity(createFormDataMultiPart(typeManagerUtil, HEADER, + createRejectionMessage(rejectionMessage.getRejectionReason(), header, connectorId)) + ).build(); + } + else { + return Response.status(Response.Status.CREATED) + .entity(createFormDataMultiPart(typeManagerUtil, HEADER, handlerResponse.getHeader(), PAYLOAD, handlerResponse.getPayload())) + .build(); + } + } + + private boolean validateToken(Message header) { + + var dynamicAttributeToken = new DynamicAttributeTokenBuilder(). + _tokenValue_(header.getSecurityToken().getTokenValue()). + _tokenFormat_(de.fraunhofer.iais.eis.TokenFormat.JWT) + .build(); + + var verificationResult = tokenService + .verifyDynamicAttributeToken(dynamicAttributeToken, header.getIssuerConnector(), idsWebhookAddress); + + if (verificationResult.failed()) { + monitor.warning(format("MultipartController: Token validation failed %s", verificationResult.getFailure().getMessages())); + return false; + } else { + return true; + } + } + + private boolean getResponseToken(Message header, HandlerResponse handlerResponse) { + + handlerResponse.getHeader().setSecurityToken(header.getSecurityToken()); + return true; + + /*if ((header.getRecipientConnector() == null) || (header.getRecipientConnector().isEmpty())) { + monitor.severe(LOG_ID + ": Recipient connector is missing"); + return false; + } + + var recipient = header.getRecipientConnector().get(0); + var tokenResult = tokenService.obtainDynamicAttributeToken(recipient.toString()); + + if (tokenResult.succeeded()) { + var responseToken = tokenResult.getContent(); + SecurityToken securityToken = new SecurityToken(); + securityToken.setType(header.getSecurityToken().getType()); + securityToken.setTokenFormat(header.getSecurityToken().getTokenFormat()); + securityToken.setTokenValue(responseToken.getTokenValue()); + + handlerResponse.getHeader().setSecurityToken(securityToken); + return true; + + } else { + monitor.severe(LOG_ID + ": Failed to get response token: " + tokenResult.getFailureDetail()); + return false; + }*/ + } + +} diff --git a/clearing-house-edc/extensions/multipart/src/main/java/de/truzzt/clearinghouse/edc/multipart/MultipartExtension.java b/clearing-house-edc/extensions/multipart/src/main/java/de/truzzt/clearinghouse/edc/multipart/MultipartExtension.java new file mode 100644 index 00000000..a36a7334 --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/main/java/de/truzzt/clearinghouse/edc/multipart/MultipartExtension.java @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ +package de.truzzt.clearinghouse.edc.multipart; + +import de.truzzt.clearinghouse.edc.handler.Handler; +import de.truzzt.clearinghouse.edc.handler.LogMessageHandler; +import de.truzzt.clearinghouse.edc.app.AppSender; +import de.truzzt.clearinghouse.edc.handler.RequestMessageHandler; +import de.truzzt.clearinghouse.edc.types.TypeManagerUtil; +import org.eclipse.edc.connector.api.management.configuration.ManagementApiConfiguration; +import org.eclipse.edc.protocol.ids.api.configuration.IdsApiConfiguration; +import org.eclipse.edc.protocol.ids.jsonld.JsonLd; +import org.eclipse.edc.protocol.ids.spi.service.DynamicAttributeTokenService; +import org.eclipse.edc.runtime.metamodel.annotation.Extension; +import org.eclipse.edc.runtime.metamodel.annotation.Inject; +import org.eclipse.edc.runtime.metamodel.annotation.Requires; +import org.eclipse.edc.spi.http.EdcHttpClient; +import org.eclipse.edc.spi.system.ServiceExtension; +import org.eclipse.edc.spi.system.ServiceExtensionContext; +import org.eclipse.edc.web.spi.WebService; + +import java.util.LinkedList; + +import static org.eclipse.edc.protocol.ids.util.ConnectorIdUtil.resolveConnectorId; + +@Extension(value = MultipartExtension.NAME) +@Requires(value = { + WebService.class, + ManagementApiConfiguration.class, + EdcHttpClient.class +}) +public class MultipartExtension implements ServiceExtension { + + public static final String NAME = "Clearing House Multipart Extension"; + + @Inject + private WebService webService; + + @Inject + private ManagementApiConfiguration managementApiConfig; + + @Inject + private EdcHttpClient httpClient; + + @Inject + private DynamicAttributeTokenService tokenService; + + @Inject + private IdsApiConfiguration idsApiConfiguration; + + @Override + public String name() { + return NAME; + } + + @Override + public void initialize(ServiceExtensionContext context) { + var monitor = context.getMonitor(); + var connectorId = resolveConnectorId(context); + var typeManagerUtil = new TypeManagerUtil(JsonLd.getObjectMapper()); + + var clearingHouseAppSender = new AppSender(monitor, httpClient, typeManagerUtil); + + var handlers = new LinkedList(); + handlers.add(new RequestMessageHandler(connectorId, typeManagerUtil, clearingHouseAppSender, context)); + handlers.add(new LogMessageHandler(connectorId, typeManagerUtil, clearingHouseAppSender, context)); + + var multipartController = new MultipartController(monitor, + connectorId, + typeManagerUtil, + tokenService, + idsApiConfiguration.getIdsWebhookAddress(), + handlers); + webService.registerResource(managementApiConfig.getContextAlias(), multipartController); + } +} diff --git a/clearing-house-edc/extensions/multipart/src/main/java/de/truzzt/clearinghouse/edc/multipart/dto/RequestValidationResponse.java b/clearing-house-edc/extensions/multipart/src/main/java/de/truzzt/clearinghouse/edc/multipart/dto/RequestValidationResponse.java new file mode 100644 index 00000000..ba89ccb6 --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/main/java/de/truzzt/clearinghouse/edc/multipart/dto/RequestValidationResponse.java @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2023 truzzt GmbH + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * truzzt GmbH - Initial implementation + * + */ +package de.truzzt.clearinghouse.edc.multipart.dto; + +import de.truzzt.clearinghouse.edc.types.ids.Message; +import jakarta.ws.rs.core.Response; +import org.jetbrains.annotations.NotNull; + +public class RequestValidationResponse { + + private Response error; + private Message header; + + public RequestValidationResponse(@NotNull Response error) { + this.error = error; + } + public RequestValidationResponse(@NotNull Message header) { + this.header = header; + } + + public Response getError() { + return error; + } + + public Message getHeader() { + return header; + } + + public Boolean fail() { + return (error != null); + } + +} diff --git a/clearing-house-edc/extensions/multipart/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension b/clearing-house-edc/extensions/multipart/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension new file mode 100644 index 00000000..21d508b1 --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/main/resources/META-INF/services/org.eclipse.edc.spi.system.ServiceExtension @@ -0,0 +1,15 @@ +# +# Copyright (c) 2023 Microsoft Corporation +# +# This program and the accompanying materials are made available under the +# terms of the Apache License, Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# +# Contributors: +# Microsoft Corporation - initial implementation +# truzzt GmbH - EDC extension implementation +# + +de.truzzt.clearinghouse.edc.multipart.MultipartExtension \ No newline at end of file diff --git a/clearing-house-edc/extensions/multipart/src/test/java/de/truzzt/clearinghouse/edc/multipart/MultipartControllerTest.java b/clearing-house-edc/extensions/multipart/src/test/java/de/truzzt/clearinghouse/edc/multipart/MultipartControllerTest.java new file mode 100644 index 00000000..ef2d40b8 --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/test/java/de/truzzt/clearinghouse/edc/multipart/MultipartControllerTest.java @@ -0,0 +1,296 @@ +package de.truzzt.clearinghouse.edc.multipart; + +import com.fasterxml.jackson.databind.ObjectMapper; +import de.fraunhofer.iais.eis.DynamicAttributeToken; +import de.truzzt.clearinghouse.edc.dto.HandlerRequest; +import de.truzzt.clearinghouse.edc.dto.HandlerResponse; +import de.truzzt.clearinghouse.edc.dto.LoggingMessageResponse; +import de.truzzt.clearinghouse.edc.handler.Handler; +import de.truzzt.clearinghouse.edc.handler.LogMessageHandler; +import de.truzzt.clearinghouse.edc.handler.RequestMessageHandler; +import de.truzzt.clearinghouse.edc.multipart.dto.RequestValidationResponse; +import de.truzzt.clearinghouse.edc.multipart.tests.TestUtils; +import de.truzzt.clearinghouse.edc.types.TypeManagerUtil; +import de.truzzt.clearinghouse.edc.types.ids.Message; +import de.truzzt.clearinghouse.edc.types.ids.RejectionMessage; +import de.truzzt.clearinghouse.edc.types.ids.RejectionReason; +import jakarta.ws.rs.core.Response; +import org.eclipse.edc.protocol.ids.spi.service.DynamicAttributeTokenService; +import org.eclipse.edc.protocol.ids.spi.types.IdsId; +import org.eclipse.edc.protocol.ids.spi.types.IdsType; +import org.eclipse.edc.spi.monitor.Monitor; +import org.eclipse.edc.spi.result.Result; +import org.glassfish.jersey.media.multipart.FormDataMultiPart; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.mockito.Spy; + +import java.io.*; +import java.net.URI; +import java.util.List; +import java.util.UUID; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyByte; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +public class MultipartControllerTest { + + private static final String IDS_WEBHOOK_ADDRESS = "http://localhost/callback"; + private static final String PAYLOAD = "Hello World"; + private static final String CREATE_PROCESS_PAYLOAD = "{ \"owners\": [\"1\", \"2\"]}"; + + private MultipartController controller; + + private IdsId connectorId; + private TypeManagerUtil typeManagerUtil; + + @Mock + private Monitor monitor; + @Mock + private DynamicAttributeTokenService tokenService; + @Mock + private LogMessageHandler logMessageHandler; + + @Mock + private RequestMessageHandler requestMessageHandler; + + private final ObjectMapper mapper = new ObjectMapper(); + + @BeforeEach + public void setUp() { + MockitoAnnotations.openMocks(this); + + connectorId = IdsId.Builder.newInstance().type(IdsType.CONNECTOR).value("http://test.connector").build(); + typeManagerUtil = new TypeManagerUtil(new ObjectMapper()); + + List multipartHandlers = List.of(logMessageHandler, requestMessageHandler); + controller = new MultipartController(monitor, connectorId, typeManagerUtil, tokenService, IDS_WEBHOOK_ADDRESS, multipartHandlers); + } + + private T extractHeader(Response response, Class type) { + + assertInstanceOf(FormDataMultiPart.class, response.getEntity()); + FormDataMultiPart multiPartResponse = (FormDataMultiPart) response.getEntity(); + + var header = multiPartResponse.getField("header"); + assertNotNull(header); + + assertInstanceOf(String.class, header.getEntity()); + var entity = (String) header.getEntity(); + return typeManagerUtil.parse(new ByteArrayInputStream(entity.getBytes()), type); + } + + private T extractPayload(Response response, Class type) { + + assertInstanceOf(FormDataMultiPart.class, response.getEntity()); + FormDataMultiPart multiPartResponse = (FormDataMultiPart) response.getEntity(); + + var payload = multiPartResponse.getField("payload"); + assertNotNull(payload); + + assertInstanceOf(String.class, payload.getEntity()); + var entity = (String) payload.getEntity(); + return typeManagerUtil.parse(new ByteArrayInputStream(entity.getBytes()), type); + } + + @Test + public void logMessageSuccess() { + var responseHeader = TestUtils.getValidResponseHeader(mapper); + var responsePayload = TestUtils.getValidResponsePayload(mapper); + + doReturn(Result.success()) + .when(tokenService).verifyDynamicAttributeToken(any(DynamicAttributeToken.class), any(URI.class), any(String.class)); + doReturn(true) + .when(logMessageHandler).canHandle(any(HandlerRequest.class)); + doReturn(HandlerResponse.Builder.newInstance().header(responseHeader).payload(responsePayload).build()) + .when(logMessageHandler).handleRequest(any(HandlerRequest.class)); + + var pid = UUID.randomUUID().toString(); + var header = TestUtils.getHeaderInputStream(TestUtils.VALID_HEADER_JSON); + + var response = controller.logMessage(pid, header, PAYLOAD); + + assertNotNull(response); + assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatus()); + + var message = extractHeader(response, Message.class); + assertEquals("ids:LogMessage", message.getType()); + + var payload = extractPayload(response, LoggingMessageResponse.class); + assertNotNull(payload.getData()); + } + + @Test + public void createProcessSuccess() { + var responseHeader = TestUtils.getResponseHeader(mapper, TestUtils.VALID_CREATE_PROCESS_HEADER_JSON); + var responsePayload = TestUtils.getValidResponsePayload(mapper); + + doReturn(Result.success()) + .when(tokenService).verifyDynamicAttributeToken(any(DynamicAttributeToken.class), any(URI.class), any(String.class)); + doReturn(true) + .when(requestMessageHandler).canHandle(any(HandlerRequest.class)); + doReturn(HandlerResponse.Builder.newInstance().header(responseHeader).payload(responsePayload).build()) + .when(requestMessageHandler).handleRequest(any(HandlerRequest.class)); + + var pid = UUID.randomUUID().toString(); + var header = TestUtils.getHeaderInputStream(TestUtils.VALID_CREATE_PROCESS_HEADER_JSON); + + var response = controller.createProcess(pid, header, CREATE_PROCESS_PAYLOAD); + + assertNotNull(response); + assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatus()); + + var message = extractHeader(response, Message.class); + assertEquals("ids:RequestMessage", message.getType()); + + var payload = extractPayload(response, LoggingMessageResponse.class); + assertNotNull(payload.getData()); + } + + @Test + public void missingPIDError() { + var header = TestUtils.getHeaderInputStream(TestUtils.VALID_HEADER_JSON); + + var response = controller.validaRequest(null, header); + + assertNotNull(response); + assertTrue(response.fail()); + + var message = extractHeader(response.getError(), RejectionMessage.class); + + assertNotNull(message.getRejectionReason()); + assertEquals(RejectionReason.MALFORMED_MESSAGE.getId(), message.getRejectionReason().getId()); + } + + @Test + public void missingHeaderError() { + var pid = UUID.randomUUID().toString(); + + var response = controller.validaRequest(pid, null); + + assertNotNull(response); + assertTrue(response.fail()); + + var message = extractHeader(response.getError(), RejectionMessage.class); + + assertNotNull(message.getRejectionReason()); + assertEquals(RejectionReason.MALFORMED_MESSAGE.getId(), message.getRejectionReason().getId()); + } + + @Test + public void invalidHeaderError() { + var pid = UUID.randomUUID().toString(); + var header = TestUtils.getHeaderInputStream(TestUtils.INVALID_HEADER_JSON); + + var response = controller.validaRequest(pid, header); + + assertNotNull(response); + assertTrue(response.fail()); + + var message = extractHeader(response.getError(), RejectionMessage.class); + + assertNotNull(message.getRejectionReason()); + assertEquals(RejectionReason.MALFORMED_MESSAGE.getId(), message.getRejectionReason().getId()); + } + + @Test + public void missingHeaderFieldsError() { + var pid = UUID.randomUUID().toString(); + var header = TestUtils.getHeaderInputStream(TestUtils.MISSING_FIELDS_HEADER_JSON); + + var response = controller.validaRequest(pid, header); + + assertNotNull(response); + assertTrue(response.fail()); + + var message = extractHeader(response.getError(), RejectionMessage.class); + + assertNotNull(message.getRejectionReason()); + assertEquals(RejectionReason.MALFORMED_MESSAGE.getId(), message.getRejectionReason().getId()); + } + + @Test + public void invalidSecurityTokenError() { + doReturn(Result.failure("Invalid token")) + .when(tokenService).verifyDynamicAttributeToken(any(DynamicAttributeToken.class), any(URI.class), any(String.class)); + + var pid = UUID.randomUUID().toString(); + var header = TestUtils.getHeaderInputStream(TestUtils.INVALID_TOKEN_HEADER_JSON); + + var response = controller.validaRequest(pid, header); + + assertNotNull(response); + assertTrue(response.fail()); + + var message = extractHeader(response.getError(), RejectionMessage.class); + + assertNotNull(message.getRejectionReason()); + assertEquals(RejectionReason.NOT_AUTHENTICATED.getId(), message.getRejectionReason().getId()); + } + + @Test + public void missingSecurityTokenError() { + var pid = UUID.randomUUID().toString(); + var header = TestUtils.getHeaderInputStream(TestUtils.MISSING_TOKEN_HEADER_JSON); + + var response = controller.validaRequest(pid, header); + + assertNotNull(response); + assertTrue(response.fail()); + + var message = extractHeader(response.getError(), RejectionMessage.class); + + assertNotNull(message.getRejectionReason()); + assertEquals(RejectionReason.NOT_AUTHENTICATED.getId(), message.getRejectionReason().getId()); + } + + @Test + public void missingPayloadError() { + doReturn(Result.success()) + .when(tokenService).verifyDynamicAttributeToken(any(DynamicAttributeToken.class), any(URI.class), any(String.class)); + + var pid = UUID.randomUUID().toString(); + var header = TestUtils.getHeaderInputStream(TestUtils.VALID_HEADER_JSON); + + var response = controller.logMessage(pid, header, null); + + assertNotNull(response); + assertEquals(Response.Status.BAD_REQUEST.getStatusCode(), response.getStatus()); + + var message = extractHeader(response, RejectionMessage.class); + + assertNotNull(message.getRejectionReason()); + assertEquals(RejectionReason.MALFORMED_MESSAGE.getId(), message.getRejectionReason().getId()); + } + + @Test + public void invalidMessageTypeError() { + doReturn(Result.success()) + .when(tokenService).verifyDynamicAttributeToken(any(DynamicAttributeToken.class), any(URI.class), any(String.class)); + doReturn(false) + .when(logMessageHandler).canHandle(any(HandlerRequest.class)); + + var pid = UUID.randomUUID().toString(); + var header = TestUtils.getResponseHeader(new ObjectMapper(), TestUtils.INVALID_TYPE_HEADER_JSON); + + var response = controller.processRequest(pid, header, PAYLOAD); + + assertNotNull(response); + assertEquals(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), response.getStatus()); + + var message = extractHeader(response, RejectionMessage.class); + + assertNotNull(message.getRejectionReason()); + assertEquals(RejectionReason.MESSAGE_TYPE_NOT_SUPPORTED.getId(), message.getRejectionReason().getId()); + } + +} diff --git a/clearing-house-edc/extensions/multipart/src/test/java/de/truzzt/clearinghouse/edc/multipart/tests/TestUtils.java b/clearing-house-edc/extensions/multipart/src/test/java/de/truzzt/clearinghouse/edc/multipart/tests/TestUtils.java new file mode 100644 index 00000000..00e6d817 --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/test/java/de/truzzt/clearinghouse/edc/multipart/tests/TestUtils.java @@ -0,0 +1,40 @@ +package de.truzzt.clearinghouse.edc.multipart.tests; + +import com.fasterxml.jackson.databind.ObjectMapper; +import de.truzzt.clearinghouse.edc.dto.LoggingMessageResponse; +import de.truzzt.clearinghouse.edc.tests.BaseTestUtils; +import de.truzzt.clearinghouse.edc.types.ids.Message; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; + +public class TestUtils extends BaseTestUtils { + + public static final String VALID_HEADER_JSON = "headers/valid-header.json"; + public static final String VALID_CREATE_PROCESS_HEADER_JSON = "headers/valid-create-process-header.json"; + public static final String INVALID_HEADER_JSON = "headers/invalid-header.json"; + public static final String INVALID_TYPE_HEADER_JSON = "headers/invalid-type.json"; + public static final String INVALID_TOKEN_HEADER_JSON = "headers/invalid-token.json"; + public static final String MISSING_FIELDS_HEADER_JSON = "headers/missing-fields.json"; + public static final String MISSING_TOKEN_HEADER_JSON = "headers/missing-token.json"; + public static final String VALID_RESPONSE_HEADER_JSON = "headers/valid-response.json"; + + public static final String VALID_RESPONSE_PAYLOAD_JSON = "payloads/valid-response.json"; + + public static InputStream getHeaderInputStream(String path) { + var json = TestUtils.readFile(path); + return new ByteArrayInputStream(json.getBytes()); + } + + public static Message getResponseHeader(ObjectMapper mapper, String path) { + return parseFile(mapper, Message.class, path); + } + + public static Message getValidResponseHeader(ObjectMapper mapper) { + return parseFile(mapper, Message.class, VALID_RESPONSE_HEADER_JSON); + } + + public static LoggingMessageResponse getValidResponsePayload(ObjectMapper mapper) { + return parseFile(mapper, LoggingMessageResponse.class, VALID_RESPONSE_PAYLOAD_JSON); + } +} diff --git a/clearing-house-edc/extensions/multipart/src/test/resources/headers/invalid-header.json b/clearing-house-edc/extensions/multipart/src/test/resources/headers/invalid-header.json new file mode 100644 index 00000000..58522c01 --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/test/resources/headers/invalid-header.json @@ -0,0 +1,19 @@ +{ + "@context":{ + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@id":"https://w3id.org/idsa/autogen/logMessage/9fdba4ad-f750-4bbc-a7f0-f648ac853508", + "ids:securityToken": { + "@type" : "ids:DynamicAttributeToken", + "@id" : "https://w3id.org/idsa/autogen/dynamicAttributeToken/7bbbd2c1-2d75-4e3d-bd10-c52d0381cab0", + "tokenValue" : "eyJ0eXAiOiJKV1QiLCJraWQiOiJkZWZhdWx0IiwiYWxnIjoiUlMyNTYifQ.eyJzY29wZXMiOlsiaWRzYzpJRFNfQ09OTkVDVE9SX0FUVFJJQlVURVNfQUxMIl0sImF1ZCI6Imlkc2M6SURTX0NPTk5FQ1RPUlNfQUxMIiwiaXNzIjoiaHR0cHM6Ly9kYXBzLmFpc2VjLmZyYXVuaG9mZXIuZGUiLCJuYmYiOjE2MzQ2NTA3MzksImlhdCI6MTYzNDY1MDczOSwianRpIjoiTVRneE9EUXdPVFF6TXpZd05qWXlOVFExTUE9PSIsImV4cCI6MTYzNDY1NDMzOSwic2VjdXJpdHlQcm9maWxlIjoiaWRzYzpCQVNFX1NFQ1VSSVRZX1BST0ZJTEUiLCJyZWZlcnJpbmdDb25uZWN0b3IiOiJodHRwOi8vYnJva2VyLmlkcy5pc3N0LmZyYXVuaG9mZXIuZGUuZGVtbyIsIkB0eXBlIjoiaWRzOkRhdFBheWxvYWQiLCJAY29udGV4dCI6Imh0dHBzOi8vdzNpZC5vcmcvaWRzYS9jb250ZXh0cy9jb250ZXh0Lmpzb25sZCIsInRyYW5zcG9ydENlcnRzU2hhMjU2IjoiOTc0ZTYzMjRmMTJmMTA5MTZmNDZiZmRlYjE4YjhkZDZkYTc4Y2M2YTZhMDU2NjAzMWZhNWYxYTM5ZWM4ZTYwMCIsInN1YiI6IjkyOjE0OkU3OkFDOjEwOjIyOkYyOkNDOjA1OjZFOjJBOjJCOjhEOkRCOjEwOkQ2OjREOkEwOkExOjUzOmtleWlkOkNCOjhDOkM3OkI2Ojg1Ojc5OkE4OjIzOkE2OkNCOjE1OkFCOjE3OjUwOjJGOkU2OjY1OjQzOjVEOkU4In0.Qw3gWMgwnKQyVatbsozcin6qtQbLyXlk6QdaLajGaDmxSYqCKEcAje4kiDp5Fqj04WPmVyF0k8c1BJA3KGnaW3Qcikv4MNxqqoenvKIrSTokXsA7-osqBCfxLhV-s2lSXVTAtV_Q7f71eSoR5j-7nPPX8_nf4Xup4_VzfnwRmnuAbLfHfWThbupxFazC34r3waXCltOTFVa_XDlwEDMpPY7vEPeaqIt2t6ofVGo_HF86UB19liL-UZvp0uSE9z2fhloyxOrx9B_xavGS7pP6oRaumSJEN_x9dfdeDS98HQ_oBSSGBzaI4fM7ik35Yg42KQwmkZesD6P_YSEzVLcJDg", + "tokenFormat" : { + "@id" : "idsc:JWT" + } + }, + "senderAgent":"http://example.org", + "modelVersion":"4.1.0", + "issued" : "2021-06-23T17:27:23.566+02:00", + "issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" +} \ No newline at end of file diff --git a/clearing-house-edc/extensions/multipart/src/test/resources/headers/invalid-token.json b/clearing-house-edc/extensions/multipart/src/test/resources/headers/invalid-token.json new file mode 100644 index 00000000..ea2bbf79 --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/test/resources/headers/invalid-token.json @@ -0,0 +1,20 @@ +{ + "@context":{ + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@type":"ids:LogMessage", + "@id":"https://w3id.org/idsa/autogen/logMessage/9fdba4ad-f750-4bbc-a7f0-f648ac853508", + "ids:securityToken": { + "@type" : "ids:DynamicAttributeToken", + "@id" : "https://w3id.org/idsa/autogen/dynamicAttributeToken/7bbbd2c1-2d75-4e3d-bd10-c52d0381cab0", + "ids:tokenValue" : "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzY29wZXMiOlsiaWRzYzpJRFNfQ09OTkVDVE9SX0FUVFJJQlVURVNfQUxMIl0sImF1ZCI6Imlkc2M6SURTX0NPTk5FQ1RPUlNfQUxMIiwiaXNzIjoiaHR0cHM6Ly9kYXBzLmFpc2VjLmZyYXVuaG9mZXIuZGUiLCJuYmYiOjE2MzQ2NTA3MzksImlhdCI6MTYzNDY1MDczOSwianRpIjoiTVRneE9EUXdPVFF6TXpZd05qWXlOVFExTUE9PSIsImV4cCI6MTYzNDY1NDMzOSwic2VjdXJpdHlQcm9maWxlIjoiaWRzYzpCQVNFX1NFQ1VSSVRZX1BST0ZJTEUiLCJyZWZlcnJpbmdDb25uZWN0b3IiOiJodHRwOi8vYnJva2VyLmlkcy5pc3N0LmZyYXVuaG9mZXIuZGUuZGVtbyIsIkB0eXBlIjoiaWRzOkRhdFBheWxvYWQiLCJAY29udGV4dCI6Imh0dHBzOi8vdzNpZC5vcmcvaWRzYS9jb250ZXh0cy9jb250ZXh0Lmpzb25sZCIsInRyYW5zcG9ydENlcnRzU2hhMjU2IjoiOTc0ZTYzMjRmMTJmMTA5MTZmNDZiZmRlYjE4YjhkZDZkYTc4Y2M2YTZhMDU2NjAzMWZhNWYxYTM5ZWM4ZTYwMCJ9.hekZoPDjEWaXreQl3l0PUIjBOPQhAl0w2mH4_PdNWuA", + "ids:tokenFormat" : { + "@id" : "idsc:JWT" + } + }, + "ids:senderAgent":"http://example.org", + "ids:modelVersion":"4.1.0", + "ids:issued" : "2021-06-23T17:27:23.566+02:00", + "ids:issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" +} \ No newline at end of file diff --git a/clearing-house-edc/extensions/multipart/src/test/resources/headers/invalid-type.json b/clearing-house-edc/extensions/multipart/src/test/resources/headers/invalid-type.json new file mode 100644 index 00000000..e96fc8d2 --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/test/resources/headers/invalid-type.json @@ -0,0 +1,20 @@ +{ + "@context":{ + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@type":"ids:otherMessage", + "@id":"https://w3id.org/idsa/autogen/logMessage/9fdba4ad-f750-4bbc-a7f0-f648ac853508", + "ids:securityToken": { + "@type" : "ids:DynamicAttributeToken", + "@id" : "https://w3id.org/idsa/autogen/dynamicAttributeToken/7bbbd2c1-2d75-4e3d-bd10-c52d0381cab0", + "ids:tokenValue" : "eyJ0eXAiOiJKV1QiLCJraWQiOiJkZWZhdWx0IiwiYWxnIjoiUlMyNTYifQ.eyJzY29wZXMiOlsiaWRzYzpJRFNfQ09OTkVDVE9SX0FUVFJJQlVURVNfQUxMIl0sImF1ZCI6Imlkc2M6SURTX0NPTk5FQ1RPUlNfQUxMIiwiaXNzIjoiaHR0cHM6Ly9kYXBzLmFpc2VjLmZyYXVuaG9mZXIuZGUiLCJuYmYiOjE2MzQ2NTA3MzksImlhdCI6MTYzNDY1MDczOSwianRpIjoiTVRneE9EUXdPVFF6TXpZd05qWXlOVFExTUE9PSIsImV4cCI6MTYzNDY1NDMzOSwic2VjdXJpdHlQcm9maWxlIjoiaWRzYzpCQVNFX1NFQ1VSSVRZX1BST0ZJTEUiLCJyZWZlcnJpbmdDb25uZWN0b3IiOiJodHRwOi8vYnJva2VyLmlkcy5pc3N0LmZyYXVuaG9mZXIuZGUuZGVtbyIsIkB0eXBlIjoiaWRzOkRhdFBheWxvYWQiLCJAY29udGV4dCI6Imh0dHBzOi8vdzNpZC5vcmcvaWRzYS9jb250ZXh0cy9jb250ZXh0Lmpzb25sZCIsInRyYW5zcG9ydENlcnRzU2hhMjU2IjoiOTc0ZTYzMjRmMTJmMTA5MTZmNDZiZmRlYjE4YjhkZDZkYTc4Y2M2YTZhMDU2NjAzMWZhNWYxYTM5ZWM4ZTYwMCIsInN1YiI6IjkyOjE0OkU3OkFDOjEwOjIyOkYyOkNDOjA1OjZFOjJBOjJCOjhEOkRCOjEwOkQ2OjREOkEwOkExOjUzOmtleWlkOkNCOjhDOkM3OkI2Ojg1Ojc5OkE4OjIzOkE2OkNCOjE1OkFCOjE3OjUwOjJGOkU2OjY1OjQzOjVEOkU4In0.Qw3gWMgwnKQyVatbsozcin6qtQbLyXlk6QdaLajGaDmxSYqCKEcAje4kiDp5Fqj04WPmVyF0k8c1BJA3KGnaW3Qcikv4MNxqqoenvKIrSTokXsA7-osqBCfxLhV-s2lSXVTAtV_Q7f71eSoR5j-7nPPX8_nf4Xup4_VzfnwRmnuAbLfHfWThbupxFazC34r3waXCltOTFVa_XDlwEDMpPY7vEPeaqIt2t6ofVGo_HF86UB19liL-UZvp0uSE9z2fhloyxOrx9B_xavGS7pP6oRaumSJEN_x9dfdeDS98HQ_oBSSGBzaI4fM7ik35Yg42KQwmkZesD6P_YSEzVLcJDg", + "ids:tokenFormat" : { + "@id" : "idsc:JWT" + } + }, + "ids:senderAgent":"http://example.org", + "ids:modelVersion":"4.1.0", + "ids:issued" : "2021-06-23T17:27:23.566+02:00", + "ids:issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" +} \ No newline at end of file diff --git a/clearing-house-edc/extensions/multipart/src/test/resources/headers/missing-fields.json b/clearing-house-edc/extensions/multipart/src/test/resources/headers/missing-fields.json new file mode 100644 index 00000000..d1ef397f --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/test/resources/headers/missing-fields.json @@ -0,0 +1,20 @@ +{ + "@context":{ + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@type":"ids:LogMessage", + "@id":"", + "ids:securityToken": { + "@type" : "ids:DynamicAttributeToken", + "@id" : "https://w3id.org/idsa/autogen/dynamicAttributeToken/7bbbd2c1-2d75-4e3d-bd10-c52d0381cab0", + "ids:tokenValue" : "eyJ0eXAiOiJKV1QiLCJraWQiOiJkZWZhdWx0IiwiYWxnIjoiUlMyNTYifQ.eyJzY29wZXMiOlsiaWRzYzpJRFNfQ09OTkVDVE9SX0FUVFJJQlVURVNfQUxMIl0sImF1ZCI6Imlkc2M6SURTX0NPTk5FQ1RPUlNfQUxMIiwiaXNzIjoiaHR0cHM6Ly9kYXBzLmFpc2VjLmZyYXVuaG9mZXIuZGUiLCJuYmYiOjE2MzQ2NTA3MzksImlhdCI6MTYzNDY1MDczOSwianRpIjoiTVRneE9EUXdPVFF6TXpZd05qWXlOVFExTUE9PSIsImV4cCI6MTYzNDY1NDMzOSwic2VjdXJpdHlQcm9maWxlIjoiaWRzYzpCQVNFX1NFQ1VSSVRZX1BST0ZJTEUiLCJyZWZlcnJpbmdDb25uZWN0b3IiOiJodHRwOi8vYnJva2VyLmlkcy5pc3N0LmZyYXVuaG9mZXIuZGUuZGVtbyIsIkB0eXBlIjoiaWRzOkRhdFBheWxvYWQiLCJAY29udGV4dCI6Imh0dHBzOi8vdzNpZC5vcmcvaWRzYS9jb250ZXh0cy9jb250ZXh0Lmpzb25sZCIsInRyYW5zcG9ydENlcnRzU2hhMjU2IjoiOTc0ZTYzMjRmMTJmMTA5MTZmNDZiZmRlYjE4YjhkZDZkYTc4Y2M2YTZhMDU2NjAzMWZhNWYxYTM5ZWM4ZTYwMCIsInN1YiI6IjkyOjE0OkU3OkFDOjEwOjIyOkYyOkNDOjA1OjZFOjJBOjJCOjhEOkRCOjEwOkQ2OjREOkEwOkExOjUzOmtleWlkOkNCOjhDOkM3OkI2Ojg1Ojc5OkE4OjIzOkE2OkNCOjE1OkFCOjE3OjUwOjJGOkU2OjY1OjQzOjVEOkU4In0.Qw3gWMgwnKQyVatbsozcin6qtQbLyXlk6QdaLajGaDmxSYqCKEcAje4kiDp5Fqj04WPmVyF0k8c1BJA3KGnaW3Qcikv4MNxqqoenvKIrSTokXsA7-osqBCfxLhV-s2lSXVTAtV_Q7f71eSoR5j-7nPPX8_nf4Xup4_VzfnwRmnuAbLfHfWThbupxFazC34r3waXCltOTFVa_XDlwEDMpPY7vEPeaqIt2t6ofVGo_HF86UB19liL-UZvp0uSE9z2fhloyxOrx9B_xavGS7pP6oRaumSJEN_x9dfdeDS98HQ_oBSSGBzaI4fM7ik35Yg42KQwmkZesD6P_YSEzVLcJDg", + "ids:tokenFormat" : { + "@id" : "idsc:JWT" + } + }, + "ids:senderAgent":"http://example.org", + "ids:modelVersion":"4.1.0", + "ids:issued" : "2021-06-23T17:27:23.566+02:00", + "ids:issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" +} \ No newline at end of file diff --git a/clearing-house-edc/extensions/multipart/src/test/resources/headers/missing-token.json b/clearing-house-edc/extensions/multipart/src/test/resources/headers/missing-token.json new file mode 100644 index 00000000..8c609c94 --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/test/resources/headers/missing-token.json @@ -0,0 +1,12 @@ +{ + "@context":{ + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@type":"ids:LogMessage", + "@id":"https://w3id.org/idsa/autogen/logMessage/9fdba4ad-f750-4bbc-a7f0-f648ac853508", + "ids:senderAgent":"http://example.org", + "ids:modelVersion":"4.1.0", + "ids:issued" : "2021-06-23T17:27:23.566+02:00", + "ids:issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" +} \ No newline at end of file diff --git a/clearing-house-edc/extensions/multipart/src/test/resources/headers/valid-create-process-header.json b/clearing-house-edc/extensions/multipart/src/test/resources/headers/valid-create-process-header.json new file mode 100644 index 00000000..a1cd612f --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/test/resources/headers/valid-create-process-header.json @@ -0,0 +1,21 @@ +{ + "@context":{ + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@type":"ids:RequestMessage", + "@id":"https://w3id.org/idsa/autogen/logMessage/9fdba4ad-f750-4bbc-a7f0-f648ac853508", + "ids:securityToken": { + "@type" : "ids:DynamicAttributeToken", + "@id" : "https://w3id.org/idsa/autogen/dynamicAttributeToken/7bbbd2c1-2d75-4e3d-bd10-c52d0381cab0", + "ids:tokenValue" : "eyJ0eXAiOiJKV1QiLCJraWQiOiJkZWZhdWx0IiwiYWxnIjoiUlMyNTYifQ.eyJzY29wZXMiOlsiaWRzYzpJRFNfQ09OTkVDVE9SX0FUVFJJQlVURVNfQUxMIl0sImF1ZCI6Imlkc2M6SURTX0NPTk5FQ1RPUlNfQUxMIiwiaXNzIjoiaHR0cHM6Ly9kYXBzLmFpc2VjLmZyYXVuaG9mZXIuZGUiLCJuYmYiOjE2MzQ2NTA3MzksImlhdCI6MTYzNDY1MDczOSwianRpIjoiTVRneE9EUXdPVFF6TXpZd05qWXlOVFExTUE9PSIsImV4cCI6MTYzNDY1NDMzOSwic2VjdXJpdHlQcm9maWxlIjoiaWRzYzpCQVNFX1NFQ1VSSVRZX1BST0ZJTEUiLCJyZWZlcnJpbmdDb25uZWN0b3IiOiJodHRwOi8vYnJva2VyLmlkcy5pc3N0LmZyYXVuaG9mZXIuZGUuZGVtbyIsIkB0eXBlIjoiaWRzOkRhdFBheWxvYWQiLCJAY29udGV4dCI6Imh0dHBzOi8vdzNpZC5vcmcvaWRzYS9jb250ZXh0cy9jb250ZXh0Lmpzb25sZCIsInRyYW5zcG9ydENlcnRzU2hhMjU2IjoiOTc0ZTYzMjRmMTJmMTA5MTZmNDZiZmRlYjE4YjhkZDZkYTc4Y2M2YTZhMDU2NjAzMWZhNWYxYTM5ZWM4ZTYwMCIsInN1YiI6IjkyOjE0OkU3OkFDOjEwOjIyOkYyOkNDOjA1OjZFOjJBOjJCOjhEOkRCOjEwOkQ2OjREOkEwOkExOjUzOmtleWlkOkNCOjhDOkM3OkI2Ojg1Ojc5OkE4OjIzOkE2OkNCOjE1OkFCOjE3OjUwOjJGOkU2OjY1OjQzOjVEOkU4In0.Qw3gWMgwnKQyVatbsozcin6qtQbLyXlk6QdaLajGaDmxSYqCKEcAje4kiDp5Fqj04WPmVyF0k8c1BJA3KGnaW3Qcikv4MNxqqoenvKIrSTokXsA7-osqBCfxLhV-s2lSXVTAtV_Q7f71eSoR5j-7nPPX8_nf4Xup4_VzfnwRmnuAbLfHfWThbupxFazC34r3waXCltOTFVa_XDlwEDMpPY7vEPeaqIt2t6ofVGo_HF86UB19liL-UZvp0uSE9z2fhloyxOrx9B_xavGS7pP6oRaumSJEN_x9dfdeDS98HQ_oBSSGBzaI4fM7ik35Yg42KQwmkZesD6P_YSEzVLcJDg", + "ids:tokenFormat" : { + "@id" : "idsc:JWT" + } + }, + "ids:senderAgent":"http://example.org", + "ids:modelVersion":"4.1.0", + "ids:issued" : "2021-06-23T17:27:23.566+02:00", + "ids:issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" +} + diff --git a/clearing-house-edc/extensions/multipart/src/test/resources/headers/valid-header.json b/clearing-house-edc/extensions/multipart/src/test/resources/headers/valid-header.json new file mode 100644 index 00000000..85057bbb --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/test/resources/headers/valid-header.json @@ -0,0 +1,20 @@ +{ + "@context":{ + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@type":"ids:LogMessage", + "@id":"https://w3id.org/idsa/autogen/logMessage/9fdba4ad-f750-4bbc-a7f0-f648ac853508", + "ids:securityToken": { + "@type" : "ids:DynamicAttributeToken", + "@id" : "https://w3id.org/idsa/autogen/dynamicAttributeToken/7bbbd2c1-2d75-4e3d-bd10-c52d0381cab0", + "ids:tokenValue" : "eyJ0eXAiOiJKV1QiLCJraWQiOiJkZWZhdWx0IiwiYWxnIjoiUlMyNTYifQ.eyJzY29wZXMiOlsiaWRzYzpJRFNfQ09OTkVDVE9SX0FUVFJJQlVURVNfQUxMIl0sImF1ZCI6Imlkc2M6SURTX0NPTk5FQ1RPUlNfQUxMIiwiaXNzIjoiaHR0cHM6Ly9kYXBzLmFpc2VjLmZyYXVuaG9mZXIuZGUiLCJuYmYiOjE2MzQ2NTA3MzksImlhdCI6MTYzNDY1MDczOSwianRpIjoiTVRneE9EUXdPVFF6TXpZd05qWXlOVFExTUE9PSIsImV4cCI6MTYzNDY1NDMzOSwic2VjdXJpdHlQcm9maWxlIjoiaWRzYzpCQVNFX1NFQ1VSSVRZX1BST0ZJTEUiLCJyZWZlcnJpbmdDb25uZWN0b3IiOiJodHRwOi8vYnJva2VyLmlkcy5pc3N0LmZyYXVuaG9mZXIuZGUuZGVtbyIsIkB0eXBlIjoiaWRzOkRhdFBheWxvYWQiLCJAY29udGV4dCI6Imh0dHBzOi8vdzNpZC5vcmcvaWRzYS9jb250ZXh0cy9jb250ZXh0Lmpzb25sZCIsInRyYW5zcG9ydENlcnRzU2hhMjU2IjoiOTc0ZTYzMjRmMTJmMTA5MTZmNDZiZmRlYjE4YjhkZDZkYTc4Y2M2YTZhMDU2NjAzMWZhNWYxYTM5ZWM4ZTYwMCIsInN1YiI6IjkyOjE0OkU3OkFDOjEwOjIyOkYyOkNDOjA1OjZFOjJBOjJCOjhEOkRCOjEwOkQ2OjREOkEwOkExOjUzOmtleWlkOkNCOjhDOkM3OkI2Ojg1Ojc5OkE4OjIzOkE2OkNCOjE1OkFCOjE3OjUwOjJGOkU2OjY1OjQzOjVEOkU4In0.Qw3gWMgwnKQyVatbsozcin6qtQbLyXlk6QdaLajGaDmxSYqCKEcAje4kiDp5Fqj04WPmVyF0k8c1BJA3KGnaW3Qcikv4MNxqqoenvKIrSTokXsA7-osqBCfxLhV-s2lSXVTAtV_Q7f71eSoR5j-7nPPX8_nf4Xup4_VzfnwRmnuAbLfHfWThbupxFazC34r3waXCltOTFVa_XDlwEDMpPY7vEPeaqIt2t6ofVGo_HF86UB19liL-UZvp0uSE9z2fhloyxOrx9B_xavGS7pP6oRaumSJEN_x9dfdeDS98HQ_oBSSGBzaI4fM7ik35Yg42KQwmkZesD6P_YSEzVLcJDg", + "ids:tokenFormat" : { + "@id" : "idsc:JWT" + } + }, + "ids:senderAgent":"http://example.org", + "ids:modelVersion":"4.1.0", + "ids:issued" : "2021-06-23T17:27:23.566+02:00", + "ids:issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" +} \ No newline at end of file diff --git a/clearing-house-edc/extensions/multipart/src/test/resources/headers/valid-response.json b/clearing-house-edc/extensions/multipart/src/test/resources/headers/valid-response.json new file mode 100644 index 00000000..85057bbb --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/test/resources/headers/valid-response.json @@ -0,0 +1,20 @@ +{ + "@context":{ + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@type":"ids:LogMessage", + "@id":"https://w3id.org/idsa/autogen/logMessage/9fdba4ad-f750-4bbc-a7f0-f648ac853508", + "ids:securityToken": { + "@type" : "ids:DynamicAttributeToken", + "@id" : "https://w3id.org/idsa/autogen/dynamicAttributeToken/7bbbd2c1-2d75-4e3d-bd10-c52d0381cab0", + "ids:tokenValue" : "eyJ0eXAiOiJKV1QiLCJraWQiOiJkZWZhdWx0IiwiYWxnIjoiUlMyNTYifQ.eyJzY29wZXMiOlsiaWRzYzpJRFNfQ09OTkVDVE9SX0FUVFJJQlVURVNfQUxMIl0sImF1ZCI6Imlkc2M6SURTX0NPTk5FQ1RPUlNfQUxMIiwiaXNzIjoiaHR0cHM6Ly9kYXBzLmFpc2VjLmZyYXVuaG9mZXIuZGUiLCJuYmYiOjE2MzQ2NTA3MzksImlhdCI6MTYzNDY1MDczOSwianRpIjoiTVRneE9EUXdPVFF6TXpZd05qWXlOVFExTUE9PSIsImV4cCI6MTYzNDY1NDMzOSwic2VjdXJpdHlQcm9maWxlIjoiaWRzYzpCQVNFX1NFQ1VSSVRZX1BST0ZJTEUiLCJyZWZlcnJpbmdDb25uZWN0b3IiOiJodHRwOi8vYnJva2VyLmlkcy5pc3N0LmZyYXVuaG9mZXIuZGUuZGVtbyIsIkB0eXBlIjoiaWRzOkRhdFBheWxvYWQiLCJAY29udGV4dCI6Imh0dHBzOi8vdzNpZC5vcmcvaWRzYS9jb250ZXh0cy9jb250ZXh0Lmpzb25sZCIsInRyYW5zcG9ydENlcnRzU2hhMjU2IjoiOTc0ZTYzMjRmMTJmMTA5MTZmNDZiZmRlYjE4YjhkZDZkYTc4Y2M2YTZhMDU2NjAzMWZhNWYxYTM5ZWM4ZTYwMCIsInN1YiI6IjkyOjE0OkU3OkFDOjEwOjIyOkYyOkNDOjA1OjZFOjJBOjJCOjhEOkRCOjEwOkQ2OjREOkEwOkExOjUzOmtleWlkOkNCOjhDOkM3OkI2Ojg1Ojc5OkE4OjIzOkE2OkNCOjE1OkFCOjE3OjUwOjJGOkU2OjY1OjQzOjVEOkU4In0.Qw3gWMgwnKQyVatbsozcin6qtQbLyXlk6QdaLajGaDmxSYqCKEcAje4kiDp5Fqj04WPmVyF0k8c1BJA3KGnaW3Qcikv4MNxqqoenvKIrSTokXsA7-osqBCfxLhV-s2lSXVTAtV_Q7f71eSoR5j-7nPPX8_nf4Xup4_VzfnwRmnuAbLfHfWThbupxFazC34r3waXCltOTFVa_XDlwEDMpPY7vEPeaqIt2t6ofVGo_HF86UB19liL-UZvp0uSE9z2fhloyxOrx9B_xavGS7pP6oRaumSJEN_x9dfdeDS98HQ_oBSSGBzaI4fM7ik35Yg42KQwmkZesD6P_YSEzVLcJDg", + "ids:tokenFormat" : { + "@id" : "idsc:JWT" + } + }, + "ids:senderAgent":"http://example.org", + "ids:modelVersion":"4.1.0", + "ids:issued" : "2021-06-23T17:27:23.566+02:00", + "ids:issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" +} \ No newline at end of file diff --git a/clearing-house-edc/extensions/multipart/src/test/resources/payloads/valid-response.json b/clearing-house-edc/extensions/multipart/src/test/resources/payloads/valid-response.json new file mode 100644 index 00000000..ecf63afb --- /dev/null +++ b/clearing-house-edc/extensions/multipart/src/test/resources/payloads/valid-response.json @@ -0,0 +1,3 @@ +{ + "data": "eyJhbGciOiJQUzUxMiIsImtpZCI6IlFyYS8vMjlGcnhiajVoaDVBemVmK0czNlNlaU9tOXE3czgrdzh1R0xEMjgifQ.eyJ0cmFuc2FjdGlvbl9pZCI6IjAwMDAwMDAwIiwidGltZXN0YW1wIjoxNjk3NzUxNzUzLCJwcm9jZXNzX2lkIjoiODcyIiwiZG9jdW1lbnRfaWQiOiIyYmMxMGVmNC03NjFjLTQ5NGYtYmQ1YS0xMWVmYTJmMzNmM2EiLCJwYXlsb2FkIjoiSGVsbG8gV29ybGQiLCJjaGFpbl9oYXNoIjoiMCIsImNsaWVudF9pZCI6IjY5OkY1OjlEOkIwOkREOkE2OjlEOjMwOjVGOjU4OkFBOjJEOjIwOjREOkIyOjM5OkYwOjU0OkZDOjNCOmtleWlkOjRGOjY2OjdEOkJEOjA4OkVFOkM2OjRBOkQxOjk2OkQ4OjdDOjZDOkEyOjMyOjhBOkVDOkE2OkFEOjQ5IiwiY2xlYXJpbmdfaG91c2VfdmVyc2lvbiI6IjAuMTAuMCJ9.NhMDSBTRiJJP04NEjBlB1Rt4LlvwDHrOEvNm0qbYRWqe8Vfdza1SSy-OLDCwMnC14hxHmwD5GpWOCbC5iswmuEeWspSMCGcGnGKZr_ra23jr4HV60YKnCAbBhOi5dmiPb6R64DSSJBH9Dw1Cni9zFNLBgUGr8pGEbm_AdijomUfl88fXUiyBWdrP0S-VVtlcygYROZtTusqBz95E_WKSyFU57hf4vOjkFRjfHHkuu92MUrJJwVXwf55YuVa-uLC8Exr2pScqeo2JI-1Y2JBCInOtBtskXmFfocav8ReIZhvL255O1-vHi5ZFsbQppEtstcz2txjP34EHoPCu8NO9s7G-BqJ8hKw5QTMKIV8-N1yrtGb2sK4qXUQCJpCKfJoMPG_BLQo9vHifWJ6gO1z4NZvOvqOXyIWGd89C1wsCWV8cSJcbye-BgAo4SUAdN5KQXTqiyWRc4wrNXC7S7Ajy639xW6k7epXEuya5qIdkP2qh-ZrL0WndA20jExLFzgYmvXVR15WcFsiprgxutFevQ1a-EWOZDsnnTSPhTt5KPFwziKepTzKq73X3cs-IRxAc_4qkEi0-zEy_YIfoWNMxWdkh4EiBj_wpgiN7msskGCGPzq2wslz64n2-AKsQiXqFEPMNv2ihRNhJHxL0PAJKMWYXStauafOffUazfZag0p8" +} \ No newline at end of file diff --git a/clearing-house-edc/gradle.properties b/clearing-house-edc/gradle.properties new file mode 100644 index 00000000..46d914ae --- /dev/null +++ b/clearing-house-edc/gradle.properties @@ -0,0 +1,2 @@ +javaVersion=17 +auth0JWTVersion=4.2.2 \ No newline at end of file diff --git a/clearing-house-edc/gradle/wrapper/gradle-wrapper.jar b/clearing-house-edc/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 00000000..033e24c4 Binary files /dev/null and b/clearing-house-edc/gradle/wrapper/gradle-wrapper.jar differ diff --git a/clearing-house-processors/gradle/wrapper/gradle-wrapper.properties b/clearing-house-edc/gradle/wrapper/gradle-wrapper.properties similarity index 74% rename from clearing-house-processors/gradle/wrapper/gradle-wrapper.properties rename to clearing-house-edc/gradle/wrapper/gradle-wrapper.properties index ae04661e..62f495df 100644 --- a/clearing-house-processors/gradle/wrapper/gradle-wrapper.properties +++ b/clearing-house-edc/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.2-bin.zip +networkTimeout=10000 +validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/clearing-house-processors/gradlew b/clearing-house-edc/gradlew similarity index 92% rename from clearing-house-processors/gradlew rename to clearing-house-edc/gradlew index a69d9cb6..fcb6fca1 100755 --- a/clearing-house-processors/gradlew +++ b/clearing-house-edc/gradlew @@ -55,7 +55,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -80,13 +80,10 @@ do esac done -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -APP_NAME="Gradle" +# This is normally unused +# shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum @@ -133,22 +130,29 @@ location of your Java installation." fi else JAVACMD=java - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the location of your Java installation." + fi fi # Increase the maximum file descriptors if we can. if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then case $MAX_FD in #( max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 MAX_FD=$( ulimit -H -n ) || warn "Could not query maximum file descriptor limit" esac case $MAX_FD in #( '' | soft) :;; #( *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 ulimit -n "$MAX_FD" || warn "Could not set maximum file descriptor limit to $MAX_FD" esac @@ -193,6 +197,10 @@ if "$cygwin" || "$msys" ; then done fi + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + # Collect all arguments for the java command; # * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of # shell script including quotes and variable substitutions, so put them in diff --git a/clearing-house-processors/gradlew.bat b/clearing-house-edc/gradlew.bat similarity index 96% rename from clearing-house-processors/gradlew.bat rename to clearing-house-edc/gradlew.bat index 53a6b238..93e3f59f 100644 --- a/clearing-house-processors/gradlew.bat +++ b/clearing-house-edc/gradlew.bat @@ -1,91 +1,92 @@ -@rem -@rem Copyright 2015 the original author or authors. -@rem -@rem Licensed under the Apache License, Version 2.0 (the "License"); -@rem you may not use this file except in compliance with the License. -@rem You may obtain a copy of the License at -@rem -@rem https://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, software -@rem distributed under the License is distributed on an "AS IS" BASIS, -@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -@rem See the License for the specific language governing permissions and -@rem limitations under the License. -@rem - -@if "%DEBUG%"=="" @echo off -@rem ########################################################################## -@rem -@rem Gradle startup script for Windows -@rem -@rem ########################################################################## - -@rem Set local scope for the variables with windows NT shell -if "%OS%"=="Windows_NT" setlocal - -set DIRNAME=%~dp0 -if "%DIRNAME%"=="" set DIRNAME=. -set APP_BASE_NAME=%~n0 -set APP_HOME=%DIRNAME% - -@rem Resolve any "." and ".." in APP_HOME to make it shorter. -for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi - -@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" - -@rem Find java.exe -if defined JAVA_HOME goto findJavaFromJavaHome - -set JAVA_EXE=java.exe -%JAVA_EXE% -version >NUL 2>&1 -if %ERRORLEVEL% equ 0 goto execute - -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:findJavaFromJavaHome -set JAVA_HOME=%JAVA_HOME:"=% -set JAVA_EXE=%JAVA_HOME%/bin/java.exe - -if exist "%JAVA_EXE%" goto execute - -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:execute -@rem Setup the command line - -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar - - -@rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* - -:end -@rem End local scope for the variables with windows NT shell -if %ERRORLEVEL% equ 0 goto mainEnd - -:fail -rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of -rem the _cmd.exe /c_ return code! -set EXIT_CODE=%ERRORLEVEL% -if %EXIT_CODE% equ 0 set EXIT_CODE=1 -if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% -exit /b %EXIT_CODE% - -:mainEnd -if "%OS%"=="Windows_NT" endlocal - -:omega +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%"=="" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if %ERRORLEVEL% equ 0 goto execute + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if %ERRORLEVEL% equ 0 goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/clearing-house-edc/launchers/connector-local/build.gradle.kts b/clearing-house-edc/launchers/connector-local/build.gradle.kts new file mode 100644 index 00000000..f1ebdcbd --- /dev/null +++ b/clearing-house-edc/launchers/connector-local/build.gradle.kts @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ + +plugins { + `java-library` + id("application") + id("com.github.johnrengelman.shadow") version "7.1.2" +} + +configurations.all { + exclude(group = "de.fraunhofer.iais.eis.ids.infomodel", module = "java") +} + +dependencies { + runtimeOnly(project(":extensions:multipart")) + + runtimeOnly(edc.bundles.connector) + runtimeOnly(edc.config.filesystem) + runtimeOnly(edc.vault.filesystem) + runtimeOnly(edc.oauth2.core) + + runtimeOnly(":infomodel-java-4.1.3") + runtimeOnly(":infomodel-util-4.0.4") +} + +application { + mainClass.set("org.eclipse.edc.boot.system.runtime.BaseRuntime") +} + +tasks.withType { + mergeServiceFiles() + archiveFileName.set("clearing-house-edc.jar") +} + diff --git a/clearing-house-edc/launchers/connector-prod/Dockerfile b/clearing-house-edc/launchers/connector-prod/Dockerfile new file mode 100644 index 00000000..66284fea --- /dev/null +++ b/clearing-house-edc/launchers/connector-prod/Dockerfile @@ -0,0 +1,16 @@ +FROM gradle:7-jdk17 AS build + +COPY --chown=gradle:gradle . /home/gradle/project/ +WORKDIR /home/gradle/project/ +RUN ./gradlew clean build + +FROM openjdk:17-slim-buster + +WORKDIR /app + +COPY --from=build /home/gradle/project/launchers/connector-prod/build/libs/clearing-house-edc.jar /app + +ENV WEB_HTTP_PORT="8181" +ENV WEB_HTTP_PATH="/api" + +ENTRYPOINT [ "sh", "-c", "exec java $ENV_JVM_ARGS -jar clearing-house-edc.jar"] diff --git a/clearing-house-edc/launchers/connector-prod/build.gradle.kts b/clearing-house-edc/launchers/connector-prod/build.gradle.kts new file mode 100644 index 00000000..52265b91 --- /dev/null +++ b/clearing-house-edc/launchers/connector-prod/build.gradle.kts @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ + +plugins { + `java-library` + id("application") + id("com.github.johnrengelman.shadow") version "7.1.2" +} + +configurations.all { + exclude(group = "de.fraunhofer.iais.eis.ids.infomodel", module = "java") +} + +dependencies { + runtimeOnly(project(":extensions:multipart")) + + runtimeOnly(edc.bundles.connector) + runtimeOnly(edc.oauth2.core) + runtimeOnly(edc.vault.filesystem) + + runtimeOnly(":infomodel-java-4.1.3") + runtimeOnly(":infomodel-util-4.0.4") +} + +application { + mainClass.set("org.eclipse.edc.boot.system.runtime.BaseRuntime") +} + +tasks.withType { + mergeServiceFiles() + archiveFileName.set("clearing-house-edc.jar") +} diff --git a/clearing-house-edc/libs/fraunhofer/infomodel-java-4.1.3.jar b/clearing-house-edc/libs/fraunhofer/infomodel-java-4.1.3.jar new file mode 100644 index 00000000..fad447d3 Binary files /dev/null and b/clearing-house-edc/libs/fraunhofer/infomodel-java-4.1.3.jar differ diff --git a/clearing-house-edc/libs/fraunhofer/infomodel-util-4.0.4.jar b/clearing-house-edc/libs/fraunhofer/infomodel-util-4.0.4.jar new file mode 100644 index 00000000..60a3775f Binary files /dev/null and b/clearing-house-edc/libs/fraunhofer/infomodel-util-4.0.4.jar differ diff --git a/clearing-house-edc/settings.gradle.kts b/clearing-house-edc/settings.gradle.kts new file mode 100644 index 00000000..23350a56 --- /dev/null +++ b/clearing-house-edc/settings.gradle.kts @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2023 Microsoft Corporation + * + * This program and the accompanying materials are made available under the + * terms of the Apache License, Version 2.0 which is available at + * https://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + * + * Contributors: + * Microsoft Corporation - Initial implementation + * truzzt GmbH - EDC extension implementation + * + */ + +pluginManagement { + repositories { + maven { + url = uri("https://oss.sonatype.org/content/repositories/snapshots/") + } + mavenCentral() + gradlePluginPortal() + } +} + +dependencyResolutionManagement { + repositories { + maven { + url = uri("https://oss.sonatype.org/content/repositories/snapshots/") + } + flatDir { + dirs("libs/fraunhofer") + } + mavenCentral() + mavenLocal() + } + versionCatalogs { + create("libs") { + from("org.eclipse.edc:edc-versions:0.0.1-milestone-8") + } + create("edc") { + version("edc", "0.0.1-milestone-8") + library("spi-catalog", "org.eclipse.edc", "catalog-spi").versionRef("edc") + library("spi-core", "org.eclipse.edc", "core-spi").versionRef("edc") + library("spi-web", "org.eclipse.edc", "web-spi").versionRef("edc") + library("util", "org.eclipse.edc", "util").versionRef("edc") + library("boot", "org.eclipse.edc", "boot").versionRef("edc") + library("config-filesystem", "org.eclipse.edc", "configuration-filesystem").versionRef("edc") + library("core-controlplane", "org.eclipse.edc", "control-plane-core").versionRef("edc") + library("core-connector", "org.eclipse.edc", "connector-core").versionRef("edc") + library("core-jetty", "org.eclipse.edc", "jetty-core").versionRef("edc") + library("core-jersey", "org.eclipse.edc", "jersey-core").versionRef("edc") + library("junit", "org.eclipse.edc", "junit").versionRef("edc") + library("api-management-config", "org.eclipse.edc", "management-api-configuration").versionRef("edc") + library("api-management", "org.eclipse.edc", "management-api").versionRef("edc") + library("api-observability", "org.eclipse.edc", "api-observability").versionRef("edc") + library("ext-http", "org.eclipse.edc", "http").versionRef("edc") + library("spi-ids", "org.eclipse.edc", "ids-spi").versionRef("edc") + library("ids", "org.eclipse.edc", "ids").versionRef("edc") + library("ids-jsonld-serdes", "org.eclipse.edc", "ids-jsonld-serdes").versionRef("edc") + library("oauth2-core", "org.eclipse.edc", "oauth2-core").versionRef("edc") + library("vault-filesystem", "org.eclipse.edc", "vault-filesystem").versionRef("edc") + + bundle( + "connector", + listOf("boot", "core-connector", "core-jersey", "core-controlplane", "api-observability") + ) + } + } +} + +include(":core") +include(":extensions:multipart") +include(":launchers:connector-local") +include(":launchers:connector-prod") diff --git a/clearing-house-processors/.editorconfig b/clearing-house-processors/.editorconfig deleted file mode 100644 index 017e5dbb..00000000 --- a/clearing-house-processors/.editorconfig +++ /dev/null @@ -1,105 +0,0 @@ -root = true - -[*] -charset = utf-8 -end_of_line = lf -indent_size = 4 -indent_style = space -insert_final_newline = false -max_line_length = 120 -tab_width = 4 -ij_continuation_indent_size = 8 -ij_formatter_off_tag = @formatter:off -ij_formatter_on_tag = @formatter:on -ij_formatter_tags_enabled = false -ij_smart_tabs = false -ij_visual_guides = none -ij_wrap_on_typing = false - -[{*.kt,*.kts}] -ij_continuation_indent_size = 4 -ij_kotlin_align_in_columns_case_branch = false -ij_kotlin_align_multiline_binary_operation = false -ij_kotlin_align_multiline_extends_list = false -ij_kotlin_align_multiline_method_parentheses = false -ij_kotlin_align_multiline_parameters = false -ij_kotlin_align_multiline_parameters_in_calls = false -ij_kotlin_allow_trailing_comma = false -ij_kotlin_allow_trailing_comma_on_call_site = false -ij_kotlin_assignment_wrap = normal -ij_kotlin_blank_lines_after_class_header = 0 -ij_kotlin_blank_lines_around_block_when_branches = 0 -ij_kotlin_blank_lines_before_declaration_with_comment_or_annotation_on_separate_line = 1 -ij_kotlin_block_comment_add_space = false -ij_kotlin_block_comment_at_first_column = true -ij_kotlin_call_parameters_new_line_after_left_paren = true -ij_kotlin_call_parameters_right_paren_on_new_line = true -ij_kotlin_call_parameters_wrap = on_every_item -ij_kotlin_catch_on_new_line = false -ij_kotlin_class_annotation_wrap = split_into_lines -ij_kotlin_continuation_indent_for_chained_calls = false -ij_kotlin_continuation_indent_for_expression_bodies = false -ij_kotlin_continuation_indent_in_argument_lists = false -ij_kotlin_continuation_indent_in_elvis = false -ij_kotlin_continuation_indent_in_if_conditions = false -ij_kotlin_continuation_indent_in_parameter_lists = false -ij_kotlin_continuation_indent_in_supertype_lists = false -ij_kotlin_else_on_new_line = false -ij_kotlin_enum_constants_wrap = off -ij_kotlin_extends_list_wrap = normal -ij_kotlin_field_annotation_wrap = split_into_lines -ij_kotlin_finally_on_new_line = false -ij_kotlin_if_rparen_on_new_line = true -ij_kotlin_import_nested_classes = false -ij_kotlin_imports_layout = *,java.**,javax.**,kotlin.**,^ -ij_kotlin_insert_whitespaces_in_simple_one_line_method = true -ij_kotlin_keep_blank_lines_before_right_brace = 0 -ij_kotlin_keep_blank_lines_in_code = 1 -ij_kotlin_keep_blank_lines_in_declarations = 1 -ij_kotlin_keep_first_column_comment = true -ij_kotlin_keep_indents_on_empty_lines = false -ij_kotlin_keep_line_breaks = true -ij_kotlin_lbrace_on_next_line = false -ij_kotlin_line_break_after_multiline_when_entry = true -ij_kotlin_line_comment_add_space = true -ij_kotlin_line_comment_add_space_on_reformat = false -ij_kotlin_line_comment_at_first_column = false -ij_kotlin_method_annotation_wrap = split_into_lines -ij_kotlin_method_call_chain_wrap = normal -ij_kotlin_method_parameters_new_line_after_left_paren = true -ij_kotlin_method_parameters_right_paren_on_new_line = true -ij_kotlin_method_parameters_wrap = on_every_item -ij_kotlin_name_count_to_use_star_import = 2147483647 -ij_kotlin_name_count_to_use_star_import_for_members = 2147483647 -ij_kotlin_packages_to_use_import_on_demand = kotlinx.android.synthetic.** -ij_kotlin_parameter_annotation_wrap = off -ij_kotlin_space_after_comma = true -ij_kotlin_space_after_extend_colon = true -ij_kotlin_space_after_type_colon = true -ij_kotlin_space_before_catch_parentheses = true -ij_kotlin_space_before_comma = false -ij_kotlin_space_before_extend_colon = true -ij_kotlin_space_before_for_parentheses = true -ij_kotlin_space_before_if_parentheses = true -ij_kotlin_space_before_lambda_arrow = true -ij_kotlin_space_before_type_colon = false -ij_kotlin_space_before_when_parentheses = true -ij_kotlin_space_before_while_parentheses = true -ij_kotlin_spaces_around_additive_operators = true -ij_kotlin_spaces_around_assignment_operators = true -ij_kotlin_spaces_around_equality_operators = true -ij_kotlin_spaces_around_function_type_arrow = true -ij_kotlin_spaces_around_logical_operators = true -ij_kotlin_spaces_around_multiplicative_operators = true -ij_kotlin_spaces_around_range = false -ij_kotlin_spaces_around_relational_operators = true -ij_kotlin_spaces_around_unary_operator = false -ij_kotlin_spaces_around_when_arrow = true -ij_kotlin_variable_annotation_wrap = off -ij_kotlin_while_on_new_line = false -ij_kotlin_wrap_elvis_expressions = 1 -ij_kotlin_wrap_expression_body_functions = 1 -ij_kotlin_wrap_first_method_in_call_chain = false -insert_final_newline = true -ktlint_code_style = official -ktlint_ignore_back_ticked_identifier = false \ No newline at end of file diff --git a/clearing-house-processors/.gitignore b/clearing-house-processors/.gitignore deleted file mode 100644 index 20e5396f..00000000 --- a/clearing-house-processors/.gitignore +++ /dev/null @@ -1,18 +0,0 @@ -# Gradle -/.gradle - -# Eclipse -.metadata -.project -.settings -.classpath -*.launch - -# IDEA -*.iml -/.idea - -# Binary -/target -/build -/out \ No newline at end of file diff --git a/clearing-house-processors/README.md b/clearing-house-processors/README.md deleted file mode 100644 index 43e16971..00000000 --- a/clearing-house-processors/README.md +++ /dev/null @@ -1,34 +0,0 @@ -# Clearing House Processors - -## Building from Source -The Clearing House Processors are written in Java and require Java 17 and can be build using gradle (version 7.5+): - -``` -cd clearing-house-processors -./gradlew build -``` - -## Camel Routes -The Clearing House Processors include a file that contains the [routes](src/routes/clearing-house-routes.xml) used by [Apache Camel](https://camel.apache.org) (used in the Trusted Connector) to provide the endpoints of the Clearing House Service. The routes also contain some important steps that transform and forward data to the services of the Clearing House. - -The routes define TLS endpoints and require access to the `keystore` and `truststore` used by the Trusted Connector. Currently, the passwords for both need to be configured in the routes file. - -The routes also expect the `Logging Service` to be accessible via the docker-url `logging-service`. If this is not the case in your deployment, you will need to change this in the routes file. - -## Testing -All tests are integration tests and will try to establish a TLS connection to an instance of the Clearing House. -The tests will only run successfully if they can authenticate the peer (i.e. the Clearing House). -To set up a local test environment, the docker container running the Trusted Connector for the Clearing House -needs to be named `provider-core` and use the `provider-keystore.p12` as keystore. - -The host running the test must include the line -``` -127.0.0.1 provider-core -``` - -in its `/etc/hosts` file. Remote setups for testing will need to adapt the settings accordingly. - -To run the tests use -``` -./gradlew integrationTest -``` diff --git a/clearing-house-processors/bin/.gitignore b/clearing-house-processors/bin/.gitignore deleted file mode 100644 index d97dd0c1..00000000 --- a/clearing-house-processors/bin/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/main/ -/default/ diff --git a/clearing-house-processors/bnd.bnd b/clearing-house-processors/bnd.bnd deleted file mode 100644 index 61c34cb8..00000000 --- a/clearing-house-processors/bnd.bnd +++ /dev/null @@ -1,7 +0,0 @@ -Bundle-Name: IDS :: Clearing House Processors -Bundle-Description: Support Processors required by Clearing House -Export-Package: \ - de.fhg.aisec.ids.clearinghouse -Import-Package: \ - !org.checkerframework.checker*,\ - * diff --git a/clearing-house-processors/build.gradle.kts b/clearing-house-processors/build.gradle.kts deleted file mode 100644 index 4c0ad8ae..00000000 --- a/clearing-house-processors/build.gradle.kts +++ /dev/null @@ -1,145 +0,0 @@ -import org.jetbrains.kotlin.gradle.tasks.KotlinCompile -import java.io.FileInputStream -import java.util.* - -plugins { - java - alias(libs.plugins.kotlin.jvm) - alias(libs.plugins.kotlin.serialization) - alias(libs.plugins.spring.dependencyManagement) - `maven-publish` -} - -group = "de.fhg.aisec.ids.clearinghouse" - -val fis = FileInputStream("../clearing-house-app/logging-service/Cargo.toml") -val props = Properties() -props.load(fis) -version = props.getProperty("version").removeSurrounding("\"") - -sourceSets{ - create("intTest"){ - } -} - -val intTestImplementation: Configuration by configurations.getting { - extendsFrom(configurations.testImplementation.get()) -} - -configurations["intTestRuntimeOnly"].extendsFrom(configurations.runtimeOnly.get()) - -val integrationTest = task("integrationTest") { - // set to true for debugging - testLogging.showStandardStreams = false - useJUnitPlatform() - - description = "Runs integration tests." - group = "verification" - - testClassesDirs = sourceSets["intTest"].output.classesDirs - classpath = sourceSets["intTest"].runtimeClasspath - shouldRunAfter("test") -} - -tasks.register("printChVersion") { - - doFirst { - println(version) - } -} - -buildscript { - repositories { - mavenCentral() - - fun findProperty(s: String) = project.findProperty(s) as String? - - maven { - name = "GitHubPackages" - - url = uri("https://maven.pkg.github.com/Fraunhofer-AISEC/ids-clearing-house-service") - credentials(HttpHeaderCredentials::class) { - name = findProperty("github.username") - value = findProperty("github.token") - } - authentication { - create("header") - } - } - } -} - -publishing { - fun findProperty(s: String) = project.findProperty(s) as String? - - publications { - create("binary") { - artifact(tasks["jar"]) - } - } - repositories { - maven { - name = "GitHubPackages" - - url = uri("https://maven.pkg.github.com/Fraunhofer-AISEC/ids-clearing-house-service") - credentials(HttpHeaderCredentials::class) { - name = findProperty("github.username") - value = findProperty("github.token") - } - authentication { - create("header") - } - } - } -} - -repositories { - mavenCentral() - // References IAIS repository that contains the infomodel artifacts - maven("https://maven.iais.fraunhofer.de/artifactory/eis-ids-public/") -} - -dependencies { - // Imported from IDS feature in TC at runtime - implementation(libs.infomodel.model) - implementation(libs.infomodel.serializer) - - implementation(libs.camel.idscp2) - implementation(libs.camel.core) - implementation(libs.camel.api) - implementation(libs.camel.jetty) - - implementation(libs.apacheHttp.core) - implementation(libs.apacheHttp.client) - implementation(libs.apacheHttp.mime) - implementation(libs.commons.fileupload) - implementation(libs.ktor.auth) - implementation(libs.ktor.auth.jwt) - compileOnly(libs.spring.context) - - testApi(libs.slf4j.simple) - testImplementation(libs.idscp2.core) - testImplementation(libs.junit5) - testImplementation(libs.okhttp3) - testImplementation(kotlin("test")) - testImplementation(libs.kotlin.serialization.json) -} - -tasks.withType { - kotlinOptions { - jvmTarget = "17" - } -} - -tasks.withType { - options.encoding = "UTF-8" - sourceCompatibility = JavaVersion.VERSION_17.toString() - targetCompatibility = JavaVersion.VERSION_17.toString() -} - -tasks.jar { - manifest { - attributes(mapOf(Pair("Bundle-Vendor", "Fraunhofer AISEC"), - Pair("-noee", true))) - } -} diff --git a/clearing-house-processors/gradle/libs.versions.toml b/clearing-house-processors/gradle/libs.versions.toml deleted file mode 100644 index 91cbd1ed..00000000 --- a/clearing-house-processors/gradle/libs.versions.toml +++ /dev/null @@ -1,71 +0,0 @@ -[versions] -idscp2 = "0.17.0" -ktlint = "0.48.2" - -# Kotlin library/compiler version -kotlin = "1.8.0" -kotlinxCoroutines = "1.6.4" -kotlinxSerialization = "1.4.0" - -# HTTP client -ktor = "2.2.3" -okhttp = "4.9.1" - -# The used version of the infomodel from IESE -infomodel = "4.1.3" - -camel = "3.18.5" -slf4j = "2.0.0" -junit5 = "5.9.2" -mockito = "5.1.1" -httpcore = "4.4.15" -httpclient = "4.5.14" - -# Needed for camel multipart processor -commonsFileUpload = "1.4" - -springBoot = "3.0.2" -springframework = "6.0.4" - -[libraries] -# common libraries -slf4j-api = { group = "org.slf4j", name = "slf4j-api", version.ref = "slf4j" } -slf4j-simple = { group = "org.slf4j", name = "slf4j-simple", version.ref = "slf4j" } -camel-core = { group = "org.apache.camel", name = "camel-core", version.ref = "camel" } -camel-api = { group = "org.apache.camel", name = "camel-api", version.ref = "camel" } -okhttp3 = { group = "com.squareup.okhttp3", name = "okhttp", version.ref = "okhttp" } -ktor-auth = { group = "io.ktor", name = "ktor-server-auth", version.ref = "ktor" } -ktor-auth-jwt = { group = "io.ktor", name = "ktor-server-auth-jwt", version.ref = "ktor" } -spring-context = { group = "org.springframework", name = "spring-context", version.ref = "springframework"} - -# common test libraries -mockito = { group = "org.mockito", name = "mockito-core", version.ref = "mockito" } -camel-test = { group = "org.apache.camel", name = "camel-test", version.ref = "camel" } -junit5 = { group = "org.junit.jupiter", name = "junit-jupiter", version.ref = "junit5" } -kotlin-serialization-json = { group = "org.jetbrains.kotlinx", name = "kotlinx-serialization-json", version.ref = "kotlinxSerialization" } - -# camel-multipart-processor -camel-jetty = { group = "org.apache.camel", name = "camel-jetty", version.ref = "camel" } -camel-http = { group = "org.apache.camel", name = "camel-http", version.ref = "camel" } -apacheHttp-core = { group = "org.apache.httpcomponents", name = "httpcore", version.ref = "httpcore" } -apacheHttp-client = { group = "org.apache.httpcomponents", name = "httpclient", version.ref = "httpclient" } -apacheHttp-mime = { group = "org.apache.httpcomponents", name = "httpmime", version.ref = "httpclient" } -commons-fileupload = { group = "commons-fileupload", name = "commons-fileupload", version.ref = "commonsFileUpload" } - -# camel-processors -camel-idscp2 = { group = "de.fhg.aisec.ids", name = "camel-idscp2", version.ref = "idscp2" } -infomodel-model = { group = "de.fraunhofer.iais.eis.ids.infomodel", name = "java", version.ref = "infomodel" } -infomodel-serializer = { group = "de.fraunhofer.iais.eis.ids", name = "infomodel-serializer", version.ref = "infomodel" } - -# for tests -idscp2-core = { group = "de.fhg.aisec.ids", name = "idscp2-core", version.ref = "idscp2" } - -[bundles] -test5 = ["junit5", "mockito"] - -[plugins] -springboot = { id = "org.springframework.boot", version.ref = "springBoot" } -spring-dependencyManagement = { id = "io.spring.dependency-management", version = "1.0.13.RELEASE" } -kotlin-jvm = { id = "org.jetbrains.kotlin.jvm", version.ref = "kotlin" } -kotlin-plugin-spring = { id = "org.jetbrains.kotlin.plugin.spring", version.ref = "kotlin" } -kotlin-serialization = { id = "org.jetbrains.kotlin.plugin.serialization", version.ref = "kotlin" } diff --git a/clearing-house-processors/gradle/wrapper/gradle-wrapper.jar b/clearing-house-processors/gradle/wrapper/gradle-wrapper.jar deleted file mode 100644 index 249e5832..00000000 Binary files a/clearing-house-processors/gradle/wrapper/gradle-wrapper.jar and /dev/null differ diff --git a/clearing-house-processors/settings.gradle.kts b/clearing-house-processors/settings.gradle.kts deleted file mode 100644 index 9bc9f2f5..00000000 --- a/clearing-house-processors/settings.gradle.kts +++ /dev/null @@ -1,2 +0,0 @@ -rootProject.name = "clearing-house-processors" - diff --git a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/Utility.kt b/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/Utility.kt deleted file mode 100644 index 3a4854b4..00000000 --- a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/Utility.kt +++ /dev/null @@ -1,217 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse - -import de.fhg.aisec.ids.clearinghouse.multipart.MultipartEndpointTest -import de.fhg.aisec.ids.idscp2.daps.aisecdaps.AisecDapsDriver -import de.fhg.aisec.ids.idscp2.daps.aisecdaps.AisecDapsDriverConfig -import de.fhg.aisec.ids.idscp2.keystores.KeyStoreUtil.loadKeyStore -import de.fraunhofer.iais.eis.DynamicAttributeToken -import de.fraunhofer.iais.eis.DynamicAttributeTokenBuilder -import de.fraunhofer.iais.eis.LogMessageBuilder -import de.fraunhofer.iais.eis.Message -import de.fraunhofer.iais.eis.QueryMessageBuilder -import de.fraunhofer.iais.eis.RequestMessageBuilder -import de.fraunhofer.iais.eis.TokenFormat -import de.fraunhofer.iais.eis.ids.jsonld.Serializer -import kotlinx.serialization.Serializable -import kotlinx.serialization.decodeFromString -import kotlinx.serialization.json.Json -import kotlinx.serialization.json.int -import kotlinx.serialization.json.jsonArray -import kotlinx.serialization.json.jsonObject -import kotlinx.serialization.json.jsonPrimitive -import okhttp3.Headers -import okhttp3.MultipartReader -import java.net.URI -import java.nio.charset.Charset -import java.nio.charset.StandardCharsets -import java.nio.file.Path -import java.nio.file.Paths -import java.time.LocalDateTime -import java.util.Base64 -import java.util.Objects -import javax.xml.datatype.DatatypeFactory - -@Serializable -data class ChJwt(val transaction_id: String, - val timestamp: Int, - val process_id: String, - val document_id: String, - val payload: String, - val chain_hash: String, - val client_id: String, - val clearing_house_version: String) - -@Serializable -private data class ChReceipt(val data: String) - -@Serializable -data class QueryResult(val date_from: String, - val date_to: String, - val page: Int, - val size: Int, - val order: String, - val documents: List) - -@Serializable -data class OwnerList(val owners: List) - - -enum class MessageType{ - LOG, PID, QUERY -} - -class Utility { - companion object{ - - val CONNECTOR_1 = "D2:70:FE:7F:32:BB:37:BF:DF:F4:08:36:6B:F1:9E:7A:EB:A4:2D:2A:keyid:CB:8C:C7:B6:85:79:A8:23:A6:CB:15:AB:17:50:2F:E6:65:43:5D:E8" - val CONNECTOR_2 = "13:09:2E:1C:50:9B:8B:77:DE:01:1F:3B:B5:E0:D2:CC:1B:C5:88:9E:keyid:CB:8C:C7:B6:85:79:A8:23:A6:CB:15:AB:17:50:2F:E6:65:43:5D:E8" - - val STATUS_400 = "Bad Request" - val STATUS_401 = "Unauthorized" - val STATUS_403 = "Forbidden" - val STATUS_404 = "Not Found" - val STATUS_500 = "Internal Server Error" - - private val TEST_RUN_ID = (0..2147483647).random() - - private val SERIALIZER = Serializer() - - val keyStorePath: Path = Paths.get( - Objects.requireNonNull( - MultipartEndpointTest::class.java.classLoader - .getResource("ssl/client-keystore.p12") - ).path - ) - - val keyStorePathOtherClient: Path = Paths.get( - Objects.requireNonNull( - MultipartEndpointTest::class.java.classLoader - .getResource("ssl/server-keystore.p12") - ).path - ) - - val trustStorePath: Path = Paths.get( - Objects.requireNonNull( - MultipartEndpointTest::class.java.classLoader - .getResource("ssl/truststore.p12") - ).path - ) - - val password = "password".toCharArray() - - // Load certificates from local KeyStore - val ks = loadKeyStore(keyStorePath, password) - val ksOtherClient = loadKeyStore(keyStorePathOtherClient, password) - - val dapsDriver = AisecDapsDriver( - AisecDapsDriverConfig.Builder() - .setKeyStorePath(keyStorePath) - .setKeyStorePassword(password) - .setKeyPassword(password) - .setKeyAlias("1") - .setTrustStorePath(trustStorePath) - .setTrustStorePassword(password) - .setDapsUrl("https://daps-dev.aisec.fraunhofer.de/v4") - .loadTransportCertsFromKeystore(ks) - .build() - ) - - val dapsDriverOtherClient = AisecDapsDriver( - AisecDapsDriverConfig.Builder() - .setKeyStorePath(keyStorePathOtherClient) - .setKeyStorePassword(password) - .setKeyPassword(password) - .setKeyAlias("1") - .setTrustStorePath(trustStorePath) - .setTrustStorePassword(password) - .setDapsUrl("https://daps-dev.aisec.fraunhofer.de/v4") - .loadTransportCertsFromKeystore(ksOtherClient) - .build() - ) - - fun formatId(id: String): String{ - return "${id}_${TEST_RUN_ID}" - } - - fun getDapsToken(token: ByteArray = dapsDriver.token): DynamicAttributeToken{ - return DynamicAttributeTokenBuilder() - ._tokenFormat_(TokenFormat.JWT) - ._tokenValue_(String(token, StandardCharsets.UTF_8)) - .build() - } - - fun checkIdsMessage(m: String, c: Class){ - SERIALIZER.deserialize(m, c) - } - - private fun getPart(headers: Headers): String{ - val partName = headers["Content-Disposition"]!!.split(";")[1].split("=")[1] - return partName.substring(1, partName.length-1) - } - - fun getParts(reader: MultipartReader): Pair{ - var header = "" - var payload = "" - reader.use { - while (true) { - val part = reader.nextPart() ?: break - when (getPart(part.headers)){ - "header" -> { - header = part.body.readString(Charset.forName("utf-8")) - } - "payload" -> { - payload = part.body.readString(Charset.forName("utf-8")) - } - } - } - } - return Pair(header, payload) - } - - fun parseJwt(receipt: String): ChJwt{ - val data = Json.decodeFromString(receipt) - val chunks: List = data.data.split(".") - val decoder: Base64.Decoder = Base64.getUrlDecoder() - val payload = String(decoder.decode(chunks[1])) - return Json.decodeFromString(payload) - } - - fun parseQueryResult(body: String): QueryResult{ - val json = Json.parseToJsonElement(body).jsonObject - return QueryResult( - json["date_from"]!!.jsonPrimitive.content, - json["date_to"]!!.jsonPrimitive.content, - json["page"]!!.jsonPrimitive.int, - json["size"]!!.jsonPrimitive.int, - json["order"]!!.jsonPrimitive.content, - json["documents"]!!.jsonArray.map { it.toString() } - ) - } - - fun getMessage(type: MessageType, token: DynamicAttributeToken): Message{ - when (type) { - MessageType.LOG -> return LogMessageBuilder() - ._securityToken_(token) - ._issuerConnector_(URI.create("http://ch-ids.aisec.fraunhofer.de/idscp-client")) - ._issued_(DatatypeFactory.newInstance().newXMLGregorianCalendar(LocalDateTime.now().toString())) - ._senderAgent_(URI.create("http://ch-ids.aisec.fraunhofer.de/idscp-client")) - ._modelVersion_("4.0") - .build() - MessageType.QUERY -> return QueryMessageBuilder() - ._securityToken_(token) - ._issuerConnector_(URI.create("http://ch-ids.aisec.fraunhofer.de/idscp-client")) - ._issued_(DatatypeFactory.newInstance().newXMLGregorianCalendar(LocalDateTime.now().toString())) - ._senderAgent_(URI.create("http://ch-ids.aisec.fraunhofer.de/idscp-client")) - ._modelVersion_("4.0") - .build() - MessageType.PID -> return RequestMessageBuilder() - ._securityToken_(token) - ._issuerConnector_(URI.create("http://ch-ids.aisec.fraunhofer.de/idscp-client")) - ._issued_(DatatypeFactory.newInstance().newXMLGregorianCalendar(LocalDateTime.now().toString())) - ._senderAgent_(URI.create("http://ch-ids.aisec.fraunhofer.de/idscp-client")) - ._modelVersion_("4.0") - .build() - } - } - } -} diff --git a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/CreatePidTests.kt b/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/CreatePidTests.kt deleted file mode 100644 index a17279f1..00000000 --- a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/CreatePidTests.kt +++ /dev/null @@ -1,124 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse.idscp2 - -import de.fhg.aisec.ids.clearinghouse.OwnerList -import de.fhg.aisec.ids.clearinghouse.Utility -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.STATUS_400 -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.STATUS_403 -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.formatId -import de.fraunhofer.iais.eis.Message -import de.fraunhofer.iais.eis.MessageProcessedNotificationMessage -import de.fraunhofer.iais.eis.RejectionMessage -import kotlinx.serialization.encodeToString -import kotlinx.serialization.json.Json -import org.junit.Assert -import org.junit.jupiter.api.Test - -class CreatePidTests { - - @Test - fun createPid1(){ - val pid = formatId("idscp-pid1") - val owners = null - - // Test: createPid with no extra owners - succCreatePid(pid, owners) - } - - @Test - fun createPid2(){ - val pid = formatId("idscp-pid2") - val owners = listOf(Utility.CONNECTOR_2) - - // Test: createPid with an extra owner - succCreatePid(pid, owners) - } - - @Test - fun createPid3(){ - val pid = formatId("idscp-pid3") - val owners = listOf(Utility.CONNECTOR_1, Utility.CONNECTOR_2) - - // Test: createPid with duplicate self in owner list - succCreatePid(pid, owners) - } - - @Test - fun createPid4(){ - val pid = formatId("idscp-pid4") - val owners = listOf(Utility.CONNECTOR_2, Utility.CONNECTOR_2) - - // Test: createPid with duplicate other owner in owner list - succCreatePid(pid, owners) - } - - @Test - fun createPid5(){ - val pid = formatId("idscp-pid5") - val owners = null - - // Preparation: create PID - succCreatePid(pid, owners) - - // Test: Try to create existing PID (to which user has access) - failCreatePid(pid, owners, STATUS_400) - } - - @Test - fun createPid6(){ - val pid = formatId("idscp-pid6") - val owners = null - - // Preparation: create PID - succCreatePid(pid, owners, client = 2) - - // Test: Try to create existing PID (to which user has access) - failCreatePid(pid, owners, STATUS_403) - } - - @Test - fun createPid7(){ - val pid = formatId("idscp-pid7") - val owners = "{\"owners\": [\"${Utility.CONNECTOR_2}\",]}" - - // Test: createPid with invalid owner list - val (resultMessage, resultPayload, _) = Idscp2EndpointTest.pidMessage(pid, owners) - - // check IDS message type - Assert.assertTrue(resultMessage is RejectionMessage) - // createPid returns the created PID, but in quotes - val p = String(resultPayload!!) - Assert.assertEquals("Unexpected status message", STATUS_400, p) - } - - companion object{ - - fun succCreatePid(pid: String, owners: List?, client: Int = 1){ - val (resultMessage, resultPayload, _) = callCreatePid(pid, owners, client) - - // check IDS message type - Assert.assertTrue(resultMessage is MessageProcessedNotificationMessage) - // createPid returns the created PID, but in quotes - val p = String(resultPayload!!) - val createdPid = p.substring(1, p.length-1) - Assert.assertEquals("Returned PID does not match given PID!", pid, createdPid) - } - - fun failCreatePid(pid: String, owners: List?, em: String){ - val (resultMessage, resultPayload, _) = callCreatePid(pid, owners) - // check IDS message type - Assert.assertTrue(resultMessage is RejectionMessage) - // payload = http status code message - val p = String(resultPayload!!) - Assert.assertEquals("Unexpected status code message", em, p) - } - - private fun callCreatePid(pid: String, owners: List?, c: Int = 1): Triple?> { - var list = "" - if (owners != null) { - list = Json.encodeToString(OwnerList(owners)) - } - return Idscp2EndpointTest.pidMessage(pid, list, client=c) - } - } - -} diff --git a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/Idscp2Client.kt b/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/Idscp2Client.kt deleted file mode 100644 index af4e85d6..00000000 --- a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/Idscp2Client.kt +++ /dev/null @@ -1,107 +0,0 @@ -/*- - * ========================LICENSE_START================================= - * idscp2-examples - * %% - * Copyright (C) 2021 Fraunhofer AISEC - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * =========================LICENSE_END================================== - */ -package de.fhg.aisec.ids.clearinghouse.idscp2 - -import de.fhg.aisec.ids.idscp2.api.FastLatch -import de.fhg.aisec.ids.idscp2.api.configuration.Idscp2Configuration -import de.fhg.aisec.ids.idscp2.api.connection.Idscp2ConnectionAdapter -import de.fhg.aisec.ids.idscp2.api.raregistry.RaProverDriverRegistry -import de.fhg.aisec.ids.idscp2.api.raregistry.RaVerifierDriverRegistry -import de.fhg.aisec.ids.idscp2.applayer.AppLayerConnection -import de.fhg.aisec.ids.idscp2.defaultdrivers.remoteattestation.dummy.RaProverDummy2 -import de.fhg.aisec.ids.idscp2.defaultdrivers.remoteattestation.dummy.RaVerifierDummy2 -import de.fhg.aisec.ids.idscp2.defaultdrivers.securechannel.tls13.NativeTLSDriver -import de.fhg.aisec.ids.idscp2.defaultdrivers.securechannel.tls13.NativeTlsConfiguration -import de.fraunhofer.iais.eis.Message -import de.fraunhofer.iais.eis.ids.jsonld.Serializer -import org.slf4j.LoggerFactory -import java.nio.charset.StandardCharsets - -class Idscp2Client constructor( - private val configuration: Idscp2Configuration, - private val nativeTlsConfiguration: NativeTlsConfiguration -) { - - init{ - // register ra drivers - RaProverDriverRegistry.registerDriver( - RaProverDummy2.RA_PROVER_DUMMY2_ID, ::RaProverDummy2, null - ) - - RaVerifierDriverRegistry.registerDriver( - RaVerifierDummy2.RA_VERIFIER_DUMMY2_ID, ::RaVerifierDummy2, null - ) - } - - fun send(message: Message, headers: Map?, payload: ByteArray?): Triple?>{ - var resultMessage: Message? = null - var resultPayload: ByteArray? = null - var resultHeaders: Map? = null - - // Use this latch for waiting - val latch = FastLatch() - - val secureChannelDriver = NativeTLSDriver() - val connectionFuture = secureChannelDriver.connect(::AppLayerConnection, configuration, nativeTlsConfiguration) - connectionFuture.thenAccept { connection: AppLayerConnection -> - LOG.info("Client: New connection with id " + connection.id) - connection.addConnectionListener(object : Idscp2ConnectionAdapter() { - override fun onError(t: Throwable) { - LOG.error("Client connection error occurred", t) - } - - override fun onClose() { - LOG.info("Client: Connection with id " + connection.id + " has been closed") - latch.unlock() - } - }) - - connection.addIdsMessageListener { c: AppLayerConnection, m: Message?, data: ByteArray?, headers: Map -> - resultMessage = m - resultHeaders = headers - resultPayload = data - headers.forEach { (name, value) -> - LOG.debug("Found header '{}':'{}'", name, value) - } - LOG.debug("All headers logged!") - LOG.info("Received IDS message: " + Serializer().serialize(m)) - LOG.info("with payload: " + String(data!!, StandardCharsets.UTF_8)) - c.close() - } - - connection.unlockMessaging() - LOG.info("Send Message ...") - connection.sendIdsMessage(message, payload, headers) - LOG.info("Local DAT: " + String(connection.localDat, StandardCharsets.UTF_8)) - }.exceptionally { t: Throwable? -> - LOG.error("Client endpoint error occurred", t) - latch.unlock() - null - } - - // Wait until error or connection close - latch.await() - return Triple(resultMessage, resultPayload, resultHeaders) - } - - companion object { - private val LOG = LoggerFactory.getLogger(Idscp2Client::class.java) - } -} diff --git a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/Idscp2EndpointTest.kt b/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/Idscp2EndpointTest.kt deleted file mode 100644 index 65ddd82b..00000000 --- a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/Idscp2EndpointTest.kt +++ /dev/null @@ -1,108 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse.idscp2 - -import de.fhg.aisec.ids.clearinghouse.MessageType -import de.fhg.aisec.ids.clearinghouse.Utility -import de.fhg.aisec.ids.idscp2.api.configuration.AttestationConfig -import de.fhg.aisec.ids.idscp2.api.configuration.Idscp2Configuration -import de.fhg.aisec.ids.idscp2.defaultdrivers.remoteattestation.dummy.RaProverDummy2 -import de.fhg.aisec.ids.idscp2.defaultdrivers.remoteattestation.dummy.RaVerifierDummy2 -import de.fhg.aisec.ids.idscp2.defaultdrivers.securechannel.tls13.NativeTlsConfiguration -import de.fraunhofer.iais.eis.DynamicAttributeTokenBuilder -import de.fraunhofer.iais.eis.Message -import de.fraunhofer.iais.eis.TokenFormat - -class Idscp2EndpointTest { - - companion object { - - private val localAttestationConfig = AttestationConfig.Builder() - .setSupportedRaSuite(arrayOf(RaProverDummy2.RA_PROVER_DUMMY2_ID)) - .setExpectedRaSuite(arrayOf(RaVerifierDummy2.RA_VERIFIER_DUMMY2_ID)) - .setRaTimeoutDelay(300 * 1000L) // 300 seconds - .build() - - // create idscp2 config - private val settings = Idscp2Configuration.Builder() - .setAckTimeoutDelay(500) // 500 ms - .setHandshakeTimeoutDelay(5 * 1000L) // 5 seconds - .setAttestationConfig(localAttestationConfig) - .setDapsDriver(Utility.dapsDriver) - .build() - - private val settingsOtherClient = Idscp2Configuration.Builder() - .setAckTimeoutDelay(500) // 500 ms - .setHandshakeTimeoutDelay(5 * 1000L) // 5 seconds - .setAttestationConfig(localAttestationConfig) - .setDapsDriver(Utility.dapsDriverOtherClient) - .build() - - // create secureChannel config - - private val nativeTlsConfiguration = NativeTlsConfiguration.Builder() - .setKeyStorePath(Utility.keyStorePath) - .setKeyPassword(Utility.password) - .setKeyStorePassword(Utility.password) - .setTrustStorePath(Utility.trustStorePath) - .setTrustStorePassword(Utility.password) - .setCertificateAlias("1") - .setHost("tc-core-server") - .build() - - val client = Idscp2Client(settings, nativeTlsConfiguration) - - fun getMessage(type: MessageType, client: Int = 1): Message{ - return when (client){ - 2 -> Utility.getMessage(type, - Utility.getDapsToken(Utility.dapsDriverOtherClient.token) - ) - else -> Utility.getMessage(type, Utility.getDapsToken()) - } - } - - fun getInvalidMessage(type: MessageType): Message{ - val invToken = DynamicAttributeTokenBuilder() - ._tokenFormat_(TokenFormat.JWT) - ._tokenValue_("This is not a valid token!") - .build() - return Utility.getMessage(type, invToken) - } - - fun logMessage(pid: String, payload: String, authenticated: Boolean = true, client: Int = 1): Triple?> { - val m = if (authenticated){ - getMessage(MessageType.LOG, client) - } else{ - getInvalidMessage(MessageType.LOG) - } - val header = mapOf("ch-ids-pid" to pid) - val p = payload.toByteArray() - return Idscp2EndpointTest.client.send(m, header, p) - } - - fun pidMessage(pid: String, payload: String, authenticated: Boolean = true, client: Int = 1): Triple?> { - val m = if (authenticated){ - getMessage(MessageType.PID, client) - } else{ - getInvalidMessage(MessageType.PID) - } - val header = mapOf("ch-ids-pid" to pid, "Content-Type" to "application/json" ) - val p = payload.toByteArray() - return Idscp2EndpointTest.client.send(m, header, p) - } - - fun queryMessage(pid: String, id: String?, payload: String, authenticated: Boolean = true, client: Int = 1, page: Int = 1, size: Int = 100, sort: String = "desc"): Triple?> { - val m = if (authenticated){ - getMessage(MessageType.QUERY, client) - } else{ - getInvalidMessage(MessageType.QUERY) - } - val header = if (id != null){ - mapOf("ch-ids-pid" to pid, "ch-ids-id" to id, "Content-Type" to "application/json" ) - } - else{ - mapOf("ch-ids-pid" to pid, "ch-ids-page" to page.toString(), "ch-ids-size" to size.toString(), "ch-ids-sort" to sort, "Content-Type" to "application/json" ) - } - val p = payload.toByteArray() - return Idscp2EndpointTest.client.send(m, header, p) - } - } -} diff --git a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/LogMessageTests.kt b/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/LogMessageTests.kt deleted file mode 100644 index 9b7483ba..00000000 --- a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/LogMessageTests.kt +++ /dev/null @@ -1,77 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse.idscp2 - -import de.fhg.aisec.ids.clearinghouse.ChJwt -import de.fhg.aisec.ids.clearinghouse.Utility -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.STATUS_400 -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.STATUS_403 -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.formatId -import de.fhg.aisec.ids.clearinghouse.idscp2.CreatePidTests.Companion.succCreatePid -import de.fraunhofer.iais.eis.* -import org.junit.Assert -import org.junit.jupiter.api.Test - -class LogMessageTests { - @Test - fun logMessage1(){ - val pid = formatId("idscp-log1") - val payload = "This message is logged" - - // create Pid - succCreatePid(pid, null) - - // test: Logging to existing Pid - succLogMessage(pid, payload) - } - - @Test - fun logMessage2() { - val pid = formatId("idscp-log2") - val payload = "This message is logged" - - succLogMessage(pid, payload) - } - - @Test - fun logMessage3(){ - val pid = formatId("idscp-log3") - val payload = "" - - // test: Logging an empty payload - failLogMessage(pid, payload, STATUS_400) - } - - @Test - fun logMessage4(){ - val pid = formatId("idscp-log4") - val payload = "This message is logged" - - // create Pid - succCreatePid(pid, null, client = 2) - - // test: Logging to existing Pid - failLogMessage(pid, payload, STATUS_403) - } - - companion object{ - - fun failLogMessage(pid: String, payload: String, em: String) { - val (resultMessage, resultPayload, _) = Idscp2EndpointTest.logMessage(pid, payload) - // check IDS message type - Assert.assertTrue(resultMessage is RejectionMessage) - // payload = http status code message - val p = String(resultPayload!!) - Assert.assertEquals("Unexpected status code message", em, p) - } - - fun succLogMessage(pid: String, payload: String, c: Int = 1): ChJwt { - val (resultMessage, resultPayload, resultHeaders) = Idscp2EndpointTest.logMessage(pid, payload) - // check IDS message type - Assert.assertTrue(resultMessage is MessageProcessedNotificationMessage) - // check the pid from receipt in the payload. Does pid match with the given pid? - val receipt = Utility.parseJwt(String(resultPayload!!)) - Assert.assertEquals("Returned PID does not match given PID!", pid, receipt.process_id) - return receipt - } - } - -} \ No newline at end of file diff --git a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/QueryIdTests.kt b/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/QueryIdTests.kt deleted file mode 100644 index ffeb75d8..00000000 --- a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/QueryIdTests.kt +++ /dev/null @@ -1,71 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse.idscp2 - -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.STATUS_404 -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.formatId -import de.fhg.aisec.ids.clearinghouse.idscp2.LogMessageTests.Companion.succLogMessage -import de.fraunhofer.iais.eis.RejectionMessage -import de.fraunhofer.iais.eis.ResultMessage -import org.junit.Assert -import org.junit.jupiter.api.Test - -class QueryIdTests { - - @Test - fun queryId1(){ - val pid = formatId("idscp-qid1") - - // create Pid with one document - val message = "This is the first message" - val receipt = LogMessageTests.succLogMessage(pid, message) - - // Test: query existing document - succQueryId(pid, receipt.document_id) - } - - @Test - fun queryId2(){ - val pid = formatId("idscp-qid2") - - // create Pid with one document - val message = "This is the first message" - succLogMessage(pid, message) - - // Test: query non-existing document - failQueryId(pid, "unknown-id", STATUS_404) - } - - @Test - fun queryId3(){ - val pid1 = formatId("idscp-qid2_with_doc") - val pid2 = formatId("idscp-qid2_without_doc") - - // create one Pid with one document and another with no documents - val message = "This is the first message" - val receipt = succLogMessage(pid1, message) - CreatePidTests.succCreatePid(pid2, null) - - // Test: query existing document in wrong pid - failQueryId(pid2, receipt.document_id, STATUS_404) - } - - companion object{ - - fun failQueryId(pid: String, id: String?, em: String) { - val (resultMessage, resultPayload, resultHeaders) = Idscp2EndpointTest.queryMessage(pid, id, "") - // check IDS message type - Assert.assertTrue(resultMessage is RejectionMessage) - // payload = http status code message - val p = String(resultPayload!!) - Assert.assertEquals("Unexpected status code message", em, p) - } - - fun succQueryId(pid: String, id: String): String { - val (resultMessage, resultPayload, resultHeaders) = Idscp2EndpointTest.queryMessage(pid, id, "") - // check IDS message type - Assert.assertTrue(resultMessage is ResultMessage) - //TODO: can't serialize json. array is of type "message + payload + payload type" - val p = String(resultPayload!!) - return p - } - } -} diff --git a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/QueryPidTests.kt b/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/QueryPidTests.kt deleted file mode 100644 index 50430bad..00000000 --- a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/idscp2/QueryPidTests.kt +++ /dev/null @@ -1,109 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse.idscp2 - -import de.fhg.aisec.ids.clearinghouse.QueryResult -import de.fhg.aisec.ids.clearinghouse.Utility -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.STATUS_403 -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.STATUS_404 -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.formatId -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.parseQueryResult -import de.fhg.aisec.ids.clearinghouse.idscp2.CreatePidTests.Companion.succCreatePid -import de.fhg.aisec.ids.clearinghouse.idscp2.LogMessageTests.Companion.succLogMessage -import de.fhg.aisec.ids.clearinghouse.idscp2.QueryIdTests.Companion.failQueryId -import de.fraunhofer.iais.eis.ResultMessage -import org.junit.Assert -import org.junit.jupiter.api.Test - -class QueryPidTests { - @Test - fun queryPid1(){ - val pid = formatId("idscp-qpid1") - - // create Pid - succCreatePid(pid, null) - - // Test: query existing Pid with no documents - val result = succQueryPid(pid) - Assert.assertEquals("Should receive empty array!", 0, result.documents.size) - } - - @Test - fun queryPid2(){ - val pid = formatId("idscp-qpid2") - - // create Pid with three messages - val messages = listOf("This is the first message", "This is the second message", "This is the third message") - messages.forEach{ - succLogMessage(pid, it) - } - - // Test: query existing Pid with three documents - val result = succQueryPid(pid) - Assert.assertEquals("Should receive array of size three!", 3, result.documents.size) - } - - @Test - fun queryPid3(){ - val pid = formatId("idscp-qpid3") - val owners = listOf(Utility.CONNECTOR_1) - - // create Pid with other user, but user 1 is also authorized - succCreatePid(pid, owners, client = 2) - - // add three messages - val messages = listOf("This is the first message", "This is the second message", "This is the third message") - messages.forEach{ - succLogMessage(pid, it, c = 2) - } - - // Test: query existing Pid with user (who did not create pid, but is authorized) - val result = succQueryPid(pid) - Assert.assertEquals("Should receive array of size three!", 3, result.documents.size) - } - - @Test - fun queryPid4(){ - val pid = formatId("idscp-qpid4") - - // Test: query non-existing Pid - failQueryPid(pid, STATUS_404) - } - - @Test - fun queryPid5(){ - val pid = formatId("idscp-qpid5") - - // create Pid with other user - succCreatePid(pid, null, client = 2) - - // Test: query existing Pid with user (for which he is not authorized) - failQueryPid(pid, STATUS_403) - } - - @Test - fun queryPid6(){ - val pid = formatId("idscp-qpid6") - - // create Pid - succLogMessage(pid, "This is the log message!") - - // Test: query non existing page results in empty array - val result = succQueryPid(pid, 2) - Assert.assertEquals("Should receive empty array!", 0, result.documents.size) - } - - companion object{ - - fun failQueryPid(pid: String, em: String) { - return failQueryId(pid, null, em) - } - - fun succQueryPid(pid: String, page: Int = 1, size: Int = 100, sort: String = "desc"): QueryResult { - val (resultMessage, resultPayload, resultHeaders) = Idscp2EndpointTest.queryMessage(pid, null, "", page=page, size=size, sort=sort) - // check IDS message type - Assert.assertTrue(resultMessage is ResultMessage) - // check the pid from receipt in the payload. Does pid match with the given pid? - return parseQueryResult(String(resultPayload!!)) - } - } - -} \ No newline at end of file diff --git a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/CreatePidTests.kt b/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/CreatePidTests.kt deleted file mode 100644 index a0e4c17f..00000000 --- a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/CreatePidTests.kt +++ /dev/null @@ -1,154 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse.multipart - -import de.fhg.aisec.ids.clearinghouse.OwnerList -import de.fhg.aisec.ids.clearinghouse.Utility -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.formatId -import de.fhg.aisec.ids.clearinghouse.multipart.MultipartEndpointTest.Companion.client -import de.fhg.aisec.ids.clearinghouse.multipart.MultipartEndpointTest.Companion.otherClient -import de.fraunhofer.iais.eis.MessageProcessedNotificationMessage -import de.fraunhofer.iais.eis.RejectionMessage -import kotlinx.serialization.encodeToString -import kotlinx.serialization.json.Json -import okhttp3.MultipartReader -import okhttp3.Response -import org.junit.Assert -import org.junit.jupiter.api.Test - -class CreatePidTests { - - @Test - fun createPid1(){ - val pid = formatId("mp-pid1") - val owners = null - - // Test: createPid with no extra owners - succCreatePid(pid, owners) - } - - @Test - fun createPid2(){ - val pid = formatId("mp-pid2") - val owners = listOf(Utility.CONNECTOR_2) - - // Test: createPid with an extra owner - succCreatePid(pid, owners) - } - - @Test - fun createPid3(){ - val pid = formatId("mp-pid3") - val owners = listOf(Utility.CONNECTOR_1, Utility.CONNECTOR_2) - - // Test: createPid with duplicate self in owner list - succCreatePid(pid, owners) - } - - @Test - fun createPid4(){ - val pid = formatId("mp-pid4") - val owners = listOf(Utility.CONNECTOR_2, Utility.CONNECTOR_2) - - // Test: createPid with duplicate other owner in owner list - succCreatePid(pid, owners) - } - - @Test - fun createPid5(){ - val pid = formatId("mp-pid5") - val owners = null - - // Preparation: create PID - succCreatePid(pid, owners) - - // Test: Try to create existing PID (to which user has access) - failCreatePid(pid, owners, 400) - } - - @Test - fun createPid6(){ - val pid = formatId("mp-pid6") - val owners = null - - // Preparation: create PID - succCreatePid(pid, owners, client=2) - - // Test: Try to create existing PID (to which user has access) - failCreatePid(pid, owners, 403) - } - - @Test - fun createPid7(){ - val pid = formatId("mp-pid7") - val owners = "{\"owners\": [\"${Utility.CONNECTOR_2}\",]}" - - // Test: createPid with invalid owner list - val call = client.newCall(MultipartClient.pidMessage(pid, owners)) - val response = call.execute() - - // check http status code - Assert.assertEquals("Unexpected http status code!", 400, response.code) - // check IDS message type - val parts = Utility.getParts(MultipartReader(response.body!!)) - Utility.checkIdsMessage(parts.first, RejectionMessage::class.java) - response.close() - } - - @Test - fun createPid8(){ - val pid = formatId("mp-pid8") - - // Test: Create Pid without matching aki:ski in certificate - failEarlyCreatePid(pid, null, 401) - } - - - companion object{ - - fun succCreatePid(pid: String, owners: List?, client: Int = 1){ - val response = callCreatePid(pid, owners, client) - val parts = Utility.getParts(MultipartReader(response.body!!)) - // check http status code - Assert.assertEquals("Unexpected http status code!", 201, response.code) - // check IDS message type - Utility.checkIdsMessage(parts.first, MessageProcessedNotificationMessage::class.java) - // createPid returns the created PID, but in quotes - val createdPid = parts.second.substring(1, parts.second.length-1) - Assert.assertEquals("Returned PID does not match given PID!", pid, createdPid) - response.close() - } - - fun failCreatePid(pid: String, owners: List?, code: Int){ - val response = callCreatePid(pid, owners) - val parts = Utility.getParts(MultipartReader(response.body!!)) - // check http status code - Assert.assertEquals("Unexpected http status code!", code, response.code) - // check IDS message type - Utility.checkIdsMessage(parts.first, RejectionMessage::class.java) - response.close() - } - - private fun callCreatePid(pid: String, owners: List?, c: Int = 1): Response { - var list = "" - if (owners != null) { - list = Json.encodeToString(OwnerList(owners)) - } - val call = when (c) { - 1 -> client.newCall(MultipartClient.pidMessage(pid, list, client=c)) - else -> otherClient.newCall(MultipartClient.pidMessage(pid, list, client=c)) - } - return call.execute() - } - - fun failEarlyCreatePid(pid: String, owners: List?, code: Int){ - var list = "" - if (owners != null) { - list = Json.encodeToString(OwnerList(owners)) - } - val call = client.newCall(MultipartClient.pidMessage(pid, list, client=2)) - val response = call.execute() - // check http status code and message - Assert.assertEquals("Unexpected http status code!", code, response.code) - Assert.assertEquals("Unexpected message", "Unauthorized", response.message) - } - } -} diff --git a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/LogMessageTests.kt b/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/LogMessageTests.kt deleted file mode 100644 index 6f0d8006..00000000 --- a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/LogMessageTests.kt +++ /dev/null @@ -1,107 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse.multipart - -import de.fhg.aisec.ids.clearinghouse.ChJwt -import de.fhg.aisec.ids.clearinghouse.Utility -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.formatId -import de.fhg.aisec.ids.clearinghouse.multipart.CreatePidTests.Companion.succCreatePid -import de.fhg.aisec.ids.clearinghouse.multipart.MultipartEndpointTest.Companion.client -import de.fhg.aisec.ids.clearinghouse.multipart.MultipartEndpointTest.Companion.otherClient -import de.fraunhofer.iais.eis.MessageProcessedNotificationMessage -import de.fraunhofer.iais.eis.RejectionMessage -import okhttp3.MultipartReader -import org.junit.Assert -import org.junit.jupiter.api.Test - -class LogMessageTests { - @Test - fun logMessage1(){ - val pid = formatId("mp-log1") - val payload = "This message is logged" - - // create Pid - succCreatePid(pid, null) - - // test: Logging to existing Pid - succLogMessage(pid, payload) - } - - @Test - fun logMessage2(){ - val pid = formatId("mp-log2") - val payload = "This message is logged" - - // test: Logging to non-existing Pid - succLogMessage(pid, payload) - } - - @Test - fun logMessage3(){ - val pid = formatId("mp-log3") - val payload = "" - - // test: Logging an empty payload - failLogMessage(pid, payload, 400) - } - - @Test - fun logMessage4(){ - val pid = formatId("mp-log4") - val payload = "This message is logged" - - // create Pid - succCreatePid(pid, null, client=2) - - // test: Logging to existing Pid - failLogMessage(pid, payload, 403) - } - - @Test - fun logMessage5(){ - val pid = formatId("mp-log5") - val payload = "This message is logged" - - // Test: Logging without matching aki:ski in certificate - failEarlyLogMessage(pid, payload, 401) - } - - companion object{ - - fun failEarlyLogMessage(pid: String, payload: String, code: Int){ - val call = client.newCall(MultipartClient.logMessage(pid, payload, client=2)) - val response = call.execute() - // check http status code and message - Assert.assertEquals("Unexpected http status code!", code, response.code) - Assert.assertEquals("Unexpected message", "Unauthorized", response.message) - } - - fun failLogMessage(pid: String, payload: String, code: Int){ - val call = client.newCall(MultipartClient.logMessage(pid, payload)) - val response = call.execute() - // check http status code - Assert.assertEquals("Unexpected http status code!", code, response.code) - // check IDS message type - val parts = Utility.getParts(MultipartReader(response.body!!)) - Utility.checkIdsMessage(parts.first, RejectionMessage::class.java) - } - - fun succLogMessage(pid: String, payload: String, c: Int = 1): ChJwt { - val call = when (c) { - 1 -> client.newCall(MultipartClient.logMessage(pid, payload, client=c)) - else -> otherClient.newCall(MultipartClient.logMessage(pid, payload, client=c)) - } - val response = call.execute() - // check http status code - Assert.assertEquals("Unexpected http status code!", 201, response.code) - // check IDS message type - val parts = Utility.getParts(MultipartReader(response.body!!)) - Utility.checkIdsMessage(parts.first, MessageProcessedNotificationMessage::class.java) - // check the pid from receipt in the payload. Does pid match with the given pid? - val receipt = Utility.parseJwt(parts.second) - Assert.assertEquals("Returned PID does not match given PID!", pid, receipt.process_id) - response.close() - return receipt - } - - } - -} diff --git a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/MultipartClient.kt b/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/MultipartClient.kt deleted file mode 100644 index f98a2ed4..00000000 --- a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/MultipartClient.kt +++ /dev/null @@ -1,80 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse.multipart - -import de.fhg.aisec.ids.clearinghouse.MessageType -import de.fraunhofer.iais.eis.Message -import de.fraunhofer.iais.eis.ids.jsonld.Serializer -import okhttp3.Headers -import okhttp3.MediaType.Companion.toMediaTypeOrNull -import okhttp3.MultipartBody -import okhttp3.Request -import okhttp3.RequestBody.Companion.toRequestBody - -class MultipartClient { - - companion object{ - private val SERIALIZER = Serializer() - private var JSON = "application/json; charset=utf-8".toMediaTypeOrNull()!! - - private val BASE_URL = "https://tc-core-server:9999/" - private val LOG_URL = "messages/log/" - private val QUERY_URL = "messages/query/" - private val PROCESS_URL = "process/" - - private fun makePart(name: String, payload: String, ctJson: Boolean): MultipartBody.Part{ - var headers = Headers.Builder().add("Content-Disposition", "form-data; name=\"$name\"") - val body = if (ctJson){ - payload.toRequestBody(JSON) - } - else{ - payload.toRequestBody() - } - - return MultipartBody.Part.create(headers.build(), body) - } - - private fun makeRequest(url: String, m: Message, payload: String, ctJson: Boolean): Request{ - val requestBody = MultipartBody.Builder() - .setType(MultipartBody.ALTERNATIVE) - .addPart(makePart("header", SERIALIZER.serialize(m), ctJson)) - .addPart(makePart("payload", payload, ctJson)) - .build() - - return Request.Builder() - .header("Authorization", "Bearer " + m.securityToken) - .url(url) - .post(requestBody) - .build() - } - - fun logMessage(pid: String, payload: String, authenticated: Boolean = true, client: Int = 1): Request{ - val m = if (authenticated){ - MultipartEndpointTest.getMessage(MessageType.LOG, client) - } else{ - MultipartEndpointTest.getInvalidMessage(MessageType.LOG) - } - val url = "$BASE_URL$LOG_URL$pid" - return makeRequest(url, m, payload, false) - } - - fun queryMessage(pid: String, id: String?, payload: String, authenticated: Boolean = true, client: Int = 1, page: Int = 1, size: Int = 100, sort: String = "desc"): Request{ - val m = if (authenticated){ - MultipartEndpointTest.getMessage(MessageType.QUERY, client) - } else{ - MultipartEndpointTest.getInvalidMessage(MessageType.QUERY) - } - val url = if (id == null) "$BASE_URL$QUERY_URL$pid?page=$page&size=$size&sort=$sort" else "$BASE_URL$QUERY_URL$pid/$id" - return makeRequest(url, m, payload, false) - } - - fun pidMessage(pid: String, payload: String, ctJson: Boolean = true, authenticated: Boolean = true, client: Int = 1): Request{ - val m = if (authenticated){ - MultipartEndpointTest.getMessage(MessageType.PID, client) - } else{ - MultipartEndpointTest.getInvalidMessage(MessageType.PID) - } - val url = "$BASE_URL$PROCESS_URL$pid" - return makeRequest(url, m, payload, ctJson) - } - - } -} diff --git a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/MultipartEndpointTest.kt b/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/MultipartEndpointTest.kt deleted file mode 100644 index 2e7b895c..00000000 --- a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/MultipartEndpointTest.kt +++ /dev/null @@ -1,63 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse.multipart - -import de.fhg.aisec.ids.clearinghouse.MessageType -import de.fhg.aisec.ids.clearinghouse.Utility -import de.fhg.aisec.ids.idscp2.keystores.PreConfiguration -import de.fraunhofer.iais.eis.DynamicAttributeTokenBuilder -import de.fraunhofer.iais.eis.Message -import de.fraunhofer.iais.eis.TokenFormat -import okhttp3.OkHttpClient -import javax.net.ssl.SSLContext - -class MultipartEndpointTest { - - companion object { - private val trustManager = PreConfiguration.getX509ExtTrustManager( - Utility.trustStorePath, - "password".toCharArray() - ) - - private val keyManagers = PreConfiguration.getX509ExtKeyManager( - "password".toCharArray(), - Utility.keyStorePath, - "password".toCharArray(), - ) - - private val keyManagersOtherClient = PreConfiguration.getX509ExtKeyManager( - "password".toCharArray(), - Utility.keyStorePathOtherClient, - "password".toCharArray(), - ) - - private val sslContext = SSLContext.getInstance("TLS").apply { - init(keyManagers, arrayOf(trustManager), null) - } - - private val sslContextOtherClient = SSLContext.getInstance("TLS").apply { - init(keyManagersOtherClient, arrayOf(trustManager), null) - } - - val client = OkHttpClient.Builder() - .sslSocketFactory(sslContext.socketFactory, trustManager) - .build() - - val otherClient = OkHttpClient.Builder() - .sslSocketFactory(sslContextOtherClient.socketFactory, trustManager) - .build() - - fun getMessage(type: MessageType, client: Int = 1): Message { - return when (client) { - 2 -> Utility.getMessage(type, Utility.getDapsToken(Utility.dapsDriverOtherClient.token)) - else -> Utility.getMessage(type, Utility.getDapsToken()) - } - } - - fun getInvalidMessage(type: MessageType): Message { - val invToken = DynamicAttributeTokenBuilder() - ._tokenFormat_(TokenFormat.JWT) - ._tokenValue_("This is not a valid token!") - .build() - return Utility.getMessage(type, invToken) - } - } -} diff --git a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/QueryIdTests.kt b/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/QueryIdTests.kt deleted file mode 100644 index 816247d0..00000000 --- a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/QueryIdTests.kt +++ /dev/null @@ -1,100 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse.multipart - -import de.fhg.aisec.ids.clearinghouse.Utility -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.formatId -import de.fhg.aisec.ids.clearinghouse.multipart.CreatePidTests.Companion.succCreatePid -import de.fhg.aisec.ids.clearinghouse.multipart.LogMessageTests.Companion.succLogMessage -import de.fhg.aisec.ids.clearinghouse.multipart.MultipartEndpointTest.Companion.client -import de.fraunhofer.iais.eis.RejectionMessage -import de.fraunhofer.iais.eis.ResultMessage -import okhttp3.MultipartReader -import org.junit.Assert -import org.junit.jupiter.api.Test -import java.net.ProtocolException - -class QueryIdTests { - - @Test - fun queryId1(){ - val pid = formatId("mp-qid1") - - // create Pid with one document - val message = "This is the first message" - val receipt = succLogMessage(pid, message) - - // Test: query existing document - succQueryId(pid, receipt.document_id) - } - - @Test - fun queryId2(){ - val pid = formatId("mp-qid2") - - // create Pid with one document - val message = "This is the first message" - succLogMessage(pid, message) - - // Test: query non-existing document - failQueryId(pid, "unknown-id", 404) - } - - @Test - fun queryId3(){ - val pid1 = formatId("mp-qid2_with_doc") - val pid2 = formatId("mp-qid2_without_doc") - - // create one Pid with one document and another with no documents - val message = "This is the first message" - val receipt = succLogMessage(pid1, message) - succCreatePid(pid2, null) - - // Test: query existing document in wrong pid - failQueryId(pid2, receipt.document_id, 404) - } - - @Test - fun queryId4(){ - val pid = formatId("mp-qid4") - - // create Pid with one document - val message = "This is the first message" - val receipt = succLogMessage(pid, message) - - // Test: query existing document - failEarlyQueryPid(pid, receipt.document_id, 401) - } - - companion object{ - fun failEarlyQueryPid(pid: String, id: String, code: Int){ - val call = client.newCall(MultipartClient.queryMessage(pid, id, "", client=2)) - val response = call.execute() - // check http status code and message - Assert.assertEquals("Unexpected http status code!", code, response.code) - Assert.assertEquals("Unexpected message", "Unauthorized", response.message) - } - - fun failQueryId(pid: String, id: String, code: Int){ - val call = client.newCall(MultipartClient.queryMessage(pid, id, "")) - val response = call.execute() - // check http status code - Assert.assertEquals("Unexpected http status code!", response.code, code) - // check IDS message type - val parts = Utility.getParts(MultipartReader(response.body!!)) - Utility.checkIdsMessage(parts.first, RejectionMessage::class.java) - response.close() - } - - fun succQueryId(pid: String, id: String): String{ - val call = client.newCall(MultipartClient.queryMessage(pid, id, "")) - val response = call.execute() - // check http status code - Assert.assertEquals("Unexpected http status code!", response.code, 200) - // check IDS message type - val parts = Utility.getParts(MultipartReader(response.body!!)) - Utility.checkIdsMessage(parts.first, ResultMessage::class.java) - //TODO: can't serialize. json array is of type "message + payload + payload type" - response.close() - return parts.second - } - } -} diff --git a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/QueryPidTests.kt b/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/QueryPidTests.kt deleted file mode 100644 index 1b474997..00000000 --- a/clearing-house-processors/src/intTest/java/de/fhg/aisec/ids/clearinghouse/multipart/QueryPidTests.kt +++ /dev/null @@ -1,141 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse.multipart - -import de.fhg.aisec.ids.clearinghouse.QueryResult -import de.fhg.aisec.ids.clearinghouse.Utility -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.CONNECTOR_1 -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.formatId -import de.fhg.aisec.ids.clearinghouse.Utility.Companion.parseQueryResult -import de.fhg.aisec.ids.clearinghouse.multipart.CreatePidTests.Companion.succCreatePid -import de.fhg.aisec.ids.clearinghouse.multipart.LogMessageTests.Companion.succLogMessage -import de.fhg.aisec.ids.clearinghouse.multipart.MultipartEndpointTest.Companion.client -import de.fraunhofer.iais.eis.RejectionMessage -import de.fraunhofer.iais.eis.ResultMessage -import okhttp3.MultipartReader -import org.junit.Assert -import org.junit.jupiter.api.Test -import java.net.ProtocolException - -class QueryPidTests { - @Test - fun queryPid1(){ - val pid = formatId("mp-qpid1") - - // create Pid - succCreatePid(pid, null) - - // Test: query existing Pid with no documents - val result = succQueryPid(pid) - Assert.assertEquals("Should receive empty array!", 0, result.documents.size) - } - - @Test - fun queryPid2(){ - val pid = formatId("mp-qpid2") - - // create Pid with three messages - val messages = listOf("This is the first message", "This is the second message", "This is the third message") - messages.forEach{ - succLogMessage(pid, it) - } - - // Test: query existing Pid with three documents - val result = succQueryPid(pid) - Assert.assertEquals("Should receive empty array!", 3, result.documents.size) - } - - @Test - fun queryPid3(){ - val pid = formatId("mp-qpid3") - val owners = listOf(CONNECTOR_1) - - // create Pid with other user, but user 1 is also authorized - succCreatePid(pid, owners, client=2) - - // add three messages - val messages = listOf("This is the first message", "This is the second message", "This is the third message") - messages.forEach{ - succLogMessage(pid, it, c=2) - } - - // Test: query existing Pid with user (who did not create pid, but is authorized) - val result = succQueryPid(pid) - Assert.assertEquals("Should receive empty array!", 3, result.documents.size) - } - - @Test - fun queryPid4(){ - val pid = formatId("mp-qpid4") - - // Test: query non-existing Pid - failQueryPid(pid, 404) - } - - @Test - fun queryPid5(){ - val pid = formatId("mp-qpid5") - - // create Pid with other user - succCreatePid(pid, null, client=2) - - // Test: query existing Pid with user (for which he is not authorized) - failQueryPid(pid, 403) - } - - @Test - fun queryPid6(){ - val pid = formatId("mp-qpid6") - - // create Pid - succLogMessage(pid, "This is the log message!") - - // Test: query non existing page results in empty array - val result = succQueryPid(pid, 2) - Assert.assertEquals("Should receive empty array!", 0, result.documents.size) - } - - @Test - fun queryPid7(){ - val pid = formatId("mp-qpid7") - - // create Pid - succLogMessage(pid, "This is the log message!") - - // Test: Query pid without matching aki:ski in certificate - failEarlyQueryPid(pid, 401) - } - - companion object{ - - fun failEarlyQueryPid(pid: String, code: Int){ - val call = client.newCall(MultipartClient.queryMessage(pid, null, "", client=2)) - val response = call.execute() - // check http status code and message - Assert.assertEquals("Unexpected http status code!", code, response.code) - Assert.assertEquals("Unexpected message", "Unauthorized", response.message) - } - - fun failQueryPid(pid: String, code: Int){ - val call = client.newCall(MultipartClient.queryMessage(pid, null, "")) - val response = call.execute() - // check http status code - Assert.assertEquals("Unexpected http status code!", code, response.code) - // check IDS message type - val parts = Utility.getParts(MultipartReader(response.body!!)) - Utility.checkIdsMessage(parts.first, RejectionMessage::class.java) - response.close() - } - - fun succQueryPid(pid: String, page: Int = 1, size: Int = 100, sort: String = "desc"): QueryResult{ - val call = client.newCall(MultipartClient.queryMessage(pid, null, "", page=page, size=size, sort=sort)) - val response = call.execute() - // check http status code - Assert.assertEquals("Unexpected http status code!", 200, response.code) - // check IDS message type - val parts = Utility.getParts(MultipartReader(response.body!!)) - Utility.checkIdsMessage(parts.first, ResultMessage::class.java) - val result = parseQueryResult(parts.second) - response.close() - return result - } - } -} diff --git a/clearing-house-processors/src/intTest/resources/simplelogger.properties b/clearing-house-processors/src/intTest/resources/simplelogger.properties deleted file mode 100644 index eafa2b0f..00000000 --- a/clearing-house-processors/src/intTest/resources/simplelogger.properties +++ /dev/null @@ -1 +0,0 @@ -org.slf4j.simpleLogger.defaultLogLevel=debug \ No newline at end of file diff --git a/clearing-house-processors/src/intTest/resources/ssl/client-keystore.p12 b/clearing-house-processors/src/intTest/resources/ssl/client-keystore.p12 deleted file mode 100644 index 3a2ac465..00000000 Binary files a/clearing-house-processors/src/intTest/resources/ssl/client-keystore.p12 and /dev/null differ diff --git a/clearing-house-processors/src/intTest/resources/ssl/consumer-keystore.p12 b/clearing-house-processors/src/intTest/resources/ssl/consumer-keystore.p12 deleted file mode 100644 index f6c7368b..00000000 Binary files a/clearing-house-processors/src/intTest/resources/ssl/consumer-keystore.p12 and /dev/null differ diff --git a/clearing-house-processors/src/intTest/resources/ssl/provider-keystore.p12 b/clearing-house-processors/src/intTest/resources/ssl/provider-keystore.p12 deleted file mode 100644 index 7c45bd30..00000000 Binary files a/clearing-house-processors/src/intTest/resources/ssl/provider-keystore.p12 and /dev/null differ diff --git a/clearing-house-processors/src/intTest/resources/ssl/server-keystore.p12 b/clearing-house-processors/src/intTest/resources/ssl/server-keystore.p12 deleted file mode 100644 index 6c361eff..00000000 Binary files a/clearing-house-processors/src/intTest/resources/ssl/server-keystore.p12 and /dev/null differ diff --git a/clearing-house-processors/src/intTest/resources/ssl/truststore.p12 b/clearing-house-processors/src/intTest/resources/ssl/truststore.p12 deleted file mode 100644 index 80d7234a..00000000 Binary files a/clearing-house-processors/src/intTest/resources/ssl/truststore.p12 and /dev/null differ diff --git a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ChTrustManager.java b/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ChTrustManager.java deleted file mode 100644 index 5d024e53..00000000 --- a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ChTrustManager.java +++ /dev/null @@ -1,56 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse; - -import javax.net.ssl.*; -import java.net.Socket; -import java.security.*; -import java.security.cert.*; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ChTrustManager extends X509ExtendedTrustManager { - - final static Logger LOG = LoggerFactory.getLogger(ChTrustManager.class); - - ChTrustManager() throws Exception { - LOG.info("received no keystore"); - } - - ChTrustManager(KeyStore keystore) throws Exception { - LOG.info("received keystore: {}", keystore.aliases()); - } - - @Override - public void checkClientTrusted(X509Certificate[] chain, String authType, Socket socket) throws CertificateException { - } - - @Override - public void checkServerTrusted(X509Certificate[] chain, String authType, Socket socket) throws CertificateException { - - } - - @Override - public void checkClientTrusted(X509Certificate[] chain, String authType, SSLEngine engine) throws CertificateException { - - } - - @Override - public void checkServerTrusted(X509Certificate[] chain, String authType, SSLEngine engine) throws CertificateException { - - } - - @Override - public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { - - } - - @Override - public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { - - } - - @Override - public X509Certificate[] getAcceptedIssuers() { - return new X509Certificate[0]; - } -} diff --git a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseConstants.java b/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseConstants.java deleted file mode 100644 index 6d28e874..00000000 --- a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseConstants.java +++ /dev/null @@ -1,48 +0,0 @@ -/*- - * ========================LICENSE_START================================= - * camel-multipart-processor - * %% - * Copyright (C) 2019 Fraunhofer AISEC - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * =========================LICENSE_END================================== - */ -package de.fhg.aisec.ids.clearinghouse; - -final class ClearingHouseConstants { - - private ClearingHouseConstants() {} - - static final String MULTIPART_HEADER = "header"; - static final String MULTIPART_PAYLOAD = "payload"; - static final String IDS_HEADER = "ids-header"; - static final String CAMEL_HTTP_STATUS_CODE_HEADER = "CamelHttpResponseCode"; - static final String CAMEL_HTTP_PATH = "CamelHttpPath"; - static final String IDS_PROTOCOL = "ids-protocol"; - static final String PROTO_IDSCP2 = "idscp2"; - static final String PROTO_MULTIPART = "idsMultipart"; - static final String PID_HEADER = "pid"; - static final String IDSCP_PID_HEADER = "ch-ids-pid"; - static final String IDSCP_ID_HEADER = "ch-ids-id"; - static final String IDSCP_PAGE_HEADER = "ch-ids-page"; - static final String IDSCP_SIZE_HEADER = "ch-ids-size"; - static final String IDSCP_SORT_HEADER = "ch-ids-sort"; - static final String TYPE_HEADER = "Content-Type"; - static final String SERVICE_HEADER = "CH-SERVICE"; - static final String SERVICE_CLAIM = "client_id"; - static final String IDSCP2_IDS_HEADER = "idscp2-header"; - static final String AUTH_HEADER = "Authorization"; - static final String TYPE_JSON = "application/json"; - static final String SERVER = "Server"; - static final String BEARER = "Bearer "; -} diff --git a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseExceptionProcessor.kt b/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseExceptionProcessor.kt deleted file mode 100644 index 3ba68f6c..00000000 --- a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseExceptionProcessor.kt +++ /dev/null @@ -1,75 +0,0 @@ -/*- - * ========================LICENSE_START================================= - * camel-multipart-processor - * %% - * Copyright (C) 2019 Fraunhofer AISEC - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * =========================LICENSE_END================================== - */ -package de.fhg.aisec.ids.clearinghouse - -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.IDSCP_ID_HEADER -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.IDSCP_PID_HEADER -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.IDS_HEADER -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.IDS_PROTOCOL -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.PROTO_IDSCP2 -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.TYPE_HEADER -import de.fraunhofer.iais.eis.Message -import de.fraunhofer.iais.eis.RejectionMessageBuilder -import org.apache.camel.Exchange -import org.apache.camel.Processor -import org.slf4j.Logger -import org.slf4j.LoggerFactory -import org.springframework.stereotype.Component - -@Component("chExceptionProcessor") -class ClearingHouseExceptionProcessor : Processor { - override fun process(exchange: Exchange) { - val egetIn = exchange.getIn() - val headers = egetIn.headers - - if (LOG.isTraceEnabled) { - LOG.trace("[ERR] ${ClearingHouseExceptionProcessor::class.java.simpleName}") - for (header in headers.keys) { - LOG.trace("Found header '{}':'{}'", header, headers[header]) - } - } - - val originalRequest = exchange.message.getHeader(IDS_HEADER) as Message - - val message = RejectionMessageBuilder() - ._correlationMessage_(originalRequest.id) - ._recipientAgent_(listOf(originalRequest.senderAgent)) - ._recipientConnector_(listOf(originalRequest.issuerConnector)) - - val caused = exchange.getProperty(Exchange.EXCEPTION_CAUGHT, Throwable::class.java) - - exchange.getIn().body = caused.message - - // set the IDS header - when (headers[IDS_PROTOCOL] as String) { - PROTO_IDSCP2 -> egetIn.setHeader(IDS_HEADER, message) - } - - // clean up headers - egetIn.removeHeader(IDS_PROTOCOL) - egetIn.removeHeader(IDSCP_ID_HEADER) - egetIn.removeHeader(IDSCP_PID_HEADER) - egetIn.removeHeader(TYPE_HEADER) - } - - companion object { - private val LOG: Logger = LoggerFactory.getLogger(ClearingHouseExceptionProcessor::class.java) - } -} diff --git a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseInputValidationProcessor.kt b/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseInputValidationProcessor.kt deleted file mode 100644 index 38230e42..00000000 --- a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseInputValidationProcessor.kt +++ /dev/null @@ -1,116 +0,0 @@ -/*- - * ========================LICENSE_START================================= - * camel-multipart-processor - * %% - * Copyright (C) 2019 Fraunhofer AISEC - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * =========================LICENSE_END================================== - */ -package de.fhg.aisec.ids.clearinghouse - -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.CAMEL_HTTP_PATH -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.IDSCP_ID_HEADER -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.IDSCP_PAGE_HEADER -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.IDSCP_PID_HEADER -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.IDSCP_SIZE_HEADER -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.IDSCP_SORT_HEADER -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.IDS_HEADER -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.IDS_PROTOCOL -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.PROTO_IDSCP2 -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.TYPE_HEADER -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.TYPE_JSON -import de.fraunhofer.iais.eis.Message -import de.fraunhofer.iais.eis.QueryMessage -import de.fraunhofer.iais.eis.RequestMessage -import org.apache.camel.Exchange -import org.apache.camel.Processor -import org.apache.http.entity.ContentType -import org.slf4j.Logger -import org.slf4j.LoggerFactory -import org.springframework.stereotype.Component - -@Component("chInputValidationProcessor") -class ClearingHouseInputValidationProcessor : Processor { - override fun process(exchange: Exchange) { - val egetIn = exchange.getIn() - val headers = egetIn.headers - val body = exchange.message.getBody(ByteArray::class.java) - - if (LOG.isTraceEnabled) { - LOG.trace("[IN] ${ClearingHouseInputValidationProcessor::class.java.simpleName}") - for (header in headers.keys) { - LOG.trace("Found header '{}':'{}'", header, headers[header]) - } - } - - // Prepare compound message for Clearing House Service API - val idsHeader = exchange.message.getHeader(IDS_HEADER) as Message - val contentTypeHeader = (headers[TYPE_HEADER] as String?) - val chMessage = ClearingHouseMessage(idsHeader, contentTypeHeader, body) - - LOG.info("idsmessage: {}", idsHeader.id) - - // Input validation: check that payload type of create pid message is application/json - if (chMessage.header is RequestMessage && idsHeader !is QueryMessage) { - val expectedContentType = ContentType.create("application/json") - if (expectedContentType.mimeType != chMessage.payloadType) { - LOG.warn("Expected application/json, got {}", chMessage.payloadType) - throw IllegalArgumentException("Expected content-type application/json") - } - } - - // Input validation: construct url from headers for IDSCP2 - if (headers[IDS_PROTOCOL] == PROTO_IDSCP2) { - if (chMessage.header is QueryMessage) { - val queryPath = if (headers.contains(IDSCP_ID_HEADER)) { - (headers[CAMEL_HTTP_PATH] as String) + "/" + (headers[IDSCP_ID_HEADER] as String) - } else { - var paginationPath = if (headers.contains(IDSCP_PAGE_HEADER)) { - (headers[CAMEL_HTTP_PATH] as String) + "?page=" + exchange.message.getHeader(IDSCP_PAGE_HEADER) - } else { - (headers[CAMEL_HTTP_PATH] as String) + "?page=1" - } - - if (headers.contains(IDSCP_SIZE_HEADER)) { - paginationPath = paginationPath + "&size=" + exchange.message.getHeader(IDSCP_SIZE_HEADER) - } - if (headers.contains(IDSCP_SORT_HEADER)) { - paginationPath = "$paginationPath?sort=desc" - } - paginationPath - } - exchange.getIn().setHeader(CAMEL_HTTP_PATH, queryPath) - } - } - - if (LOG.isTraceEnabled) { - LOG.trace("Received payload: {}", chMessage.payload) - } - - // store ids header for response processor and clean up idscp2 specific header - exchange.getIn().removeHeader(IDSCP_ID_HEADER) - exchange.getIn().removeHeader(IDSCP_PID_HEADER) - - // Remove current Content-Type header before setting the new one - exchange.getIn().removeHeader(TYPE_HEADER) - - // Copy Content-Type from payload part populate body with new payload - exchange.getIn().setHeader(TYPE_HEADER, TYPE_JSON) - exchange.getIn().body = chMessage.toJson() - } - - companion object { - private val LOG: Logger = LoggerFactory.getLogger(ClearingHouseInputValidationProcessor::class.java) - } -} diff --git a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseMessage.kt b/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseMessage.kt deleted file mode 100644 index 27ab517d..00000000 --- a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseMessage.kt +++ /dev/null @@ -1,62 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse - -import com.fasterxml.jackson.annotation.JsonInclude -import com.fasterxml.jackson.databind.ObjectMapper -import de.fraunhofer.iais.eis.Message -import org.slf4j.LoggerFactory -import java.nio.charset.Charset -import javax.xml.bind.DatatypeConverter - -class ClearingHouseMessage (var header: Message? = null, var payloadType: String? = null, var payload: String? = null){ - private var charset: String = Charset.defaultCharset().toString() - - fun toJson(): String { - val objectMapper = ObjectMapper() - objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL) - return objectMapper.writeValueAsString(this) - } - - constructor(idsHeader: Message, contentTypeHeader: String?, payload: ByteArray) : this() { - this.header = idsHeader - parseContentType(contentTypeHeader) - when (this.payloadType){ - "text/plain", "application/json", "application/ld+json" -> { - this.payload = String(payload, Charset.forName(charset)) - } - else -> { - this.payloadType = "application/octet-stream" - this.payload = DatatypeConverter.printBase64Binary(payload) - } - } - } - - private fun parseContentType(contentTypeHeader: String?) { - // Parsing Content-Type and Charset - if (contentTypeHeader != null) { - val parts = contentTypeHeader.split(";") - when (parts.size){ - 1 -> { - this.payloadType = parts[0] - } - 2 -> { - this.payloadType = parts[0] - val charsetInput = parts[1].split("=") - if (charsetInput.size == 2){ - this.charset = charsetInput[1] - LOG.debug("Using Charset from Content-Type header: {}", charset) - } - } - else -> { - this.payloadType = "text/plain" - } - } - } - else{ - this.payloadType = "application/octet-stream" - } - } - - companion object { - private val LOG = LoggerFactory.getLogger(ClearingHouseMessage::class.java) - } -} diff --git a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseOutputProcessor.kt b/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseOutputProcessor.kt deleted file mode 100644 index 9375a35f..00000000 --- a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/ClearingHouseOutputProcessor.kt +++ /dev/null @@ -1,108 +0,0 @@ -/*- - * ========================LICENSE_START================================= - * camel-multipart-processor - * %% - * Copyright (C) 2019 Fraunhofer AISEC - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * =========================LICENSE_END================================== - */ -package de.fhg.aisec.ids.clearinghouse - -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.CAMEL_HTTP_STATUS_CODE_HEADER -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.IDS_HEADER -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.IDS_PROTOCOL -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.PROTO_IDSCP2 -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.PROTO_MULTIPART -import de.fraunhofer.iais.eis.Message -import de.fraunhofer.iais.eis.MessageProcessedNotificationMessageBuilder -import de.fraunhofer.iais.eis.RejectionMessageBuilder -import de.fraunhofer.iais.eis.ResultMessageBuilder -import org.apache.camel.Exchange -import org.apache.camel.Processor -import org.slf4j.LoggerFactory -import org.springframework.stereotype.Component - -@Component("chOutputProcessor") -class ClearingHouseOutputProcessor : Processor { - - override fun process(exchange: Exchange) { - val egetIn = exchange.getIn() - val headers = egetIn.headers - if (LOG.isTraceEnabled) { - LOG.trace("[IN] ${ClearingHouseOutputProcessor::class.java.simpleName}") - for (header in headers.keys) { - LOG.trace("Found header '{}':'{}'", header, headers[header]) - } - } - - // If this property is null, the routes are not defined correctly! - val originalRequest = exchange.message.getHeader(IDS_HEADER) as Message - - val statusCode = (headers[CAMEL_HTTP_STATUS_CODE_HEADER] as Int?)!!.toInt() - // creating IDS header for the response - val responseMessage = when (statusCode) { - 200 -> ResultMessageBuilder() - ._correlationMessage_(originalRequest.id) - ._recipientAgent_(listOf(originalRequest.senderAgent)) - ._recipientConnector_(listOf(originalRequest.issuerConnector)) - 201 -> MessageProcessedNotificationMessageBuilder() - ._correlationMessage_(originalRequest.id) - ._recipientAgent_(listOf(originalRequest.senderAgent)) - ._recipientConnector_(listOf(originalRequest.issuerConnector)) - else -> RejectionMessageBuilder() - ._correlationMessage_(originalRequest.id) - ._recipientAgent_(listOf(originalRequest.senderAgent)) - ._recipientConnector_(listOf(originalRequest.issuerConnector)) - } - - // set the IDS header - egetIn.setHeader(IDS_HEADER, responseMessage) - - // idscp2 set status code - when (headers[IDS_PROTOCOL] as String){ - PROTO_IDSCP2 -> { - when(statusCode){ - 400 -> egetIn.body = "Bad Request" - 401 -> egetIn.body = "Unauthorized" - 403 -> egetIn.body = "Forbidden" - 404 -> egetIn.body = "Not Found" - 500 -> egetIn.body = "Internal Server Error" - } - } - PROTO_MULTIPART -> { - when(statusCode){ - 200, 201 -> - if (LOG.isTraceEnabled) { - LOG.trace("[OUT] ${ClearingHouseOutputProcessor::class.java.simpleName}") - LOG.trace("Message successfully processed.") - } - else -> { - egetIn.body = "" - } - } - } - } - - // Clean up the headers - egetIn.removeHeader(ClearingHouseConstants.AUTH_HEADER) - egetIn.removeHeader(ClearingHouseConstants.PID_HEADER) - egetIn.removeHeader(ClearingHouseConstants.SERVER) - egetIn.removeHeader(ClearingHouseConstants.TYPE_HEADER) - egetIn.removeHeader(IDS_PROTOCOL) - } - - companion object { - private val LOG = LoggerFactory.getLogger(ClearingHouseOutputProcessor::class.java) - } -} diff --git a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/Configuration.kt b/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/Configuration.kt deleted file mode 100644 index 54d3d059..00000000 --- a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/Configuration.kt +++ /dev/null @@ -1,28 +0,0 @@ -package de.fhg.aisec.ids.clearinghouse - -import org.slf4j.LoggerFactory - -internal object Configuration { - private val LOG = LoggerFactory.getLogger(Configuration::class.java) - private const val LOGGING_SERVICE_ID = "SERVICE_ID_LOG" - private const val TC_SERVICE_ID = "SERVICE_ID_TC" - private const val SERVICE_SHARED_SECRET = "SERVICE_SHARED_SECRET" - - val serviceIdTc: String - get() = getEnvVariable(TC_SERVICE_ID) - val serviceIdLog: String - get() = getEnvVariable(LOGGING_SERVICE_ID) - val serviceSecret: String - get() = getEnvVariable(SERVICE_SHARED_SECRET) - - - private fun getEnvVariable(envVariable: String): String { - val value = System.getenv(envVariable) - return if (value == null) { - LOG.error("Configuration invalid: Missing {}", envVariable) - "" - } else { - value - } - } -} diff --git a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/SharedSecretProcessor.kt b/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/SharedSecretProcessor.kt deleted file mode 100644 index b6d8c372..00000000 --- a/clearing-house-processors/src/main/java/de/fhg/aisec/ids/clearinghouse/SharedSecretProcessor.kt +++ /dev/null @@ -1,72 +0,0 @@ -/*- - * ========================LICENSE_START================================= - * camel-multipart-processor - * %% - * Copyright (C) 2019 Fraunhofer AISEC - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * =========================LICENSE_END================================== - */ -package de.fhg.aisec.ids.clearinghouse - -import com.auth0.jwt.JWT -import com.auth0.jwt.algorithms.Algorithm -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.IDS_HEADER -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.SERVICE_CLAIM -import de.fhg.aisec.ids.clearinghouse.ClearingHouseConstants.SERVICE_HEADER -import de.fraunhofer.iais.eis.Message -import org.apache.camel.Exchange -import org.apache.camel.Processor -import org.slf4j.Logger -import org.slf4j.LoggerFactory -import org.springframework.stereotype.Component -import java.util.Date - -/** - * This processor validates the JWT token in the IDS header - */ -@Component("chSharedSecretProcessor") -class SharedSecretProcessor : Processor { - override fun process(exchange: Exchange) { - val eIn = exchange.getIn() - val headers = eIn.headers - -// if (LOG.isDebugEnabled) { - LOG.debug("[IN] ${SharedSecretProcessor::class.java.simpleName}") - for (header in headers.keys) { - LOG.debug("Found header '{}':'{}'", header, headers[header]) - } -// } - - val idsHeader = exchange.message.getHeader(IDS_HEADER) as Message - //val idsHeader = exchange.getProperty(IDS_HEADER, Message::class.java) - // ?: throw RuntimeException("No IDS header provided!") - val dat = idsHeader.securityToken?.tokenValue ?: throw RuntimeException("No DAT provided!") - - val decodedDat = JWT.decode(dat) - val claimedClientId = decodedDat.subject - val now = System.currentTimeMillis() - val serviceToken = JWT.create() - .withAudience(Configuration.serviceIdLog) - .withIssuer(Configuration.serviceIdTc) - .withClaim(SERVICE_CLAIM, claimedClientId) - .withIssuedAt(Date(now)) - .withExpiresAt(Date(now + 60000)) - .sign(Algorithm.HMAC256(Configuration.serviceSecret)) - exchange.getIn().setHeader(SERVICE_HEADER, serviceToken) - } - - companion object { - val LOG: Logger = LoggerFactory.getLogger(SharedSecretProcessor::class.java) - } -} diff --git a/clearing-house-processors/src/routes/clearing-house-routes.xml b/clearing-house-processors/src/routes/clearing-house-routes.xml deleted file mode 100644 index a26975db..00000000 --- a/clearing-house-processors/src/routes/clearing-house-routes.xml +++ /dev/null @@ -1,170 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - java.io.IOException - java.lang.SecurityException - java.lang.IllegalArgumentException - - true - - ${exception.message} - - - - - - - - - ${exception.class} == 'java.lang.SecurityException' || ${exception.class} == 'java.lang.IllegalArgumentException' - 401 - - - ${exception.class} == 'java.io.IOException' || ${exception.class} == 'java.lang.RuntimeException' - 400 - - - 500 - Internal Server Error - - - - - idsMultipart - - - - - - - - - - java.io.IOException - java.lang.RuntimeException - java.lang.SecurityException - java.lang.IllegalArgumentException - - true - - - - - - - - - - idscp2 - - - ${exchangeProperty.ids-type} == 'RequestMessage' - - POST - /process/${headers.ch-ids-pid} - - - - - - - ${exchangeProperty.ids-type} == 'QueryMessage' - - POST - /messages/query/${headers.ch-ids-pid} - - - - - - - ${exchangeProperty.ids-type} == 'LogMessage' - - POST - /messages/log/${headers.ch-ids-pid} - - - - - - - - ${null} - - - - - - - - diff --git a/clearing-house-processors/version b/clearing-house-processors/version deleted file mode 100644 index e69de29b..00000000 diff --git a/docker/application.yml b/docker/application.yml deleted file mode 100644 index 786732c1..00000000 --- a/docker/application.yml +++ /dev/null @@ -1,14 +0,0 @@ -logging: - level: - root: info - -ids-multipart: - daps-bean-name: rootDaps - -connector: - # Used as default for IDSCP2 DAPS instances which have not been explicitly configured. - daps-url: ${TC_DAPS_URL:} - # Used for IDS Messages issuerConnector field. - connector-url: ${TC_CH_ISSUER_CONNECTOR:} - # Used for IDS Messages senderAgent field. - sender-agent: ${TC_CH_AGENT:} diff --git a/docker/daps_cachain.crt b/docker/daps_cachain.crt deleted file mode 100644 index 1611ceeb..00000000 --- a/docker/daps_cachain.crt +++ /dev/null @@ -1,112 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDZzCCAk+gAwIBAgIJALIB7y7FZtiHMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNV -BAYTAkRFMRMwEQYDVQQKDApGcmF1bmhvZmVyMR4wHAYDVQQDDBVJRFMgUm9vdCBU -ZXN0IENBIDIwMTgwHhcNMTgxMDMxMTYxMjAyWhcNMzgxMDI2MTYxMjAyWjBCMQsw -CQYDVQQGEwJERTETMBEGA1UECgwKRnJhdW5ob2ZlcjEeMBwGA1UEAwwVSURTIFJv -b3QgVGVzdCBDQSAyMDE4MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA -sh2FI432kI1BYWCs1CBPgJakTBsQt/BP7DQJ2lULlBKhtJpJM85tD1dXgIhLoBFi -5cMQPCX8kEy7TQLEOB4pPvqvUk+Tc48bNA39F4p3nOLZzlYREaTHui9r+FDvHy7j -TJGuRuDNKbP8qOAAd8Ci63e9iabSTL1wXLuZFnkEEHKraCwUQhzck5Dk7ll5Vncw -StishI+zk47uOiM3v0yMO4TSRyxFvwKfHB09hKLbLg6fr+62IqLEQWU/r0qo9h8e -4N16HdXLojyTam36JPDfExB2yHwMAe5fqtzwknWy0VfkrI2+KjN004EmjhsD9tL0 -dKS+MSRARDQYuHenSpG6XQIDAQABo2AwXjAdBgNVHQ4EFgQUl+fsV0kEi4xmZ+NX -uXYPdqDwY5AwHwYDVR0jBBgwFoAUl+fsV0kEi4xmZ+NXuXYPdqDwY5AwDwYDVR0T -AQH/BAUwAwEB/zALBgNVHQ8EBAMCAUYwDQYJKoZIhvcNAQEFBQADggEBAIgDI5Zq -xvrI0ihxtgdnr/p7Nx71imoEIquGDWR+W9smzUGcY5zkAX9V2zm/vydNhRrA3iNn -/+bmsM/xUYk4J6Oju8czYE1EQ/43OUB3iODzdzdAUspaa4MQORi0qaHehLrOzf28 -RVDPzLhhy64Z7muvhiKlWlwlLMqJSKIrJpeCSYTQZJFLLvu2atktoFJok477z1cX -ZpV2P1ODUyVETlA2WpOkb1g+BeaboDcODV3lwHA0kToDnjCeVo1rzs9ghjIpxvmg -gWMVSvLUgHTqUq1ruyqg9dJ7kN4rxpmTYBhVzYueVCfaRNQ+kKkUOyNEIAH9/c2g -lmTm5QRelsvLnbk= ------END CERTIFICATE----- -Certificate: - Data: - Version: 3 (0x2) - Serial Number: 1 (0x1) - Signature Algorithm: sha256WithRSAEncryption - Issuer: C=DE, O=Fraunhofer, CN=IDS Root Test CA 2018 - Validity - Not Before: Oct 31 16:12:04 2018 GMT - Not After : Oct 29 16:12:04 2026 GMT - Subject: C=DE, O=Fraunhofer, CN=IDS Test SubCA 2018 - Subject Public Key Info: - Public Key Algorithm: rsaEncryption - Public-Key: (2048 bit) - Modulus: - 00:ab:24:8c:56:4f:da:26:8a:74:d1:c1:da:e6:fe: - c6:2e:1d:cc:98:c9:43:c7:a0:7a:59:cf:59:e7:7c: - 4e:c0:5e:2a:6b:dd:b4:e6:b0:ae:20:c0:20:d6:aa: - 37:25:5f:22:98:62:ba:bc:d4:76:fc:c8:72:fc:a7: - 64:48:36:b7:35:33:58:69:46:19:b7:51:70:34:3c: - b0:82:58:75:33:29:56:6f:c9:e7:f5:1e:22:fc:14: - db:cf:78:9c:a9:22:9d:a8:5b:f7:52:56:82:4d:51: - 89:9e:26:ef:14:8f:30:5b:f8:58:87:5b:83:e1:80: - b2:e2:0f:6c:7a:76:a1:b8:92:2e:e6:53:50:c3:c1: - 20:41:66:0d:0d:f9:c2:f8:d3:34:76:ef:d8:5e:45: - 15:39:88:20:c9:fb:34:88:61:c4:66:a0:c3:10:58: - c0:ad:86:6b:1f:8f:4b:ca:0a:47:6f:44:84:c3:59: - 92:aa:66:15:21:14:79:25:01:a7:3d:16:9e:59:01: - c3:69:eb:24:47:7a:84:ed:81:8d:c4:f9:4b:4b:75: - 8b:09:0a:54:11:77:81:db:12:be:50:bd:6b:86:95: - fa:66:6f:b4:3f:45:4e:0b:af:1a:0f:51:57:1a:2b: - ef:bc:ec:5c:d6:a2:37:f1:95:9f:ca:4a:73:30:b6: - ed:eb - Exponent: 65537 (0x10001) - X509v3 extensions: - X509v3 Subject Key Identifier: - CB:8C:C7:B6:85:79:A8:23:A6:CB:15:AB:17:50:2F:E6:65:43:5D:E8 - X509v3 Authority Key Identifier: - keyid:97:E7:EC:57:49:04:8B:8C:66:67:E3:57:B9:76:0F:76:A0:F0:63:90 - - X509v3 Basic Constraints: critical - CA:TRUE - X509v3 Key Usage: - Digital Signature, Certificate Sign, CRL Sign - X509v3 CRL Distribution Points: - - Full Name: - URI:http://crl.aisec.fraunhofer.de/ids.crl - - Authority Information Access: - CA Issuers - URI:http://downloads.aisec.fraunhofer.de/rootcacert2016.cert - - Signature Algorithm: sha256WithRSAEncryption - 11:95:1a:ed:ec:b4:2e:3d:fd:7b:fd:a0:fd:7c:68:08:42:6b: - 5f:ad:84:cd:1a:53:b7:90:22:09:fe:d5:26:68:d6:0a:6e:b4: - 05:f1:85:8b:d5:6b:26:52:d3:75:00:6b:06:fa:d0:43:14:7d: - 19:53:e9:09:0a:97:14:c6:fe:14:12:c8:7c:d0:c7:50:8d:67: - db:63:de:aa:49:ce:6b:6d:07:8a:ae:9e:39:c5:91:ef:af:85: - 06:fb:51:70:d2:c4:f6:b4:07:26:b1:da:e3:ac:b5:38:9e:61: - cf:bf:4e:47:9b:1d:51:6a:4c:6f:b9:a0:b8:a6:a8:b0:da:e4: - 60:e3:29:85:5c:ad:2d:65:29:60:7b:e5:16:a0:1f:7c:c6:62: - ed:fe:48:04:81:3f:9f:3b:e3:9b:d8:8b:78:1b:5a:8b:f1:46: - 5e:39:f3:bf:e2:8e:68:62:cd:ec:fa:17:98:80:5b:8a:5d:89: - 61:2e:f2:bb:68:7b:9c:ec:7e:e5:c1:6f:b1:03:0c:81:fe:45: - 6f:32:01:31:0a:dc:25:83:90:11:96:a3:ba:e4:8a:d9:58:20: - f4:85:21:e9:7b:00:d2:11:df:bc:e6:8c:bb:5e:f8:31:18:60: - 40:9d:66:9f:af:6a:99:8b:42:8a:9f:f3:6b:c4:f4:be:a5:01: - 24:b8:f4:26 ------BEGIN CERTIFICATE----- -MIID7jCCAtagAwIBAgIBATANBgkqhkiG9w0BAQsFADBCMQswCQYDVQQGEwJERTET -MBEGA1UECgwKRnJhdW5ob2ZlcjEeMBwGA1UEAwwVSURTIFJvb3QgVGVzdCBDQSAy -MDE4MB4XDTE4MTAzMTE2MTIwNFoXDTI2MTAyOTE2MTIwNFowQDELMAkGA1UEBhMC -REUxEzARBgNVBAoMCkZyYXVuaG9mZXIxHDAaBgNVBAMME0lEUyBUZXN0IFN1YkNB -IDIwMTgwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrJIxWT9ominTR -wdrm/sYuHcyYyUPHoHpZz1nnfE7AXipr3bTmsK4gwCDWqjclXyKYYrq81Hb8yHL8 -p2RINrc1M1hpRhm3UXA0PLCCWHUzKVZvyef1HiL8FNvPeJypIp2oW/dSVoJNUYme -Ju8UjzBb+FiHW4PhgLLiD2x6dqG4ki7mU1DDwSBBZg0N+cL40zR279heRRU5iCDJ -+zSIYcRmoMMQWMCthmsfj0vKCkdvRITDWZKqZhUhFHklAac9Fp5ZAcNp6yRHeoTt -gY3E+UtLdYsJClQRd4HbEr5QvWuGlfpmb7Q/RU4LrxoPUVcaK++87FzWojfxlZ/K -SnMwtu3rAgMBAAGjgfAwge0wHQYDVR0OBBYEFMuMx7aFeagjpssVqxdQL+ZlQ13o -MB8GA1UdIwQYMBaAFJfn7FdJBIuMZmfjV7l2D3ag8GOQMA8GA1UdEwEB/wQFMAMB -Af8wCwYDVR0PBAQDAgGGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly9jcmwuYWlz -ZWMuZnJhdW5ob2Zlci5kZS9pZHMuY3JsMFQGCCsGAQUFBwEBBEgwRjBEBggrBgEF -BQcwAoY4aHR0cDovL2Rvd25sb2Fkcy5haXNlYy5mcmF1bmhvZmVyLmRlL3Jvb3Rj -YWNlcnQyMDE2LmNlcnQwDQYJKoZIhvcNAQELBQADggEBABGVGu3stC49/Xv9oP18 -aAhCa1+thM0aU7eQIgn+1SZo1gputAXxhYvVayZS03UAawb60EMUfRlT6QkKlxTG -/hQSyHzQx1CNZ9tj3qpJzmttB4qunjnFke+vhQb7UXDSxPa0Byax2uOstTieYc+/ -TkebHVFqTG+5oLimqLDa5GDjKYVcrS1lKWB75RagH3zGYu3+SASBP58745vYi3gb -WovxRl4587/ijmhizez6F5iAW4pdiWEu8rtoe5zsfuXBb7EDDIH+RW8yATEK3CWD -kBGWo7rkitlYIPSFIel7ANIR37zmjLte+DEYYECdZp+vapmLQoqf82vE9L6lASS4 -9CY= ------END CERTIFICATE----- diff --git a/docker/document-api-multistage.Dockerfile b/docker/document-api-multistage.Dockerfile deleted file mode 100644 index 9de74c6c..00000000 --- a/docker/document-api-multistage.Dockerfile +++ /dev/null @@ -1,23 +0,0 @@ -FROM rust as builder -WORKDIR app -COPY LICENSE clearing-house-app ./ -RUN cargo build --release - -FROM ubuntu:20.04 - -RUN apt-get update \ -&& echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections \ -&& apt-get --no-install-recommends install -y -q ca-certificates gnupg2 libssl1.1 libc6 - -# trust the DAPS certificate -COPY docker/daps_cachain.crt /usr/local/share/ca-certificates/daps_cachain.crt -RUN update-ca-certificates - -RUN mkdir /server -WORKDIR /server - -COPY --from=builder /app/target/release/document-api . -COPY docker/entrypoint.sh . - -ENTRYPOINT ["/server/entrypoint.sh"] -CMD ["/server/document-api"] \ No newline at end of file diff --git a/docker/document-api.Dockerfile b/docker/document-api.Dockerfile deleted file mode 100644 index 73eabf3f..00000000 --- a/docker/document-api.Dockerfile +++ /dev/null @@ -1,24 +0,0 @@ -######################################################################################### -# -# Builds minimal runtime environment for the document-api -# Copyright 2019 Fraunhofer AISEC -# -######################################################################################### -FROM ubuntu:20.04 - -RUN apt-get update \ -&& echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections \ -&& apt-get --no-install-recommends install -y -q ca-certificates gnupg2 libssl1.1 libc6 - -# trust the DAPS certificate -COPY docker/daps_cachain.crt /usr/local/share/ca-certificates/daps_cachain.crt -RUN update-ca-certificates - -RUN mkdir /server -WORKDIR /server - -COPY clearing-house-app/target/release/document-api . -COPY docker/entrypoint.sh . - -ENTRYPOINT ["/server/entrypoint.sh"] -CMD ["/server/document-api"] diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh deleted file mode 100755 index a6453106..00000000 --- a/docker/entrypoint.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -set -e - -exec "$@" diff --git a/docker/keyring-api-multistage.Dockerfile b/docker/keyring-api-multistage.Dockerfile deleted file mode 100644 index 572d7f24..00000000 --- a/docker/keyring-api-multistage.Dockerfile +++ /dev/null @@ -1,23 +0,0 @@ -FROM rust as builder -WORKDIR app -COPY LICENSE clearing-house-app ./ -RUN cargo build --release - -FROM ubuntu:20.04 - -RUN apt-get update \ -&& echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections \ -&& apt-get --no-install-recommends install -y -q ca-certificates gnupg2 libssl1.1 libc6 - -# trust the DAPS certificate -COPY docker/daps_cachain.crt /usr/local/share/ca-certificates/daps_cachain.crt -RUN update-ca-certificates - -RUN mkdir /server -WORKDIR /server - -COPY --from=builder /app/target/release/keyring-api . -COPY docker/entrypoint.sh . - -ENTRYPOINT ["/server/entrypoint.sh"] -CMD ["/server/keyring-api"] \ No newline at end of file diff --git a/docker/keyring-api.Dockerfile b/docker/keyring-api.Dockerfile deleted file mode 100644 index ad0146a0..00000000 --- a/docker/keyring-api.Dockerfile +++ /dev/null @@ -1,24 +0,0 @@ -######################################################################################### -# -# Builds minimal runtime environment for the keyring-api -# Copyright 2019 Fraunhofer AISEC -# -######################################################################################### -FROM ubuntu:20.04 - -RUN apt-get update \ -&& echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections \ -&& apt-get --no-install-recommends install -y -q ca-certificates gnupg2 libssl1.1 libc6 - -# trust the DAPS certificate -COPY docker/daps_cachain.crt /usr/local/share/ca-certificates/daps_cachain.crt -RUN update-ca-certificates - -RUN mkdir /server -WORKDIR /server - -COPY clearing-house-app/target/release/keyring-api . -COPY docker/entrypoint.sh . - -ENTRYPOINT ["/server/entrypoint.sh"] -CMD ["/server/keyring-api"] diff --git a/docker/logging-service-multistage.Dockerfile b/docker/logging-service-multistage.Dockerfile deleted file mode 100644 index 82d7f821..00000000 --- a/docker/logging-service-multistage.Dockerfile +++ /dev/null @@ -1,23 +0,0 @@ -FROM rust as builder -WORKDIR app -COPY LICENSE clearing-house-app ./ -RUN cargo build --release - -FROM ubuntu:20.04 - -RUN apt-get update \ -&& echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections \ -&& apt-get --no-install-recommends install -y -q ca-certificates gnupg2 libssl1.1 libc6 - -# trust the DAPS certificate -COPY docker/daps_cachain.crt /usr/local/share/ca-certificates/daps_cachain.crt -RUN update-ca-certificates - -RUN mkdir /server -WORKDIR /server - -COPY --from=builder /app/target/release/logging-service . -COPY docker/entrypoint.sh . - -ENTRYPOINT ["/server/entrypoint.sh"] -CMD ["/server/logging-service"] \ No newline at end of file diff --git a/docker/logging-service.Dockerfile b/docker/logging-service.Dockerfile deleted file mode 100644 index 72040c03..00000000 --- a/docker/logging-service.Dockerfile +++ /dev/null @@ -1,24 +0,0 @@ -######################################################################################### -# -# Builds minimal runtime environment for the Trackchain API -# Copyright 2019 Fraunhofer AISEC -# -######################################################################################### -FROM ubuntu:20.04 - -RUN apt-get update \ -&& echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections \ -&& apt-get --no-install-recommends install -y -q ca-certificates gnupg2 libssl1.1 libc6 - -# trust the DAPS certificate -COPY docker/daps_cachain.crt /usr/local/share/ca-certificates/daps_cachain.crt -RUN update-ca-certificates - -RUN mkdir /server -WORKDIR /server - -COPY clearing-house-app/target/release/logging-service . -COPY docker/entrypoint.sh . - -ENTRYPOINT ["/server/entrypoint.sh"] -CMD ["/server/logging-service"] diff --git a/docs/Clearinghouse.md b/docs/Clearinghouse.md new file mode 100644 index 00000000..163c6cca --- /dev/null +++ b/docs/Clearinghouse.md @@ -0,0 +1,30 @@ +# Clearinghouse: How it works + +The Clearingouse consist of two services. The Clearinghouse-edc and the Clearinghouse-App. The Clearinghouse-edc is used to terminate IDS connections and map the requests to the API of the +Clearinghouse-App. The Clearinghouse-App is the brain of the the Clearinghouse and uses complex algorithms to encrypt and store log messages in the clearinghouse and provide mechanisms to query for log messages. + +## Clearinghouse-edc + +## Clearinghouse-App + +First of all the clearinghouse-app consisted before out of three separate microservices - logging, keyring, document - which were merged into one service. The reason for this is that the services were too tightly coupled and there was no benefit in having them separated. The new service is called just "clearinghouse-app". + +### Functionality + +The Clearinghouse-App provides the following functionality: logging and querying of log messages. Adding and changing of DocumentTypes. + +#### Log Message + +The logging service (as an entity inside the remaining clearinghouse-app) is responsible for orchestrating the flow between document service and keyring service: + +When logging a message, the message consists of two parts, originating from the IDS communication structure. There is a `header` and a `payload`. First part is to merge those two parts into a single struct (a Document). + +The logging service creates a process id (if not exists) and checks the authorization. + +After all prerequisites are checked and completed, it starts to get the transaction counter and assign i + +### API + +The API is described here in the [OpenAPI specification](). + +#### \ No newline at end of file diff --git a/docs/Proposal.md b/docs/Proposal.md new file mode 100644 index 00000000..4dd34dd4 --- /dev/null +++ b/docs/Proposal.md @@ -0,0 +1,46 @@ +# Communication Proposal + +Für den aktuellen Betrieb des MDS würden wir auf die Clearinghouse Specification des IDS RAM 4.0 setzen. +Dabie kann das bestehende Clearinghouse angepasst und verbessert werden durch die folgenden Punkte: + +Austausch des Trusted Connectors mittels EDC +Zusammenführung der MS zur CH-APP +Austausch des Webservers Rocket durch Axum +Wartung und Optimierungen +Stabilität durch Mutex +Update der Dependencies +Dadurch ist das Clearinghouse IDS RAM 4.0 complient und rückwärts kompatibel mit EDC MS8 + +### Offene Entscheidungen: + +Blockchain +Masterkey +Future +Im DSP wird es kein Clearinghouse wie es in der IDS RAM 4.0 spezifiziert mehr geben. +Das Clearinghouse wird vom DSP ledeglich als Teilnehmer gesehen. +Dabei werden die Logs der Connectoren dezentral nur im jeweiligen Connector liegen. +Das Clearinghouse im bereich Logging könnte somit einen Vertrag mit allen Connectoren schließen um diese Logs anzufragen. + +### Clearinghouse und DAPS +In Hinblick auf die anstehende Migration zu did:web bietet das Clearnghouse einen sinnvollen Ersatz für den DAPS. +Das Clearinghouse könnte Verifiable Credentials ausstellen, sobald die Teilnehmer den Vertrag mit diesem eingegangen sind und die Grundvorraussetzungen um am Dataspace zu partizipieren erfüllt sind. Jeder Teilnehmer darf nur mit Mitgliedern des Dataspaces interagieren, die dieses Verifiable Credential vorweisen können. +Dadurch wird sichergestellt das alle Teilnehmer am Datenraum das Clearinghouse akzeptieren. + +## Aktuelle Implementierung +Der Endpunkt ```POST /messages/log/:PID``` wird mit einer zufällig generierten PID aufgerufen. Das hat einige Nachteile: +- Es wird für jede Transaktion ein neuer Prozess angelegt. +- Transaktionen können nicht gruppiert (einem Vertrag zugeordnet) werden. +- Transaktionen von anderen Connectoren können nicht zur gleichen Transaktion gefiltert werden. + +## Optimierter Ansatz +Bevor eine Transaktion stattfindet, wird ein Vertrag geschlossen. In diesem Schritt könnte der Prozess im Clearinghouse bereits angelegt werden. Hierbei ist es auch möglich, mehrere Connector IDs anzugeben, um festzulegen, wer Lese- und Schreibrechte besitzt. +- Die erstellte PID muss mit allen Connectoren geteilt werden. +- Die Connectoren können auf die gleiche PID loggen, um die Transaktionen nach Verträgen zu gruppieren. +- Der MDS kann seinen eigenen Connector als Standard festlegen, um Zugriff auf alle Transaktionen zu erhalten. + +## Ablauf +### CreateLogMessage +![](./images/CreateLogMessage.png) + +### CreatePID +![](./images/CreatePid.png) \ No newline at end of file diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md new file mode 100644 index 00000000..6d521456 --- /dev/null +++ b/docs/SUMMARY.md @@ -0,0 +1,21 @@ +# Summary + +# Admin Guide +- [Quick Start](content/admin-guide/quick_start.md) +- [Installation]() + - [Clearinghouse-App](content/admin-guide/ch-app_installation.md) + - [Clearinghouse-EDC](content/admin-guide/ch-edc_installation.md) +- [Tests](content/admin-guide/tests.md) +- [Maintenance](content/admin-guide/maintenance.md) + +# References +- [API](content/references/API.md) + +# Internals +- [Architecture](content/internals/architecture.md) + - [Communication](content/internals/communication.md) + - [Functionality](content/internals/functionality.md) +- [Proposal](Proposal.md) +- [oldReadme](old_README.md) + + diff --git a/docs/content/admin-guide/ch-app_installation.md b/docs/content/admin-guide/ch-app_installation.md new file mode 100644 index 00000000..bf7683f2 --- /dev/null +++ b/docs/content/admin-guide/ch-app_installation.md @@ -0,0 +1,57 @@ +# Clearinghouse App Installation + +The Clearinghouse App (`ch-app`) comes pre-packaged in a docker container. + +## Releases + +For existing releases visit [ids-basecamp-clearinghouse/ch-app Releases](https://github.com/truzzt/ids-basecamp-clearinghouse/pkgs/container/ids-basecamp-clearing/ch-app). + +## Usage + +Starting the `ch-app` Docker-only, use the following command and adapt it to your needs. + +```sh +docker run -d \ + -p 8000:8000 \ + -v ${PRIVATE_KEY_PATH}:/app/keys/private_key.der:ro \ + -e CH_APP_PROCESS_DATABASE_URL='mongodb://mongohost:27017' \ + -e CH_APP_KEYRING_DATABASE_URL='mongodb://mongohost:27017' \ + -e CH_APP_DOCUMENT_DATABASE_URL='mongodb://mongohost:27017' \ + -e CH_APP_CLEAR_DB='false' \ + -e CH_APP_LOG_LEVEL='INFO' \ + -e SERVICE_ID_LOG='1' \ + -e SHARED_SECRET='123' \ + --name ch-app \ + ghcr.io/truzzt/ids-basecamp-clearing/ch-app:${TAG} +``` + +The following example starts the `ch-app` together with a `mongodb` also running on docker (good for local development): + +```sh +# Create a docker network +docker network create testch +# Start mongodb +docker run -d -p 27017:27017 --net=testch --name mongohost mongo +# Start ch-app +docker run -d \ + -p 8000:8000 --net=testch \ + -v ${PRIVATE_KEY_PATH}:/app/keys/private_key.der:ro \ + -e CH_APP_PROCESS_DATABASE_URL='mongodb://mongohost:27017' \ + -e CH_APP_KEYRING_DATABASE_URL='mongodb://mongohost:27017' \ + -e CH_APP_DOCUMENT_DATABASE_URL='mongodb://mongohost:27017' \ + -e CH_APP_CLEAR_DB='false' \ + -e CH_APP_LOG_LEVEL='INFO' \ + -e SERVICE_ID_LOG='1' \ + -e SHARED_SECRET='123' \ + --name ch-app \ + ghcr.io/truzzt/ids-basecamp-clearing/ch-app:${TAG} + +# --- +# Cleanup +docker rm -f mongohost ch-app +docker network rm testch +``` + +## Build + +To build the ch-app yourself change into the `/clearing-house-app` directory and run `docker build -t ch-app:latest .`. \ No newline at end of file diff --git a/docs/content/admin-guide/ch-edc_installation.md b/docs/content/admin-guide/ch-edc_installation.md new file mode 100644 index 00000000..62ba90b8 --- /dev/null +++ b/docs/content/admin-guide/ch-edc_installation.md @@ -0,0 +1,27 @@ +# Installation + +## Clearinghouse-edc +This module contains the Clearing House Extension that works with the Eclipse Dataspace Connector +allowing logging operations. + +### Configurations +It is required to configure those parameters: + +| Parameter name | Description | Default value | +|----------------------------------------|----------------------------------------------|------------------------| +| `truzzt.clearinghouse.jwt.audience` | Defines the intended recipients of the token | 1 | +| `truzzt.clearinghouse.jwt.issuer` | Person or entity offering the token | 1 | +| `truzzt.clearinghouse.jwt.sign.secret` | Secret key to encode the token | 123 | +| `truzzt.clearinghouse.jwt.expires.at` | Time to token Expiration (in Seconds) | 30 | +| `truzzt.clearinghouse.app.base.url` | Base URL from the clearing house app | http://localhost:8000 | + +### Build +To build the project run the command below: + + ./gradlew build + + +### Running +Local execution: + + java -Dedc.fs.config=launchers/connector-local/resources/config.properties -Dedc.keystore=launchers/connector-local/resources/keystore.jks -Dedc.keystore.password=password -Dedc.vault=launchers/connector-local/resources/vault.properties -jar launchers/connector-local/build/libs/clearing-house-edc.jar diff --git a/docs/content/admin-guide/maintenance.md b/docs/content/admin-guide/maintenance.md new file mode 100644 index 00000000..dfec7ebc --- /dev/null +++ b/docs/content/admin-guide/maintenance.md @@ -0,0 +1 @@ +# Maintenance diff --git a/docs/content/admin-guide/quick_start.md b/docs/content/admin-guide/quick_start.md new file mode 100644 index 00000000..daae246d --- /dev/null +++ b/docs/content/admin-guide/quick_start.md @@ -0,0 +1,98 @@ +# Quick Start + +## Prerequesits +To run the quick start example please ensure to have a working DAPS. + +### Private Key +You will need the private key in the following formats: +* .jks +* .der + +The .jks should be generated from the MDS Portal + +To generate the .der key run +```sh +openssl genpkey -algorithm RSA \ + -pkeyopt rsa_keygen_bits:4096 \ + -outform der \ + -out private_key.der +``` + +### Environment +```.env +VERSION=1.0.0-beta.1 +SERVICE_ID=1 +SHARED_SECRET=changethis +KEY_PASSWORD=password +DAPS_URL= +DAPS_JWKS_URL= +API_KEY=changethis +CLIENT_ID= +``` + +## docker-compose.yml +```sh +docker compose up +``` + +```docker-compoye.yml +version: "3.8" + +services: + ch-app: + image: ghcr.io/truzzt/ids-basecamp-clearing/ch-app:$VERSION + environment: + CH_APP_PROCESS_DATABASE_URL: mongodb://mongodb:27017 + CH_APP_KEYRING_DATABASE_URL: mongodb://mongodb:27017 + CH_APP_DOCUMENT_DATABASE_URL: mongodb://mongodb:27017 + CH_APP_CLEAR_DB: false + CH_APP_LOG_LEVEL: INFO + SERVICE_ID_LOG: $SERVICE_ID + SHARED_SECRET: $SHARED_SECRET + volumes: + - ./YOUR_PRIVATE_KEY.der:/app/keys/private_key.der:ro + + ch-edc: + image: ghcr.io/truzzt/ids-basecamp-clearing/ch-edc:$VERSION + environment: + WEB_HTTP_MANAGEMENT_PORT: 11001 + WEB_HTTP_MANAGEMENT_PATH: / + WEB_HTTP_DATA_PORT: 11002 + WEB_HTTP_DATA_PATH: /api/v1/data + WEB_HTTP_IDS_PORT: 11003 + WEB_HTTP_IDS_PATH: /api/v1/ids + EDC_IDS_ID: urn:connector:example-connector + EDC_IDS_TITLE: 'truzzt Test EDC Connector' + EDC_IDS_DESCRIPTION: 'Minimally configured Open Source EDC built by truzzt.' + EDC_IDS_ENDPOINT: http://ch-edc:11003/api/v1/ids + IDS_WEBHOOK_ADDRESS: http://ch-edc:11003 + EDC_IDS_CURATOR: https://truzzt.com + EDC_IDS_MAINTAINER: https://truzzt.com + EDC_CONNECTOR_NAME: truzzt-example-connector + EDC_HOSTNAME: ch-edc + EDC_API_AUTH_KEY: $API_KEY + EDC_WEB_REST_CORS_ENABLED: 'true' + EDC_WEB_REST_CORS_HEADERS: 'origin,content-type,accept,authorization,x-api-key' + EDC_WEB_REST_CORS_ORIGINS: '*' + EDC_VAULT: /resources/vault/edc/vault.properties + EDC_OAUTH_TOKEN_URL: $DAPS_URL + EDC_OAUTH_PROVIDER_JWKS_URL: $DAPS_JWKS_URL + EDC_OAUTH_ENDPOINT_AUDIENCE: idsc:IDS_CONNECTORS_ALL + EDC_OAUTH_CLIENT_ID: $CLIENT_ID + EDC_KEYSTORE: /resources/vault/edc/keystore.jks + EDC_KEYSTORE_PASSWORD: $KEY_PASSWORD + EDC_OAUTH_CERTIFICATE_ALIAS: 1 + EDC_OAUTH_PRIVATE_KEY_ALIAS: 1 + TRUZZT_CLEARINGHOUSE_JWT_AUDIENCE: $SERVICE_ID + TRUZZT_CLEARINGHOUSE_JWT_ISSUER: ch-edc + TRUZZT_CLEARINGHOUSE_JWT_SIGN_SECRET: $SHARED_SECRET + TRUZZT_CLEARINGHOUSE_JWT_EXPIRES_AT: 30 + TRUZZT_CLEARINGHOUSE_APP_BASE_URL: http://ch-app:8000 + volumes: + - ./YOUR_PRIVATE_KEY.jks:/resources/vault/edc/keystore.jks + - ./vault.properties:/resources/vault/edc/vault.properties + + mongodb: + image: mongo +``` + diff --git a/docs/content/admin-guide/tests.md b/docs/content/admin-guide/tests.md new file mode 100644 index 00000000..e5ae585b --- /dev/null +++ b/docs/content/admin-guide/tests.md @@ -0,0 +1,22 @@ +# Tests + + +## Clearinghouse-edc + +For the test clearinghouse-edc it uses Junit 5 and Jacoco for the coverage. + +### Running Tests +To run the unit-tests execute the following command: + + ./gradlew test + + +### Test Coverage +To generate the tests coverage execute the following command: + + ./gradlew jacocoTestReport + +The coverage reports will be available in the following folders: + +- [core/build/reports/jacoco/test/html/index.html](./core/build/reports/jacoco/test/html/index.html) +- [extensions/multipart/build/reports/jacoco/test/html/index.html](./extensions/multipart/build/reports/jacoco/test/html/index.html) \ No newline at end of file diff --git a/docs/content/internals/architecture.md b/docs/content/internals/architecture.md new file mode 100644 index 00000000..e73014d5 --- /dev/null +++ b/docs/content/internals/architecture.md @@ -0,0 +1,23 @@ +# Architecture + +The Clearingouse consist of two services: The Clearinghouse-EDC and the Clearinghouse-App. The Clearinghouse-EDC is used to terminate IDS connections and map the requests to the API of the Clearinghouse-App. The Clearinghouse-App is the brain of the the Clearinghouse and uses complex algorithms to encrypt and store log messages in the MongoDB and provide mechanisms to query for log messages. + +```d2 +direction: right +ch: Clearinghouse { + cha: Clearinghouse-App + che: Clearinghouse-EDC + m: MongoDB { + shape: cylinder + } + + che -> cha: REST + cha -> m +} +c: Connector +c -> ch: IDS Multipart +``` + +> **Short history lesson** +> +> The clearinghouse-app consisted before out of three separate microservices - logging, keyring, document - which were merged into one service. The reason for this is that the services were too tightly coupled and there was no benefit in having them separated. The new service is called just "clearinghouse-app". \ No newline at end of file diff --git a/docs/content/internals/communication.md b/docs/content/internals/communication.md new file mode 100644 index 00000000..eee461d9 --- /dev/null +++ b/docs/content/internals/communication.md @@ -0,0 +1,162 @@ +# Communication + +The APIs are documented in the following dscriptions: +- Connector to Clearinghouse: [IDS-G](https://github.com/International-Data-Spaces-Association/IDS-G/tree/main/Communication/protocols/multipart) +- Clearinghouse to Clearinghouse-App: [OpenAPI](https://github.com/truzzt/ids-basecamp-clearinghouse-postman/blob/main/index.yaml) + +The following section contains examples of the communication between the components. + +## Connector to Clearinghouse-EDC + +The Clearinghouse-EDC received IDS-Multipart messages of the type `ids:LogMessage` in the *header* and an arbitrary *payload*. The following shows an example of a multipart message: + +``` +POST /messages/log/1 HTTP/1.1 +Host: ch-ids.aisec.fraunhofer.de +Content-Type: multipart/form-data; boundary=X-TEST-REQUEST-BOUNDARY +Accept: */* + +--X-TEST-REQUEST-BOUNDARY +Content-Disposition: form-data; name="header" +Content-Type: application/json +{ + "@context" : { + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@type" : "ids:LogMessage", + "@id" : "https://w3id.org/idsa/autogen/logMessage/c6c15a90-7799-4aa1-ac21-9323b87a7xv9", + "ids:securityToken" : { + "@type" : "ids:DynamicAttributeToken", + "@id" : "https://w3id.org/idsa/autogen/dynamicAttributeToken/6378asd9-480d-80df-c5cb02e4e260", + "ids:tokenFormat" : { + "@id" : "idsc:JWT" + }, + "ids:tokenValue" : "eyJ0eXAiOiJKV1QiLCJraWQiOiJkZWZhdWx0IiwiYWxnIjoi....." + }, + "ids:senderAgent" : "http://example.org", + "ids:modelVersion" : "4.1.0", + "ids:issued" : { + "@value" : "2020-12-14T08:57:57.057+01:00", + "@type" : "http://www.w3.org/2001/XMLSchema#dateTimeStamp" + }, + "ids:issuerConnector" : { + "@id" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" + } +} +--X-TEST-REQUEST-BOUNDARY +Content-Disposition: form-data; name="payload" +Content-Type: application/json +{ + "@context" : "https://w3id.org/idsa/contexts/context.jsonld", + "@type" : "ids:ConnectorUpdateMessage", + "id" : "http://industrialdataspace.org/connectorAvailableMessage/34d761cf-5ca4-4a77-a7f4-b14d8f75636a", + "issued" : "2019-12-02T08:25:08.245Z", + "modelVersion" : "4.1.0", + "issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b", + "securityToken" : { + "@type" : "ids:DynamicAttributeToken", + "tokenFormat" : "https://w3id.org/idsa/code/tokenformat/JWT", + "tokenValue" : "eyJhbGciOiJSUzI1NiIsInR5cCI..." +} +--X-TEST-REQUEST-BOUNDARY-- +``` + +## Clearinghouse-EDC to Clearinghouse-App + +The Clearinghouse-EDC extracts the *header* and *payload* and forwards it to the Clearinghouse-App via REST. The message looks like this: + +```json +{ + "header": { + "@context" : { + "ids" : "https://w3id.org/idsa/core/", + "idsc" : "https://w3id.org/idsa/code/" + }, + "@type" : "ids:LogMessage", + "@id" : "https://w3id.org/idsa/autogen/logMessage/c6c15a90-7799-4aa1-ac21-9323b87a7xv9", + "ids:securityToken" : { + "@type" : "ids:DynamicAttributeToken", + "@id" : "https://w3id.org/idsa/autogen/dynamicAttributeToken/6378asd9-480d-80df-c5cb02e4e260", + "ids:tokenFormat" : { + "@id" : "idsc:JWT" + }, + "ids:tokenValue" : "eyJ0eXAiOiJKV1QiLCJraWQiOiJkZWZhdWx0IiwiYWxnIjoi....." + }, + "ids:senderAgent" : "http://example.org", + "ids:modelVersion" : "4.1.0", + "ids:issued" : { + "@value" : "2020-12-14T08:57:57.057+01:00", + "@type" : "http://www.w3.org/2001/XMLSchema#dateTimeStamp" + }, + "ids:issuerConnector" : { + "@id" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b" + } + }, + "payload": { + "@context" : "https://w3id.org/idsa/contexts/context.jsonld", + "@type" : "ids:ConnectorUpdateMessage", + "id" : "http://industrialdataspace.org/connectorAvailableMessage/34d761cf-5ca4-4a77-a7f4-b14d8f75636a", + "issued" : "2019-12-02T08:25:08.245Z", + "modelVersion" : "4.1.0", + "issuerConnector" : "https://companyA.com/connector/59a68243-dd96-4c8d-88a9-0f0e03e13b1b", + "securityToken" : { + "@type" : "ids:DynamicAttributeToken", + "tokenFormat" : "https://w3id.org/idsa/code/tokenformat/JWT", + "tokenValue" : "eyJhbGciOiJSUzI1NiIsInR5cCI..." + } + } +} +``` + +## Clearinghouse-App to Clearinghouse-EDC + +```json +{ + "data": "eyJhbGciOiJQUzUxMiIsImtpZCI6IlFyYS8vMjlGcnhiajVoaDVBemVmK0czNlNlaU9tOXE3czgrdzh1R0xEMjgifQ.eyJ0cmFuc2FjdGlvbl9pZCI6IjAwMDAwMDAwIiwidGltZXN0YW1wIjoxNjk2NDExMTM2LCJwcm9jZXNzX2lkIjoiMSIsImRvY3VtZW50X2lkIjoiNmNkNDQwNjQtZWFjNi00NmQzLWFhZTUtODcxYjgwYjU4OWMxIiwicGF5bG9hZCI6Int9IiwiY2hhaW5faGFzaCI6IjAiLCJjbGllbnRfaWQiOiJGNjoyNTo1ODpDNTo2MTo2ODo3QToyMTpGMTo0MDo5Rjo0RTpGQjo5NTpEQjo5OTo4ODpDOTpBNzoxQTpDNTpGODpCRjo0Qzo1NToxODo1NjozNTozNTo0MzpDNTpEQzo5NDpCNTpFQjo0NTozMDpGNTpBRjpDRSIsImNsZWFyaW5nX2hvdXNlX3ZlcnNpb24iOiIwLjEwLjAifQ.eo1KoF9gAZLF7CuhuQ-Sd9WSjw6dvDsrmM8w-A-FdTl4cOaPqp75k9O0tKxY8_ZNBsWmOzBzAfGng6YdvpDHIw9xFZTA7N_UMjTrrPuc8ehrVO2rwltTKb8N2bK4bQ4_Uq22Kd8mSFI6IyOZ7KeTkZ_iN30PXlYFAdt2GQHoT7xNERyQbHNEkJmOgGnaraMv0xEbl2zJktQqkTH9Kk4ZF2T_GbxKInhVxUhOsJ707ZeQ2Nxk4H6yO2RXwG5yKXFkwBDOMLg1f0Dnrgz_H1f-fQ7gPOrAL_4G4L7M9o7EVkMJlMpJR1xNBCeYbT_IvfL1CB5gi1NF-VNzt-8Zg5Yj-vNNR9j38yZTe6vH2dMkGl20B99KrEKTjkyVkCUIKnlb3oEKldse0E4ouw9v6WnIWq33-KnGV0ajwZrs13bQLZyLWvdNCBmYA5NujzbqOGkDROXloAB6MXBm5KiGTU8FxrqS6s_J7OW1CLTlAlTFF_U2Tr1xSvcusnpOGrU22IrCuqVuGCNNGCrPYjKJmMc05wIG0cmdxTdRnoe8R-vOVg2Zd07jdrBLX5l5tZtF60LC8DZKw4k2JaCu37W_dXdWHLSXEnpR9MGgnqC8MbOAMIIzSXpWKFdXcS-86SkgTvDA16geN_Bj7Ac6xcuUnEhM3_9tVnpjNMgPcStyO0KiP3c" +} +``` + +## Clearinghouse-EDC to Connector + +``` +--Boundary_1_377557244_1696411137008 +Content-Type: application/json +Content-Disposition: form-data; name="header" + +{ + "@context": { + "ids": "https://w3id.org/idsa/core/", + "idsc": "https://w3id.org/idsa/code/" + }, + "@id": "urn:message:92a2da5a-b5de-4709-bda9-c16a0ae293f6", + "@type": "ids:MessageProcessedNotificationMessage", + "ids:securityToken": { + "@id": "https://w3id.org/idsa/autogen/dynamicAttributeToken/6378asd9-480d-80df-c5cb02e4e260", + "@type": "ids:DynamicAttributeToken", + "ids:tokenFormat": { + "@id": "idsc:JWT" + }, + "ids:tokenValue": "eyJ0eXAiOiJhdCtqd3QiLCJraWQiOiJkNzRlYzU1MGY0MzkxYTAwZGIwODA5Mzg5MjdjOGU4YWQ0NjE3NmM4NGQ3MzhkZGMwODM1ODMzYzM5YWJkMzRhIiwiYWxnIjoiUlMyNTYifQ.eyJzY29wZSI6Imlkc2M6SURTX0NPTk5FQ1RPUl9BVFRSSUJVVEVTX0FMTCIsImF1ZCI6WyJpZHNjOklEU19DT05ORUNUT1JTX0FMTCJdLCJpc3MiOiJkYXBzLmRlbW8udHJ1enp0cG9ydC5jb20iLCJzdWIiOiJGNjoyNTo1ODpDNTo2MTo2ODo3QToyMTpGMTo0MDo5Rjo0RTpGQjo5NTpEQjo5OTo4ODpDOTpBNzoxQTpDNTpGODpCRjo0Qzo1NToxODo1NjozNTozNTo0MzpDNTpEQzo5NDpCNTpFQjo0NTozMDpGNTpBRjpDRSIsIm5iZiI6MTY5NjQxMTAxNiwiaWF0IjoxNjk2NDExMDE2LCJqdGkiOiI0MjY2OTY0NC01MzgzLTQ2NDYtYmMxMC0zMzJlMzRkMjdmNGMiLCJleHAiOjE2OTY0MTQ2MTYsImNsaWVudF9pZCI6IkY2OjI1OjU4OkM1OjYxOjY4OjdBOjIxOkYxOjQwOjlGOjRFOkZCOjk1OkRCOjk5Ojg4OkM5OkE3OjFBOkM1OkY4OkJGOjRDOjU1OjE4OjU2OjM1OjM1OjQzOkM1OkRDOjk0OkI1OkVCOjQ1OjMwOkY1OkFGOkNFIn0.sa2zCMCwap7KjqV6RkzQ4jeR-nMPXo546oqxSzyZSPamhfkPc35LfldZTkuX_gxy6P1Ra2ltrannQTH7467FC8H00giF3mamZ_LuyUHMRUZzab0UvNJaGqt1mJZaMiOnupixP1cUhsXszfmCRKXWvatbwvlc0nhw5gdO2lH_njWBrXUy5Bt2MIIFp892ijf_rP5KC7yfa0cW9lwTFuWZYMMRBeOfY_g1Mx_YVkQXy9mFI0x3zC6rms8jq8OWRompNfkQ7mZsiFPAafls2f0iP8M2HKWA8JeOG5rkAIw0ESWSVT7iB-oV50LlX7L7zAYVLGdDyM3s_khDNxrbvlW_bQ" + }, + "ids:issuerConnector": { + "@id": "urn:connector:example-connector" + }, + "ids:modelVersion": "4.1.3", + "ids:issued": { + "@value": "2023-10-04T09:18:56.998Z", + "@type": "http://www.w3.org/2001/XMLSchema#dateTimeStamp" + }, + "ids:senderAgent": { + "@id": "urn:connector:example-connector" + } +} +--Boundary_1_377557244_1696411137008 +Content-Type: application/json +Content-Disposition: form-data; name="payload" + +{ + "data": "eyJhbGciOiJQUzUxMiIsImtpZCI6IlFyYS8vMjlGcnhiajVoaDVBemVmK0czNlNlaU9tOXE3czgrdzh1R0xEMjgifQ.eyJ0cmFuc2FjdGlvbl9pZCI6IjAwMDAwMDAwIiwidGltZXN0YW1wIjoxNjk2NDExMTM2LCJwcm9jZXNzX2lkIjoiMSIsImRvY3VtZW50X2lkIjoiNmNkNDQwNjQtZWFjNi00NmQzLWFhZTUtODcxYjgwYjU4OWMxIiwicGF5bG9hZCI6Int9IiwiY2hhaW5faGFzaCI6IjAiLCJjbGllbnRfaWQiOiJGNjoyNTo1ODpDNTo2MTo2ODo3QToyMTpGMTo0MDo5Rjo0RTpGQjo5NTpEQjo5OTo4ODpDOTpBNzoxQTpDNTpGODpCRjo0Qzo1NToxODo1NjozNTozNTo0MzpDNTpEQzo5NDpCNTpFQjo0NTozMDpGNTpBRjpDRSIsImNsZWFyaW5nX2hvdXNlX3ZlcnNpb24iOiIwLjEwLjAifQ.eo1KoF9gAZLF7CuhuQ-Sd9WSjw6dvDsrmM8w-A-FdTl4cOaPqp75k9O0tKxY8_ZNBsWmOzBzAfGng6YdvpDHIw9xFZTA7N_UMjTrrPuc8ehrVO2rwltTKb8N2bK4bQ4_Uq22Kd8mSFI6IyOZ7KeTkZ_iN30PXlYFAdt2GQHoT7xNERyQbHNEkJmOgGnaraMv0xEbl2zJktQqkTH9Kk4ZF2T_GbxKInhVxUhOsJ707ZeQ2Nxk4H6yO2RXwG5yKXFkwBDOMLg1f0Dnrgz_H1f-fQ7gPOrAL_4G4L7M9o7EVkMJlMpJR1xNBCeYbT_IvfL1CB5gi1NF-VNzt-8Zg5Yj-vNNR9j38yZTe6vH2dMkGl20B99KrEKTjkyVkCUIKnlb3oEKldse0E4ouw9v6WnIWq33-KnGV0ajwZrs13bQLZyLWvdNCBmYA5NujzbqOGkDROXloAB6MXBm5KiGTU8FxrqS6s_J7OW1CLTlAlTFF_U2Tr1xSvcusnpOGrU22IrCuqVuGCNNGCrPYjKJmMc05wIG0cmdxTdRnoe8R-vOVg2Zd07jdrBLX5l5tZtF60LC8DZKw4k2JaCu37W_dXdWHLSXEnpR9MGgnqC8MbOAMIIzSXpWKFdXcS-86SkgTvDA16geN_Bj7Ac6xcuUnEhM3_9tVnpjNMgPcStyO0KiP3c" +} +--Boundary_1_377557244_1696411137008-- +``` \ No newline at end of file diff --git a/docs/content/internals/functionality.md b/docs/content/internals/functionality.md new file mode 100644 index 00000000..d60396b0 --- /dev/null +++ b/docs/content/internals/functionality.md @@ -0,0 +1,91 @@ +# Functionality + + +## Logging a message + +The logging service (as an entity inside the remaining clearinghouse-app) is responsible for orchestrating the flow between document service and keyring service: + +When logging a message, the message consists of two parts, originating from the IDS communication structure. There is a `header` and a `payload`. + +The logging service creates a process id (if not exists) and checks the authorization. + +After all prerequisites are checked and completed, the logging-service merges `header` and `payload` into a Document starts to get the transaction counter and assigns it to the Document. + +Now the document service comes into play: First checking if the document exists already, then requesting the keyring service to generate a key map for the document. The key map is then used to encrypt the document (back in the document service) and then the document is stored in the database. + +Finally the transaction counter is incremented and a reciept is signed and send back to the Clearinghouse-EDC. + +### Encryption + +There is a randomly generated Master Key stored in the database. + +Each document has a number of fields. For each document a random secret is generated. This secret is used to derive multiple secrets with the HKDF Algorithm from the original secret. These derived secrets are used to encrypt the fields of the document with AES-256-GCM-SIV. + +The original secret is encrypted also with AES-256-GCM-SIV with a derived key from the Master Key and stored in the database alongside the Document. + +### Detailed internal diagram + +```d2 +log: fn log { + gp: fn db.get_process + ia: fn db.is_authorized + sp: fn db.store_process + de: process exists? { + shape: diamond + } + + gp -> de + de -> sp: No + de -> ia: Yes + sp -> ia +} + +lm: fn log_message { + + gt: fn db.get_transaction_counter + df: Document::from(message) + ced: fn doc_api.create_encrypted_document { + ed: fn db.exists_document + gk: fn key_api.generate_keys { + gm: fn db.get_master_key + gdt: fn db.get_document_type + gkm: fn generate_key_map + + gm -> gdt + gdt -> gkm + + } + de: fn doc.encrypt + pt: fn db.get_document_with_previous_transaction_counter + ad: fn db.add_document + + ed -> gk + gk -> de + de -> pt + pt -> ad + } + itc: fn db.increment_transaction_counter + + df -> gt + gt -> ced + ced -> itc +} + +log -> lm + +lm.ced.gk.gkm -> gkm + +gkm: fn generate_key_map { + ik: fn initialize_kdf + dk: fn derive_key_map + rk: fn restore_kdf + ke: fn kdf.expand + es: fn encrypt_secret + + ik -> dk + dk -> rk + rk -> ke + ke -> es + +} +``` \ No newline at end of file diff --git a/docs/content/references/API.md b/docs/content/references/API.md new file mode 100644 index 00000000..549af365 --- /dev/null +++ b/docs/content/references/API.md @@ -0,0 +1,3 @@ +# API Docs + +Swagger and Postman Collection can be found [here](https://github.com/truzzt/ids-basecamp-clearinghouse-postman) \ No newline at end of file diff --git a/docs/images/CreateLogMessage.png b/docs/images/CreateLogMessage.png new file mode 100644 index 00000000..4829d7ee Binary files /dev/null and b/docs/images/CreateLogMessage.png differ diff --git a/docs/images/CreatePid.png b/docs/images/CreatePid.png new file mode 100644 index 00000000..6405cb84 Binary files /dev/null and b/docs/images/CreatePid.png differ diff --git a/doc/images/LogMessage.drawio b/docs/images/LogMessage.drawio similarity index 100% rename from doc/images/LogMessage.drawio rename to docs/images/LogMessage.drawio diff --git a/doc/images/LogMessage.jpg b/docs/images/LogMessage.jpg similarity index 100% rename from doc/images/LogMessage.jpg rename to docs/images/LogMessage.jpg diff --git a/doc/images/LogMessage.png b/docs/images/LogMessage.png similarity index 100% rename from doc/images/LogMessage.png rename to docs/images/LogMessage.png diff --git a/doc/images/QueryMessage.drawio b/docs/images/QueryMessage.drawio similarity index 100% rename from doc/images/QueryMessage.drawio rename to docs/images/QueryMessage.drawio diff --git a/doc/images/QueryMessage.png b/docs/images/QueryMessage.png similarity index 100% rename from doc/images/QueryMessage.png rename to docs/images/QueryMessage.png diff --git a/doc/images/ch_container_dependencies.png b/docs/images/ch_container_dependencies.png similarity index 100% rename from doc/images/ch_container_dependencies.png rename to docs/images/ch_container_dependencies.png diff --git a/doc/images/ch_container_dependencies.puml b/docs/images/ch_container_dependencies.puml similarity index 100% rename from doc/images/ch_container_dependencies.puml rename to docs/images/ch_container_dependencies.puml diff --git a/docs/old_README.md b/docs/old_README.md new file mode 100644 index 00000000..6fa2b14d --- /dev/null +++ b/docs/old_README.md @@ -0,0 +1,101 @@ +# IDS Clearing House +The IDS Clearing House Service is a prototype implementation of the [Clearing House](https://github.com/International-Data-Spaces-Association/IDS-RAM_4_0/blob/main/documentation/3_Layers_of_the_Reference_Architecture_Model/3_5_System_Layer/3_5_5_Clearing_House.md) component of the [Industrial Data Space](https://internationaldataspaces.org/). + +Data in the Clearing House is stored encrypted and practically immutable. There are multiple ways in which the Clearing House enforces Data Immutability: +- Using the `Logging Service` there is no way to update an already existing log entry in the database +- Log entries in the database include a hash value of the previous log entry, chaining together all log entries. Any change to a previous log entry would require rehashing all following log entries. +- The connector logging information in the Clearing House receives a signed receipt from the Clearing House that includes among other things a timestamp and the current chain hash. A single valid receipt in possession of any connector is enough to detect any change to data up to the time indicated in the receipt. + +## Architecture +The IDS Clearing House Service currently implements the [`Logging Service`](https://github.com/International-Data-Spaces-Association/IDS-RAM_4_0/blob/main/documentation/3_Layers_of_the_Reference_Architecture_Model/3_5_System_Layer/3_5_5_Clearing_House.md). Other services that comprise the [Clearing House](https://github.com/International-Data-Spaces-Association/IDS-RAM_4_0/blob/main/documentation/3_Layers_of_the_Reference_Architecture_Model/3_5_System_Layer/3_5_5_Clearing_House.md) may follow. The Clearing House Service consists of two parts: + +1. [`Clearing House App`](clearing-house-app) +2. [`Clearing House Processors`](clearing-house-processors) + +The `Clearing House App` is a REST API written in [Rust](https://www.rust-lang.org) that implements the business logic of the Clearing House. The `Clearing House Processors` is a library written in Java that integrates the `Clearing House App` into the [Trusted Connector](https://github.com/industrial-data-space/trusted-connector). The `Clearing House Processors` provide the `multipart` and `idscp2` endpoints described in the [IDS-G](https://github.com/International-Data-Spaces-Association/IDS-G/tree/main). These are used by the IDS connectors to interact with the Clearing House. Both `Clearing House App` and `Clearing House Processors` are needed to provide the `Clearing House Service`. + +## Requirements +- [OpenSSL](https://www.openssl.org) +- [MongoDB](https://www.mongodb.com) +- ([Docker](https://www.docker.com)) +- [Trusted Connector](https://github.com/industrial-data-space/trusted-connector) + +## Trusted Connector +The Clearing House Service API requires a Trusted Connector [Trusted Connector](https://github.com/industrial-data-space/trusted-connector) (Version 7.1.0+) for deployment. The process of setting up a Trusted Connector is described [here](https://industrial-data-space.github.io/trusted-connector-documentation/docs/getting_started/). Using a docker image of the Trusted Connector should be sufficient for most deployments: + +`docker pull fraunhoferaisec/trusted-connector-core:7.2.0` + +The Clearing House Processors are written in Java for use in the Camel Component of the Trusted Connector. To configure the Trusted Connector for the Clearing House Service API, it needs access to the following files inside the docker container (e.g. mounted as a volume): +- `clearing-house-processors.jar`: The Clearing House Processors need to be placed in the `/root/jars` folder of the Trusted Connector. The jar file needs to be [build](clearing-house-processors#building-from-source) from the Clearing House Processors using `gradle`. +- [`clearing-house-routes.xml`](clearing-house-processors/src/routes/clearing-house-routes.xml): The camel routes required by the Clearing House need to be placed in the `/root/deploy` folder of the Trusted Connector. +- [`application.yml`](docker/application.yml): This is a new configuration file of Trusted Connector 7.0.0+. The file version in this repository enables the use of some of the environment variables documented in the next section. + +Besides those files that are specific for the configuration of the Clearing House Service API, the Trusted Connector requires other files for its configuration, e.g. a truststore and a keystore with appropriate key material. Please refer to the [Documentation](https://industrial-data-space.github.io/trusted-connector-documentation/) of the Trusted Connector for more information. Also, please check the [Examples](https://github.com/industrial-data-space/trusted-connector/tree/master/examples) as they contain up-to-date configurations for the Trusted Connector. + +#### Environment Variables +The Clearing House Processors can override some standard configuration settings of the Trusted Connector using environment variables. If these variables are not set, the Clearing House Processors will use the standard values provided by the Trusted Connector. Some of the variables are mandatory and have to be set: +- `TC_DAPS_URL`: The url of the DAPS used by the Clearing House. The Trusted Connector uses `https://daps.aisec.fraunhofer.de/v3` as the default DAPS url. +- `TC_KEYSTORE_PW`: The password of the key store mounted in the Trusted Connector. Defaults to `password`. +- `TC_TRUSTSTORE_PW`: The password of the trust store mounted in the Trusted Connector. Defaults to `password`. +- `TC_CH_ISSUER_CONNECTOR`(mandatory): Issuer connector needed for IDS Messages as specified by the [InfoModel](https://github.com/International-Data-Spaces-Association/InformationModel) +- `TC_CH_AGENT`(mandatory): Server agent needed for IDS Messages as specified by the [InfoModel](https://github.com/International-Data-Spaces-Association/InformationModel) +- `SERVICE_SHARED_SECRET`(mandatory): Shared secret, see Configuration section +- `SERVICE_ID_TC` (mandatory): Internal ID of the `Trusted Connector` that is used by the `Logging Service` to identify the `Trusted Connector`. +- `SERVICE_ID_LOG` (mandatory): Internal ID of the `Logging Service`. + + +#### Example Configuration (docker-compose) +``` +tc-core: + container_name: "tc-core" + image: fraunhoferaisec/trusted-connector-core:7.1.0 + tty: true + stdin_open: true + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - ./data/trusted-connector/application.yml:/root/etc/application.yml + - ./data/trusted-connector/allow-all-flows.pl:/root/deploy/allow-all-flows.pl + - ./data/trusted-connector/ch-ids.p12:/root/etc/keystore.p12 + - ./data/trusted-connector/truststore.p12:/root/etc/truststore.p12 + - ./data/trusted-connector/clearing-house-processors-0.10.0.jar:/root/jars/clearing-house-processors.jar + - ./data/trusted-connector/routes/clearing-house-routes.xml:/root/deploy/clearing-house-routes.xml + environment: + TC_DAPS_URL: https:// + SERVICE_SHARED_SECRET: + SERVICE_ID_TC: + SERVICE_ID_LOG: + + ports: + - "8443:8443" + - "9999:9999" + - "29292:29292" +``` + +## Docker Containers +The dockerfiles located [here](docker/) can be used to create containers for the services of the [`Clearing House App`](clearing-house-app). There are two types of dockerfiles: +1. Simple builds (e.g. [dockerfile](docker/keyring-api.Dockerfile)) that require you to build the Service APIs yourself using [Rust](https://www.rust-lang.org) +2. Multistage builds (e.g. [dockerfile](docker/keyring-api-multistage.Dockerfile)) that have a stage for building the rust code + +To build the containers check out the repository and in the main directory execute + +`docker build -f docker/ . -t ` + +### Container Dependencies +![Container Dependencies](doc/images/ch_container_dependencies.png) + +### Configuration +Please read the configuration section of the service ([`Logging Service`](https://github.com/Fraunhofer-AISEC/ids-clearing-house-service/tree/architecture-revamp/clearing-house-app#logging-service), [`Document API`](https://github.com/Fraunhofer-AISEC/ids-clearing-house-service/tree/architecture-revamp/clearing-house-app#document-api), [`Keyring API`](https://github.com/Fraunhofer-AISEC/ids-clearing-house-service/tree/architecture-revamp/clearing-house-app#keyring-api)) you are trying to run, before using `docker run` oder `docker-compose`. All Containers build with the provided dockerfiles require at least one volume: +1. The configuration file `Rocket.toml` is expected at `/server/Rocket.toml` + +Containers of the Keyring API require an additional volume: + +2. `/server/init_db` needs to contain the `default_doc_type.json` + +Containers of the Logging Service require an additional volume: + +3. The folder containing the signing key needs to match the path configured for the signing key in `Rocket.toml`, e.g. `/sever/keys` + +## Shared Secret +The Clearing House services use signed JWTs with HMAC and a shared secret to ensure a minimal integrity of the requests received. The `Trusted Connector` as well as the services ([`Logging Service`](https://github.com/Fraunhofer-AISEC/ids-clearing-house-service/tree/architecture-revamp/clearing-house-app#logging-service), [`Document API`](https://github.com/Fraunhofer-AISEC/ids-clearing-house-service/tree/architecture-revamp/clearing-house-app#document-api), [`Keyring API`](https://github.com/Fraunhofer-AISEC/ids-clearing-house-service/tree/architecture-revamp/clearing-house-app#keyring-api)) need to have access to the shared secret. + +For production use please consider using additional protection measures. diff --git a/generateJWT.js b/generateJWT.js new file mode 100644 index 00000000..9f34a689 --- /dev/null +++ b/generateJWT.js @@ -0,0 +1,14 @@ +const jwt = require('jsonwebtoken') + +const payload = { + "client_id": "69:F5:9D:B0:DD:A6:9D:30:5F:58:AA:2D:20:4D:B2:39:F0:54:FC:3B:keyid:4F:66:7D:BD:08:EE:C6:4A:D1:96:D8:7C:6C:A2:32:8A:EC:A6:AD:49", + "iss": "69:F5:9D:B0:DD:A6:9D:30:5F:58:AA:2D:20:4D:B2:39:F0:54:FC:3B:keyid:4F:66:7D:BD:08:EE:C6:4A:D1:96:D8:7C:6C:A2:32:8A:EC:A6:AD:49", + "iat": Math.floor(Date.now() / 1000), + "nbf": Math.floor(Date.now() / 1000), + "exp": Math.floor(Date.now() / 1000) + 3600, + "aud": "1" +} + +jwt.sign(payload, "123", { algorithm: 'HS256' }, function(err, token) { + console.log(token); +}); \ No newline at end of file diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..adfd6db3 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,7644 @@ +{ + "name": "ids-basecamp-clearinghouse", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "ids-basecamp-clearinghouse", + "version": "1.0.0", + "license": "Apache-2.0", + "devDependencies": { + "@semantic-release/changelog": "^6.0.3", + "@semantic-release/git": "^10.0.1", + "jsonwebtoken": "^9.0.2", + "semantic-release": "^21.0.7" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.23.5", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.23.5.tgz", + "integrity": "sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.23.4", + "chalk": "^2.4.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/code-frame/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/code-frame/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/code-frame/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/code-frame/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/@babel/code-frame/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@babel/code-frame/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/code-frame/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", + "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.23.4", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.23.4.tgz", + "integrity": "sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.22.20", + "chalk": "^2.4.2", + "js-tokens": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/@babel/highlight/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@babel/highlight/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@colors/colors": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", + "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", + "dev": true, + "optional": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@octokit/auth-token": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz", + "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==", + "dev": true, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/core": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.1.0.tgz", + "integrity": "sha512-BDa2VAMLSh3otEiaMJ/3Y36GU4qf6GI+VivQ/P41NC6GHcdxpKlqV0ikSZ5gdQsmS3ojXeRx5vasgNTinF0Q4g==", + "dev": true, + "dependencies": { + "@octokit/auth-token": "^4.0.0", + "@octokit/graphql": "^7.0.0", + "@octokit/request": "^8.0.2", + "@octokit/request-error": "^5.0.0", + "@octokit/types": "^12.0.0", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/endpoint": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.4.tgz", + "integrity": "sha512-DWPLtr1Kz3tv8L0UvXTDP1fNwM0S+z6EJpRcvH66orY6Eld4XBMCSYsaWp4xIm61jTWxK68BrR7ibO+vSDnZqw==", + "dev": true, + "dependencies": { + "@octokit/types": "^12.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/graphql": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.0.2.tgz", + "integrity": "sha512-OJ2iGMtj5Tg3s6RaXH22cJcxXRi7Y3EBqbHTBRq+PQAqfaS8f/236fUrWhfSn8P4jovyzqucxme7/vWSSZBX2Q==", + "dev": true, + "dependencies": { + "@octokit/request": "^8.0.1", + "@octokit/types": "^12.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==", + "dev": true + }, + "node_modules/@octokit/plugin-paginate-rest": { + "version": "9.2.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.1.tgz", + "integrity": "sha512-wfGhE/TAkXZRLjksFXuDZdmGnJQHvtU/joFQdweXUgzo1XwvBCD4o4+75NtFfjfLK5IwLf9vHTfSiU3sLRYpRw==", + "dev": true, + "dependencies": { + "@octokit/types": "^12.6.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "5" + } + }, + "node_modules/@octokit/plugin-retry": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz", + "integrity": "sha512-SKs+Tz9oj0g4p28qkZwl/topGcb0k0qPNX/i7vBKmDsjoeqnVfFUquqrE/O9oJY7+oLzdCtkiWSXLpLjvl6uog==", + "dev": true, + "dependencies": { + "@octokit/request-error": "^5.0.0", + "@octokit/types": "^12.0.0", + "bottleneck": "^2.15.3" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": ">=5" + } + }, + "node_modules/@octokit/plugin-throttling": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-throttling/-/plugin-throttling-8.2.0.tgz", + "integrity": "sha512-nOpWtLayKFpgqmgD0y3GqXafMFuKcA4tRPZIfu7BArd2lEZeb1988nhWhwx4aZWmjDmUfdgVf7W+Tt4AmvRmMQ==", + "dev": true, + "dependencies": { + "@octokit/types": "^12.2.0", + "bottleneck": "^2.15.3" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "^5.0.0" + } + }, + "node_modules/@octokit/request": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.2.0.tgz", + "integrity": "sha512-exPif6x5uwLqv1N1irkLG1zZNJkOtj8bZxuVHd71U5Ftuxf2wGNvAJyNBcPbPC+EBzwYEbBDdSFb8EPcjpYxPQ==", + "dev": true, + "dependencies": { + "@octokit/endpoint": "^9.0.0", + "@octokit/request-error": "^5.0.0", + "@octokit/types": "^12.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/request-error": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.0.1.tgz", + "integrity": "sha512-X7pnyTMV7MgtGmiXBwmO6M5kIPrntOXdyKZLigNfQWSEQzVxR4a4vo49vJjTWX70mPndj8KhfT4Dx+2Ng3vnBQ==", + "dev": true, + "dependencies": { + "@octokit/types": "^12.0.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "dev": true, + "dependencies": { + "@octokit/openapi-types": "^20.0.0" + } + }, + "node_modules/@pnpm/config.env-replace": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@pnpm/config.env-replace/-/config.env-replace-1.1.0.tgz", + "integrity": "sha512-htyl8TWnKL7K/ESFa1oW2UB5lVDxuF5DpM7tBi6Hu2LNL3mWkIzNLG6N4zoCUP1lCKNxWy/3iu8mS8MvToGd6w==", + "dev": true, + "engines": { + "node": ">=12.22.0" + } + }, + "node_modules/@pnpm/network.ca-file": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@pnpm/network.ca-file/-/network.ca-file-1.0.2.tgz", + "integrity": "sha512-YcPQ8a0jwYU9bTdJDpXjMi7Brhkr1mXsXrUJvjqM2mQDgkRiz8jFaQGOdaLxgjtUfQgZhKy/O3cG/YwmgKaxLA==", + "dev": true, + "dependencies": { + "graceful-fs": "4.2.10" + }, + "engines": { + "node": ">=12.22.0" + } + }, + "node_modules/@pnpm/network.ca-file/node_modules/graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "dev": true + }, + "node_modules/@pnpm/npm-conf": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/@pnpm/npm-conf/-/npm-conf-2.2.2.tgz", + "integrity": "sha512-UA91GwWPhFExt3IizW6bOeY/pQ0BkuNwKjk9iQW9KqxluGCrg4VenZ0/L+2Y0+ZOtme72EVvg6v0zo3AMQRCeA==", + "dev": true, + "dependencies": { + "@pnpm/config.env-replace": "^1.1.0", + "@pnpm/network.ca-file": "^1.0.1", + "config-chain": "^1.1.11" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@semantic-release/changelog": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/@semantic-release/changelog/-/changelog-6.0.3.tgz", + "integrity": "sha512-dZuR5qByyfe3Y03TpmCvAxCyTnp7r5XwtHRf/8vD9EAn4ZWbavUX8adMtXYzE86EVh0gyLA7lm5yW4IV30XUag==", + "dev": true, + "dependencies": { + "@semantic-release/error": "^3.0.0", + "aggregate-error": "^3.0.0", + "fs-extra": "^11.0.0", + "lodash": "^4.17.4" + }, + "engines": { + "node": ">=14.17" + }, + "peerDependencies": { + "semantic-release": ">=18.0.0" + } + }, + "node_modules/@semantic-release/commit-analyzer": { + "version": "10.0.4", + "resolved": "https://registry.npmjs.org/@semantic-release/commit-analyzer/-/commit-analyzer-10.0.4.tgz", + "integrity": "sha512-pFGn99fn8w4/MHE0otb2A/l5kxgOuxaaauIh4u30ncoTJuqWj4hXTgEJ03REqjS+w1R2vPftSsO26WC61yOcpw==", + "dev": true, + "dependencies": { + "conventional-changelog-angular": "^6.0.0", + "conventional-commits-filter": "^3.0.0", + "conventional-commits-parser": "^5.0.0", + "debug": "^4.0.0", + "import-from": "^4.0.0", + "lodash-es": "^4.17.21", + "micromatch": "^4.0.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "semantic-release": ">=20.1.0" + } + }, + "node_modules/@semantic-release/error": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@semantic-release/error/-/error-3.0.0.tgz", + "integrity": "sha512-5hiM4Un+tpl4cKw3lV4UgzJj+SmfNIDCLLw0TepzQxz9ZGV5ixnqkzIVF+3tp0ZHgcMKE+VNGHJjEeyFG2dcSw==", + "dev": true, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/@semantic-release/git": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/@semantic-release/git/-/git-10.0.1.tgz", + "integrity": "sha512-eWrx5KguUcU2wUPaO6sfvZI0wPafUKAMNC18aXY4EnNcrZL86dEmpNVnC9uMpGZkmZJ9EfCVJBQx4pV4EMGT1w==", + "dev": true, + "dependencies": { + "@semantic-release/error": "^3.0.0", + "aggregate-error": "^3.0.0", + "debug": "^4.0.0", + "dir-glob": "^3.0.0", + "execa": "^5.0.0", + "lodash": "^4.17.4", + "micromatch": "^4.0.0", + "p-reduce": "^2.0.0" + }, + "engines": { + "node": ">=14.17" + }, + "peerDependencies": { + "semantic-release": ">=18.0.0" + } + }, + "node_modules/@semantic-release/github": { + "version": "9.2.6", + "resolved": "https://registry.npmjs.org/@semantic-release/github/-/github-9.2.6.tgz", + "integrity": "sha512-shi+Lrf6exeNZF+sBhK+P011LSbhmIAoUEgEY6SsxF8irJ+J2stwI5jkyDQ+4gzYyDImzV6LCKdYB9FXnQRWKA==", + "dev": true, + "dependencies": { + "@octokit/core": "^5.0.0", + "@octokit/plugin-paginate-rest": "^9.0.0", + "@octokit/plugin-retry": "^6.0.0", + "@octokit/plugin-throttling": "^8.0.0", + "@semantic-release/error": "^4.0.0", + "aggregate-error": "^5.0.0", + "debug": "^4.3.4", + "dir-glob": "^3.0.1", + "globby": "^14.0.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.0", + "issue-parser": "^6.0.0", + "lodash-es": "^4.17.21", + "mime": "^4.0.0", + "p-filter": "^4.0.0", + "url-join": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "semantic-release": ">=20.1.0" + } + }, + "node_modules/@semantic-release/github/node_modules/@semantic-release/error": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@semantic-release/error/-/error-4.0.0.tgz", + "integrity": "sha512-mgdxrHTLOjOddRVYIYDo0fR3/v61GNN1YGkfbrjuIKg/uMgCd+Qzo3UAXJ+woLQQpos4pl5Esuw5A7AoNlzjUQ==", + "dev": true, + "engines": { + "node": ">=18" + } + }, + "node_modules/@semantic-release/github/node_modules/aggregate-error": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-5.0.0.tgz", + "integrity": "sha512-gOsf2YwSlleG6IjRYG2A7k0HmBMEo6qVNk9Bp/EaLgAJT5ngH6PXbqa4ItvnEwCm/velL5jAnQgsHsWnjhGmvw==", + "dev": true, + "dependencies": { + "clean-stack": "^5.2.0", + "indent-string": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/github/node_modules/clean-stack": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-5.2.0.tgz", + "integrity": "sha512-TyUIUJgdFnCISzG5zu3291TAsE77ddchd0bepon1VVQrKLGKFED4iXFEDQ24mIPdPBbyE16PK3F8MYE1CmcBEQ==", + "dev": true, + "dependencies": { + "escape-string-regexp": "5.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/github/node_modules/indent-string": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz", + "integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/npm": { + "version": "10.0.6", + "resolved": "https://registry.npmjs.org/@semantic-release/npm/-/npm-10.0.6.tgz", + "integrity": "sha512-DyqHrGE8aUyapA277BB+4kV0C4iMHh3sHzUWdf0jTgp5NNJxVUz76W1f57FB64Ue03him3CBXxFqQD2xGabxow==", + "dev": true, + "dependencies": { + "@semantic-release/error": "^4.0.0", + "aggregate-error": "^5.0.0", + "execa": "^8.0.0", + "fs-extra": "^11.0.0", + "lodash-es": "^4.17.21", + "nerf-dart": "^1.0.0", + "normalize-url": "^8.0.0", + "npm": "^9.5.0", + "rc": "^1.2.8", + "read-pkg": "^8.0.0", + "registry-auth-token": "^5.0.0", + "semver": "^7.1.2", + "tempy": "^3.0.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "semantic-release": ">=20.1.0" + } + }, + "node_modules/@semantic-release/npm/node_modules/@semantic-release/error": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@semantic-release/error/-/error-4.0.0.tgz", + "integrity": "sha512-mgdxrHTLOjOddRVYIYDo0fR3/v61GNN1YGkfbrjuIKg/uMgCd+Qzo3UAXJ+woLQQpos4pl5Esuw5A7AoNlzjUQ==", + "dev": true, + "engines": { + "node": ">=18" + } + }, + "node_modules/@semantic-release/npm/node_modules/aggregate-error": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-5.0.0.tgz", + "integrity": "sha512-gOsf2YwSlleG6IjRYG2A7k0HmBMEo6qVNk9Bp/EaLgAJT5ngH6PXbqa4ItvnEwCm/velL5jAnQgsHsWnjhGmvw==", + "dev": true, + "dependencies": { + "clean-stack": "^5.2.0", + "indent-string": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/npm/node_modules/clean-stack": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-5.2.0.tgz", + "integrity": "sha512-TyUIUJgdFnCISzG5zu3291TAsE77ddchd0bepon1VVQrKLGKFED4iXFEDQ24mIPdPBbyE16PK3F8MYE1CmcBEQ==", + "dev": true, + "dependencies": { + "escape-string-regexp": "5.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/npm/node_modules/execa": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", + "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^8.0.1", + "human-signals": "^5.0.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": ">=16.17" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/@semantic-release/npm/node_modules/get-stream": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", + "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", + "dev": true, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/npm/node_modules/human-signals": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", + "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", + "dev": true, + "engines": { + "node": ">=16.17.0" + } + }, + "node_modules/@semantic-release/npm/node_modules/indent-string": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz", + "integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/npm/node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/npm/node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/npm/node_modules/npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "dev": true, + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/npm/node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "dev": true, + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/npm/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/npm/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@semantic-release/npm/node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/release-notes-generator": { + "version": "11.0.7", + "resolved": "https://registry.npmjs.org/@semantic-release/release-notes-generator/-/release-notes-generator-11.0.7.tgz", + "integrity": "sha512-T09QB9ImmNx7Q6hY6YnnEbw/rEJ6a+22LBxfZq+pSAXg/OL/k0siwEm5cK4k1f9dE2Z2mPIjJKKohzUm0jbxcQ==", + "dev": true, + "dependencies": { + "conventional-changelog-angular": "^6.0.0", + "conventional-changelog-writer": "^6.0.0", + "conventional-commits-filter": "^4.0.0", + "conventional-commits-parser": "^5.0.0", + "debug": "^4.0.0", + "get-stream": "^7.0.0", + "import-from": "^4.0.0", + "into-stream": "^7.0.0", + "lodash-es": "^4.17.21", + "read-pkg-up": "^10.0.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "semantic-release": ">=20.1.0" + } + }, + "node_modules/@semantic-release/release-notes-generator/node_modules/conventional-commits-filter": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/conventional-commits-filter/-/conventional-commits-filter-4.0.0.tgz", + "integrity": "sha512-rnpnibcSOdFcdclpFwWa+pPlZJhXE7l+XK04zxhbWrhgpR96h33QLz8hITTXbcYICxVr3HZFtbtUAQ+4LdBo9A==", + "dev": true, + "engines": { + "node": ">=16" + } + }, + "node_modules/@semantic-release/release-notes-generator/node_modules/get-stream": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-7.0.1.tgz", + "integrity": "sha512-3M8C1EOFN6r8AMUhwUAACIoXZJEOufDU5+0gFFN5uNs6XYOralD2Pqkl7m046va6x77FwposWXbAhPPIOus7mQ==", + "dev": true, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@sindresorhus/merge-streams": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", + "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@types/minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==", + "dev": true + }, + "node_modules/@types/normalize-package-data": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", + "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", + "dev": true + }, + "node_modules/agent-base": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz", + "integrity": "sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg==", + "dev": true, + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-escapes": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-6.2.0.tgz", + "integrity": "sha512-kzRaCqXnpzWs+3z5ABPQiVke+iq0KXkHo8xiWV4RPTi5Yli0l97BEQuhXV1s7+aSU/fu1kUuxgS4MsQ0fRuygw==", + "dev": true, + "dependencies": { + "type-fest": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/ansicolors": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/ansicolors/-/ansicolors-0.3.2.tgz", + "integrity": "sha512-QXu7BPrP29VllRxH8GwB7x5iX5qWKAAMLqKQGWTeLWVlNHNOpVMJ91dsxQAIWXpjuW5wqvxu3Jd/nRjrJ+0pqg==", + "dev": true + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/argv-formatter": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/argv-formatter/-/argv-formatter-1.0.0.tgz", + "integrity": "sha512-F2+Hkm9xFaRg+GkaNnbwXNDV5O6pnCFEmqyhvfC/Ic5LbgOWjJh3L+mN/s91rxVL3znE7DYVpW0GJFT+4YBgWw==", + "dev": true + }, + "node_modules/array-ify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/array-ify/-/array-ify-1.0.0.tgz", + "integrity": "sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng==", + "dev": true + }, + "node_modules/arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/before-after-hook": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==", + "dev": true + }, + "node_modules/bottleneck": { + "version": "2.19.5", + "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", + "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==", + "dev": true + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==", + "dev": true + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase-keys": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz", + "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", + "dev": true, + "dependencies": { + "camelcase": "^5.3.1", + "map-obj": "^4.0.0", + "quick-lru": "^4.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cardinal": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/cardinal/-/cardinal-2.1.1.tgz", + "integrity": "sha512-JSr5eOgoEymtYHBjNWyjrMqet9Am2miJhlfKNdqLp6zoeAh0KN5dRAcxlecj5mAJrmQomgiOBj35xHLrFjqBpw==", + "dev": true, + "dependencies": { + "ansicolors": "~0.3.2", + "redeyed": "~2.1.0" + }, + "bin": { + "cdl": "bin/cdl.js" + } + }, + "node_modules/chalk": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "dev": true, + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/cli-table3": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.3.tgz", + "integrity": "sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0" + }, + "engines": { + "node": "10.* || >= 12.*" + }, + "optionalDependencies": { + "@colors/colors": "1.5.0" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/compare-func": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/compare-func/-/compare-func-2.0.0.tgz", + "integrity": "sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA==", + "dev": true, + "dependencies": { + "array-ify": "^1.0.0", + "dot-prop": "^5.1.0" + } + }, + "node_modules/config-chain": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz", + "integrity": "sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==", + "dev": true, + "dependencies": { + "ini": "^1.3.4", + "proto-list": "~1.2.1" + } + }, + "node_modules/conventional-changelog-angular": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-6.0.0.tgz", + "integrity": "sha512-6qLgrBF4gueoC7AFVHu51nHL9pF9FRjXrH+ceVf7WmAfH3gs+gEYOkvxhjMPjZu57I4AGUGoNTY8V7Hrgf1uqg==", + "dev": true, + "dependencies": { + "compare-func": "^2.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/conventional-changelog-writer": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/conventional-changelog-writer/-/conventional-changelog-writer-6.0.1.tgz", + "integrity": "sha512-359t9aHorPw+U+nHzUXHS5ZnPBOizRxfQsWT5ZDHBfvfxQOAik+yfuhKXG66CN5LEWPpMNnIMHUTCKeYNprvHQ==", + "dev": true, + "dependencies": { + "conventional-commits-filter": "^3.0.0", + "dateformat": "^3.0.3", + "handlebars": "^4.7.7", + "json-stringify-safe": "^5.0.1", + "meow": "^8.1.2", + "semver": "^7.0.0", + "split": "^1.0.1" + }, + "bin": { + "conventional-changelog-writer": "cli.js" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/conventional-commits-filter": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/conventional-commits-filter/-/conventional-commits-filter-3.0.0.tgz", + "integrity": "sha512-1ymej8b5LouPx9Ox0Dw/qAO2dVdfpRFq28e5Y0jJEU8ZrLdy0vOSkkIInwmxErFGhg6SALro60ZrwYFVTUDo4Q==", + "dev": true, + "dependencies": { + "lodash.ismatch": "^4.4.0", + "modify-values": "^1.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/conventional-commits-parser": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/conventional-commits-parser/-/conventional-commits-parser-5.0.0.tgz", + "integrity": "sha512-ZPMl0ZJbw74iS9LuX9YIAiW8pfM5p3yh2o/NbXHbkFuZzY5jvdi5jFycEOkmBW5H5I7nA+D6f3UcsCLP2vvSEA==", + "dev": true, + "dependencies": { + "is-text-path": "^2.0.0", + "JSONStream": "^1.3.5", + "meow": "^12.0.1", + "split2": "^4.0.0" + }, + "bin": { + "conventional-commits-parser": "cli.mjs" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/conventional-commits-parser/node_modules/meow": { + "version": "12.1.1", + "resolved": "https://registry.npmjs.org/meow/-/meow-12.1.1.tgz", + "integrity": "sha512-BhXM0Au22RwUneMPwSCnyhTOizdWoIEPU9sp0Aqa1PnDMR5Wv2FGXYDjuzJEIX+Eo2Rb8xuYe5jrnm5QowQFkw==", + "dev": true, + "engines": { + "node": ">=16.10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, + "node_modules/cosmiconfig": { + "version": "8.3.6", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", + "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", + "dev": true, + "dependencies": { + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0", + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/crypto-random-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-4.0.0.tgz", + "integrity": "sha512-x8dy3RnvYdlUcPOjkEHqozhiwzKNSq7GcPuXFbnyMOCHxX8V3OgIg/pYuabl2sbUPfIJaeAQB7PMOK8DFIdoRA==", + "dev": true, + "dependencies": { + "type-fest": "^1.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/crypto-random-string/node_modules/type-fest": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz", + "integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/dateformat": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.3.tgz", + "integrity": "sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decamelize-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.1.tgz", + "integrity": "sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==", + "dev": true, + "dependencies": { + "decamelize": "^1.1.0", + "map-obj": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/decamelize-keys/node_modules/map-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", + "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "dev": true, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==", + "dev": true + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/dot-prop": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", + "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", + "dev": true, + "dependencies": { + "is-obj": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/duplexer2": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/duplexer2/-/duplexer2-0.1.4.tgz", + "integrity": "sha512-asLFVfWWtJ90ZyOUHMqk7/S2w2guQKxUI2itj3d92ADHhxUSbCMGi1f1cBcJ7xM1To+pE/Khbwo1yuNbMEPKeA==", + "dev": true, + "dependencies": { + "readable-stream": "^2.0.2" + } + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/env-ci": { + "version": "9.1.1", + "resolved": "https://registry.npmjs.org/env-ci/-/env-ci-9.1.1.tgz", + "integrity": "sha512-Im2yEWeF4b2RAMAaWvGioXk6m0UNaIjD8hj28j2ij5ldnIFrDQT0+pzDvpbRkcjurhXhf/AsBKv8P2rtmGi9Aw==", + "dev": true, + "dependencies": { + "execa": "^7.0.0", + "java-properties": "^1.0.2" + }, + "engines": { + "node": "^16.14 || >=18" + } + }, + "node_modules/env-ci/node_modules/execa": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-7.2.0.tgz", + "integrity": "sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.1", + "human-signals": "^4.3.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^3.0.7", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": "^14.18.0 || ^16.14.0 || >=18.0.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/env-ci/node_modules/human-signals": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", + "integrity": "sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ==", + "dev": true, + "engines": { + "node": ">=14.18.0" + } + }, + "node_modules/env-ci/node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/env-ci/node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/env-ci/node_modules/npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "dev": true, + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/env-ci/node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "dev": true, + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/env-ci/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/env-ci/node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/escalade": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", + "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fastq": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/figures": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-5.0.0.tgz", + "integrity": "sha512-ej8ksPF4x6e5wvK9yevct0UCXh8TTFlWGVLlgjZuoBH1HwjIfKE/IdL5mq89sFA7zELi1VhKpmtDnrs7zWyeyg==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^5.0.0", + "is-unicode-supported": "^1.2.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-6.3.0.tgz", + "integrity": "sha512-v2ZsoEuVHYy8ZIlYqwPe/39Cy+cFDzp4dXPaxNvkEuouymu+2Jbz0PxpKarJHYJTmv2HWT3O382qY8l4jMWthw==", + "dev": true, + "dependencies": { + "locate-path": "^7.1.0", + "path-exists": "^5.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/find-versions": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/find-versions/-/find-versions-5.1.0.tgz", + "integrity": "sha512-+iwzCJ7C5v5KgcBuueqVoNiHVoQpwiUK5XFLjf0affFTep+Wcw93tPvmb8tqujDNmzhBDPddnWV/qgWSXgq+Hg==", + "dev": true, + "dependencies": { + "semver-regex": "^4.0.5" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/from2": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", + "integrity": "sha512-OMcX/4IC/uqEPVgGeyfN22LJk6AZrMkRZHxcHBMBvHScDGgwTm2GT2Wkgtocyd3JfZffjj2kYUDXXII0Fk9W0g==", + "dev": true, + "dependencies": { + "inherits": "^2.0.1", + "readable-stream": "^2.0.0" + } + }, + "node_modules/fs-extra": { + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz", + "integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/git-log-parser": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/git-log-parser/-/git-log-parser-1.2.0.tgz", + "integrity": "sha512-rnCVNfkTL8tdNryFuaY0fYiBWEBcgF748O6ZI61rslBvr2o7U65c2/6npCRqH40vuAhtgtDiqLTJjBVdrejCzA==", + "dev": true, + "dependencies": { + "argv-formatter": "~1.0.0", + "spawn-error-forwarder": "~1.0.0", + "split2": "~1.0.0", + "stream-combiner2": "~1.1.1", + "through2": "~2.0.0", + "traverse": "~0.6.6" + } + }, + "node_modules/git-log-parser/node_modules/split2": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-1.0.0.tgz", + "integrity": "sha512-NKywug4u4pX/AZBB1FCPzZ6/7O+Xhz1qMVbzTvvKvikjO99oPN87SkK08mEY9P63/5lWjK+wgOOgApnTg5r6qg==", + "dev": true, + "dependencies": { + "through2": "~2.0.0" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/globby": { + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/globby/-/globby-14.0.1.tgz", + "integrity": "sha512-jOMLD2Z7MAhyG8aJpNOpmziMOP4rPLcc95oQPKXBazW82z+CEgPFBQvEpRUa1KeIMUJo4Wsm+q6uzO/Q/4BksQ==", + "dev": true, + "dependencies": { + "@sindresorhus/merge-streams": "^2.1.0", + "fast-glob": "^3.3.2", + "ignore": "^5.2.4", + "path-type": "^5.0.0", + "slash": "^5.1.0", + "unicorn-magic": "^0.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby/node_modules/path-type": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-5.0.0.tgz", + "integrity": "sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true + }, + "node_modules/handlebars": { + "version": "4.7.8", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", + "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==", + "dev": true, + "dependencies": { + "minimist": "^1.2.5", + "neo-async": "^2.6.2", + "source-map": "^0.6.1", + "wordwrap": "^1.0.0" + }, + "bin": { + "handlebars": "bin/handlebars" + }, + "engines": { + "node": ">=0.4.7" + }, + "optionalDependencies": { + "uglify-js": "^3.1.4" + } + }, + "node_modules/hard-rejection": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", + "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hook-std": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hook-std/-/hook-std-3.0.0.tgz", + "integrity": "sha512-jHRQzjSDzMtFy34AGj1DN+vq54WVuhSvKgrHf0OMiFQTwDD4L/qqofVEWjLOBMTn5+lCD3fPg32W9yOfnEJTTw==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/hosted-git-info": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.1.tgz", + "integrity": "sha512-+K84LB1DYwMHoHSgaOY/Jfhw3ucPmSET5v98Ke/HdNSw4a0UktWzyW1mjhjpuxxTqOOsfWT/7iVshHmVZ4IpOA==", + "dev": true, + "dependencies": { + "lru-cache": "^10.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", + "dev": true, + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", + "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", + "dev": true, + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/ignore": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz", + "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-fresh/node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/import-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/import-from/-/import-from-4.0.0.tgz", + "integrity": "sha512-P9J71vT5nLlDeV8FHs5nNxaLbrpfAV5cF5srvbZfpwpcJoM/xZR3hiv+q+SAnuSmuGbXMWud063iIMx/V/EWZQ==", + "dev": true, + "engines": { + "node": ">=12.2" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "dev": true + }, + "node_modules/into-stream": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/into-stream/-/into-stream-7.0.0.tgz", + "integrity": "sha512-2dYz766i9HprMBasCMvHMuazJ7u4WzhJwo5kb3iPSiW/iRYV6uPari3zHoqZlnuaR7V1bEiNMxikhp37rdBXbw==", + "dev": true, + "dependencies": { + "from2": "^2.3.0", + "p-is-promise": "^3.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true + }, + "node_modules/is-core-module": { + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", + "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "dev": true, + "dependencies": { + "hasown": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-text-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-text-path/-/is-text-path-2.0.0.tgz", + "integrity": "sha512-+oDTluR6WEjdXEJMnC2z6A4FRwFoYuvShVVEGsS7ewc0UTi2QtAKMDJuL4BDEVt+5T7MjFo12RP8ghOM75oKJw==", + "dev": true, + "dependencies": { + "text-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-unicode-supported": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz", + "integrity": "sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/issue-parser": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/issue-parser/-/issue-parser-6.0.0.tgz", + "integrity": "sha512-zKa/Dxq2lGsBIXQ7CUZWTHfvxPC2ej0KfO7fIPqLlHB9J2hJ7rGhZ5rilhuufylr4RXYPzJUeFjKxz305OsNlA==", + "dev": true, + "dependencies": { + "lodash.capitalize": "^4.2.1", + "lodash.escaperegexp": "^4.1.2", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.uniqby": "^4.7.0" + }, + "engines": { + "node": ">=10.13" + } + }, + "node_modules/java-properties": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/java-properties/-/java-properties-1.0.2.tgz", + "integrity": "sha512-qjdpeo2yKlYTH7nFdK0vbZWuTCesk4o63v5iVOlhMQPfuIZQfW/HI35SjfhA+4qpg36rnFSvUK5b1m+ckIblQQ==", + "dev": true, + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", + "dev": true + }, + "node_modules/jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "dev": true, + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/jsonparse": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", + "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==", + "dev": true, + "engines": [ + "node >= 0.2.0" + ] + }, + "node_modules/JSONStream": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz", + "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==", + "dev": true, + "dependencies": { + "jsonparse": "^1.2.0", + "through": ">=2.2.7 <3" + }, + "bin": { + "JSONStream": "bin.js" + }, + "engines": { + "node": "*" + } + }, + "node_modules/jsonwebtoken": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", + "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", + "dev": true, + "dependencies": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, + "node_modules/jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "dev": true, + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "dev": true, + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/load-json-file": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "parse-json": "^4.0.0", + "pify": "^3.0.0", + "strip-bom": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/load-json-file/node_modules/parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==", + "dev": true, + "dependencies": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/locate-path": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz", + "integrity": "sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==", + "dev": true, + "dependencies": { + "p-locate": "^6.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", + "dev": true + }, + "node_modules/lodash.capitalize": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/lodash.capitalize/-/lodash.capitalize-4.2.1.tgz", + "integrity": "sha512-kZzYOKspf8XVX5AvmQF94gQW0lejFVgb80G85bU4ZWzoJ6C03PQg3coYAUpSTpQWelrZELd3XWgHzw4Ck5kaIw==", + "dev": true + }, + "node_modules/lodash.escaperegexp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.escaperegexp/-/lodash.escaperegexp-4.1.2.tgz", + "integrity": "sha512-TM9YBvyC84ZxE3rgfefxUWiQKLilstD6k7PTGt6wfbtXF8ixIJLOL3VYyV/z+ZiPLsVxAsKAFVwWlWeb2Y8Yyw==", + "dev": true + }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==", + "dev": true + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==", + "dev": true + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==", + "dev": true + }, + "node_modules/lodash.ismatch": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz", + "integrity": "sha512-fPMfXjGQEV9Xsq/8MTSgUf255gawYRbjwMyDbcvDhXgV7enSZA0hynz6vMPnpAb5iONEzBHBPsT+0zes5Z301g==", + "dev": true + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==", + "dev": true + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==", + "dev": true + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==", + "dev": true + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", + "dev": true + }, + "node_modules/lodash.uniqby": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/lodash.uniqby/-/lodash.uniqby-4.7.0.tgz", + "integrity": "sha512-e/zcLx6CSbmaEgFHCA7BnoQKyCtKMxnuWrJygbwPs/AIn+IMKl66L8/s+wBUn5LRw2pZx3bUHibiV1b6aTWIww==", + "dev": true + }, + "node_modules/lru-cache": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.0.tgz", + "integrity": "sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==", + "dev": true, + "engines": { + "node": "14 || >=16.14" + } + }, + "node_modules/map-obj": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz", + "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/marked": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/marked/-/marked-5.1.2.tgz", + "integrity": "sha512-ahRPGXJpjMjwSOlBoTMZAK7ATXkli5qCPxZ21TG44rx1KEo44bii4ekgTDQPNRQ4Kh7JMb9Ub1PVk1NxRSsorg==", + "dev": true, + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 16" + } + }, + "node_modules/marked-terminal": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/marked-terminal/-/marked-terminal-5.2.0.tgz", + "integrity": "sha512-Piv6yNwAQXGFjZSaiNljyNFw7jKDdGrw70FSbtxEyldLsyeuV5ZHm/1wW++kWbrOF1VPnUgYOhB2oLL0ZpnekA==", + "dev": true, + "dependencies": { + "ansi-escapes": "^6.2.0", + "cardinal": "^2.1.1", + "chalk": "^5.2.0", + "cli-table3": "^0.6.3", + "node-emoji": "^1.11.0", + "supports-hyperlinks": "^2.3.0" + }, + "engines": { + "node": ">=14.13.1 || >=16.0.0" + }, + "peerDependencies": { + "marked": "^1.0.0 || ^2.0.0 || ^3.0.0 || ^4.0.0 || ^5.0.0" + } + }, + "node_modules/meow": { + "version": "8.1.2", + "resolved": "https://registry.npmjs.org/meow/-/meow-8.1.2.tgz", + "integrity": "sha512-r85E3NdZ+mpYk1C6RjPFEMSE+s1iZMuHtsHAqY0DT3jZczl0diWUZ8g6oU7h0M9cD2EL+PzaYghhCLzR0ZNn5Q==", + "dev": true, + "dependencies": { + "@types/minimist": "^1.2.0", + "camelcase-keys": "^6.2.2", + "decamelize-keys": "^1.1.0", + "hard-rejection": "^2.1.0", + "minimist-options": "4.1.0", + "normalize-package-data": "^3.0.0", + "read-pkg-up": "^7.0.1", + "redent": "^3.0.0", + "trim-newlines": "^3.0.0", + "type-fest": "^0.18.0", + "yargs-parser": "^20.2.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/meow/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/meow/node_modules/hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true + }, + "node_modules/meow/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/meow/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/meow/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/meow/node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/meow/node_modules/read-pkg": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", + "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", + "dev": true, + "dependencies": { + "@types/normalize-package-data": "^2.4.0", + "normalize-package-data": "^2.5.0", + "parse-json": "^5.0.0", + "type-fest": "^0.6.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/meow/node_modules/read-pkg-up": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", + "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", + "dev": true, + "dependencies": { + "find-up": "^4.1.0", + "read-pkg": "^5.2.0", + "type-fest": "^0.8.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/meow/node_modules/read-pkg-up/node_modules/type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/meow/node_modules/read-pkg/node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/meow/node_modules/read-pkg/node_modules/type-fest": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", + "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/meow/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/meow/node_modules/type-fest": { + "version": "0.18.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz", + "integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "dependencies": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/mime/-/mime-4.0.1.tgz", + "integrity": "sha512-5lZ5tyrIfliMXzFtkYyekWbtRXObT9OWa8IwQ5uxTBDHucNNwniRqo0yInflj+iYi5CBa6qxadGzGarDfuEOxA==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa" + ], + "bin": { + "mime": "bin/cli.js" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minimist-options": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz", + "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", + "dev": true, + "dependencies": { + "arrify": "^1.0.1", + "is-plain-obj": "^1.1.0", + "kind-of": "^6.0.3" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/modify-values": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/modify-values/-/modify-values-1.0.1.tgz", + "integrity": "sha512-xV2bxeN6F7oYjZWTe/YPAy6MN2M+sL4u/Rlm2AHCIVGfo2p1yGmBHQ6vHehl4bRTZBdHu3TSkWdYgkwpYzAGSw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true + }, + "node_modules/nerf-dart": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/nerf-dart/-/nerf-dart-1.0.0.tgz", + "integrity": "sha512-EZSPZB70jiVsivaBLYDCyntd5eH8NTSMOn3rB+HxwdmKThGELLdYv8qVIMWvZEFy9w8ZZpW9h9OB32l1rGtj7g==", + "dev": true + }, + "node_modules/node-emoji": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/node-emoji/-/node-emoji-1.11.0.tgz", + "integrity": "sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A==", + "dev": true, + "dependencies": { + "lodash": "^4.17.21" + } + }, + "node_modules/normalize-package-data": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", + "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^4.0.1", + "is-core-module": "^2.5.0", + "semver": "^7.3.4", + "validate-npm-package-license": "^3.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/normalize-package-data/node_modules/hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/normalize-package-data/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/normalize-url": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-8.0.1.tgz", + "integrity": "sha512-IO9QvjUMWxPQQhs60oOu10CRkWCiZzSUkzbXGGV9pviYl1fXYcvkzQ5jV9z8Y6un8ARoVRl4EtC6v6jNqbaJ/w==", + "dev": true, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm": { + "version": "9.9.3", + "resolved": "https://registry.npmjs.org/npm/-/npm-9.9.3.tgz", + "integrity": "sha512-Z1l+rcQ5kYb17F3hHtO601arEpvdRYnCLtg8xo3AGtyj3IthwaraEOexI9903uANkifFbqHC8hT53KIrozWg8A==", + "bundleDependencies": [ + "@isaacs/string-locale-compare", + "@npmcli/arborist", + "@npmcli/config", + "@npmcli/fs", + "@npmcli/map-workspaces", + "@npmcli/package-json", + "@npmcli/promise-spawn", + "@npmcli/run-script", + "abbrev", + "archy", + "cacache", + "chalk", + "ci-info", + "cli-columns", + "cli-table3", + "columnify", + "fastest-levenshtein", + "fs-minipass", + "glob", + "graceful-fs", + "hosted-git-info", + "ini", + "init-package-json", + "is-cidr", + "json-parse-even-better-errors", + "libnpmaccess", + "libnpmdiff", + "libnpmexec", + "libnpmfund", + "libnpmhook", + "libnpmorg", + "libnpmpack", + "libnpmpublish", + "libnpmsearch", + "libnpmteam", + "libnpmversion", + "make-fetch-happen", + "minimatch", + "minipass", + "minipass-pipeline", + "ms", + "node-gyp", + "nopt", + "normalize-package-data", + "npm-audit-report", + "npm-install-checks", + "npm-package-arg", + "npm-pick-manifest", + "npm-profile", + "npm-registry-fetch", + "npm-user-validate", + "npmlog", + "p-map", + "pacote", + "parse-conflict-json", + "proc-log", + "qrcode-terminal", + "read", + "semver", + "sigstore", + "spdx-expression-parse", + "ssri", + "supports-color", + "tar", + "text-table", + "tiny-relative-date", + "treeverse", + "validate-npm-package-name", + "which", + "write-file-atomic" + ], + "dev": true, + "workspaces": [ + "docs", + "smoke-tests", + "mock-globals", + "mock-registry", + "workspaces/*" + ], + "dependencies": { + "@isaacs/string-locale-compare": "^1.1.0", + "@npmcli/arborist": "^6.5.0", + "@npmcli/config": "^6.4.0", + "@npmcli/fs": "^3.1.0", + "@npmcli/map-workspaces": "^3.0.4", + "@npmcli/package-json": "^4.0.1", + "@npmcli/promise-spawn": "^6.0.2", + "@npmcli/run-script": "^6.0.2", + "abbrev": "^2.0.0", + "archy": "~1.0.0", + "cacache": "^17.1.4", + "chalk": "^5.3.0", + "ci-info": "^4.0.0", + "cli-columns": "^4.0.0", + "cli-table3": "^0.6.3", + "columnify": "^1.6.0", + "fastest-levenshtein": "^1.0.16", + "fs-minipass": "^3.0.3", + "glob": "^10.3.10", + "graceful-fs": "^4.2.11", + "hosted-git-info": "^6.1.1", + "ini": "^4.1.1", + "init-package-json": "^5.0.0", + "is-cidr": "^4.0.2", + "json-parse-even-better-errors": "^3.0.1", + "libnpmaccess": "^7.0.2", + "libnpmdiff": "^5.0.20", + "libnpmexec": "^6.0.4", + "libnpmfund": "^4.2.1", + "libnpmhook": "^9.0.3", + "libnpmorg": "^5.0.4", + "libnpmpack": "^5.0.20", + "libnpmpublish": "^7.5.1", + "libnpmsearch": "^6.0.2", + "libnpmteam": "^5.0.3", + "libnpmversion": "^4.0.2", + "make-fetch-happen": "^11.1.1", + "minimatch": "^9.0.3", + "minipass": "^7.0.4", + "minipass-pipeline": "^1.2.4", + "ms": "^2.1.2", + "node-gyp": "^9.4.1", + "nopt": "^7.2.0", + "normalize-package-data": "^5.0.0", + "npm-audit-report": "^5.0.0", + "npm-install-checks": "^6.3.0", + "npm-package-arg": "^10.1.0", + "npm-pick-manifest": "^8.0.2", + "npm-profile": "^7.0.1", + "npm-registry-fetch": "^14.0.5", + "npm-user-validate": "^2.0.0", + "npmlog": "^7.0.1", + "p-map": "^4.0.0", + "pacote": "^15.2.0", + "parse-conflict-json": "^3.0.1", + "proc-log": "^3.0.0", + "qrcode-terminal": "^0.12.0", + "read": "^2.1.0", + "semver": "^7.6.0", + "sigstore": "^1.9.0", + "spdx-expression-parse": "^3.0.1", + "ssri": "^10.0.5", + "supports-color": "^9.4.0", + "tar": "^6.2.0", + "text-table": "~0.2.0", + "tiny-relative-date": "^1.3.0", + "treeverse": "^3.0.0", + "validate-npm-package-name": "^5.0.0", + "which": "^3.0.1", + "write-file-atomic": "^5.0.1" + }, + "bin": { + "npm": "bin/npm-cli.js", + "npx": "bin/npx-cli.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/@colors/colors": { + "version": "1.5.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/npm/node_modules/@gar/promisify": { + "version": "1.1.3", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/@isaacs/cliui": { + "version": "8.0.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex": { + "version": "9.2.2", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/npm/node_modules/@isaacs/string-locale-compare": { + "version": "1.1.0", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/@npmcli/arborist": { + "version": "6.5.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@isaacs/string-locale-compare": "^1.1.0", + "@npmcli/fs": "^3.1.0", + "@npmcli/installed-package-contents": "^2.0.2", + "@npmcli/map-workspaces": "^3.0.2", + "@npmcli/metavuln-calculator": "^5.0.0", + "@npmcli/name-from-folder": "^2.0.0", + "@npmcli/node-gyp": "^3.0.0", + "@npmcli/package-json": "^4.0.0", + "@npmcli/query": "^3.1.0", + "@npmcli/run-script": "^6.0.0", + "bin-links": "^4.0.1", + "cacache": "^17.0.4", + "common-ancestor-path": "^1.0.1", + "hosted-git-info": "^6.1.1", + "json-parse-even-better-errors": "^3.0.0", + "json-stringify-nice": "^1.1.4", + "minimatch": "^9.0.0", + "nopt": "^7.0.0", + "npm-install-checks": "^6.2.0", + "npm-package-arg": "^10.1.0", + "npm-pick-manifest": "^8.0.1", + "npm-registry-fetch": "^14.0.3", + "npmlog": "^7.0.1", + "pacote": "^15.0.8", + "parse-conflict-json": "^3.0.0", + "proc-log": "^3.0.0", + "promise-all-reject-late": "^1.0.0", + "promise-call-limit": "^1.0.2", + "read-package-json-fast": "^3.0.2", + "semver": "^7.3.7", + "ssri": "^10.0.1", + "treeverse": "^3.0.0", + "walk-up-path": "^3.0.1" + }, + "bin": { + "arborist": "bin/index.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@npmcli/config": { + "version": "6.4.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/map-workspaces": "^3.0.2", + "ci-info": "^4.0.0", + "ini": "^4.1.0", + "nopt": "^7.0.0", + "proc-log": "^3.0.0", + "read-package-json-fast": "^3.0.2", + "semver": "^7.3.5", + "walk-up-path": "^3.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@npmcli/disparity-colors": { + "version": "3.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "ansi-styles": "^4.3.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@npmcli/fs": { + "version": "3.1.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@npmcli/git": { + "version": "4.1.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/promise-spawn": "^6.0.0", + "lru-cache": "^7.4.4", + "npm-pick-manifest": "^8.0.0", + "proc-log": "^3.0.0", + "promise-inflight": "^1.0.1", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@npmcli/installed-package-contents": { + "version": "2.0.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "npm-bundled": "^3.0.0", + "npm-normalize-package-bin": "^3.0.0" + }, + "bin": { + "installed-package-contents": "lib/index.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@npmcli/map-workspaces": { + "version": "3.0.4", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/name-from-folder": "^2.0.0", + "glob": "^10.2.2", + "minimatch": "^9.0.0", + "read-package-json-fast": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@npmcli/metavuln-calculator": { + "version": "5.0.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "cacache": "^17.0.0", + "json-parse-even-better-errors": "^3.0.0", + "pacote": "^15.0.0", + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@npmcli/move-file": { + "version": "2.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "mkdirp": "^1.0.4", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/npm/node_modules/@npmcli/name-from-folder": { + "version": "2.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@npmcli/node-gyp": { + "version": "3.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@npmcli/package-json": { + "version": "4.0.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/git": "^4.1.0", + "glob": "^10.2.2", + "hosted-git-info": "^6.1.1", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^5.0.0", + "proc-log": "^3.0.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@npmcli/promise-spawn": { + "version": "6.0.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "which": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@npmcli/query": { + "version": "3.1.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "postcss-selector-parser": "^6.0.10" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@npmcli/run-script": { + "version": "6.0.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/node-gyp": "^3.0.0", + "@npmcli/promise-spawn": "^6.0.0", + "node-gyp": "^9.0.0", + "read-package-json-fast": "^3.0.0", + "which": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/npm/node_modules/@sigstore/bundle": { + "version": "1.1.0", + "dev": true, + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.2.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/protobuf-specs": { + "version": "0.2.1", + "dev": true, + "inBundle": true, + "license": "Apache-2.0", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/sign": { + "version": "1.0.0", + "dev": true, + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^1.1.0", + "@sigstore/protobuf-specs": "^0.2.0", + "make-fetch-happen": "^11.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@sigstore/tuf": { + "version": "1.0.3", + "dev": true, + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.2.0", + "tuf-js": "^1.1.7" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@tootallnate/once": { + "version": "2.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 10" + } + }, + "node_modules/npm/node_modules/@tufjs/canonical-json": { + "version": "1.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/@tufjs/models": { + "version": "1.0.4", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "@tufjs/canonical-json": "1.0.0", + "minimatch": "^9.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/abbrev": { + "version": "2.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/agent-base": { + "version": "6.0.2", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/npm/node_modules/agentkeepalive": { + "version": "4.5.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "humanize-ms": "^1.2.1" + }, + "engines": { + "node": ">= 8.0.0" + } + }, + "node_modules/npm/node_modules/aggregate-error": { + "version": "3.1.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/ansi-regex": { + "version": "5.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/ansi-styles": { + "version": "4.3.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/npm/node_modules/aproba": { + "version": "2.0.0", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/archy": { + "version": "1.0.0", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/are-we-there-yet": { + "version": "4.0.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/balanced-match": { + "version": "1.0.2", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/bin-links": { + "version": "4.0.3", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "cmd-shim": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0", + "read-cmd-shim": "^4.0.0", + "write-file-atomic": "^5.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/binary-extensions": { + "version": "2.2.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/brace-expansion": { + "version": "2.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/npm/node_modules/builtins": { + "version": "5.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "semver": "^7.0.0" + } + }, + "node_modules/npm/node_modules/cacache": { + "version": "17.1.4", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^7.7.1", + "minipass": "^7.0.3", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/chalk": { + "version": "5.3.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/npm/node_modules/chownr": { + "version": "2.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/npm/node_modules/ci-info": { + "version": "4.0.0", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/cidr-regex": { + "version": "3.1.1", + "dev": true, + "inBundle": true, + "license": "BSD-2-Clause", + "dependencies": { + "ip-regex": "^4.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm/node_modules/clean-stack": { + "version": "2.2.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/npm/node_modules/cli-columns": { + "version": "4.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/npm/node_modules/cli-table3": { + "version": "0.6.3", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "string-width": "^4.2.0" + }, + "engines": { + "node": "10.* || >= 12.*" + }, + "optionalDependencies": { + "@colors/colors": "1.5.0" + } + }, + "node_modules/npm/node_modules/clone": { + "version": "1.0.4", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/npm/node_modules/cmd-shim": { + "version": "6.0.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/color-convert": { + "version": "2.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/npm/node_modules/color-name": { + "version": "1.1.4", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/color-support": { + "version": "1.1.3", + "dev": true, + "inBundle": true, + "license": "ISC", + "bin": { + "color-support": "bin.js" + } + }, + "node_modules/npm/node_modules/columnify": { + "version": "1.6.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "strip-ansi": "^6.0.1", + "wcwidth": "^1.0.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/npm/node_modules/common-ancestor-path": { + "version": "1.0.1", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/concat-map": { + "version": "0.0.1", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/console-control-strings": { + "version": "1.1.0", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/cross-spawn": { + "version": "7.0.3", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/npm/node_modules/cross-spawn/node_modules/which": { + "version": "2.0.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/npm/node_modules/cssesc": { + "version": "3.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/npm/node_modules/debug": { + "version": "4.3.4", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/npm/node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/defaults": { + "version": "1.0.4", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "clone": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm/node_modules/delegates": { + "version": "1.0.0", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/diff": { + "version": "5.2.0", + "dev": true, + "inBundle": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/npm/node_modules/eastasianwidth": { + "version": "0.2.0", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/emoji-regex": { + "version": "8.0.0", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/encoding": { + "version": "0.1.13", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "iconv-lite": "^0.6.2" + } + }, + "node_modules/npm/node_modules/env-paths": { + "version": "2.2.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/npm/node_modules/err-code": { + "version": "2.0.3", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/exponential-backoff": { + "version": "3.1.1", + "dev": true, + "inBundle": true, + "license": "Apache-2.0" + }, + "node_modules/npm/node_modules/fastest-levenshtein": { + "version": "1.0.16", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 4.9.1" + } + }, + "node_modules/npm/node_modules/foreground-child": { + "version": "3.1.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/fs-minipass": { + "version": "3.0.3", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/fs.realpath": { + "version": "1.0.0", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/function-bind": { + "version": "1.1.2", + "dev": true, + "inBundle": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/npm/node_modules/gauge": { + "version": "5.0.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.3", + "console-control-strings": "^1.1.0", + "has-unicode": "^2.0.1", + "signal-exit": "^4.0.1", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/glob": { + "version": "10.3.10", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^2.3.5", + "minimatch": "^9.0.1", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", + "path-scurry": "^1.10.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/graceful-fs": { + "version": "4.2.11", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/has-unicode": { + "version": "2.0.1", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/hasown": { + "version": "2.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/npm/node_modules/hosted-git-info": { + "version": "6.1.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^7.5.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/http-cache-semantics": { + "version": "4.1.1", + "dev": true, + "inBundle": true, + "license": "BSD-2-Clause" + }, + "node_modules/npm/node_modules/http-proxy-agent": { + "version": "5.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/npm/node_modules/https-proxy-agent": { + "version": "5.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/npm/node_modules/humanize-ms": { + "version": "1.2.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "ms": "^2.0.0" + } + }, + "node_modules/npm/node_modules/iconv-lite": { + "version": "0.6.3", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm/node_modules/ignore-walk": { + "version": "6.0.4", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "minimatch": "^9.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/imurmurhash": { + "version": "0.1.4", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/npm/node_modules/indent-string": { + "version": "4.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/infer-owner": { + "version": "1.0.4", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/inflight": { + "version": "1.0.6", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/npm/node_modules/inherits": { + "version": "2.0.4", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/ini": { + "version": "4.1.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/init-package-json": { + "version": "5.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "npm-package-arg": "^10.0.0", + "promzard": "^1.0.0", + "read": "^2.0.0", + "read-package-json": "^6.0.0", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4", + "validate-npm-package-name": "^5.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/ip-address": { + "version": "9.0.5", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "jsbn": "1.1.0", + "sprintf-js": "^1.1.3" + }, + "engines": { + "node": ">= 12" + } + }, + "node_modules/npm/node_modules/ip-address/node_modules/sprintf-js": { + "version": "1.1.3", + "dev": true, + "inBundle": true, + "license": "BSD-3-Clause" + }, + "node_modules/npm/node_modules/ip-regex": { + "version": "4.3.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/is-cidr": { + "version": "4.0.2", + "dev": true, + "inBundle": true, + "license": "BSD-2-Clause", + "dependencies": { + "cidr-regex": "^3.1.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm/node_modules/is-core-module": { + "version": "2.13.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/npm/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/is-lambda": { + "version": "1.0.1", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/isexe": { + "version": "2.0.0", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/jackspeak": { + "version": "2.3.6", + "dev": true, + "inBundle": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/npm/node_modules/jsbn": { + "version": "1.1.0", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/json-parse-even-better-errors": { + "version": "3.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/json-stringify-nice": { + "version": "1.1.4", + "dev": true, + "inBundle": true, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/jsonparse": { + "version": "1.3.1", + "dev": true, + "engines": [ + "node >= 0.2.0" + ], + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/just-diff": { + "version": "6.0.2", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/just-diff-apply": { + "version": "5.5.0", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/libnpmaccess": { + "version": "7.0.3", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "npm-package-arg": "^10.1.0", + "npm-registry-fetch": "^14.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/libnpmdiff": { + "version": "5.0.21", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/arborist": "^6.5.0", + "@npmcli/disparity-colors": "^3.0.0", + "@npmcli/installed-package-contents": "^2.0.2", + "binary-extensions": "^2.2.0", + "diff": "^5.1.0", + "minimatch": "^9.0.0", + "npm-package-arg": "^10.1.0", + "pacote": "^15.0.8", + "tar": "^6.1.13" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/libnpmexec": { + "version": "6.0.5", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/arborist": "^6.5.0", + "@npmcli/run-script": "^6.0.0", + "ci-info": "^4.0.0", + "npm-package-arg": "^10.1.0", + "npmlog": "^7.0.1", + "pacote": "^15.0.8", + "proc-log": "^3.0.0", + "read": "^2.0.0", + "read-package-json-fast": "^3.0.2", + "semver": "^7.3.7", + "walk-up-path": "^3.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/libnpmfund": { + "version": "4.2.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/arborist": "^6.5.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/libnpmhook": { + "version": "9.0.4", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "aproba": "^2.0.0", + "npm-registry-fetch": "^14.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/libnpmorg": { + "version": "5.0.5", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "aproba": "^2.0.0", + "npm-registry-fetch": "^14.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/libnpmpack": { + "version": "5.0.21", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/arborist": "^6.5.0", + "@npmcli/run-script": "^6.0.0", + "npm-package-arg": "^10.1.0", + "pacote": "^15.0.8" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/libnpmpublish": { + "version": "7.5.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "ci-info": "^4.0.0", + "normalize-package-data": "^5.0.0", + "npm-package-arg": "^10.1.0", + "npm-registry-fetch": "^14.0.3", + "proc-log": "^3.0.0", + "semver": "^7.3.7", + "sigstore": "^1.4.0", + "ssri": "^10.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/libnpmsearch": { + "version": "6.0.3", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "npm-registry-fetch": "^14.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/libnpmteam": { + "version": "5.0.4", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "aproba": "^2.0.0", + "npm-registry-fetch": "^14.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/libnpmversion": { + "version": "4.0.3", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/git": "^4.0.1", + "@npmcli/run-script": "^6.0.0", + "json-parse-even-better-errors": "^3.0.0", + "proc-log": "^3.0.0", + "semver": "^7.3.7" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/lru-cache": { + "version": "7.18.3", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/npm/node_modules/make-fetch-happen": { + "version": "11.1.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "agentkeepalive": "^4.2.1", + "cacache": "^17.0.0", + "http-cache-semantics": "^4.1.1", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^5.0.0", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^10.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/make-fetch-happen/node_modules/minipass": { + "version": "5.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/minimatch": { + "version": "9.0.3", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/minipass": { + "version": "7.0.4", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/npm/node_modules/minipass-collect": { + "version": "1.0.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/npm/node_modules/minipass-collect/node_modules/minipass": { + "version": "3.3.6", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/minipass-fetch": { + "version": "3.0.4", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + } + }, + "node_modules/npm/node_modules/minipass-flush": { + "version": "1.0.5", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/npm/node_modules/minipass-flush/node_modules/minipass": { + "version": "3.3.6", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/minipass-json-stream": { + "version": "1.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "jsonparse": "^1.3.1", + "minipass": "^3.0.0" + } + }, + "node_modules/npm/node_modules/minipass-json-stream/node_modules/minipass": { + "version": "3.3.6", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/minipass-pipeline": { + "version": "1.2.4", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/minipass-pipeline/node_modules/minipass": { + "version": "3.3.6", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/minipass-sized": { + "version": "1.0.3", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/minipass-sized/node_modules/minipass": { + "version": "3.3.6", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/minizlib": { + "version": "2.1.2", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/npm/node_modules/minizlib/node_modules/minipass": { + "version": "3.3.6", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/mkdirp": { + "version": "1.0.4", + "dev": true, + "inBundle": true, + "license": "MIT", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm/node_modules/ms": { + "version": "2.1.3", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/mute-stream": { + "version": "1.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/negotiator": { + "version": "0.6.3", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/npm/node_modules/node-gyp": { + "version": "9.4.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "env-paths": "^2.2.0", + "exponential-backoff": "^3.1.1", + "glob": "^7.1.4", + "graceful-fs": "^4.2.6", + "make-fetch-happen": "^10.0.3", + "nopt": "^6.0.0", + "npmlog": "^6.0.0", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "tar": "^6.1.2", + "which": "^2.0.2" + }, + "bin": { + "node-gyp": "bin/node-gyp.js" + }, + "engines": { + "node": "^12.13 || ^14.13 || >=16" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs": { + "version": "2.1.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@gar/promisify": "^1.1.3", + "semver": "^7.3.5" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/abbrev": { + "version": "1.1.1", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/node-gyp/node_modules/are-we-there-yet": { + "version": "3.0.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/brace-expansion": { + "version": "1.1.11", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/cacache": { + "version": "16.1.3", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^2.1.0", + "@npmcli/move-file": "^2.0.0", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^8.0.1", + "infer-owner": "^1.0.4", + "lru-cache": "^7.7.1", + "minipass": "^3.1.6", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^9.0.0", + "tar": "^6.1.11", + "unique-filename": "^2.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion": { + "version": "2.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob": { + "version": "8.1.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch": { + "version": "5.1.6", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/fs-minipass": { + "version": "2.1.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/gauge": { + "version": "4.0.4", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.3", + "console-control-strings": "^1.1.0", + "has-unicode": "^2.0.1", + "signal-exit": "^3.0.7", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.5" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/glob": { + "version": "7.2.3", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen": { + "version": "10.2.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "agentkeepalive": "^4.2.1", + "cacache": "^16.1.0", + "http-cache-semantics": "^4.1.0", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^3.1.6", + "minipass-collect": "^1.0.2", + "minipass-fetch": "^2.0.3", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^9.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/minimatch": { + "version": "3.1.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/minipass": { + "version": "3.3.6", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch": { + "version": "2.1.2", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "minipass": "^3.1.6", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/nopt": { + "version": "6.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "abbrev": "^1.0.0" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/npmlog": { + "version": "6.0.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "are-we-there-yet": "^3.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^4.0.3", + "set-blocking": "^2.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/signal-exit": { + "version": "3.0.7", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/node-gyp/node_modules/ssri": { + "version": "9.0.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.1.1" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/unique-filename": { + "version": "2.0.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "unique-slug": "^3.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/unique-slug": { + "version": "3.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/npm/node_modules/node-gyp/node_modules/which": { + "version": "2.0.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/npm/node_modules/nopt": { + "version": "7.2.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "abbrev": "^2.0.0" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/normalize-package-data": { + "version": "5.0.0", + "dev": true, + "inBundle": true, + "license": "BSD-2-Clause", + "dependencies": { + "hosted-git-info": "^6.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/npm-audit-report": { + "version": "5.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/npm-bundled": { + "version": "3.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "npm-normalize-package-bin": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/npm-install-checks": { + "version": "6.3.0", + "dev": true, + "inBundle": true, + "license": "BSD-2-Clause", + "dependencies": { + "semver": "^7.1.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/npm-normalize-package-bin": { + "version": "3.0.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/npm-package-arg": { + "version": "10.1.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "hosted-git-info": "^6.0.0", + "proc-log": "^3.0.0", + "semver": "^7.3.5", + "validate-npm-package-name": "^5.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/npm-packlist": { + "version": "7.0.4", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "ignore-walk": "^6.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/npm-pick-manifest": { + "version": "8.0.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "npm-install-checks": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0", + "npm-package-arg": "^10.0.0", + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/npm-profile": { + "version": "7.0.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "npm-registry-fetch": "^14.0.0", + "proc-log": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/npm-registry-fetch": { + "version": "14.0.5", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "make-fetch-happen": "^11.0.0", + "minipass": "^5.0.0", + "minipass-fetch": "^3.0.0", + "minipass-json-stream": "^1.0.1", + "minizlib": "^2.1.2", + "npm-package-arg": "^10.0.0", + "proc-log": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/npm-registry-fetch/node_modules/minipass": { + "version": "5.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/npm-user-validate": { + "version": "2.0.0", + "dev": true, + "inBundle": true, + "license": "BSD-2-Clause", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/npmlog": { + "version": "7.0.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "are-we-there-yet": "^4.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^5.0.0", + "set-blocking": "^2.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/once": { + "version": "1.4.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/npm/node_modules/p-map": { + "version": "4.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm/node_modules/pacote": { + "version": "15.2.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "@npmcli/git": "^4.0.0", + "@npmcli/installed-package-contents": "^2.0.1", + "@npmcli/promise-spawn": "^6.0.1", + "@npmcli/run-script": "^6.0.0", + "cacache": "^17.0.0", + "fs-minipass": "^3.0.0", + "minipass": "^5.0.0", + "npm-package-arg": "^10.0.0", + "npm-packlist": "^7.0.0", + "npm-pick-manifest": "^8.0.0", + "npm-registry-fetch": "^14.0.0", + "proc-log": "^3.0.0", + "promise-retry": "^2.0.1", + "read-package-json": "^6.0.0", + "read-package-json-fast": "^3.0.0", + "sigstore": "^1.3.0", + "ssri": "^10.0.0", + "tar": "^6.1.11" + }, + "bin": { + "pacote": "lib/bin.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/pacote/node_modules/minipass": { + "version": "5.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/parse-conflict-json": { + "version": "3.0.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "json-parse-even-better-errors": "^3.0.0", + "just-diff": "^6.0.0", + "just-diff-apply": "^5.2.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/path-is-absolute": { + "version": "1.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm/node_modules/path-key": { + "version": "3.1.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/path-scurry": { + "version": "1.10.1", + "dev": true, + "inBundle": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^9.1.1 || ^10.0.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.2.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": "14 || >=16.14" + } + }, + "node_modules/npm/node_modules/postcss-selector-parser": { + "version": "6.0.15", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/npm/node_modules/proc-log": { + "version": "3.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/promise-all-reject-late": { + "version": "1.0.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/promise-call-limit": { + "version": "1.0.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/promise-inflight": { + "version": "1.0.1", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/promise-retry": { + "version": "2.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "err-code": "^2.0.2", + "retry": "^0.12.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm/node_modules/promzard": { + "version": "1.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "read": "^2.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/qrcode-terminal": { + "version": "0.12.0", + "dev": true, + "inBundle": true, + "bin": { + "qrcode-terminal": "bin/qrcode-terminal.js" + } + }, + "node_modules/npm/node_modules/read": { + "version": "2.1.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "mute-stream": "~1.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/read-cmd-shim": { + "version": "4.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/read-package-json": { + "version": "6.0.4", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "glob": "^10.2.2", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^5.0.0", + "npm-normalize-package-bin": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/read-package-json-fast": { + "version": "3.0.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "json-parse-even-better-errors": "^3.0.0", + "npm-normalize-package-bin": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/readable-stream": { + "version": "3.6.2", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/npm/node_modules/retry": { + "version": "0.12.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/npm/node_modules/rimraf": { + "version": "3.0.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/rimraf/node_modules/brace-expansion": { + "version": "1.1.11", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/npm/node_modules/rimraf/node_modules/glob": { + "version": "7.2.3", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/rimraf/node_modules/minimatch": { + "version": "3.1.2", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/npm/node_modules/safe-buffer": { + "version": "5.2.1", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/safer-buffer": { + "version": "2.1.2", + "dev": true, + "inBundle": true, + "license": "MIT", + "optional": true + }, + "node_modules/npm/node_modules/semver": { + "version": "7.6.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm/node_modules/semver/node_modules/lru-cache": { + "version": "6.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm/node_modules/set-blocking": { + "version": "2.0.0", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/shebang-command": { + "version": "2.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/shebang-regex": { + "version": "3.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/signal-exit": { + "version": "4.1.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/npm/node_modules/sigstore": { + "version": "1.9.0", + "dev": true, + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^1.1.0", + "@sigstore/protobuf-specs": "^0.2.0", + "@sigstore/sign": "^1.0.0", + "@sigstore/tuf": "^1.0.3", + "make-fetch-happen": "^11.0.1" + }, + "bin": { + "sigstore": "bin/sigstore.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/smart-buffer": { + "version": "4.2.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/npm/node_modules/socks": { + "version": "2.8.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "ip-address": "^9.0.5", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/npm/node_modules/socks-proxy-agent": { + "version": "7.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "agent-base": "^6.0.2", + "debug": "^4.3.3", + "socks": "^2.6.2" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/npm/node_modules/spdx-correct": { + "version": "3.2.0", + "dev": true, + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/npm/node_modules/spdx-exceptions": { + "version": "2.5.0", + "dev": true, + "inBundle": true, + "license": "CC-BY-3.0" + }, + "node_modules/npm/node_modules/spdx-expression-parse": { + "version": "3.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/npm/node_modules/spdx-license-ids": { + "version": "3.0.17", + "dev": true, + "inBundle": true, + "license": "CC0-1.0" + }, + "node_modules/npm/node_modules/ssri": { + "version": "10.0.5", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/string_decoder": { + "version": "1.3.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/npm/node_modules/string-width": { + "version": "4.2.3", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/strip-ansi": { + "version": "6.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/supports-color": { + "version": "9.4.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/npm/node_modules/tar": { + "version": "6.2.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npm/node_modules/tar/node_modules/fs-minipass": { + "version": "2.1.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/npm/node_modules/tar/node_modules/fs-minipass/node_modules/minipass": { + "version": "3.3.6", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/tar/node_modules/minipass": { + "version": "5.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">=8" + } + }, + "node_modules/npm/node_modules/text-table": { + "version": "0.2.0", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/tiny-relative-date": { + "version": "1.3.0", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/treeverse": { + "version": "3.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/tuf-js": { + "version": "1.1.7", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "@tufjs/models": "1.0.4", + "debug": "^4.3.4", + "make-fetch-happen": "^11.1.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/unique-filename": { + "version": "3.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "unique-slug": "^4.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/unique-slug": { + "version": "4.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/util-deprecate": { + "version": "1.0.2", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/validate-npm-package-license": { + "version": "3.0.4", + "dev": true, + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/npm/node_modules/validate-npm-package-name": { + "version": "5.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "builtins": "^5.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/walk-up-path": { + "version": "3.0.1", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/wcwidth": { + "version": "1.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "defaults": "^1.0.3" + } + }, + "node_modules/npm/node_modules/which": { + "version": "3.0.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/wide-align": { + "version": "1.1.5", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "node_modules/npm/node_modules/wrap-ansi": { + "version": "8.1.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/npm/node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/npm/node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "6.0.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/npm/node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "6.2.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/npm/node_modules/wrap-ansi/node_modules/emoji-regex": { + "version": "9.2.2", + "dev": true, + "inBundle": true, + "license": "MIT" + }, + "node_modules/npm/node_modules/wrap-ansi/node_modules/string-width": { + "version": "5.1.2", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm/node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "7.1.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/npm/node_modules/wrappy": { + "version": "1.0.2", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/npm/node_modules/write-file-atomic": { + "version": "5.0.1", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm/node_modules/yallist": { + "version": "4.0.0", + "dev": true, + "inBundle": true, + "license": "ISC" + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-each-series": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-each-series/-/p-each-series-3.0.0.tgz", + "integrity": "sha512-lastgtAdoH9YaLyDa5i5z64q+kzOcQHsQ5SsZJD3q0VEyI8mq872S3geuNbRUQLVAE9siMfgKrpj7MloKFHruw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-filter": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-filter/-/p-filter-4.1.0.tgz", + "integrity": "sha512-37/tPdZ3oJwHaS3gNJdenCDB3Tz26i9sjhnguBtvN0vYlRIiDNnvTWkuh+0hETV9rLPdJ3rlL3yVOYPIAnM8rw==", + "dev": true, + "dependencies": { + "p-map": "^7.0.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-is-promise": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-3.0.0.tgz", + "integrity": "sha512-Wo8VsW4IRQSKVXsJCn7TomUaVtyfjVDn3nUP7kE967BQk0CwFpdbZs0X0uk5sW9mkBa9eNM7hCMaG93WUAwxYQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-limit": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz", + "integrity": "sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^1.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-6.0.0.tgz", + "integrity": "sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==", + "dev": true, + "dependencies": { + "p-limit": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-map": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-7.0.1.tgz", + "integrity": "sha512-2wnaR0XL/FDOj+TgpDuRb2KTjLnu3Fma6b1ZUwGY7LcqenMcvP/YFpjpbPKY6WVGsbuJZRuoUz8iPrt8ORnAFw==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-reduce": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/p-reduce/-/p-reduce-2.1.0.tgz", + "integrity": "sha512-2USApvnsutq8uoxZBGbbWM0JIYLiEMJ9RlaN7fAzVNb9OZN0SHjjTTfIcb667XynS5Y1VhwDJVDa72TnPzAYWw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz", + "integrity": "sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/pkg-conf": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/pkg-conf/-/pkg-conf-2.1.0.tgz", + "integrity": "sha512-C+VUP+8jis7EsQZIhDYmS5qlNtjv2yP4SNtjXK9AP1ZcTRlnSfuumaTnRfYZnYgUUYVIKqL0fRvmUGDV2fmp6g==", + "dev": true, + "dependencies": { + "find-up": "^2.0.0", + "load-json-file": "^4.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pkg-conf/node_modules/find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==", + "dev": true, + "dependencies": { + "locate-path": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pkg-conf/node_modules/locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==", + "dev": true, + "dependencies": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pkg-conf/node_modules/p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "dependencies": { + "p-try": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pkg-conf/node_modules/p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==", + "dev": true, + "dependencies": { + "p-limit": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pkg-conf/node_modules/p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/pkg-conf/node_modules/path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "node_modules/proto-list": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", + "integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==", + "dev": true + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/quick-lru": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", + "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "dev": true, + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "bin": { + "rc": "cli.js" + } + }, + "node_modules/read-pkg": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-8.1.0.tgz", + "integrity": "sha512-PORM8AgzXeskHO/WEv312k9U03B8K9JSiWF/8N9sUuFjBa+9SF2u6K7VClzXwDXab51jCd8Nd36CNM+zR97ScQ==", + "dev": true, + "dependencies": { + "@types/normalize-package-data": "^2.4.1", + "normalize-package-data": "^6.0.0", + "parse-json": "^7.0.0", + "type-fest": "^4.2.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg-up": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-10.1.0.tgz", + "integrity": "sha512-aNtBq4jR8NawpKJQldrQcSW9y/d+KWH4v24HWkHljOZ7H0av+YTGANBzRh9A5pw7v/bLVsLVPpOhJ7gHNVy8lA==", + "dev": true, + "dependencies": { + "find-up": "^6.3.0", + "read-pkg": "^8.1.0", + "type-fest": "^4.2.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg-up/node_modules/type-fest": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.12.0.tgz", + "integrity": "sha512-5Y2/pp2wtJk8o08G0CMkuFPCO354FGwk/vbidxrdhRGZfd0tFnb4Qb8anp9XxXriwBgVPjdWbKpGl4J9lJY2jQ==", + "dev": true, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg/node_modules/json-parse-even-better-errors": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.1.tgz", + "integrity": "sha512-aatBvbL26wVUCLmbWdCpeu9iF5wOyWpagiKkInA+kfws3sWdBrTnsvN2CKcyCYyUrc7rebNBlK6+kteg7ksecg==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/read-pkg/node_modules/lines-and-columns": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-2.0.4.tgz", + "integrity": "sha512-wM1+Z03eypVAVUCE7QdSqpVIvelbOakn1M0bPDoA4SGWPx3sNDVUiMo3L6To6WWGClB7VyXnhQ4Sn7gxiJbE6A==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/read-pkg/node_modules/normalize-package-data": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.0.tgz", + "integrity": "sha512-UL7ELRVxYBHBgYEtZCXjxuD5vPxnmvMGq0jp/dGPKKrN7tfsBh2IY7TlJ15WWwdjRWD3RJbnsygUurTK3xkPkg==", + "dev": true, + "dependencies": { + "hosted-git-info": "^7.0.0", + "is-core-module": "^2.8.1", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/read-pkg/node_modules/parse-json": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-7.1.1.tgz", + "integrity": "sha512-SgOTCX/EZXtZxBE5eJ97P4yGM5n37BwRU+YMsH4vNzFqJV/oWFXXCmwFlgWUM4PrakybVOueJJ6pwHqSVhTFDw==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.21.4", + "error-ex": "^1.3.2", + "json-parse-even-better-errors": "^3.0.0", + "lines-and-columns": "^2.0.3", + "type-fest": "^3.8.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg/node_modules/parse-json/node_modules/type-fest": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-3.13.1.tgz", + "integrity": "sha512-tLq3bSNx+xSpwvAJnzrK0Ep5CLNWjvFTOp71URMaAEWBfRb9nnJiBoUe0tF8bI4ZFO3omgBR6NvnbzVUT3Ly4g==", + "dev": true, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg/node_modules/type-fest": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.12.0.tgz", + "integrity": "sha512-5Y2/pp2wtJk8o08G0CMkuFPCO354FGwk/vbidxrdhRGZfd0tFnb4Qb8anp9XxXriwBgVPjdWbKpGl4J9lJY2jQ==", + "dev": true, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/readable-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, + "dependencies": { + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/redeyed": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/redeyed/-/redeyed-2.1.1.tgz", + "integrity": "sha512-FNpGGo1DycYAdnrKFxCMmKYgo/mILAqtRYbkdQD8Ep/Hk2PQ5+aEAEx+IU713RTDmuBaH0c8P5ZozurNu5ObRQ==", + "dev": true, + "dependencies": { + "esprima": "~4.0.0" + } + }, + "node_modules/registry-auth-token": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-5.0.2.tgz", + "integrity": "sha512-o/3ikDxtXaA59BmZuZrJZDJv8NMDGSj+6j6XaeBmHw8eY1i1qd9+6H+LjVvQXx3HN6aRCGa1cUdJ9RaJZUugnQ==", + "dev": true, + "dependencies": { + "@pnpm/npm-conf": "^2.1.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/semantic-release": { + "version": "21.1.2", + "resolved": "https://registry.npmjs.org/semantic-release/-/semantic-release-21.1.2.tgz", + "integrity": "sha512-kz76azHrT8+VEkQjoCBHE06JNQgTgsC4bT8XfCzb7DHcsk9vG3fqeMVik8h5rcWCYi2Fd+M3bwA7BG8Z8cRwtA==", + "dev": true, + "dependencies": { + "@semantic-release/commit-analyzer": "^10.0.0", + "@semantic-release/error": "^4.0.0", + "@semantic-release/github": "^9.0.0", + "@semantic-release/npm": "^10.0.2", + "@semantic-release/release-notes-generator": "^11.0.0", + "aggregate-error": "^5.0.0", + "cosmiconfig": "^8.0.0", + "debug": "^4.0.0", + "env-ci": "^9.0.0", + "execa": "^8.0.0", + "figures": "^5.0.0", + "find-versions": "^5.1.0", + "get-stream": "^6.0.0", + "git-log-parser": "^1.2.0", + "hook-std": "^3.0.0", + "hosted-git-info": "^7.0.0", + "lodash-es": "^4.17.21", + "marked": "^5.0.0", + "marked-terminal": "^5.1.1", + "micromatch": "^4.0.2", + "p-each-series": "^3.0.0", + "p-reduce": "^3.0.0", + "read-pkg-up": "^10.0.0", + "resolve-from": "^5.0.0", + "semver": "^7.3.2", + "semver-diff": "^4.0.0", + "signale": "^1.2.1", + "yargs": "^17.5.1" + }, + "bin": { + "semantic-release": "bin/semantic-release.js" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/semantic-release/node_modules/@semantic-release/error": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@semantic-release/error/-/error-4.0.0.tgz", + "integrity": "sha512-mgdxrHTLOjOddRVYIYDo0fR3/v61GNN1YGkfbrjuIKg/uMgCd+Qzo3UAXJ+woLQQpos4pl5Esuw5A7AoNlzjUQ==", + "dev": true, + "engines": { + "node": ">=18" + } + }, + "node_modules/semantic-release/node_modules/aggregate-error": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-5.0.0.tgz", + "integrity": "sha512-gOsf2YwSlleG6IjRYG2A7k0HmBMEo6qVNk9Bp/EaLgAJT5ngH6PXbqa4ItvnEwCm/velL5jAnQgsHsWnjhGmvw==", + "dev": true, + "dependencies": { + "clean-stack": "^5.2.0", + "indent-string": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semantic-release/node_modules/clean-stack": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-5.2.0.tgz", + "integrity": "sha512-TyUIUJgdFnCISzG5zu3291TAsE77ddchd0bepon1VVQrKLGKFED4iXFEDQ24mIPdPBbyE16PK3F8MYE1CmcBEQ==", + "dev": true, + "dependencies": { + "escape-string-regexp": "5.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semantic-release/node_modules/execa": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", + "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^8.0.1", + "human-signals": "^5.0.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": ">=16.17" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/semantic-release/node_modules/execa/node_modules/get-stream": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", + "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", + "dev": true, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semantic-release/node_modules/human-signals": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", + "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", + "dev": true, + "engines": { + "node": ">=16.17.0" + } + }, + "node_modules/semantic-release/node_modules/indent-string": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz", + "integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semantic-release/node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semantic-release/node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semantic-release/node_modules/npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "dev": true, + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semantic-release/node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "dev": true, + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semantic-release/node_modules/p-reduce": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-reduce/-/p-reduce-3.0.0.tgz", + "integrity": "sha512-xsrIUgI0Kn6iyDYm9StOpOeK29XM1aboGji26+QEortiFST1hGZaUQOLhtEbqHErPpGW/aSz6allwK2qcptp0Q==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semantic-release/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semantic-release/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/semantic-release/node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver-diff": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-4.0.0.tgz", + "integrity": "sha512-0Ju4+6A8iOnpL/Thra7dZsSlOHYAHIeMxfhWQRI1/VLcT3WDBZKKtQt/QkBOsiIN9ZpuvHE6cGZ0x4glCMmfiA==", + "dev": true, + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semver-regex": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/semver-regex/-/semver-regex-4.0.5.tgz", + "integrity": "sha512-hunMQrEy1T6Jr2uEVjrAIqjwWcQTgOAcIM52C8MY1EZSD3DDNft04XzvYKPqjED65bNVVko0YI38nYeEHCX3yw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semver/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/signale": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/signale/-/signale-1.4.0.tgz", + "integrity": "sha512-iuh+gPf28RkltuJC7W5MRi6XAjTDCAPC/prJUpQoG4vIP3MJZ+GTydVnodXA7pwvTKb2cA0m9OFZW/cdWy/I/w==", + "dev": true, + "dependencies": { + "chalk": "^2.3.2", + "figures": "^2.0.0", + "pkg-conf": "^2.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/signale/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/signale/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/signale/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/signale/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/signale/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/signale/node_modules/figures": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", + "integrity": "sha512-Oa2M9atig69ZkfwiApY8F2Yy+tzMbazyvqv21R0NsSC8floSOC09BbT1ITWAdoMGQvJ/aZnR1KMwdx9tvHnTNA==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/signale/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/signale/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/slash": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", + "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", + "dev": true, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/spawn-error-forwarder": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/spawn-error-forwarder/-/spawn-error-forwarder-1.0.0.tgz", + "integrity": "sha512-gRjMgK5uFjbCvdibeGJuy3I5OYz6VLoVdsOJdA6wV0WlfQVLFueoqMxwwYD9RODdgb6oUIvlRlsyFSiQkMKu0g==", + "dev": true + }, + "node_modules/spdx-correct": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", + "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", + "dev": true, + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", + "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", + "dev": true + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.17", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.17.tgz", + "integrity": "sha512-sh8PWc/ftMqAAdFiBu6Fy6JUOYjqDJBJvIhpfDMyHrr0Rbp5liZqd4TjtQ/RgfLjKFZb+LMx5hpml5qOWy0qvg==", + "dev": true + }, + "node_modules/split": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", + "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", + "dev": true, + "dependencies": { + "through": "2" + }, + "engines": { + "node": "*" + } + }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "dev": true, + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/stream-combiner2": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/stream-combiner2/-/stream-combiner2-1.1.1.tgz", + "integrity": "sha512-3PnJbYgS56AeWgtKF5jtJRT6uFJe56Z0Hc5Ngg/6sI6rIt8iiMBTa9cvdyFfpMQjaVHr8dusbNeFGIIonxOvKw==", + "dev": true, + "dependencies": { + "duplexer2": "~0.1.0", + "readable-stream": "^2.0.2" + } + }, + "node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-hyperlinks": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz", + "integrity": "sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0", + "supports-color": "^7.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/temp-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/temp-dir/-/temp-dir-3.0.0.tgz", + "integrity": "sha512-nHc6S/bwIilKHNRgK/3jlhDoIHcp45YgyiwcAk46Tr0LfEqGBVpmiAyuiuxeVE44m3mXnEeVhaipLOEWmH+Njw==", + "dev": true, + "engines": { + "node": ">=14.16" + } + }, + "node_modules/tempy": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/tempy/-/tempy-3.1.0.tgz", + "integrity": "sha512-7jDLIdD2Zp0bDe5r3D2qtkd1QOCacylBuL7oa4udvN6v2pqr4+LcCr67C8DR1zkpaZ8XosF5m1yQSabKAW6f2g==", + "dev": true, + "dependencies": { + "is-stream": "^3.0.0", + "temp-dir": "^3.0.0", + "type-fest": "^2.12.2", + "unique-string": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/tempy/node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/tempy/node_modules/type-fest": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", + "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", + "dev": true, + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/text-extensions": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/text-extensions/-/text-extensions-2.4.0.tgz", + "integrity": "sha512-te/NtwBwfiNRLf9Ijqx3T0nlqZiQ2XrrtBvu+cLL8ZRrGkO0NHTug8MYFKyoSrv/sHTaSKfilUkizV6XhxMJ3g==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", + "dev": true + }, + "node_modules/through2": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "dev": true, + "dependencies": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/traverse": { + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.8.tgz", + "integrity": "sha512-aXJDbk6SnumuaZSANd21XAo15ucCDE38H4fkqiGsc3MhCK+wOlZvLP9cB/TvpHT0mOyWgC4Z8EwRlzqYSUzdsA==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/trim-newlines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz", + "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/type-fest": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-3.13.1.tgz", + "integrity": "sha512-tLq3bSNx+xSpwvAJnzrK0Ep5CLNWjvFTOp71URMaAEWBfRb9nnJiBoUe0tF8bI4ZFO3omgBR6NvnbzVUT3Ly4g==", + "dev": true, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/uglify-js": { + "version": "3.17.4", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.17.4.tgz", + "integrity": "sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g==", + "dev": true, + "optional": true, + "bin": { + "uglifyjs": "bin/uglifyjs" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/unicorn-magic": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz", + "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/unique-string": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-3.0.0.tgz", + "integrity": "sha512-VGXBUVwxKMBUznyffQweQABPRRW1vHZAbadFZud4pLFAqRGvv/96vafgjWFqzourzr8YonlQiPgH0YCJfawoGQ==", + "dev": true, + "dependencies": { + "crypto-random-string": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/universal-user-agent": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", + "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==", + "dev": true + }, + "node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "dev": true, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/url-join": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/url-join/-/url-join-5.0.0.tgz", + "integrity": "sha512-n2huDr9h9yzd6exQVnH/jU5mr+Pfx08LRXXZhkLLetAMESRj+anQsTAh940iMrIetKAmry9coFuZQ2jY8/p3WA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", + "dev": true + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "dev": true, + "engines": { + "node": ">=0.4" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/yocto-queue": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz", + "integrity": "sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==", + "dev": true, + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 00000000..e36b8220 --- /dev/null +++ b/package.json @@ -0,0 +1,29 @@ +{ + "name": "ids-basecamp-clearinghouse", + "version": "1.0.0", + "description": "The IDS Clearing House Service is a prototype implementation of the [Clearing House](https://github.com/International-Data-Spaces-Association/IDS-RAM_4_0/blob/main/documentation/3_Layers_of_the_Reference_Architecture_Model/3_5_System_Layer/3_5_5_Clearing_House.md) component of the [Industrial Data Space](https://internationaldataspaces.org/).", + "main": "index.js", + "directories": { + "doc": "doc" + }, + "scripts": { + "test:app:int": "cd clearing-house-app && cargo test -- --ignored", + "test:app": "cd clearing-house-app && cargo test", + "test:edc": "cd clearing-house-edc && ./gradlew test", + "docs": "mdbook serve", + "start": "docker compose -f docker/docker-compose.yml up -d" + }, + "bugs": { + "url": "https://github.com/ids-basecamp/clearinghouse/issues" + }, + "keywords": [], + "repository": "https://github.com/ids-basecamp/clearinghouse", + "author": "Maximilian Schönenberg, Daniel Hommen", + "license": "Apache-2.0", + "devDependencies": { + "@semantic-release/changelog": "^6.0.3", + "@semantic-release/git": "^10.0.1", + "jsonwebtoken": "^9.0.2", + "semantic-release": "^21.0.7" + } +} diff --git a/tests/load.js b/tests/load.js new file mode 100644 index 00000000..743a22b4 --- /dev/null +++ b/tests/load.js @@ -0,0 +1,25 @@ +import http from 'k6/http'; +import { check } from 'k6'; +import logMessage from './util/logMessage.js'; +import header from './util/header.js'; + +export const options = { + vus: 10, + duration: "1m" +}; + +const url = `http://${__ENV.HOSTNAME}`; +const TOKEN = `${__ENV.TOKEN}` + +export default () => { + const logMessageHeader = { + "Content-Type": "application/json", + "CH-SERVICE": TOKEN + } + + const logMessageRes = http.post(`${url}/messages/log/6`, JSON.stringify(logMessage(), null, 2), { headers: header() }); + check(logMessageRes, { + 'ch-app POST logmessage is status 201': (r) => r.status === 201, + }); + +}; diff --git a/tests/smoke.js b/tests/smoke.js new file mode 100644 index 00000000..ba7df1b7 --- /dev/null +++ b/tests/smoke.js @@ -0,0 +1,23 @@ +import http from 'k6/http'; +import { check } from 'k6'; +import logMessage from './util/logMessage.js'; +import header from './util/header.js' + +export const options = { + vus: 2 +}; + +const url = `http://${__ENV.HOSTNAME}`; + +export default () => { + const jwksRes = http.get(`${url}/.well-known/jwks.json`); + check(jwksRes, { + 'ch-app GET jwks is status 200': (r) => r.status === 200, + }); + + + const logMessageRes = http.post(`${url}/messages/log/6`, JSON.stringify(logMessage(), null, 2), { headers: header() }); + check(logMessageRes, { + 'ch-app POST logmessage is status 201': (r) => r.status === 201, + }); +}; diff --git a/tests/util/header.js b/tests/util/header.js new file mode 100644 index 00000000..8f14f0a5 --- /dev/null +++ b/tests/util/header.js @@ -0,0 +1,6 @@ +export default () => { + return { + "Content-Type": "application/json", + "CH-SERVICE": __ENV.TOKEN + } +} \ No newline at end of file diff --git a/tests/util/logMessage.js b/tests/util/logMessage.js new file mode 100644 index 00000000..48f105fc --- /dev/null +++ b/tests/util/logMessage.js @@ -0,0 +1,32 @@ +const date = new Date(); + +export default () => { + return { + "header": { + + "@context": { + // ... (HashMap) + }, + "@type": "ids:LogMessage", + "@id": "String", + "modelVersion": "String", + "correlationMessage": "String", + "issued": date.toISOString(), + "issuerConnector": "InfoModelId", + "senderAgent": "String", + "recipientConnector": [ + "test" + ], + "recipientAgent": [ + "test" + ], + "transferContract": "String", + "contentVersion": "String", + "securityToken": null, + "authorizationToken": "String", + "payload": "String", + "payload_type": "String" + }, + payload: "hello world" + } +} \ No newline at end of file