diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 000000000..81fd6345a --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,16 @@ + + +### Requires + + +### Supports + diff --git a/.github/workflows/golangci_lint.yml b/.github/workflows/golangci_lint.yml index 2b787841e..22ef02836 100644 --- a/.github/workflows/golangci_lint.yml +++ b/.github/workflows/golangci_lint.yml @@ -5,23 +5,13 @@ on: [pull_request] jobs: golangci-lint: runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + actions: read steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Set up Go - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 - with: - go-version-file: "go.mod" - - name: Build binary - shell: bash - run: go build ./... - name: golangci-lint - uses: golangci/golangci-lint-action@aaa42aa0628b4ae2578232a66b541047968fac86 # v6.1.0 + uses: smartcontractkit/.github/actions/ci-lint-go@2ac9d97a83a5edded09af7fcf4ea5bce7a4473a4 # v0.2.6 with: - version: v1.60.1 - # only-new-issues is only applicable to PRs, otherwise it is always set to false - only-new-issues: true - args: --out-format colored-line-number,checkstyle:golangci-lint-report.xml - - name: Print lint report artifact - if: failure() - shell: bash - run: cat ./golangci-lint-report.xml + golangci-lint-version: v1.61.0 + \ No newline at end of file diff --git a/.github/workflows/llm-action-error-reporter.yml b/.github/workflows/llm-action-error-reporter.yml new file mode 100644 index 000000000..98622317a --- /dev/null +++ b/.github/workflows/llm-action-error-reporter.yml @@ -0,0 +1,23 @@ +name: LLM Action Error Reporter +on: + workflow_run: + workflows: ["PKG Build and Test"] # As soon as one of the listed worfklows is completed, reporter is triggered + types: + - completed + +jobs: + analyze_logs: + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: write + repository-projects: read + actions: read + steps: + - name: Analyze logs + uses: smartcontractkit/.github/actions/llm-action-error-reporter@d125ca9fe5e3b410de7c6db4a4ce3ed7a0728cd6 # v0.3.0 + with: + parent-workflow-conclusion: ${{ github.event.workflow_run.conclusion }} + skip-on-success: true # Skip posting comment if no errors are found + gh-token: ${{ github.token }} + openai-api-key: ${{ secrets.OPENAI_API_KEY }} \ No newline at end of file diff --git a/.github/workflows/observability.yml b/.github/workflows/observability.yml index 7c9653f1e..f70124109 100644 --- a/.github/workflows/observability.yml +++ b/.github/workflows/observability.yml @@ -21,7 +21,7 @@ jobs: go-version-file: "go.mod" - name: Build - run: go build -v ./... + run: make build - name: Unit Tests - run: go test -v ./... + run: make test diff --git a/.github/workflows/pkg.yml b/.github/workflows/pkg.yml index 4140ba01a..e89eeaa56 100644 --- a/.github/workflows/pkg.yml +++ b/.github/workflows/pkg.yml @@ -18,7 +18,7 @@ jobs: run: go build -v ./... - name: Unit Tests - run: GORACE="log_path=$PWD/race" go test -race ./... -coverpkg=./... -coverprofile=pkg_coverage.out + run: GORACE="log_path=$PWD/race" go test -race ./... -coverpkg=./... -coverprofile=coverage.txt - name: Print Races if: failure() @@ -42,20 +42,13 @@ jobs: if: failure() run: find . -type f|fgrep '/testdata/fuzz/'|while read f; do echo $f; cat $f; done - - name: Upload Fuzz Tests Failing Inputs - if: failure() - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 - with: - name: failing-fuzz-inputs - path: "**/testdata/fuzz/**" - - name: Upload Go test results if: always() uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: name: go-test-results path: | - ./pkg_coverage.out + ./coverage.txt ./race.* check-tidy: diff --git a/.github/workflows/sonar-scan.yml b/.github/workflows/sonar-scan.yml index a1c2b77fb..8361b2eb2 100644 --- a/.github/workflows/sonar-scan.yml +++ b/.github/workflows/sonar-scan.yml @@ -14,11 +14,11 @@ jobs: ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - name: Wait for workflows - uses: smartcontractkit/chainlink-github-actions/utils/wait-for-workflows@e29366cdecfe6befff9ab8c3cfe4825218505d58 # v2.3.16 + uses: smartcontractkit/.github/actions/wait-for-workflows@dca9ab89d734e82738b8aa52bd25d09b205ec6ee # v0.1.1 with: - max-timeout: "900" + max-timeout: "1200" polling-interval: "30" - exclude-workflow-names: "" + exclude-workflow-names: "Build External Repositories, Observability Lib Checks, Run Benchmarks, LLM Action Error Reporter" exclude-workflow-ids: "" github-token: ${{ secrets.GITHUB_TOKEN }} env: @@ -30,46 +30,12 @@ jobs: runs-on: ubuntu-latest if: always() steps: - - name: Checkout the repo - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - with: - fetch-depth: 0 # fetches all history for all tags and branches to provide more metadata for sonar reports - - - name: Download Golangci-lint report - if: always() - uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6 - with: - workflow: golangci_lint.yml - workflow_conclusion: "" - name_is_regexp: true - name: golangci-lint-report - if_no_artifact_found: warn - - - name: Download Go PKG test reports - if: always() - uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6 - with: - workflow: pkg.yml - workflow_conclusion: "" - name_is_regexp: true - name: go-test-results - if_no_artifact_found: warn - - - name: Set SonarQube Report Paths - if: always() - id: sonarqube_report_paths - shell: bash - run: | - echo "sonarqube_coverage_report_paths=$(find -type f -name '*coverage.out' -printf "%p,")" >> $GITHUB_OUTPUT - echo "sonarqube_golangci_report_paths=$(find -type f -name 'golangci-lint-report.xml' -printf "%p,")" >> $GITHUB_OUTPUT - - name: SonarQube Scan - if: always() - uses: sonarsource/sonarqube-scan-action@53c3e3207fe4b8d52e2f1ac9d6eb1d2506f626c0 # v2.0.2 + uses: smartcontractkit/.github/actions/ci-sonarqube-go@5f4a9c9c3407dd499a1ebbc658a45b9beb9bf675 # v0.3.0 with: - args: > - -Dsonar.go.coverage.reportPaths=${{ steps.sonarqube_report_paths.outputs.sonarqube_coverage_report_paths }} - -Dsonar.go.golangci-lint.reportPaths=${{ steps.sonarqube_report_paths.outputs.sonarqube_golangci_report_paths }} - env: - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }} + # sonarqube inputs + include-lint: "true" + test-report-workflow: pkg.yml + lint-report-workflow: golangci_lint.yml + sonar-token: ${{ secrets.SONAR_TOKEN }} + sonar-host-url: ${{ secrets.SONAR_HOST_URL }} diff --git a/.gitignore b/.gitignore index 6d1bc4012..493ae60ed 100644 --- a/.gitignore +++ b/.gitignore @@ -19,11 +19,13 @@ **/testdata/fuzz/* # Dependency directories (remove the comment below to include it) -# vendor/ +vendor/ # IntelliJ IDE .idea -vendor/ +# Visual Studio Code +.vscode +# Generated files *.wasm diff --git a/.golangci.yml b/.golangci.yml index 8c1a4b166..8b4070989 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -2,23 +2,29 @@ run: timeout: 15m0s linters: enable: + - containedctx + - depguard + - errname + - errorlint - exhaustive - exportloopref - - revive + - fatcontext + - ginkgolinter - goimports - gosec + - loggercheck + - mirror - misspell + - noctx + - perfsprint + - prealloc + - revive - rowserrcheck - - errorlint - - unconvert + - spancheck - sqlclosecheck - - noctx + - testifylint + - unconvert - whitespace - - depguard - - containedctx - - fatcontext - - mirror - - loggercheck linters-settings: exhaustive: default-signifies-exhaustive: true diff --git a/.tool-versions b/.tool-versions index b82d197d7..6b6bb428c 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1,5 +1,5 @@ golang 1.22.7 protoc 25.1 protoc-gen-go-grpc 1.3.0 -golangci-lint 1.55.2 +golangci-lint 1.61.0 mockery 2.43.2 diff --git a/go.mod b/go.mod index 062c250d4..7e1981644 100644 --- a/go.mod +++ b/go.mod @@ -36,26 +36,29 @@ require ( github.com/smartcontractkit/libocr v0.0.0-20241007185508-adbe57025f12 github.com/stretchr/testify v1.9.0 go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.52.0 - go.opentelemetry.io/otel v1.28.0 + go.opentelemetry.io/otel v1.30.0 go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.0.0-20240823153156-2a54df7bffb9 + go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.6.0 go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.28.0 + go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.30.0 go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0 + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.30.0 go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.4.0 go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.28.0 go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.28.0 - go.opentelemetry.io/otel/log v0.4.0 - go.opentelemetry.io/otel/metric v1.28.0 - go.opentelemetry.io/otel/sdk v1.28.0 - go.opentelemetry.io/otel/sdk/log v0.4.0 - go.opentelemetry.io/otel/sdk/metric v1.28.0 - go.opentelemetry.io/otel/trace v1.28.0 + go.opentelemetry.io/otel/log v0.6.0 + go.opentelemetry.io/otel/metric v1.30.0 + go.opentelemetry.io/otel/sdk v1.30.0 + go.opentelemetry.io/otel/sdk/log v0.6.0 + go.opentelemetry.io/otel/sdk/metric v1.30.0 + go.opentelemetry.io/otel/trace v1.30.0 go.uber.org/goleak v1.3.0 go.uber.org/zap v1.27.0 golang.org/x/crypto v0.27.0 golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 golang.org/x/tools v0.25.0 gonum.org/v1/gonum v0.15.0 - google.golang.org/grpc v1.65.0 + google.golang.org/grpc v1.66.1 google.golang.org/protobuf v1.34.2 sigs.k8s.io/yaml v1.4.0 ) @@ -95,7 +98,7 @@ require ( github.com/stretchr/objx v0.5.2 // indirect github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect github.com/x448/float16 v0.8.4 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.30.0 // indirect go.opentelemetry.io/proto/otlp v1.3.1 // indirect go.uber.org/multierr v1.11.0 // indirect golang.org/x/mod v0.21.0 // indirect @@ -104,7 +107,7 @@ require ( golang.org/x/sys v0.25.0 // indirect golang.org/x/text v0.18.0 // indirect golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20240822170219-fc7c04adadcd // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index aa0c53162..10f49bb5d 100644 --- a/go.sum +++ b/go.sum @@ -253,34 +253,40 @@ go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.52.0 h1:vS1Ao/R55RNV4O7TA2Qopok8yN+X0LIP6RVWLFkprck= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.52.0/go.mod h1:BMsdeOxN04K0L5FNUBfjFdvwWGNe/rkmSwH4Aelu/X0= -go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo= -go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4= +go.opentelemetry.io/otel v1.30.0 h1:F2t8sK4qf1fAmY9ua4ohFS/K+FUuOPemHUIXHtktrts= +go.opentelemetry.io/otel v1.30.0/go.mod h1:tFw4Br9b7fOS+uEao81PJjVMjW/5fvNCbpsDIXqP0pc= go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.0.0-20240823153156-2a54df7bffb9 h1:UiRNKd1OgqsLbFwE+wkAWTdiAxXtCBqKIHeBIse4FUA= go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.0.0-20240823153156-2a54df7bffb9/go.mod h1:eqZlW3pJWhjyexnDPrdQxix1pn0wwhI4AO4GKpP/bMI= +go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.6.0 h1:QSKmLBzbFULSyHzOdO9JsN9lpE4zkrz1byYGmJecdVE= +go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.6.0/go.mod h1:sTQ/NH8Yrirf0sJ5rWqVu+oT82i4zL9FaF6rWcqnptM= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.28.0 h1:U2guen0GhqH8o/G2un8f/aG/y++OuW6MyCo6hT9prXk= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.28.0/go.mod h1:yeGZANgEcpdx/WK0IvvRFC+2oLiMS2u4L/0Rj2M2Qr0= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0 h1:3Q/xZUyC1BBkualc9ROb4G8qkH90LXEIICcs5zv1OYY= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0/go.mod h1:s75jGIWA9OfCMzF0xr+ZgfrB5FEbbV7UuYo32ahUiFI= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.30.0 h1:VrMAbeJz4gnVDg2zEzjHG4dEH86j4jO6VYB+NgtGD8s= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.30.0/go.mod h1:qqN/uFdpeitTvm+JDqqnjm517pmQRYxTORbETHq5tOc= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.30.0 h1:lsInsfvhVIfOI6qHVyysXMNDnjO9Npvl7tlDPJFBVd4= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.30.0/go.mod h1:KQsVNh4OjgjTG0G6EiNi1jVpnaeeKsKMRwbLN+f1+8M= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0 h1:R3X6ZXmNPRR8ul6i3WgFURCHzaXjHdm0karRG/+dj3s= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0/go.mod h1:QWFXnDavXWwMx2EEcZsf3yxgEKAqsxQ+Syjp+seyInw= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.30.0 h1:umZgi92IyxfXd/l4kaDhnKgY8rnN/cZcF1LKc6I8OQ8= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.30.0/go.mod h1:4lVs6obhSVRb1EW5FhOuBTyiQhtRtAnnva9vD3yRfq8= go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.4.0 h1:0MH3f8lZrflbUWXVxyBg/zviDFdGE062uKh5+fu8Vv0= go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.4.0/go.mod h1:Vh68vYiHY5mPdekTr0ox0sALsqjoVy0w3Os278yX5SQ= go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.28.0 h1:BJee2iLkfRfl9lc7aFmBwkWxY/RI1RDdXepSF6y8TPE= go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.28.0/go.mod h1:DIzlHs3DRscCIBU3Y9YSzPfScwnYnzfnCd4g8zA7bZc= go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.28.0 h1:EVSnY9JbEEW92bEkIYOVMw4q1WJxIAGoFTrtYOzWuRQ= go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.28.0/go.mod h1:Ea1N1QQryNXpCD0I1fdLibBAIpQuBkznMmkdKrapk1Y= -go.opentelemetry.io/otel/log v0.4.0 h1:/vZ+3Utqh18e8TPjuc3ecg284078KWrR8BRz+PQAj3o= -go.opentelemetry.io/otel/log v0.4.0/go.mod h1:DhGnQvky7pHy82MIRV43iXh3FlKN8UUKftn0KbLOq6I= -go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q= -go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s= -go.opentelemetry.io/otel/sdk v1.28.0 h1:b9d7hIry8yZsgtbmM0DKyPWMMUMlK9NEKuIG4aBqWyE= -go.opentelemetry.io/otel/sdk v1.28.0/go.mod h1:oYj7ClPUA7Iw3m+r7GeEjz0qckQRJK2B8zjcZEfu7Pg= -go.opentelemetry.io/otel/sdk/log v0.4.0 h1:1mMI22L82zLqf6KtkjrRy5BbagOTWdJsqMY/HSqILAA= -go.opentelemetry.io/otel/sdk/log v0.4.0/go.mod h1:AYJ9FVF0hNOgAVzUG/ybg/QttnXhUePWAupmCqtdESo= -go.opentelemetry.io/otel/sdk/metric v1.28.0 h1:OkuaKgKrgAbYrrY0t92c+cC+2F6hsFNnCQArXCKlg08= -go.opentelemetry.io/otel/sdk/metric v1.28.0/go.mod h1:cWPjykihLAPvXKi4iZc1dpER3Jdq2Z0YLse3moQUCpg= -go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g= -go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI= +go.opentelemetry.io/otel/log v0.6.0 h1:nH66tr+dmEgW5y+F9LanGJUBYPrRgP4g2EkmPE3LeK8= +go.opentelemetry.io/otel/log v0.6.0/go.mod h1:KdySypjQHhP069JX0z/t26VHwa8vSwzgaKmXtIB3fJM= +go.opentelemetry.io/otel/metric v1.30.0 h1:4xNulvn9gjzo4hjg+wzIKG7iNFEaBMX00Qd4QIZs7+w= +go.opentelemetry.io/otel/metric v1.30.0/go.mod h1:aXTfST94tswhWEb+5QjlSqG+cZlmyXy/u8jFpor3WqQ= +go.opentelemetry.io/otel/sdk v1.30.0 h1:cHdik6irO49R5IysVhdn8oaiR9m8XluDaJAs4DfOrYE= +go.opentelemetry.io/otel/sdk v1.30.0/go.mod h1:p14X4Ok8S+sygzblytT1nqG98QG2KYKv++HE0LY/mhg= +go.opentelemetry.io/otel/sdk/log v0.6.0 h1:4J8BwXY4EeDE9Mowg+CyhWVBhTSLXVXodiXxS/+PGqI= +go.opentelemetry.io/otel/sdk/log v0.6.0/go.mod h1:L1DN8RMAduKkrwRAFDEX3E3TLOq46+XMGSbUfHU/+vE= +go.opentelemetry.io/otel/sdk/metric v1.30.0 h1:QJLT8Pe11jyHBHfSAgYH7kEmT24eX792jZO1bo4BXkM= +go.opentelemetry.io/otel/sdk/metric v1.30.0/go.mod h1:waS6P3YqFNzeP01kuo/MBBYqaoBJl7efRQHOaydhy1Y= +go.opentelemetry.io/otel/trace v1.30.0 h1:7UBkkYzeg3C7kQX8VAidWh2biiQbtAKjyIML8dQ9wmc= +go.opentelemetry.io/otel/trace v1.30.0/go.mod h1:5EyKqTzzmyqB9bwtCCq6pDLktPK6fmGf/Dph+8VI02o= go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0= go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= @@ -376,17 +382,17 @@ google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoA google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20210401141331-865547bb08e2/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto/googleapis/api v0.0.0-20240822170219-fc7c04adadcd h1:BBOTEWLuuEGQy9n1y9MhVJ9Qt0BDu21X8qZs71/uPZo= -google.golang.org/genproto/googleapis/api v0.0.0-20240822170219-fc7c04adadcd/go.mod h1:fO8wJzT2zbQbAjbIoos1285VfEIYKDDY+Dt+WpTkh6g= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd h1:6TEm2ZxXoQmFWFlt1vNxvVOa1Q0dXFQD1m/rYjXmS0E= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= +google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1 h1:hjSy6tcFQZ171igDaN5QHOw2n6vx40juYbC/x67CEhc= +google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:qpvKtACPCQhAdu3PyQgV4l3LMXZEtft7y8QcarRsp9I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.65.0 h1:bs/cUb4lp1G5iImFFd3u5ixQzweKizoZJAwBNLR42lc= -google.golang.org/grpc v1.65.0/go.mod h1:WgYC2ypjlB0EiQi6wdKixMqukr6lBc0Vo+oOgjrM5ZQ= +google.golang.org/grpc v1.66.1 h1:hO5qAXR19+/Z44hmvIM4dQFMSYX9XcWsByfoxutBpAM= +google.golang.org/grpc v1.66.1/go.mod h1:s3/l6xSSCURdVfAnL+TqCNMyTDAGN6+lZeVxnZR128Y= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= diff --git a/observability-lib/Makefile b/observability-lib/Makefile index ad7795deb..be7def023 100644 --- a/observability-lib/Makefile +++ b/observability-lib/Makefile @@ -12,4 +12,8 @@ lint: .PHONY: test test: - go test -v ./... \ No newline at end of file + go test ./... + +.PHONY: update +update: + go test ./dashboards/... -update=1 diff --git a/observability-lib/api/contact-point.go b/observability-lib/api/contact-point.go index b31f5bc0a..331a561d6 100644 --- a/observability-lib/api/contact-point.go +++ b/observability-lib/api/contact-point.go @@ -80,7 +80,7 @@ func (c *Client) DeleteContactPoint(uid string) (DeleteContactPointResponse, *re } statusCode := resp.StatusCode() - if statusCode != 204 { + if statusCode != 202 { return DeleteContactPointResponse{}, resp, fmt.Errorf("error deleting contact point, received unexpected status code %d: %s", statusCode, resp.String()) } @@ -95,6 +95,7 @@ func (c *Client) PostContactPoint(contactPoint alerting.ContactPoint) (PostConta resp, err := c.resty.R(). SetHeader("Content-Type", "application/json"). + SetHeader("X-Disable-Provenance", "true"). SetBody(contactPoint). SetResult(&grafanaResp). Post("/api/v1/provisioning/contact-points") @@ -119,6 +120,7 @@ func (c *Client) PutContactPoint(uid string, contactPoint alerting.ContactPoint) resp, err := c.resty.R(). SetHeader("Content-Type", "application/json"). + SetHeader("X-Disable-Provenance", "true"). SetBody(contactPoint). SetResult(&grafanaResp). Put(fmt.Sprintf("/api/v1/provisioning/contact-points/%s", uid)) diff --git a/observability-lib/api/datasource.go b/observability-lib/api/datasource.go index 1deefebfb..ee67a9be6 100644 --- a/observability-lib/api/datasource.go +++ b/observability-lib/api/datasource.go @@ -27,7 +27,7 @@ func (c *Client) GetDataSourceByName(name string) (*Datasource, *resty.Response, statusCode := resp.StatusCode() if statusCode != 200 { - return nil, resp, fmt.Errorf("error fetching datasource, received unexpected status code %d: %s", statusCode, resp.String()) + return nil, resp, fmt.Errorf("error fetching datasource %s, received unexpected status code %d: %s", name, statusCode, resp.String()) } return &grafanaResp, resp, nil } diff --git a/observability-lib/api/notification-policy.go b/observability-lib/api/notification-policy.go index 3a96f9b17..5792c1a8c 100644 --- a/observability-lib/api/notification-policy.go +++ b/observability-lib/api/notification-policy.go @@ -8,31 +8,57 @@ import ( "github.com/grafana/grafana-foundation-sdk/go/alerting" ) +func objectMatchersEqual(a alerting.ObjectMatchers, b alerting.ObjectMatchers) bool { + if len(a) != len(b) { + return false + } + + for i := range a { + foundMatch := false + for j := range b { + if reflect.DeepEqual(a[i], b[j]) { + foundMatch = true + break + } + } + if !foundMatch { + return false + } + } + + return true +} + +func policyExist(parent alerting.NotificationPolicy, newNotificationPolicy alerting.NotificationPolicy) bool { + for _, notificationPolicy := range parent.Routes { + matchersEqual := false + if notificationPolicy.ObjectMatchers != nil { + matchersEqual = objectMatchersEqual(*notificationPolicy.ObjectMatchers, *newNotificationPolicy.ObjectMatchers) + } + receiversEqual := reflect.DeepEqual(notificationPolicy.Receiver, newNotificationPolicy.Receiver) + if matchersEqual && receiversEqual { + return true + } + if notificationPolicy.Routes != nil { + policyExist(notificationPolicy, newNotificationPolicy) + } + } + return false +} + // AddNestedPolicy Add Nested Policy to Notification Policy Tree func (c *Client) AddNestedPolicy(newNotificationPolicy alerting.NotificationPolicy) error { notificationPolicyTree, _, err := c.GetNotificationPolicy() if err != nil { return err } - updatedNotificationPolicy := notificationPolicyTree - tagsEqual := false - for key, notificationPolicy := range updatedNotificationPolicy.Routes { - if notificationPolicy.ObjectMatchers != nil { - tagsEqual = reflect.DeepEqual(notificationPolicy.ObjectMatchers, newNotificationPolicy.ObjectMatchers) - if tagsEqual { - updatedNotificationPolicy.Routes[key] = newNotificationPolicy - } + if !policyExist(alerting.NotificationPolicy(notificationPolicyTree), newNotificationPolicy) { + notificationPolicyTree.Routes = append(notificationPolicyTree.Routes, newNotificationPolicy) + _, _, errPutNotificationPolicy := c.PutNotificationPolicy(alerting.NotificationPolicy(notificationPolicyTree)) + if errPutNotificationPolicy != nil { + return errPutNotificationPolicy } } - if !tagsEqual { - updatedNotificationPolicy.Routes = append(updatedNotificationPolicy.Routes, newNotificationPolicy) - } - - _, _, errPutNotificationPolicy := c.PutNotificationPolicy(alerting.NotificationPolicy(updatedNotificationPolicy)) - if errPutNotificationPolicy != nil { - return errPutNotificationPolicy - } - return nil } diff --git a/observability-lib/api/notification-policy_test.go b/observability-lib/api/notification-policy_test.go new file mode 100644 index 000000000..16ead9063 --- /dev/null +++ b/observability-lib/api/notification-policy_test.go @@ -0,0 +1,46 @@ +package api + +import ( + "testing" + + "github.com/grafana/grafana-foundation-sdk/go/alerting" + "github.com/stretchr/testify/require" +) + +func TestObjectMatchersEqual(t *testing.T) { + t.Run("returns true if the two object matchers are equal", func(t *testing.T) { + a := alerting.ObjectMatchers{{"team", "=", "chainlink"}} + b := alerting.ObjectMatchers{{"team", "=", "chainlink"}} + + result := objectMatchersEqual(a, b) + require.True(t, result) + }) + + t.Run("returns true if the two object matchers with multiple matches are equal", func(t *testing.T) { + a := alerting.ObjectMatchers{ + {"team", "=", "chainlink"}, + {"severity", "=", "critical"}, + } + b := alerting.ObjectMatchers{ + {"severity", "=", "critical"}, + {"team", "=", "chainlink"}, + } + + result := objectMatchersEqual(a, b) + require.True(t, result) + }) + + t.Run("returns false if the two object matchers with multiple matches are different", func(t *testing.T) { + a := alerting.ObjectMatchers{ + {"team", "=", "chainlink"}, + {"severity", "=", "critical"}, + } + b := alerting.ObjectMatchers{ + {"severity", "=", "warning"}, + {"team", "=", "chainlink"}, + } + + result := objectMatchersEqual(a, b) + require.False(t, result) + }) +} diff --git a/observability-lib/api/rule.go b/observability-lib/api/rule.go index e004df6d7..d5546752d 100644 --- a/observability-lib/api/rule.go +++ b/observability-lib/api/rule.go @@ -70,6 +70,31 @@ func (c *Client) PostAlertRule(alertRule alerting.Rule) (PostAlertRuleResponse, return grafanaResp, resp, nil } +type UpdateAlertRuleResponse struct{} + +// UpdateAlertRule Update a specific alert rule by UID +func (c *Client) UpdateAlertRule(uid string, alertRule alerting.Rule) (UpdateAlertRuleResponse, *resty.Response, error) { + var grafanaResp UpdateAlertRuleResponse + + resp, err := c.resty.R(). + SetHeader("Content-Type", "application/json"). + SetHeader("X-Disable-Provenance", "true"). + SetBody(alertRule). + SetResult(&grafanaResp). + Put(fmt.Sprintf("/api/v1/provisioning/alert-rules/%s", uid)) + + if err != nil { + return UpdateAlertRuleResponse{}, resp, fmt.Errorf("error making API request: %w", err) + } + + statusCode := resp.StatusCode() + if statusCode != 200 { + return UpdateAlertRuleResponse{}, resp, fmt.Errorf("error updating alert rule, received unexpected status code %d: %s", statusCode, resp.String()) + } + + return grafanaResp, resp, nil +} + type DeleteAlertRuleResponse struct{} // DeleteAlertRule Delete a specific alert rule by UID diff --git a/observability-lib/cmd/builder.go b/observability-lib/cmd/builder.go index fdd849075..4be5289e2 100644 --- a/observability-lib/cmd/builder.go +++ b/observability-lib/cmd/builder.go @@ -46,13 +46,14 @@ type BuildOptions struct { AlertsFilters string } -func BuildDashboardWithType(options *BuildOptions) (*grafana.Dashboard, error) { +func BuildDashboardWithType(options *BuildOptions) (*grafana.Observability, error) { switch options.TypeDashboard { case TypeDashboardCoreNode: return corenode.NewDashboard(&corenode.Props{ Name: options.Name, Platform: options.Platform, MetricsDataSource: options.MetricsDataSource, + LogsDataSource: options.LogsDataSource, SlackChannel: options.SlackChannel, SlackWebhookURL: options.SlackWebhookURL, AlertsTags: options.AlertsTags, diff --git a/observability-lib/cmd/deploy.go b/observability-lib/cmd/deploy.go index daa63fd3f..17e2b0633 100644 --- a/observability-lib/cmd/deploy.go +++ b/observability-lib/cmd/deploy.go @@ -14,14 +14,18 @@ var DeployCmd = &cobra.Command{ return errAlertsTags } - metricsDataSource, errMetricsDataSource := grafana.GetDataSourceFromGrafana( - cmd.Flag("metrics-datasource").Value.String(), - cmd.Flag("grafana-url").Value.String(), - cmd.Flag("grafana-token").Value.String(), - ) + var metricsDataSource *grafana.DataSource + if cmd.Flag("metrics-datasource").Value.String() != "" { + var errMetricsDataSource error + metricsDataSource, errMetricsDataSource = grafana.GetDataSourceFromGrafana( + cmd.Flag("metrics-datasource").Value.String(), + cmd.Flag("grafana-url").Value.String(), + cmd.Flag("grafana-token").Value.String(), + ) - if errMetricsDataSource != nil { - return errMetricsDataSource + if errMetricsDataSource != nil { + return errMetricsDataSource + } } var logsDataSource *grafana.DataSource diff --git a/observability-lib/cmd/notification-templates.yaml b/observability-lib/cmd/notification-templates.yaml index f28351cb1..60df98cc6 100644 --- a/observability-lib/cmd/notification-templates.yaml +++ b/observability-lib/cmd/notification-templates.yaml @@ -53,4 +53,13 @@ slack: |- {{ define "slack.chainlink.title" }} {{ template "alert_severity_prefix_emoji" . }} [{{- if gt (len .Alerts.Resolved) 0}}{{ .Status | toUpper }}{{- else }}{{ .CommonLabels.severity | toUpper }}{{- end }}:{{ .Alerts | len }}] {{ .CommonLabels.alertname }} + {{ end }} + +pagerduty: |- + {{ define "pagerduty.chainlink.title" }} + [{{- if gt (len .Alerts.Resolved) 0}}{{ .Status | toUpper }}{{- else }}{{ .CommonLabels.severity | toUpper }}{{- end }}:{{ .Alerts | len }}] {{ .CommonLabels.alertname }} + {{ end }} + + {{ define "pagerduty.chainlink.severity" }} + {{ if .CommonLabels.severity }}{{ .CommonLabels.severity | toLower }}{{ else }}critical{{ end }} {{ end }} \ No newline at end of file diff --git a/observability-lib/dashboards/atlas-don/component.go b/observability-lib/dashboards/atlas-don/component.go index 5cb9f4e4f..098bda871 100644 --- a/observability-lib/dashboards/atlas-don/component.go +++ b/observability-lib/dashboards/atlas-don/component.go @@ -10,7 +10,7 @@ import ( "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" ) -func NewDashboard(props *Props) (*grafana.Dashboard, error) { +func NewDashboard(props *Props) (*grafana.Observability, error) { if props.Name == "" { return nil, fmt.Errorf("Name is required") } @@ -31,6 +31,9 @@ func NewDashboard(props *Props) (*grafana.Dashboard, error) { } props.platformOpts = platformPanelOpts(props.OCRVersion) + if props.Tested { + props.platformOpts.LabelQuery = "" + } builder := grafana.NewBuilder(&grafana.BuilderOptions{ Name: props.Name, diff --git a/observability-lib/dashboards/atlas-don/component_test.go b/observability-lib/dashboards/atlas-don/component_test.go index 919c01bc6..7ce5bb775 100644 --- a/observability-lib/dashboards/atlas-don/component_test.go +++ b/observability-lib/dashboards/atlas-don/component_test.go @@ -1,6 +1,7 @@ package atlasdon_test import ( + "flag" "os" "testing" @@ -11,28 +12,70 @@ import ( atlasdon "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/atlas-don" ) +var update = flag.Bool("update", false, "update golden test files") + +const fileOutput = "test-output.json" + +func TestGenerateFile(t *testing.T) { + if *update == false { + t.Skip("skipping test") + } + + testDashboard, err := atlasdon.NewDashboard(&atlasdon.Props{ + Name: "DON OCR Dashboard", + MetricsDataSource: grafana.NewDataSource("Prometheus", "1"), + OCRVersion: "ocr2", + Tested: true, + }) + if err != nil { + t.Errorf("Error creating dashboard: %v", err) + } + json, errJSON := testDashboard.GenerateJSON() + if errJSON != nil { + t.Errorf("Error generating JSON: %v", errJSON) + } + if _, errExists := os.Stat(fileOutput); errExists == nil { + errRemove := os.Remove(fileOutput) + if errRemove != nil { + t.Errorf("Error removing file: %v", errRemove) + } + } + file, errFile := os.Create(fileOutput) + if errFile != nil { + panic(errFile) + } + writeString, err := file.WriteString(string(json)) + if err != nil { + t.Errorf("Error writing to file: %v", writeString) + } + t.Cleanup(func() { + file.Close() + }) +} + func TestNewDashboard(t *testing.T) { t.Run("NewDashboard creates a dashboard", func(t *testing.T) { testDashboard, err := atlasdon.NewDashboard(&atlasdon.Props{ Name: "DON OCR Dashboard", MetricsDataSource: grafana.NewDataSource("Prometheus", "1"), OCRVersion: "ocr2", + Tested: true, }) if err != nil { t.Errorf("Error creating dashboard: %v", err) } - require.IsType(t, grafana.Dashboard{}, *testDashboard) + require.IsType(t, grafana.Observability{}, *testDashboard) require.Equal(t, "DON OCR Dashboard", *testDashboard.Dashboard.Title) json, errJSON := testDashboard.GenerateJSON() if errJSON != nil { t.Errorf("Error generating JSON: %v", errJSON) } - jsonCompared, errCompared := os.ReadFile("test-output.json") + jsonCompared, errCompared := os.ReadFile(fileOutput) if errCompared != nil { t.Errorf("Error reading file: %v", errCompared) } - require.ElementsMatch(t, jsonCompared, json) + require.JSONEq(t, string(jsonCompared), string(json)) }) } diff --git a/observability-lib/dashboards/atlas-don/platform.go b/observability-lib/dashboards/atlas-don/platform.go index 8ac1bdbd2..dd953a2f7 100644 --- a/observability-lib/dashboards/atlas-don/platform.go +++ b/observability-lib/dashboards/atlas-don/platform.go @@ -16,6 +16,7 @@ type Props struct { MetricsDataSource *grafana.DataSource // MetricsDataSource is the datasource for querying metrics OCRVersion string // OCRVersion is the version of the OCR (ocr, ocr2, ocr3) platformOpts platformOpts + Tested bool } // PlatformPanelOpts generate different queries depending on params diff --git a/observability-lib/dashboards/atlas-don/test-output.json b/observability-lib/dashboards/atlas-don/test-output.json index 3f28f5859..65b2a8f14 100644 --- a/observability-lib/dashboards/atlas-don/test-output.json +++ b/observability-lib/dashboards/atlas-don/test-output.json @@ -6,6 +6,7 @@ "ocr2" ], "timezone": "browser", + "editable": true, "graphTooltip": 0, "time": { "from": "now-30m", @@ -13,7 +14,7 @@ }, "fiscalYearStartMonth": 0, "refresh": "30s", - "schemaVersion": 0, + "schemaVersion": 39, "panels": [ { "type": "row", @@ -30,10 +31,10 @@ }, { "type": "stat", - "id": 0, + "id": 1, "targets": [ { - "expr": "bool:ocr2_telemetry_down{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", } == 1", + "expr": "bool:ocr2_telemetry_down{} == 1", "format": "", "legendFormat": "{{job}} | {{report_type}}", "refId": "" @@ -57,6 +58,7 @@ "justifyMode": "auto", "textMode": "name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -66,7 +68,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -97,7 +99,7 @@ }, { "type": "stat", - "id": 1, + "id": 2, "targets": [ { "expr": "bool:ocr2_oracle_telemetry_down_except_telemetry_down{job=~\"${job}\", oracle!=\"csa_unknown\"} == 1", @@ -133,6 +135,7 @@ "justifyMode": "auto", "textMode": "name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -142,7 +145,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -173,7 +176,7 @@ }, { "type": "stat", - "id": 2, + "id": 3, "targets": [ { "expr": "bool:ocr2_feed_reporting_failure_except_feed_telemetry_down{job=~\"${job}\", oracle!=\"csa_unknown\"} == 1", @@ -200,6 +203,7 @@ "justifyMode": "auto", "textMode": "name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -209,7 +213,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -240,7 +244,7 @@ }, { "type": "stat", - "id": 3, + "id": 4, "targets": [ { "expr": "bool:ocr2_feed_telemetry_down_except_telemetry_down{job=~\"${job}\"} == 1", @@ -267,6 +271,7 @@ "justifyMode": "auto", "textMode": "name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -276,7 +281,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -307,7 +312,7 @@ }, { "type": "stat", - "id": 4, + "id": 5, "targets": [ { "expr": "bool:ocr2_oracle_blind_except_telemetry_down{job=~\"${job}\"} == 1", @@ -343,6 +348,7 @@ "justifyMode": "auto", "textMode": "name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -352,7 +358,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -383,7 +389,7 @@ }, { "type": "stat", - "id": 5, + "id": 6, "targets": [ { "expr": "bool:ocr2_oracle_feed_no_observations_except_oracle_blind_except_feed_reporting_failure_except_feed_telemetry_down{job=~\"${job}\"} == 1", @@ -419,6 +425,7 @@ "justifyMode": "auto", "textMode": "name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -428,7 +435,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -472,10 +479,10 @@ }, { "type": "stat", - "id": 6, + "id": 7, "targets": [ { - "expr": "sum(ocr2_contract_oracle_active{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }) by (contract, oracle)", + "expr": "sum(ocr2_contract_oracle_active{}) by (contract, oracle)", "format": "", "legendFormat": "{{oracle}}", "refId": "" @@ -508,6 +515,7 @@ "justifyMode": "auto", "textMode": "name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -517,7 +525,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -561,22 +569,22 @@ }, { "type": "timeseries", - "id": 7, + "id": 8, "targets": [ { - "expr": "ocr2_contract_config_n{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", + "expr": "ocr2_contract_config_n{}", "format": "", "legendFormat": "{{feed_id}}", "refId": "" }, { - "expr": "ocr2_contract_config_r_max{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", + "expr": "ocr2_contract_config_r_max{}", "format": "", "legendFormat": "Max nodes", "refId": "" }, { - "expr": "avg(2 * ocr2_contract_config_f{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", } + 1)", + "expr": "avg(2 * ocr2_contract_config_f{} + 1)", "format": "", "legendFormat": "Min nodes", "refId": "" @@ -613,7 +621,7 @@ "min": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -637,7 +645,7 @@ }, { "type": "timeseries", - "id": 8, + "id": 9, "targets": [ { "expr": "sum by (sender, receiver) (increase(ocr2_telemetry_p2p_received_total{job=~\"${job}\"}[5m]))", @@ -676,7 +684,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -687,7 +695,7 @@ }, { "type": "timeseries", - "id": 9, + "id": 10, "targets": [ { "expr": "sum by (sender, receiver) (rate(ocr2_telemetry_p2p_received_total{job=~\"${job}\"}[5m]))", @@ -726,7 +734,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -737,10 +745,10 @@ }, { "type": "timeseries", - "id": 10, + "id": 11, "targets": [ { - "expr": "ocr2_telemetry_observation{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", + "expr": "ocr2_telemetry_observation{}", "format": "", "legendFormat": "{{oracle}}", "refId": "" @@ -776,7 +784,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -787,10 +795,10 @@ }, { "type": "timeseries", - "id": 11, + "id": 12, "targets": [ { - "expr": "rate(ocr2_telemetry_message_observe_total{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }[5m])", + "expr": "rate(ocr2_telemetry_message_observe_total{}[5m])", "format": "", "legendFormat": "{{oracle}}", "refId": "" @@ -826,7 +834,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -850,10 +858,10 @@ }, { "type": "timeseries", - "id": 12, + "id": 13, "targets": [ { - "expr": "ocr2_telemetry_feed_agreed_epoch{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", + "expr": "ocr2_telemetry_feed_agreed_epoch{}", "format": "", "legendFormat": "{{feed_id}}", "refId": "" @@ -889,7 +897,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -900,10 +908,10 @@ }, { "type": "timeseries", - "id": 13, + "id": 14, "targets": [ { - "expr": "ocr2_telemetry_epoch_round{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", + "expr": "ocr2_telemetry_epoch_round{}", "format": "", "legendFormat": "{{oracle}}", "refId": "" @@ -939,7 +947,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -950,10 +958,10 @@ }, { "type": "timeseries", - "id": 14, + "id": 15, "targets": [ { - "expr": "rate(ocr2_telemetry_round_started_total{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }[1m])", + "expr": "rate(ocr2_telemetry_round_started_total{}[1m])", "format": "", "legendFormat": "{{oracle}}", "refId": "" @@ -989,7 +997,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1000,10 +1008,10 @@ }, { "type": "timeseries", - "id": 15, + "id": 16, "targets": [ { - "expr": "rate(ocr2_telemetry_ingested_total{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }[1m])", + "expr": "rate(ocr2_telemetry_ingested_total{}[1m])", "format": "", "legendFormat": "{{oracle}}", "refId": "" @@ -1039,7 +1047,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1063,10 +1071,10 @@ }, { "type": "stat", - "id": 16, + "id": 17, "targets": [ { - "expr": "ocr2_contract_config_alpha{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", + "expr": "ocr2_contract_config_alpha{}", "format": "", "legendFormat": "{{contract}}", "refId": "" @@ -1090,6 +1098,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -1099,7 +1108,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -1113,10 +1122,10 @@ }, { "type": "stat", - "id": 17, + "id": 18, "targets": [ { - "expr": "ocr2_contract_config_delta_c_seconds{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", + "expr": "ocr2_contract_config_delta_c_seconds{}", "format": "", "legendFormat": "{{contract}}", "refId": "" @@ -1140,6 +1149,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -1149,7 +1159,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -1163,10 +1173,10 @@ }, { "type": "stat", - "id": 18, + "id": 19, "targets": [ { - "expr": "ocr2_contract_config_delta_grace_seconds{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", + "expr": "ocr2_contract_config_delta_grace_seconds{}", "format": "", "legendFormat": "{{contract}}", "refId": "" @@ -1190,6 +1200,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -1199,7 +1210,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -1213,10 +1224,10 @@ }, { "type": "stat", - "id": 19, + "id": 20, "targets": [ { - "expr": "ocr2_contract_config_delta_progress_seconds{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", + "expr": "ocr2_contract_config_delta_progress_seconds{}", "format": "", "legendFormat": "{{contract}}", "refId": "" @@ -1240,6 +1251,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -1249,7 +1261,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -1263,10 +1275,10 @@ }, { "type": "stat", - "id": 20, + "id": 21, "targets": [ { - "expr": "ocr2_contract_config_delta_resend_seconds{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", + "expr": "ocr2_contract_config_delta_resend_seconds{}", "format": "", "legendFormat": "{{contract}}", "refId": "" @@ -1290,6 +1302,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -1299,7 +1312,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -1313,10 +1326,10 @@ }, { "type": "stat", - "id": 21, + "id": 22, "targets": [ { - "expr": "ocr2_contract_config_delta_round_seconds{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", + "expr": "ocr2_contract_config_delta_round_seconds{}", "format": "", "legendFormat": "{{contract}}", "refId": "" @@ -1340,6 +1353,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -1349,7 +1363,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -1363,10 +1377,10 @@ }, { "type": "stat", - "id": 22, + "id": 23, "targets": [ { - "expr": "ocr2_contract_config_delta_stage_seconds{contract=~\"${contract}\", feed_id=~\"${feed_id}\", namespace=\"otpe2\", job=~\"${job}\", }", + "expr": "ocr2_contract_config_delta_stage_seconds{}", "format": "", "legendFormat": "{{contract}}", "refId": "" @@ -1390,6 +1404,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -1399,7 +1414,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -1418,6 +1433,7 @@ "type": "query", "name": "job", "label": "Job", + "description": "", "query": "label_values(up{namespace=\"otpe2\"}, job)", "datasource": { "uid": "Prometheus" @@ -1438,6 +1454,7 @@ "type": "query", "name": "contract", "label": "Contract", + "description": "", "query": "label_values(ocr2_contract_config_f{job=\"$job\"}, contract)", "datasource": { "uid": "Prometheus" @@ -1458,6 +1475,7 @@ "type": "query", "name": "feed_id", "label": "Feed ID", + "description": "", "query": "label_values(ocr2_contract_config_f{job=\"$job\", contract=\"$contract\"}, feed_id)", "datasource": { "uid": "Prometheus" diff --git a/observability-lib/dashboards/capabilities/component.go b/observability-lib/dashboards/capabilities/component.go index 27c035c97..9c36d9be7 100644 --- a/observability-lib/dashboards/capabilities/component.go +++ b/observability-lib/dashboards/capabilities/component.go @@ -15,7 +15,7 @@ type Props struct { } // NewDashboard creates a Capabilities dashboard -func NewDashboard(props *Props) (*grafana.Dashboard, error) { +func NewDashboard(props *Props) (*grafana.Observability, error) { if props.Name == "" { return nil, fmt.Errorf("Name is required") } diff --git a/observability-lib/dashboards/capabilities/component_test.go b/observability-lib/dashboards/capabilities/component_test.go index 48cd0003d..90ad5ce9e 100644 --- a/observability-lib/dashboards/capabilities/component_test.go +++ b/observability-lib/dashboards/capabilities/component_test.go @@ -1,6 +1,7 @@ package capabilities_test import ( + "flag" "os" "testing" @@ -11,6 +12,45 @@ import ( "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/capabilities" ) +var update = flag.Bool("update", false, "update golden test files") + +const fileOutput = "test-output.json" + +func TestGenerateFile(t *testing.T) { + if *update == false { + t.Skip("skipping test") + } + + testDashboard, err := capabilities.NewDashboard(&capabilities.Props{ + Name: "Capabilities Dashboard", + MetricsDataSource: grafana.NewDataSource("Prometheus", ""), + }) + if err != nil { + t.Errorf("Error creating dashboard: %v", err) + } + json, errJSON := testDashboard.GenerateJSON() + if errJSON != nil { + t.Errorf("Error generating JSON: %v", errJSON) + } + if _, errExists := os.Stat(fileOutput); errExists == nil { + errRemove := os.Remove(fileOutput) + if errRemove != nil { + t.Errorf("Error removing file: %v", errRemove) + } + } + file, errFile := os.Create(fileOutput) + if errFile != nil { + panic(errFile) + } + writeString, err := file.WriteString(string(json)) + if err != nil { + t.Errorf("Error writing to file: %v", writeString) + } + t.Cleanup(func() { + file.Close() + }) +} + func TestNewDashboard(t *testing.T) { t.Run("NewDashboard creates a dashboard", func(t *testing.T) { testDashboard, err := capabilities.NewDashboard(&capabilities.Props{ @@ -20,18 +60,18 @@ func TestNewDashboard(t *testing.T) { if err != nil { t.Errorf("Error creating dashboard: %v", err) } - require.IsType(t, grafana.Dashboard{}, *testDashboard) + require.IsType(t, grafana.Observability{}, *testDashboard) require.Equal(t, "Capabilities Dashboard", *testDashboard.Dashboard.Title) json, errJSON := testDashboard.GenerateJSON() if errJSON != nil { t.Errorf("Error generating JSON: %v", errJSON) } - jsonCompared, errCompared := os.ReadFile("test-output.json") + jsonCompared, errCompared := os.ReadFile(fileOutput) if errCompared != nil { t.Errorf("Error reading file: %v", errCompared) } - require.ElementsMatch(t, jsonCompared, json) + require.JSONEq(t, string(jsonCompared), string(json)) }) } diff --git a/observability-lib/dashboards/capabilities/test-output.json b/observability-lib/dashboards/capabilities/test-output.json index 192b6fe67..ff7a21a65 100644 --- a/observability-lib/dashboards/capabilities/test-output.json +++ b/observability-lib/dashboards/capabilities/test-output.json @@ -5,6 +5,7 @@ "Capabilities" ], "timezone": "browser", + "editable": true, "graphTooltip": 0, "time": { "from": "now-7d", @@ -12,7 +13,7 @@ }, "fiscalYearStartMonth": 0, "refresh": "30s", - "schemaVersion": 0, + "schemaVersion": 39, "panels": [ { "type": "row", @@ -29,7 +30,7 @@ }, { "type": "timeseries", - "id": 0, + "id": 1, "targets": [ { "expr": "capability_execution_time_ms", @@ -68,7 +69,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -79,7 +80,7 @@ }, { "type": "timeseries", - "id": 1, + "id": 2, "targets": [ { "expr": "capability_runs_count", @@ -118,7 +119,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -129,7 +130,7 @@ }, { "type": "timeseries", - "id": 2, + "id": 3, "targets": [ { "expr": "capability_runs_fault_count", @@ -168,7 +169,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -179,7 +180,7 @@ }, { "type": "timeseries", - "id": 3, + "id": 4, "targets": [ { "expr": "capability_runs_invalid_count", @@ -218,7 +219,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -229,7 +230,7 @@ }, { "type": "timeseries", - "id": 4, + "id": 5, "targets": [ { "expr": "capability_runs_unauthorized_count", @@ -268,7 +269,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -279,7 +280,7 @@ }, { "type": "timeseries", - "id": 5, + "id": 6, "targets": [ { "expr": "capability_runs_no_resource_count", @@ -318,7 +319,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -334,6 +335,7 @@ "type": "query", "name": "env", "label": "Environment", + "description": "", "query": "label_values(up, env)", "datasource": { "uid": "Prometheus" @@ -354,6 +356,7 @@ "type": "query", "name": "cluster", "label": "Cluster", + "description": "", "query": "label_values(up{env=\"$env\"}, cluster)", "datasource": { "uid": "Prometheus" @@ -374,6 +377,7 @@ "type": "query", "name": "namespace", "label": "Namespace", + "description": "", "query": "label_values(up{env=\"$env\", cluster=\"$cluster\"}, namespace)", "datasource": { "uid": "Prometheus" @@ -394,6 +398,7 @@ "type": "query", "name": "job", "label": "Job", + "description": "", "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", namespace=\"$namespace\"}, job)", "datasource": { "uid": "Prometheus" @@ -414,6 +419,7 @@ "type": "query", "name": "pod", "label": "Pod", + "description": "", "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", namespace=\"$namespace\", job=\"$job\"}, pod)", "datasource": { "uid": "Prometheus" @@ -434,6 +440,7 @@ "type": "query", "name": "capability", "label": "Capability", + "description": "", "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", namespace=\"$namespace\", job=\"$job\"}, pod)", "datasource": { "uid": "Prometheus" diff --git a/observability-lib/dashboards/core-node-components/component.go b/observability-lib/dashboards/core-node-components/component.go index 5e2d824b7..6175fd438 100644 --- a/observability-lib/dashboards/core-node-components/component.go +++ b/observability-lib/dashboards/core-node-components/component.go @@ -9,12 +9,15 @@ import ( "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" ) -func NewDashboard(props *Props) (*grafana.Dashboard, error) { +func NewDashboard(props *Props) (*grafana.Observability, error) { if props.Name == "" { return nil, fmt.Errorf("Name is required") } props.platformOpts = platformPanelOpts() + if props.Tested { + props.platformOpts.LabelQuery = "" + } builder := grafana.NewBuilder(&grafana.BuilderOptions{ Name: props.Name, diff --git a/observability-lib/dashboards/core-node-components/component_test.go b/observability-lib/dashboards/core-node-components/component_test.go index 8d581da90..ce257164c 100644 --- a/observability-lib/dashboards/core-node-components/component_test.go +++ b/observability-lib/dashboards/core-node-components/component_test.go @@ -1,6 +1,7 @@ package corenodecomponents_test import ( + "flag" "os" "testing" @@ -11,28 +12,70 @@ import ( corenodecomponents "github.com/smartcontractkit/chainlink-common/observability-lib/dashboards/core-node-components" ) +var update = flag.Bool("update", false, "update golden test files") + +const fileOutput = "test-output.json" + +func TestGenerateFile(t *testing.T) { + if *update == false { + t.Skip("skipping test") + } + + testDashboard, err := corenodecomponents.NewDashboard(&corenodecomponents.Props{ + Name: "Core Node Components Dashboard", + MetricsDataSource: grafana.NewDataSource("Prometheus", ""), + LogsDataSource: grafana.NewDataSource("Loki", ""), + Tested: true, + }) + if err != nil { + t.Errorf("Error creating dashboard: %v", err) + } + json, errJSON := testDashboard.GenerateJSON() + if errJSON != nil { + t.Errorf("Error generating JSON: %v", errJSON) + } + if _, errExists := os.Stat(fileOutput); errExists == nil { + errRemove := os.Remove(fileOutput) + if errRemove != nil { + t.Errorf("Error removing file: %v", errRemove) + } + } + file, errFile := os.Create(fileOutput) + if errFile != nil { + panic(errFile) + } + writeString, err := file.WriteString(string(json)) + if err != nil { + t.Errorf("Error writing to file: %v", writeString) + } + t.Cleanup(func() { + file.Close() + }) +} + func TestNewDashboard(t *testing.T) { t.Run("NewDashboard creates a dashboard", func(t *testing.T) { testDashboard, err := corenodecomponents.NewDashboard(&corenodecomponents.Props{ Name: "Core Node Components Dashboard", MetricsDataSource: grafana.NewDataSource("Prometheus", ""), LogsDataSource: grafana.NewDataSource("Loki", ""), + Tested: true, }) if err != nil { t.Errorf("Error creating dashboard: %v", err) } - require.IsType(t, grafana.Dashboard{}, *testDashboard) + require.IsType(t, grafana.Observability{}, *testDashboard) require.Equal(t, "Core Node Components Dashboard", *testDashboard.Dashboard.Title) json, errJSON := testDashboard.GenerateJSON() if errJSON != nil { t.Errorf("Error generating JSON: %v", errJSON) } - jsonCompared, errCompared := os.ReadFile("test-output.json") + jsonCompared, errCompared := os.ReadFile(fileOutput) if errCompared != nil { t.Errorf("Error reading file: %v", errCompared) } - require.ElementsMatch(t, jsonCompared, json) + require.JSONEq(t, string(jsonCompared), string(json)) }) } diff --git a/observability-lib/dashboards/core-node-components/platform.go b/observability-lib/dashboards/core-node-components/platform.go index 6605e40a2..cd64ad669 100644 --- a/observability-lib/dashboards/core-node-components/platform.go +++ b/observability-lib/dashboards/core-node-components/platform.go @@ -17,6 +17,7 @@ type Props struct { MetricsDataSource *grafana.DataSource // MetricsDataSource is the datasource for querying metrics LogsDataSource *grafana.DataSource // LogsDataSource is the datasource for querying logs platformOpts platformOpts + Tested bool } // PlatformPanelOpts generate different queries for "docker" and "k8s" deployment platforms diff --git a/observability-lib/dashboards/core-node-components/test-output.json b/observability-lib/dashboards/core-node-components/test-output.json index f722b9335..734c36e22 100644 --- a/observability-lib/dashboards/core-node-components/test-output.json +++ b/observability-lib/dashboards/core-node-components/test-output.json @@ -7,6 +7,7 @@ "Components" ], "timezone": "browser", + "editable": true, "graphTooltip": 0, "time": { "from": "now-30m", @@ -14,14 +15,14 @@ }, "fiscalYearStartMonth": 0, "refresh": "30s", - "schemaVersion": 0, + "schemaVersion": 39, "panels": [ { "type": "stat", - "id": 0, + "id": 1, "targets": [ { - "expr": "100 * avg(avg_over_time(health{blockchain=~\"${blockchain}\", product=~\"${product}\", network_type=~\"${network_type}\", component=~\"${component}\", service=~\"${service}\", env=~\"${env}\", cluster=~\"${cluster}\", service_id=~\"${service_id}\"}[$interval])) by (service_id, version, service, cluster, env)", + "expr": "100 * avg(avg_over_time(health{service_id=~\"${service_id}\"}[$interval])) by (service_id, version, service, cluster, env)", "format": "", "legendFormat": "{{service_id}}", "refId": "" @@ -45,6 +46,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -54,7 +56,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "vertical" }, "fieldConfig": { @@ -89,10 +91,10 @@ }, { "type": "timeseries", - "id": 1, + "id": 2, "targets": [ { - "expr": "100 * (health{blockchain=~\"${blockchain}\", product=~\"${product}\", network_type=~\"${network_type}\", component=~\"${component}\", service=~\"${service}\", env=~\"${env}\", cluster=~\"${cluster}\", service_id=~\"${service_id}\"})", + "expr": "100 * (health{service_id=~\"${service_id}\"})", "format": "", "legendFormat": "{{service_id}}", "refId": "" @@ -130,7 +132,7 @@ "max": 100, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -141,10 +143,10 @@ }, { "type": "timeseries", - "id": 2, + "id": 3, "targets": [ { - "expr": "100 * (avg(avg_over_time(health{blockchain=~\"${blockchain}\", product=~\"${product}\", network_type=~\"${network_type}\", component=~\"${component}\", service=~\"${service}\", env=~\"${env}\", cluster=~\"${cluster}\", service_id=~\"${service_id}\"}[$interval])) by (service_id, version, service, cluster, env))", + "expr": "100 * (avg(avg_over_time(health{service_id=~\"${service_id}\"}[$interval])) by (service_id, version, service, cluster, env))", "format": "", "legendFormat": "{{service_id}}", "refId": "" @@ -182,7 +184,7 @@ "max": 100, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -193,7 +195,7 @@ }, { "type": "logs", - "id": 3, + "id": 4, "targets": [ { "expr": "{env=\"${env}\", cluster=\"${cluster}\", product=\"${product}\", network_type=\"${network_type}\", instance=~\"${service}\"} | json | level=~\"(error|panic|fatal|crit)\"", @@ -214,6 +216,17 @@ "x": 0, "y": 16 }, + "options": { + "showLabels": false, + "showCommonLabels": false, + "showTime": false, + "showLogContextToggle": false, + "wrapLogMessage": false, + "prettifyLogMessage": false, + "enableLogDetails": false, + "sortOrder": "", + "dedupStrategy": "" + }, "fieldConfig": { "defaults": { "noValue": "No data" @@ -228,6 +241,7 @@ "type": "interval", "name": "interval", "label": "Interval", + "description": "", "query": "30s,1m,5m,15m,30m,1h,6h,12h", "current": { "selected": true, @@ -243,6 +257,7 @@ "type": "query", "name": "env", "label": "Environment", + "description": "", "query": "label_values(up, env)", "datasource": { "uid": "Prometheus" @@ -263,6 +278,7 @@ "type": "query", "name": "cluster", "label": "Cluster", + "description": "", "query": "label_values(up{env=\"$env\"}, cluster)", "datasource": { "uid": "Prometheus" @@ -283,6 +299,7 @@ "type": "query", "name": "blockchain", "label": "Blockchain", + "description": "", "query": "label_values(up{env=\"$env\", cluster=\"$cluster\"}, blockchain)", "datasource": { "uid": "Prometheus" @@ -303,6 +320,7 @@ "type": "query", "name": "product", "label": "Product", + "description": "", "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", blockchain=\"$blockchain\"}, product)", "datasource": { "uid": "Prometheus" @@ -323,6 +341,7 @@ "type": "query", "name": "network_type", "label": "Network Type", + "description": "", "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", blockchain=\"$blockchain\", product=\"$product\"}, network_type)", "datasource": { "uid": "Prometheus" @@ -343,6 +362,7 @@ "type": "query", "name": "component", "label": "Component", + "description": "", "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", blockchain=\"$blockchain\", network_type=\"$network_type\"}, component)", "datasource": { "uid": "Prometheus" @@ -363,6 +383,7 @@ "type": "query", "name": "service", "label": "Service", + "description": "", "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", blockchain=\"$blockchain\", network_type=\"$network_type\", component=\"$component\"}, service)", "datasource": { "uid": "Prometheus" @@ -383,6 +404,7 @@ "type": "query", "name": "service_id", "label": "Service ID", + "description": "", "query": "label_values(health{cluster=\"$cluster\", blockchain=\"$blockchain\", network_type=\"$network_type\", component=\"$component\", service=\"$service\"}, service_id)", "datasource": { "uid": "Prometheus" diff --git a/observability-lib/dashboards/core-node/component.go b/observability-lib/dashboards/core-node/component.go index ec3392201..8374bad53 100644 --- a/observability-lib/dashboards/core-node/component.go +++ b/observability-lib/dashboards/core-node/component.go @@ -2,6 +2,7 @@ package corenode import ( "fmt" + "strconv" "github.com/grafana/grafana-foundation-sdk/go/alerting" "github.com/grafana/grafana-foundation-sdk/go/cog" @@ -13,7 +14,7 @@ import ( ) // NewDashboard creates a DON dashboard for the given OCR version -func NewDashboard(props *Props) (*grafana.Dashboard, error) { +func NewDashboard(props *Props) (*grafana.Observability, error) { if props.Name == "" { return nil, fmt.Errorf("Name is required") } @@ -33,7 +34,21 @@ func NewDashboard(props *Props) (*grafana.Dashboard, error) { } } + if props.LogsDataSource == nil { + return nil, fmt.Errorf("LogsDataSource is required") + } else { + if props.LogsDataSource.Name == "" { + return nil, fmt.Errorf("LogsDataSource.Name is required") + } + if props.LogsDataSource.UID == "" { + return nil, fmt.Errorf("LogsDataSource.UID is required") + } + } + props.platformOpts = platformPanelOpts(props.Platform) + if props.Tested { + props.platformOpts.LabelQuery = "" + } builder := grafana.NewBuilder(&grafana.BuilderOptions{ Name: props.Name, @@ -57,18 +72,18 @@ func NewDashboard(props *Props) (*grafana.Dashboard, error) { "color": `{{ template "slack.chainlink.color" . }}`, }, })) - } - notificationPolicyOptions := &grafana.NotificationPolicyOptions{ - Receiver: "chainlink-slack", - GroupBy: []string{"grafana_folder", "alertname"}, - } - for name, value := range props.AlertsTags { - notificationPolicyOptions.ObjectMatchers = append(notificationPolicyOptions.ObjectMatchers, alerting.ObjectMatcher{name, "=", value}) + notificationPolicySlackOptions := &grafana.NotificationPolicyOptions{ + Receiver: "chainlink-slack", + GroupBy: []string{"grafana_folder", "alertname"}, + Continue: grafana.Pointer(true), + } + for name, value := range props.AlertsTags { + notificationPolicySlackOptions.ObjectMatchers = append(notificationPolicySlackOptions.ObjectMatchers, alerting.ObjectMatcher{name, "=", value}) + } + builder.AddNotificationPolicy(grafana.NewNotificationPolicy(notificationPolicySlackOptions)) } - builder.AddNotificationPolicy(grafana.NewNotificationPolicy(notificationPolicyOptions)) - builder.AddVars(vars(props)...) builder.AddRow("Headlines") @@ -144,7 +159,7 @@ func vars(p *Props) []cog.Builder[dashboard.VariableModel] { Name: "env", }, Datasource: p.MetricsDataSource.Name, - Query: `label_values(up, env)`, + Query: `label_values(uptime_seconds, env)`, })) variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ @@ -153,7 +168,7 @@ func vars(p *Props) []cog.Builder[dashboard.VariableModel] { Name: "cluster", }, Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env"}, cluster)`, + Query: `label_values(uptime_seconds{env="$env"}, cluster)`, })) variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ @@ -162,7 +177,7 @@ func vars(p *Props) []cog.Builder[dashboard.VariableModel] { Name: "namespace", }, Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster"}, namespace)`, + Query: `label_values(uptime_seconds{env="$env", cluster="$cluster"}, namespace)`, })) variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ @@ -171,7 +186,7 @@ func vars(p *Props) []cog.Builder[dashboard.VariableModel] { Name: "blockchain", }, Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace"}, blockchain)`, + Query: `label_values(uptime_seconds{env="$env", cluster="$cluster", namespace="$namespace"}, blockchain)`, })) variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ @@ -180,7 +195,7 @@ func vars(p *Props) []cog.Builder[dashboard.VariableModel] { Name: "product", }, Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace", blockchain="$blockchain"}, product)`, + Query: `label_values(uptime_seconds{env="$env", cluster="$cluster", namespace="$namespace", blockchain="$blockchain"}, product)`, })) variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ @@ -189,7 +204,7 @@ func vars(p *Props) []cog.Builder[dashboard.VariableModel] { Name: "network_type", }, Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace", blockchain="$blockchain", product="$product"}, network_type)`, + Query: `label_values(uptime_seconds{env="$env", cluster="$cluster", namespace="$namespace", blockchain="$blockchain", product="$product"}, network_type)`, })) variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ @@ -198,8 +213,7 @@ func vars(p *Props) []cog.Builder[dashboard.VariableModel] { Name: "job", }, Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace", blockchain="$blockchain", product="$product", network_type="$network_type"}, job)`, - Multi: true, + Query: `label_values(uptime_seconds{env="$env", cluster="$cluster", namespace="$namespace", blockchain="$blockchain", product="$product", network_type="$network_type"}, job)`, })) variables = append(variables, grafana.NewQueryVariable(&grafana.QueryVariableOptions{ @@ -208,7 +222,7 @@ func vars(p *Props) []cog.Builder[dashboard.VariableModel] { Name: "pod", }, Datasource: p.MetricsDataSource.Name, - Query: `label_values(up{env="$env", cluster="$cluster", namespace="$namespace", job="$job"}, pod)`, + Query: `label_values(uptime_seconds{env="$env", cluster="$cluster", namespace="$namespace", job="$job"}, pod)`, Multi: true, IncludeAll: true, })) @@ -228,6 +242,50 @@ func vars(p *Props) []cog.Builder[dashboard.VariableModel] { return variables } +func healthAverageAlertRule(p *Props, threshold float64, tags map[string]string) grafana.AlertOptions { + return grafana.AlertOptions{ + Title: `Health Avg by Service is less than ` + strconv.FormatFloat(threshold, 'f', -1, 64) + `%`, + Summary: `Uptime less than ` + strconv.FormatFloat(threshold, 'f', -1, 64) + `% over last 15 minutes on one component in a Node`, + Description: `Component {{ index $labels "service_id" }} uptime in the last 15m is {{ index $values "C" }}%`, + RunbookURL: "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", + For: "15m", + Tags: tags, + Query: []grafana.RuleQuery{ + { + Expr: `health{` + p.AlertsFilters + `}`, + RefID: "A", + Datasource: p.MetricsDataSource.UID, + }, + }, + QueryRefCondition: "D", + Condition: []grafana.ConditionQuery{ + { + RefID: "B", + ReduceExpression: &grafana.ReduceExpression{ + Expression: "A", + Reducer: expr.TypeReduceReducerMean, + }, + }, + { + RefID: "C", + MathExpression: &grafana.MathExpression{ + Expression: "$B * 100", + }, + }, + { + RefID: "D", + ThresholdExpression: &grafana.ThresholdExpression{ + Expression: "C", + ThresholdConditionsOptions: grafana.ThresholdConditionsOption{ + Params: []float64{threshold}, + Type: grafana.TypeThresholdTypeLt, + }, + }, + }, + }, + } +} + func headlines(p *Props) []*grafana.Panel { var panels []*grafana.Panel @@ -333,7 +391,7 @@ func headlines(p *Props) []*grafana.Panel { PanelOptions: &grafana.PanelOptions{ Datasource: p.MetricsDataSource.Name, Title: "Health Avg by Service over 15m", - Span: 16, + Span: 24, Height: 6, Decimals: 1, Unit: "percent", @@ -345,55 +403,26 @@ func headlines(p *Props) []*grafana.Panel { }, Min: grafana.Pointer[float64](0), Max: grafana.Pointer[float64](100), - AlertOptions: &grafana.AlertOptions{ - Summary: `Uptime less than 90% over last 15 minutes on one component in a Node`, - Description: `Component {{ index $labels "service_id" }} uptime in the last 15m is {{ index $values "A" }}%`, - RunbookURL: "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", - For: "15m", - Tags: map[string]string{ - "severity": "warning", - }, - Query: []grafana.RuleQuery{ - { - Expr: `health{` + p.AlertsFilters + `}`, - RefID: "A", - Datasource: p.MetricsDataSource.UID, - }, - }, - QueryRefCondition: "D", - Condition: []grafana.ConditionQuery{ - { - RefID: "B", - ReduceExpression: &grafana.ReduceExpression{ - Expression: "A", - Reducer: expr.TypeReduceReducerMean, - }, - }, - { - RefID: "C", - MathExpression: &grafana.MathExpression{ - Expression: "$B * 100", - }, - }, - { - RefID: "D", - ThresholdExpression: &grafana.ThresholdExpression{ - Expression: "C", - ThresholdConditionsOptions: []grafana.ThresholdConditionsOption{ - { - Params: []float64{90, 0}, - Type: expr.TypeThresholdTypeLt, - }, - }, - }, - }, + Threshold: &grafana.ThresholdOptions{ + Mode: dashboard.ThresholdsModeAbsolute, + Steps: []dashboard.Threshold{ + {Value: nil, Color: "green"}, + {Value: grafana.Pointer[float64](50), Color: "red"}, + {Value: grafana.Pointer[float64](70), Color: "orange"}, + {Value: grafana.Pointer[float64](90), Color: "green"}, }, }, }, + ThresholdStyle: common.GraphThresholdsStyleModeDashed, LegendOptions: &grafana.LegendOptions{ DisplayMode: common.LegendDisplayModeList, Placement: common.LegendPlacementRight, }, + AlertsOptions: []grafana.AlertOptions{ + healthAverageAlertRule(p, 90, map[string]string{"severity": "info"}), + healthAverageAlertRule(p, 70, map[string]string{"severity": "warning"}), + healthAverageAlertRule(p, 50, map[string]string{"severity": "critical"}), + }, })) panels = append(panels, grafana.NewStatPanel(&grafana.StatPanelOptions{ @@ -401,7 +430,7 @@ func headlines(p *Props) []*grafana.Panel { Datasource: p.MetricsDataSource.Name, Title: "Health Avg by Service over 15m with health < 90%", Description: "Only displays services with health average < 90%", - Span: 8, + Span: 24, Height: 6, Decimals: 1, Unit: "percent", @@ -427,6 +456,22 @@ func headlines(p *Props) []*grafana.Panel { Orientation: common.VizOrientationHorizontal, })) + panels = append(panels, grafana.NewLogPanel(&grafana.LogPanelOptions{ + PanelOptions: &grafana.PanelOptions{ + Datasource: p.LogsDataSource.Name, + Title: "Logs with severity >= error", + Span: 24, + Height: 10, + Query: []grafana.Query{ + { + Expr: `{env="${env}", cluster="${cluster}", product="${product}", network_type="${network_type}", namespace="${namespace}", pod="${pod}"} | json | level=~"(error|panic|fatal|crit)"`, + Legend: "", + }, + }, + }, + PrettifyJSON: true, + })) + panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ PanelOptions: &grafana.PanelOptions{ Datasource: p.MetricsDataSource.Name, @@ -440,7 +485,9 @@ func headlines(p *Props) []*grafana.Panel { Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{account}}`, }, }, - AlertOptions: &grafana.AlertOptions{ + }, + AlertsOptions: []grafana.AlertOptions{ + { Summary: `ETH Balance is lower than threshold`, Description: `ETH Balance critically low at {{ index $values "A" }} on {{ index $labels "` + p.platformOpts.LabelFilter + `" }}`, RunbookURL: "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", @@ -463,11 +510,9 @@ func headlines(p *Props) []*grafana.Panel { RefID: "B", ThresholdExpression: &grafana.ThresholdExpression{ Expression: "A", - ThresholdConditionsOptions: []grafana.ThresholdConditionsOption{ - { - Params: []float64{1, 0}, - Type: expr.TypeThresholdTypeLt, - }, + ThresholdConditionsOptions: grafana.ThresholdConditionsOption{ + Params: []float64{1}, + Type: grafana.TypeThresholdTypeLt, }, }, }, @@ -489,7 +534,9 @@ func headlines(p *Props) []*grafana.Panel { Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{account}}`, }, }, - AlertOptions: &grafana.AlertOptions{ + }, + AlertsOptions: []grafana.AlertOptions{ + { Summary: `Solana Balance is lower than threshold`, Description: `Solana Balance critically low at {{ index $values "A" }} on {{ index $labels "` + p.platformOpts.LabelFilter + `" }}`, RunbookURL: "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", @@ -512,11 +559,9 @@ func headlines(p *Props) []*grafana.Panel { RefID: "B", ThresholdExpression: &grafana.ThresholdExpression{ Expression: "A", - ThresholdConditionsOptions: []grafana.ThresholdConditionsOption{ - { - Params: []float64{1, 0}, - Type: expr.TypeThresholdTypeLt, - }, + ThresholdConditionsOptions: grafana.ThresholdConditionsOption{ + Params: []float64{1}, + Type: grafana.TypeThresholdTypeLt, }, }, }, @@ -842,7 +887,9 @@ func headTracker(p *Props) []*grafana.Panel { Legend: `{{` + p.platformOpts.LabelFilter + `}}`, }, }, - AlertOptions: &grafana.AlertOptions{ + }, + AlertsOptions: []grafana.AlertOptions{ + { Summary: `No Headers Received`, Description: `{{ index $labels "` + p.platformOpts.LabelFilter + `" }} on ChainID {{ index $labels "ChainID" }} has received {{ index $values "A" }} heads over 10 minutes.`, RunbookURL: "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", @@ -865,11 +912,9 @@ func headTracker(p *Props) []*grafana.Panel { RefID: "B", ThresholdExpression: &grafana.ThresholdExpression{ Expression: "A", - ThresholdConditionsOptions: []grafana.ThresholdConditionsOption{ - { - Params: []float64{1, 0}, - Type: expr.TypeThresholdTypeLt, - }, + ThresholdConditionsOptions: grafana.ThresholdConditionsOption{ + Params: []float64{1}, + Type: grafana.TypeThresholdTypeLt, }, }, }, @@ -970,57 +1015,146 @@ func headReporter(p *Props) []*grafana.Panel { func txManager(p *Props) []*grafana.Panel { var panels []*grafana.Panel - txStatus := map[string]string{ - "num_confirmed_transactions": "Confirmed", - "num_successful_transactions": "Successful", - "num_tx_reverted": "Reverted", - "num_gas_bumps": "Gas Bumps", - "fwd_tx_count": "Forwarded", - "tx_attempt_count": "Attempts", - "gas_bump_exceeds_limit": "Gas Bump Exceeds Limit", - } + panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ + PanelOptions: &grafana.PanelOptions{ + Datasource: p.MetricsDataSource.Name, + Title: "TX Manager Confirmed", + Span: 6, + Height: 6, + Query: []grafana.Query{ + { + Expr: `sum(tx_manager_num_confirmed_transactions{` + p.platformOpts.LabelQuery + `}) by (blockchain, chainID, ` + p.platformOpts.LabelFilter + `)`, + Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{blockchain}} - {{chainID}}`, + }, + }, + }, + })) - for status, title := range txStatus { - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "TX Manager " + title, - Span: 6, - Height: 6, - Query: []grafana.Query{ - { - Expr: `sum(tx_manager_` + status + `{` + p.platformOpts.LabelQuery + `}) by (blockchain, chainID, ` + p.platformOpts.LabelFilter + `)`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{blockchain}} - {{chainID}}`, - }, + panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ + PanelOptions: &grafana.PanelOptions{ + Datasource: p.MetricsDataSource.Name, + Title: "TX Manager Successful", + Span: 6, + Height: 6, + Query: []grafana.Query{ + { + Expr: `sum(tx_manager_num_successful_transactions{` + p.platformOpts.LabelQuery + `}) by (blockchain, chainID, ` + p.platformOpts.LabelFilter + `)`, + Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{blockchain}} - {{chainID}}`, }, }, - })) - } + }, + })) - txUntilStatus := map[string]string{ - "broadcast": "The amount of time elapsed from when a transaction is enqueued to until it is broadcast", - "confirmed": "The amount of time elapsed from a transaction being broadcast to being included in a block", - } + panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ + PanelOptions: &grafana.PanelOptions{ + Datasource: p.MetricsDataSource.Name, + Title: "TX Manager Reverted", + Span: 6, + Height: 6, + Query: []grafana.Query{ + { + Expr: `sum(tx_manager_num_tx_reverted{` + p.platformOpts.LabelQuery + `}) by (blockchain, chainID, ` + p.platformOpts.LabelFilter + `)`, + Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{blockchain}} - {{chainID}}`, + }, + }, + }, + })) - for status, description := range txUntilStatus { - panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ - PanelOptions: &grafana.PanelOptions{ - Datasource: p.MetricsDataSource.Name, - Title: "TX Manager Time Until " + status, - Description: description, - Span: 6, - Height: 6, - Decimals: 1, - Unit: "ms", - Query: []grafana.Query{ - { - Expr: `histogram_quantile(0.9, sum(rate(tx_manager_time_until_tx_` + status + `_bucket{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (le, ` + p.platformOpts.LabelFilter + `, blockchain, chainID)) / 1e6`, - Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{blockchain}} - {{chainID}}`, - }, + panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ + PanelOptions: &grafana.PanelOptions{ + Datasource: p.MetricsDataSource.Name, + Title: "TX Manager Gas Bumps", + Span: 6, + Height: 6, + Query: []grafana.Query{ + { + Expr: `sum(tx_manager_num_gas_bumps{` + p.platformOpts.LabelQuery + `}) by (blockchain, chainID, ` + p.platformOpts.LabelFilter + `)`, + Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{blockchain}} - {{chainID}}`, }, }, - })) - } + }, + })) + + panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ + PanelOptions: &grafana.PanelOptions{ + Datasource: p.MetricsDataSource.Name, + Title: "TX Manager Forwarded", + Span: 6, + Height: 6, + Query: []grafana.Query{ + { + Expr: `sum(tx_manager_fwd_tx_count{` + p.platformOpts.LabelQuery + `}) by (blockchain, chainID, ` + p.platformOpts.LabelFilter + `)`, + Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{blockchain}} - {{chainID}}`, + }, + }, + }, + })) + + panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ + PanelOptions: &grafana.PanelOptions{ + Datasource: p.MetricsDataSource.Name, + Title: "TX Manager Attempts", + Span: 6, + Height: 6, + Query: []grafana.Query{ + { + Expr: `sum(tx_manager_tx_attempt_count{` + p.platformOpts.LabelQuery + `}) by (blockchain, chainID, ` + p.platformOpts.LabelFilter + `)`, + Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{blockchain}} - {{chainID}}`, + }, + }, + }, + })) + + panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ + PanelOptions: &grafana.PanelOptions{ + Datasource: p.MetricsDataSource.Name, + Title: "TX Manager Gas Bump Exceeds Limit", + Span: 6, + Height: 6, + Query: []grafana.Query{ + { + Expr: `sum(tx_manager_gas_bump_exceeds_limit{` + p.platformOpts.LabelQuery + `}) by (blockchain, chainID, ` + p.platformOpts.LabelFilter + `)`, + Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{blockchain}} - {{chainID}}`, + }, + }, + }, + })) + + panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ + PanelOptions: &grafana.PanelOptions{ + Datasource: p.MetricsDataSource.Name, + Title: "TX Manager Time Until Broadcast", + Description: "The amount of time elapsed from when a transaction is enqueued to until it is broadcast", + Span: 6, + Height: 6, + Decimals: 1, + Unit: "ms", + Query: []grafana.Query{ + { + Expr: `histogram_quantile(0.9, sum(rate(tx_manager_time_until_tx_broadcast_bucket{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (le, ` + p.platformOpts.LabelFilter + `, blockchain, chainID)) / 1e6`, + Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{blockchain}} - {{chainID}}`, + }, + }, + }, + })) + + panels = append(panels, grafana.NewTimeSeriesPanel(&grafana.TimeSeriesPanelOptions{ + PanelOptions: &grafana.PanelOptions{ + Datasource: p.MetricsDataSource.Name, + Title: "TX Manager Time Until Confirmed", + Description: "The amount of time elapsed from a transaction being broadcast to being included in a block", + Span: 6, + Height: 6, + Decimals: 1, + Unit: "ms", + Query: []grafana.Query{ + { + Expr: `histogram_quantile(0.9, sum(rate(tx_manager_time_until_tx_confirmed_bucket{` + p.platformOpts.LabelQuery + `}[$__rate_interval])) by (le, ` + p.platformOpts.LabelFilter + `, blockchain, chainID)) / 1e6`, + Legend: `{{` + p.platformOpts.LabelFilter + `}} - {{blockchain}} - {{chainID}}`, + }, + }, + }, + })) return panels } @@ -1472,6 +1606,7 @@ func evmNodeRPC(p *Props) []*grafana.Panel { Title: "EVM Pool RPC Node Calls Success Rate", Span: 24, Height: 6, + Decimals: 2, Unit: "percentunit", Max: grafana.Pointer[float64](1), Query: []grafana.Query{ @@ -1502,6 +1637,7 @@ func evmNodeRPC(p *Props) []*grafana.Panel { Title: "EVM Pool RPC Node Dials Failure Rate", Span: 24, Height: 6, + Decimals: 2, Unit: "percentunit", Max: grafana.Pointer[float64](1), Query: []grafana.Query{ diff --git a/observability-lib/dashboards/core-node/component_test.go b/observability-lib/dashboards/core-node/component_test.go index 33a408f48..965acc493 100644 --- a/observability-lib/dashboards/core-node/component_test.go +++ b/observability-lib/dashboards/core-node/component_test.go @@ -1,6 +1,7 @@ package corenode_test import ( + "flag" "os" "testing" @@ -10,28 +11,72 @@ import ( "github.com/stretchr/testify/require" ) +var update = flag.Bool("update", false, "update golden test files") + +const fileOutput = "test-output.json" + +func TestGenerateFile(t *testing.T) { + if *update == false { + t.Skip("skipping test") + } + + testDashboard, err := corenode.NewDashboard(&corenode.Props{ + Name: "Core Node Dashboard", + Platform: grafana.TypePlatformDocker, + MetricsDataSource: grafana.NewDataSource("Prometheus", "1"), + LogsDataSource: grafana.NewDataSource("Loki", "2"), + Tested: true, + }) + if err != nil { + t.Errorf("Error creating dashboard: %v", err) + } + json, errJSON := testDashboard.GenerateJSON() + if errJSON != nil { + t.Errorf("Error generating JSON: %v", errJSON) + } + if _, errExists := os.Stat(fileOutput); errExists == nil { + errRemove := os.Remove(fileOutput) + if errRemove != nil { + t.Errorf("Error removing file: %v", errRemove) + } + } + file, errFile := os.Create(fileOutput) + if errFile != nil { + panic(errFile) + } + writeString, err := file.WriteString(string(json)) + if err != nil { + t.Errorf("Error writing to file: %v", writeString) + } + t.Cleanup(func() { + file.Close() + }) +} + func TestNewDashboard(t *testing.T) { t.Run("NewDashboard creates a dashboard", func(t *testing.T) { testDashboard, err := corenode.NewDashboard(&corenode.Props{ Name: "Core Node Dashboard", Platform: grafana.TypePlatformDocker, MetricsDataSource: grafana.NewDataSource("Prometheus", "1"), + LogsDataSource: grafana.NewDataSource("Loki", "2"), + Tested: true, }) if err != nil { t.Errorf("Error creating dashboard: %v", err) } - require.IsType(t, grafana.Dashboard{}, *testDashboard) + require.IsType(t, grafana.Observability{}, *testDashboard) require.Equal(t, "Core Node Dashboard", *testDashboard.Dashboard.Title) json, errJSON := testDashboard.GenerateJSON() if errJSON != nil { t.Errorf("Error generating JSON: %v", errJSON) } - jsonCompared, errCompared := os.ReadFile("test-output.json") + jsonCompared, errCompared := os.ReadFile(fileOutput) if errCompared != nil { t.Errorf("Error reading file: %v", errCompared) } - require.ElementsMatch(t, jsonCompared, json) + require.JSONEq(t, string(jsonCompared), string(json)) }) } diff --git a/observability-lib/dashboards/core-node/platform.go b/observability-lib/dashboards/core-node/platform.go index 7a8ae8e26..fbd7b6c6c 100644 --- a/observability-lib/dashboards/core-node/platform.go +++ b/observability-lib/dashboards/core-node/platform.go @@ -19,11 +19,13 @@ type Props struct { Name string // Name is the name of the dashboard Platform grafana.TypePlatform // Platform is infrastructure deployment platform: docker or k8s MetricsDataSource *grafana.DataSource // MetricsDataSource is the datasource for querying metrics + LogsDataSource *grafana.DataSource // LogsDataSource is the datasource for querying logs SlackChannel string // SlackChannel is the channel to send alerts to SlackWebhookURL string // SlackWebhookURL is the URL to send alerts to AlertsTags map[string]string // AlertsTags is the tags to map with notification policy AlertsFilters string // AlertsFilters is the filters to apply to alerts platformOpts platformOpts + Tested bool } // PlatformPanelOpts generate different queries for "docker" and "k8s" deployment platforms diff --git a/observability-lib/dashboards/core-node/test-output.json b/observability-lib/dashboards/core-node/test-output.json index 1ce52864b..7f43d7735 100644 --- a/observability-lib/dashboards/core-node/test-output.json +++ b/observability-lib/dashboards/core-node/test-output.json @@ -6,6 +6,7 @@ "Node" ], "timezone": "browser", + "editable": true, "graphTooltip": 0, "time": { "from": "now-30m", @@ -13,7 +14,7 @@ }, "fiscalYearStartMonth": 0, "refresh": "30s", - "schemaVersion": 0, + "schemaVersion": 39, "panels": [ { "type": "row", @@ -30,10 +31,10 @@ }, { "type": "stat", - "id": 0, + "id": 1, "targets": [ { - "expr": "version{instance=~\"${instance}\", }", + "expr": "version{}", "instant": true, "range": false, "format": "", @@ -59,6 +60,7 @@ "justifyMode": "auto", "textMode": "name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -68,7 +70,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -82,10 +84,10 @@ }, { "type": "stat", - "id": 1, + "id": 2, "targets": [ { - "expr": "uptime_seconds{instance=~\"${instance}\", }", + "expr": "uptime_seconds{}", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -109,6 +111,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -118,7 +121,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -132,10 +135,10 @@ }, { "type": "stat", - "id": 2, + "id": 3, "targets": [ { - "expr": "sum(eth_balance{instance=~\"${instance}\", }) by (instance, account)", + "expr": "sum(eth_balance{}) by (instance, account)", "instant": true, "range": false, "format": "", @@ -161,6 +164,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -170,7 +174,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -201,10 +205,10 @@ }, { "type": "stat", - "id": 3, + "id": 4, "targets": [ { - "expr": "sum(solana_balance{instance=~\"${instance}\", }) by (instance, account)", + "expr": "sum(solana_balance{}) by (instance, account)", "instant": true, "range": false, "format": "", @@ -230,6 +234,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -239,7 +244,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -270,10 +275,10 @@ }, { "type": "timeseries", - "id": 4, + "id": 5, "targets": [ { - "expr": "100 * (avg(avg_over_time(health{instance=~\"${instance}\", }[15m])) by (instance, service_id, version, service, cluster, env))", + "expr": "100 * (avg(avg_over_time(health{}[15m])) by (instance, service_id, version, service, cluster, env))", "format": "", "legendFormat": "{{instance}} - {{service_id}}", "refId": "" @@ -287,7 +292,7 @@ }, "gridPos": { "h": 6, - "w": 16, + "w": 24, "x": 0, "y": 9 }, @@ -309,9 +314,33 @@ "decimals": 1, "min": 0, "max": 100, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "value": null, + "color": "green" + }, + { + "value": 50, + "color": "red" + }, + { + "value": 70, + "color": "orange" + }, + { + "value": 90, + "color": "green" + } + ] + }, "noValue": "No data", "custom": { - "fillOpacity": 2, + "thresholdsStyle": { + "mode": "dashed" + }, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -322,10 +351,10 @@ }, { "type": "stat", - "id": 5, + "id": 6, "targets": [ { - "expr": "100 * avg(avg_over_time(health{instance=~\"${instance}\", }[15m])) by (instance, service_id, version, service, cluster, env) \u003c 90", + "expr": "100 * avg(avg_over_time(health{}[15m])) by (instance, service_id, version, service, cluster, env) \u003c 90", "format": "", "legendFormat": "{{instance}} - {{service_id}}", "refId": "" @@ -339,9 +368,9 @@ }, "gridPos": { "h": 6, - "w": 8, - "x": 16, - "y": 9 + "w": 24, + "x": 0, + "y": 15 }, "options": { "graphMode": "line", @@ -349,6 +378,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -358,7 +388,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -391,12 +421,53 @@ "overrides": null } }, + { + "type": "logs", + "id": 7, + "targets": [ + { + "expr": "{env=\"${env}\", cluster=\"${cluster}\", product=\"${product}\", network_type=\"${network_type}\", namespace=\"${namespace}\", pod=\"${pod}\"} | json | level=~\"(error|panic|fatal|crit)\"", + "format": "", + "legendFormat": "", + "refId": "" + } + ], + "title": "Logs with severity \u003e= error", + "description": "", + "transparent": false, + "datasource": { + "uid": "Loki" + }, + "gridPos": { + "h": 10, + "w": 24, + "x": 0, + "y": 21 + }, + "options": { + "showLabels": false, + "showCommonLabels": false, + "showTime": false, + "showLogContextToggle": false, + "wrapLogMessage": false, + "prettifyLogMessage": true, + "enableLogDetails": false, + "sortOrder": "", + "dedupStrategy": "" + }, + "fieldConfig": { + "defaults": { + "noValue": "No data" + }, + "overrides": null + } + }, { "type": "timeseries", - "id": 6, + "id": 8, "targets": [ { - "expr": "sum(eth_balance{instance=~\"${instance}\", }) by (instance, account)", + "expr": "sum(eth_balance{}) by (instance, account)", "format": "", "legendFormat": "{{instance}} - {{account}}", "refId": "" @@ -412,7 +483,7 @@ "h": 6, "w": 12, "x": 0, - "y": 15 + "y": 31 }, "options": { "legend": { @@ -432,7 +503,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -443,10 +514,10 @@ }, { "type": "timeseries", - "id": 7, + "id": 9, "targets": [ { - "expr": "sum(solana_balance{instance=~\"${instance}\", }) by (instance, account)", + "expr": "sum(solana_balance{}) by (instance, account)", "format": "", "legendFormat": "{{instance}} - {{account}}", "refId": "" @@ -462,7 +533,7 @@ "h": 6, "w": 12, "x": 12, - "y": 15 + "y": 31 }, "options": { "legend": { @@ -482,7 +553,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -493,10 +564,10 @@ }, { "type": "stat", - "id": 8, + "id": 10, "targets": [ { - "expr": "process_open_fds{instance=~\"${instance}\", }", + "expr": "process_open_fds{}", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -512,7 +583,7 @@ "h": 4, "w": 6, "x": 0, - "y": 21 + "y": 37 }, "options": { "graphMode": "area", @@ -520,6 +591,7 @@ "justifyMode": "auto", "textMode": "value", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -529,7 +601,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "auto" }, "fieldConfig": { @@ -543,10 +615,10 @@ }, { "type": "stat", - "id": 9, + "id": 11, "targets": [ { - "expr": "go_info{instance=~\"${instance}\", }", + "expr": "go_info{}", "instant": true, "range": false, "format": "", @@ -564,7 +636,7 @@ "h": 4, "w": 4, "x": 6, - "y": 21 + "y": 37 }, "options": { "graphMode": "none", @@ -572,6 +644,7 @@ "justifyMode": "auto", "textMode": "name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -581,7 +654,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "auto" }, "fieldConfig": { @@ -601,35 +674,35 @@ "h": 1, "w": 24, "x": 0, - "y": 25 + "y": 41 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 10, + "id": 12, "targets": [ { - "expr": "sum(db_conns_max{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(db_conns_max{}) by (instance)", "format": "", "legendFormat": "{{instance}} - Max", "refId": "" }, { - "expr": "sum(db_conns_open{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(db_conns_open{}) by (instance)", "format": "", "legendFormat": "{{instance}} - Open", "refId": "" }, { - "expr": "sum(db_conns_used{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(db_conns_used{}) by (instance)", "format": "", "legendFormat": "{{instance}} - Used", "refId": "" }, { - "expr": "sum(db_conns_wait{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(db_conns_wait{}) by (instance)", "format": "", "legendFormat": "{{instance}} - Wait", "refId": "" @@ -645,7 +718,7 @@ "h": 6, "w": 24, "x": 0, - "y": 26 + "y": 42 }, "options": { "legend": { @@ -665,7 +738,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -676,10 +749,10 @@ }, { "type": "timeseries", - "id": 11, + "id": 13, "targets": [ { - "expr": "sum(db_wait_count{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(db_wait_count{}) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -695,7 +768,7 @@ "h": 6, "w": 12, "x": 0, - "y": 32 + "y": 48 }, "options": { "legend": { @@ -715,7 +788,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -726,10 +799,10 @@ }, { "type": "timeseries", - "id": 12, + "id": 14, "targets": [ { - "expr": "sum(db_wait_time_seconds{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(db_wait_time_seconds{}) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -745,7 +818,7 @@ "h": 6, "w": 12, "x": 12, - "y": 32 + "y": 48 }, "options": { "legend": { @@ -765,7 +838,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -782,29 +855,29 @@ "h": 1, "w": 24, "x": 0, - "y": 38 + "y": 54 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 13, + "id": 15, "targets": [ { - "expr": "histogram_quantile(0.9, sum(rate(sql_query_timeout_percent_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le))", + "expr": "histogram_quantile(0.9, sum(rate(sql_query_timeout_percent_bucket{}[$__rate_interval])) by (le))", "format": "", "legendFormat": "p90", "refId": "" }, { - "expr": "histogram_quantile(0.95, sum(rate(sql_query_timeout_percent_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le))", + "expr": "histogram_quantile(0.95, sum(rate(sql_query_timeout_percent_bucket{}[$__rate_interval])) by (le))", "format": "", "legendFormat": "p95", "refId": "" }, { - "expr": "histogram_quantile(0.99, sum(rate(sql_query_timeout_percent_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le))", + "expr": "histogram_quantile(0.99, sum(rate(sql_query_timeout_percent_bucket{}[$__rate_interval])) by (le))", "format": "", "legendFormat": "p99", "refId": "" @@ -820,7 +893,7 @@ "h": 6, "w": 24, "x": 0, - "y": 39 + "y": 55 }, "options": { "legend": { @@ -840,7 +913,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -857,17 +930,17 @@ "h": 1, "w": 24, "x": 0, - "y": 45 + "y": 61 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 14, + "id": 16, "targets": [ { - "expr": "sum(head_tracker_current_head{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(head_tracker_current_head{}) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -883,7 +956,7 @@ "h": 6, "w": 18, "x": 0, - "y": 46 + "y": 62 }, "options": { "legend": { @@ -903,7 +976,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -914,10 +987,10 @@ }, { "type": "stat", - "id": 15, + "id": 17, "targets": [ { - "expr": "head_tracker_current_head{instance=~\"${instance}\", }", + "expr": "head_tracker_current_head{}", "instant": true, "range": false, "format": "", @@ -935,7 +1008,7 @@ "h": 6, "w": 6, "x": 18, - "y": 46 + "y": 62 }, "options": { "graphMode": "none", @@ -943,6 +1016,7 @@ "justifyMode": "auto", "textMode": "value", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -952,7 +1026,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "auto" }, "fieldConfig": { @@ -966,10 +1040,10 @@ }, { "type": "timeseries", - "id": 16, + "id": 18, "targets": [ { - "expr": "rate(head_tracker_heads_received{instance=~\"${instance}\", }[1m])", + "expr": "rate(head_tracker_heads_received{}[1m])", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -985,7 +1059,7 @@ "h": 6, "w": 24, "x": 0, - "y": 52 + "y": 68 }, "options": { "legend": { @@ -1005,7 +1079,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1016,10 +1090,10 @@ }, { "type": "timeseries", - "id": 17, + "id": 19, "targets": [ { - "expr": "head_tracker_very_old_head{instance=~\"${instance}\", }", + "expr": "head_tracker_very_old_head{}", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -1035,7 +1109,7 @@ "h": 6, "w": 12, "x": 0, - "y": 58 + "y": 74 }, "options": { "legend": { @@ -1055,7 +1129,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1066,10 +1140,10 @@ }, { "type": "timeseries", - "id": 18, + "id": 20, "targets": [ { - "expr": "rate(head_tracker_connection_errors{instance=~\"${instance}\", }[1m])", + "expr": "rate(head_tracker_connection_errors{}[1m])", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -1085,7 +1159,7 @@ "h": 6, "w": 12, "x": 12, - "y": 58 + "y": 74 }, "options": { "legend": { @@ -1105,7 +1179,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1122,17 +1196,17 @@ "h": 1, "w": 24, "x": 0, - "y": 64 + "y": 80 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 19, + "id": 21, "targets": [ { - "expr": "sum(unconfirmed_transactions{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(unconfirmed_transactions{}) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -1148,7 +1222,7 @@ "h": 6, "w": 8, "x": 0, - "y": 65 + "y": 81 }, "options": { "legend": { @@ -1168,7 +1242,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1179,10 +1253,10 @@ }, { "type": "timeseries", - "id": 20, + "id": 22, "targets": [ { - "expr": "sum(max_unconfirmed_tx_age{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(max_unconfirmed_tx_age{}) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -1198,7 +1272,7 @@ "h": 6, "w": 8, "x": 8, - "y": 65 + "y": 81 }, "options": { "legend": { @@ -1218,7 +1292,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1229,10 +1303,10 @@ }, { "type": "timeseries", - "id": 21, + "id": 23, "targets": [ { - "expr": "sum(max_unconfirmed_blocks{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(max_unconfirmed_blocks{}) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -1248,7 +1322,7 @@ "h": 6, "w": 8, "x": 16, - "y": 65 + "y": 81 }, "options": { "legend": { @@ -1268,7 +1342,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1285,17 +1359,17 @@ "h": 1, "w": 24, "x": 0, - "y": 71 + "y": 87 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 22, + "id": 24, "targets": [ { - "expr": "sum(tx_manager_num_confirmed_transactions{instance=~\"${instance}\", }) by (blockchain, chainID, instance)", + "expr": "sum(tx_manager_num_confirmed_transactions{}) by (blockchain, chainID, instance)", "format": "", "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", "refId": "" @@ -1311,7 +1385,7 @@ "h": 6, "w": 6, "x": 0, - "y": 72 + "y": 88 }, "options": { "legend": { @@ -1331,7 +1405,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1342,10 +1416,10 @@ }, { "type": "timeseries", - "id": 23, + "id": 25, "targets": [ { - "expr": "sum(tx_manager_num_successful_transactions{instance=~\"${instance}\", }) by (blockchain, chainID, instance)", + "expr": "sum(tx_manager_num_successful_transactions{}) by (blockchain, chainID, instance)", "format": "", "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", "refId": "" @@ -1361,7 +1435,7 @@ "h": 6, "w": 6, "x": 6, - "y": 72 + "y": 88 }, "options": { "legend": { @@ -1381,7 +1455,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1392,10 +1466,10 @@ }, { "type": "timeseries", - "id": 24, + "id": 26, "targets": [ { - "expr": "sum(tx_manager_num_tx_reverted{instance=~\"${instance}\", }) by (blockchain, chainID, instance)", + "expr": "sum(tx_manager_num_tx_reverted{}) by (blockchain, chainID, instance)", "format": "", "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", "refId": "" @@ -1411,7 +1485,7 @@ "h": 6, "w": 6, "x": 12, - "y": 72 + "y": 88 }, "options": { "legend": { @@ -1431,7 +1505,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1442,10 +1516,10 @@ }, { "type": "timeseries", - "id": 25, + "id": 27, "targets": [ { - "expr": "sum(tx_manager_num_gas_bumps{instance=~\"${instance}\", }) by (blockchain, chainID, instance)", + "expr": "sum(tx_manager_num_gas_bumps{}) by (blockchain, chainID, instance)", "format": "", "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", "refId": "" @@ -1461,7 +1535,7 @@ "h": 6, "w": 6, "x": 18, - "y": 72 + "y": 88 }, "options": { "legend": { @@ -1481,7 +1555,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1492,10 +1566,10 @@ }, { "type": "timeseries", - "id": 26, + "id": 28, "targets": [ { - "expr": "sum(tx_manager_fwd_tx_count{instance=~\"${instance}\", }) by (blockchain, chainID, instance)", + "expr": "sum(tx_manager_fwd_tx_count{}) by (blockchain, chainID, instance)", "format": "", "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", "refId": "" @@ -1511,7 +1585,7 @@ "h": 6, "w": 6, "x": 0, - "y": 78 + "y": 94 }, "options": { "legend": { @@ -1531,7 +1605,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1542,10 +1616,10 @@ }, { "type": "timeseries", - "id": 27, + "id": 29, "targets": [ { - "expr": "sum(tx_manager_tx_attempt_count{instance=~\"${instance}\", }) by (blockchain, chainID, instance)", + "expr": "sum(tx_manager_tx_attempt_count{}) by (blockchain, chainID, instance)", "format": "", "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", "refId": "" @@ -1561,7 +1635,7 @@ "h": 6, "w": 6, "x": 6, - "y": 78 + "y": 94 }, "options": { "legend": { @@ -1581,7 +1655,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1592,10 +1666,10 @@ }, { "type": "timeseries", - "id": 28, + "id": 30, "targets": [ { - "expr": "sum(tx_manager_gas_bump_exceeds_limit{instance=~\"${instance}\", }) by (blockchain, chainID, instance)", + "expr": "sum(tx_manager_gas_bump_exceeds_limit{}) by (blockchain, chainID, instance)", "format": "", "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", "refId": "" @@ -1611,7 +1685,7 @@ "h": 6, "w": 6, "x": 12, - "y": 78 + "y": 94 }, "options": { "legend": { @@ -1631,7 +1705,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1642,16 +1716,16 @@ }, { "type": "timeseries", - "id": 29, + "id": 31, "targets": [ { - "expr": "histogram_quantile(0.9, sum(rate(tx_manager_time_until_tx_broadcast_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le, instance, blockchain, chainID)) / 1e6", + "expr": "histogram_quantile(0.9, sum(rate(tx_manager_time_until_tx_broadcast_bucket{}[$__rate_interval])) by (le, instance, blockchain, chainID)) / 1e6", "format": "", "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", "refId": "" } ], - "title": "TX Manager Time Until broadcast", + "title": "TX Manager Time Until Broadcast", "description": "The amount of time elapsed from when a transaction is enqueued to until it is broadcast", "transparent": false, "datasource": { @@ -1661,7 +1735,7 @@ "h": 6, "w": 6, "x": 18, - "y": 78 + "y": 94 }, "options": { "legend": { @@ -1681,7 +1755,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1692,16 +1766,16 @@ }, { "type": "timeseries", - "id": 30, + "id": 32, "targets": [ { - "expr": "histogram_quantile(0.9, sum(rate(tx_manager_time_until_tx_confirmed_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le, instance, blockchain, chainID)) / 1e6", + "expr": "histogram_quantile(0.9, sum(rate(tx_manager_time_until_tx_confirmed_bucket{}[$__rate_interval])) by (le, instance, blockchain, chainID)) / 1e6", "format": "", "legendFormat": "{{instance}} - {{blockchain}} - {{chainID}}", "refId": "" } ], - "title": "TX Manager Time Until confirmed", + "title": "TX Manager Time Until Confirmed", "description": "The amount of time elapsed from a transaction being broadcast to being included in a block", "transparent": false, "datasource": { @@ -1711,7 +1785,7 @@ "h": 6, "w": 6, "x": 0, - "y": 84 + "y": 100 }, "options": { "legend": { @@ -1731,7 +1805,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1748,17 +1822,17 @@ "h": 1, "w": 24, "x": 0, - "y": 90 + "y": 106 }, "id": 0, "panels": null }, { "type": "stat", - "id": 31, + "id": 33, "targets": [ { - "expr": "count(log_poller_query_duration_sum{instance=~\"${instance}\", }) by (evmChainID)", + "expr": "count(log_poller_query_duration_sum{}) by (evmChainID)", "format": "", "legendFormat": "chainId: {{evmChainID}}", "refId": "" @@ -1774,7 +1848,7 @@ "h": 6, "w": 12, "x": 0, - "y": 91 + "y": 107 }, "options": { "graphMode": "line", @@ -1782,6 +1856,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -1791,7 +1866,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -1805,16 +1880,16 @@ }, { "type": "timeseries", - "id": 32, + "id": 34, "targets": [ { - "expr": "avg by (query, instance) (sum by (query, job) (rate(log_poller_query_duration_count{instance=~\"${instance}\", }[$__rate_interval])))", + "expr": "avg by (query, instance) (sum by (query, job) (rate(log_poller_query_duration_count{}[$__rate_interval])))", "format": "", "legendFormat": "{{instance}} - {{query}}", "refId": "" }, { - "expr": "avg (sum by(instance) (rate(log_poller_query_duration_count{instance=~\"${instance}\", }[$__rate_interval])))", + "expr": "avg (sum by(instance) (rate(log_poller_query_duration_count{}[$__rate_interval])))", "format": "", "legendFormat": "Total", "refId": "" @@ -1830,7 +1905,7 @@ "h": 6, "w": 12, "x": 12, - "y": 91 + "y": 107 }, "options": { "legend": { @@ -1850,7 +1925,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1861,10 +1936,10 @@ }, { "type": "timeseries", - "id": 33, + "id": 35, "targets": [ { - "expr": "avg by (instance, type) (sum by (type, instance) (rate(log_poller_query_duration_count{instance=~\"${instance}\", }[$__rate_interval])))", + "expr": "avg by (instance, type) (sum by (type, instance) (rate(log_poller_query_duration_count{}[$__rate_interval])))", "format": "", "legendFormat": "{{instance}} - {{type}}", "refId": "" @@ -1880,7 +1955,7 @@ "h": 6, "w": 12, "x": 0, - "y": 97 + "y": 113 }, "options": { "legend": { @@ -1900,7 +1975,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1911,10 +1986,10 @@ }, { "type": "timeseries", - "id": 34, + "id": 36, "targets": [ { - "expr": "avg by (instance, query) (log_poller_query_dataset_size{instance=~\"${instance}\", })", + "expr": "avg by (instance, query) (log_poller_query_dataset_size{})", "format": "", "legendFormat": "{{instance}} - {{query}}", "refId": "" @@ -1930,7 +2005,7 @@ "h": 6, "w": 12, "x": 12, - "y": 97 + "y": 113 }, "options": { "legend": { @@ -1950,7 +2025,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -1961,10 +2036,10 @@ }, { "type": "timeseries", - "id": 35, + "id": 37, "targets": [ { - "expr": "max by (instance, query) (log_poller_query_dataset_size{instance=~\"${instance}\", })", + "expr": "max by (instance, query) (log_poller_query_dataset_size{})", "format": "", "legendFormat": "{{instance}} - {{query}}", "refId": "" @@ -1980,7 +2055,7 @@ "h": 6, "w": 12, "x": 0, - "y": 103 + "y": 119 }, "options": { "legend": { @@ -2000,7 +2075,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2011,10 +2086,10 @@ }, { "type": "timeseries", - "id": 36, + "id": 38, "targets": [ { - "expr": "max by (evmChainID) (log_poller_query_dataset_size{instance=~\"${instance}\", })", + "expr": "max by (evmChainID) (log_poller_query_dataset_size{})", "format": "", "legendFormat": "{{evmChainID}}", "refId": "" @@ -2030,7 +2105,7 @@ "h": 6, "w": 12, "x": 12, - "y": 103 + "y": 119 }, "options": { "legend": { @@ -2050,7 +2125,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2061,10 +2136,10 @@ }, { "type": "timeseries", - "id": 37, + "id": 39, "targets": [ { - "expr": "histogram_quantile(0.5, sum(rate(log_poller_query_duration_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le, instance, query)) / 1e6", + "expr": "histogram_quantile(0.5, sum(rate(log_poller_query_duration_bucket{}[$__rate_interval])) by (le, instance, query)) / 1e6", "format": "", "legendFormat": "{{instance}} - {{query}}", "refId": "" @@ -2080,7 +2155,7 @@ "h": 6, "w": 24, "x": 0, - "y": 109 + "y": 125 }, "options": { "legend": { @@ -2100,7 +2175,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2111,10 +2186,10 @@ }, { "type": "timeseries", - "id": 38, + "id": 40, "targets": [ { - "expr": "histogram_quantile(0.9, sum(rate(log_poller_query_duration_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le, instance, query)) / 1e6", + "expr": "histogram_quantile(0.9, sum(rate(log_poller_query_duration_bucket{}[$__rate_interval])) by (le, instance, query)) / 1e6", "format": "", "legendFormat": "{{instance}} - {{query}}", "refId": "" @@ -2130,7 +2205,7 @@ "h": 6, "w": 24, "x": 0, - "y": 115 + "y": 131 }, "options": { "legend": { @@ -2150,7 +2225,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2161,10 +2236,10 @@ }, { "type": "timeseries", - "id": 39, + "id": 41, "targets": [ { - "expr": "histogram_quantile(0.99, sum(rate(log_poller_query_duration_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (le, instance, query)) / 1e6", + "expr": "histogram_quantile(0.99, sum(rate(log_poller_query_duration_bucket{}[$__rate_interval])) by (le, instance, query)) / 1e6", "format": "", "legendFormat": "{{instance}} - {{query}}", "refId": "" @@ -2180,7 +2255,7 @@ "h": 6, "w": 24, "x": 0, - "y": 121 + "y": 137 }, "options": { "legend": { @@ -2200,7 +2275,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2211,10 +2286,10 @@ }, { "type": "timeseries", - "id": 40, + "id": 42, "targets": [ { - "expr": "avg by (evmChainID) (log_poller_logs_inserted{instance=~\"${instance}\", })", + "expr": "avg by (evmChainID) (log_poller_logs_inserted{})", "format": "", "legendFormat": "{{evmChainID}}", "refId": "" @@ -2230,7 +2305,7 @@ "h": 6, "w": 12, "x": 0, - "y": 127 + "y": 143 }, "options": { "legend": { @@ -2250,7 +2325,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2261,10 +2336,10 @@ }, { "type": "timeseries", - "id": 41, + "id": 43, "targets": [ { - "expr": "avg by (evmChainID) (rate(log_poller_logs_inserted{instance=~\"${instance}\", }[$__rate_interval]))", + "expr": "avg by (evmChainID) (rate(log_poller_logs_inserted{}[$__rate_interval]))", "format": "", "legendFormat": "{{evmChainID}}", "refId": "" @@ -2280,7 +2355,7 @@ "h": 6, "w": 12, "x": 12, - "y": 127 + "y": 143 }, "options": { "legend": { @@ -2300,7 +2375,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2311,10 +2386,10 @@ }, { "type": "timeseries", - "id": 42, + "id": 44, "targets": [ { - "expr": "avg by (evmChainID) (log_poller_blocks_inserted{instance=~\"${instance}\", })", + "expr": "avg by (evmChainID) (log_poller_blocks_inserted{})", "format": "", "legendFormat": "{{evmChainID}}", "refId": "" @@ -2330,7 +2405,7 @@ "h": 6, "w": 12, "x": 0, - "y": 133 + "y": 149 }, "options": { "legend": { @@ -2350,7 +2425,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2361,10 +2436,10 @@ }, { "type": "timeseries", - "id": 43, + "id": 45, "targets": [ { - "expr": "avg by (evmChainID) (rate(log_poller_blocks_inserted{instance=~\"${instance}\", }[$__rate_interval]))", + "expr": "avg by (evmChainID) (rate(log_poller_blocks_inserted{}[$__rate_interval]))", "format": "", "legendFormat": "{{evmChainID}}", "refId": "" @@ -2380,7 +2455,7 @@ "h": 6, "w": 12, "x": 12, - "y": 133 + "y": 149 }, "options": { "legend": { @@ -2400,7 +2475,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2417,17 +2492,17 @@ "h": 1, "w": 24, "x": 0, - "y": 139 + "y": 155 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 44, + "id": 46, "targets": [ { - "expr": "sum(feeds_job_proposal_requests{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(feeds_job_proposal_requests{}) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -2443,7 +2518,7 @@ "h": 6, "w": 12, "x": 0, - "y": 140 + "y": 156 }, "options": { "legend": { @@ -2463,7 +2538,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2474,10 +2549,10 @@ }, { "type": "timeseries", - "id": 45, + "id": 47, "targets": [ { - "expr": "sum(feeds_job_proposal_count{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(feeds_job_proposal_count{}) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -2493,7 +2568,7 @@ "h": 6, "w": 12, "x": 12, - "y": 140 + "y": 156 }, "options": { "legend": { @@ -2513,7 +2588,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2530,17 +2605,17 @@ "h": 1, "w": 24, "x": 0, - "y": 146 + "y": 162 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 46, + "id": 48, "targets": [ { - "expr": "sum(mailbox_load_percent{instance=~\"${instance}\", }) by (capacity, name, instance)", + "expr": "sum(mailbox_load_percent{}) by (capacity, name, instance)", "format": "", "legendFormat": "{{instance}} - Capacity: {{capacity}} - {{name}}", "refId": "" @@ -2556,7 +2631,7 @@ "h": 6, "w": 24, "x": 0, - "y": 147 + "y": 163 }, "options": { "legend": { @@ -2576,7 +2651,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2593,17 +2668,17 @@ "h": 1, "w": 24, "x": 0, - "y": 153 + "y": 169 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 47, + "id": 49, "targets": [ { - "expr": "sum(log_panic_count{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(log_panic_count{}) by (instance)", "format": "", "legendFormat": "{{instance}} - panic", "refId": "" @@ -2619,7 +2694,7 @@ "h": 6, "w": 8, "x": 0, - "y": 154 + "y": 170 }, "options": { "legend": { @@ -2639,7 +2714,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2650,10 +2725,10 @@ }, { "type": "timeseries", - "id": 48, + "id": 50, "targets": [ { - "expr": "sum(log_fatal_count{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(log_fatal_count{}) by (instance)", "format": "", "legendFormat": "{{instance}} - fatal", "refId": "" @@ -2669,7 +2744,7 @@ "h": 6, "w": 8, "x": 8, - "y": 154 + "y": 170 }, "options": { "legend": { @@ -2689,7 +2764,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2700,10 +2775,10 @@ }, { "type": "timeseries", - "id": 49, + "id": 51, "targets": [ { - "expr": "sum(log_critical_count{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(log_critical_count{}) by (instance)", "format": "", "legendFormat": "{{instance}} - critical", "refId": "" @@ -2719,7 +2794,7 @@ "h": 6, "w": 8, "x": 16, - "y": 154 + "y": 170 }, "options": { "legend": { @@ -2739,7 +2814,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2750,10 +2825,10 @@ }, { "type": "timeseries", - "id": 50, + "id": 52, "targets": [ { - "expr": "sum(log_warn_count{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(log_warn_count{}) by (instance)", "format": "", "legendFormat": "{{instance}} - warn", "refId": "" @@ -2769,7 +2844,7 @@ "h": 6, "w": 8, "x": 0, - "y": 160 + "y": 176 }, "options": { "legend": { @@ -2789,7 +2864,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2800,10 +2875,10 @@ }, { "type": "timeseries", - "id": 51, + "id": 53, "targets": [ { - "expr": "sum(log_error_count{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(log_error_count{}) by (instance)", "format": "", "legendFormat": "{{instance}} - error", "refId": "" @@ -2819,7 +2894,7 @@ "h": 6, "w": 8, "x": 8, - "y": 160 + "y": 176 }, "options": { "legend": { @@ -2839,7 +2914,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2856,17 +2931,17 @@ "h": 1, "w": 24, "x": 0, - "y": 166 + "y": 182 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 52, + "id": 54, "targets": [ { - "expr": "sum(rate(log_panic_count{instance=~\"${instance}\", }[$__rate_interval])) by (instance)", + "expr": "sum(rate(log_panic_count{}[$__rate_interval])) by (instance)", "format": "", "legendFormat": "{{instance}} - error", "refId": "" @@ -2882,7 +2957,7 @@ "h": 6, "w": 8, "x": 0, - "y": 167 + "y": 183 }, "options": { "legend": { @@ -2902,7 +2977,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2913,10 +2988,10 @@ }, { "type": "timeseries", - "id": 53, + "id": 55, "targets": [ { - "expr": "sum(rate(log_fatal_count{instance=~\"${instance}\", }[$__rate_interval])) by (instance)", + "expr": "sum(rate(log_fatal_count{}[$__rate_interval])) by (instance)", "format": "", "legendFormat": "{{instance}} - error", "refId": "" @@ -2932,7 +3007,7 @@ "h": 6, "w": 8, "x": 8, - "y": 167 + "y": 183 }, "options": { "legend": { @@ -2952,7 +3027,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -2963,10 +3038,10 @@ }, { "type": "timeseries", - "id": 54, + "id": 56, "targets": [ { - "expr": "sum(rate(log_critical_count{instance=~\"${instance}\", }[$__rate_interval])) by (instance)", + "expr": "sum(rate(log_critical_count{}[$__rate_interval])) by (instance)", "format": "", "legendFormat": "{{instance}} - error", "refId": "" @@ -2982,7 +3057,7 @@ "h": 6, "w": 8, "x": 16, - "y": 167 + "y": 183 }, "options": { "legend": { @@ -3002,7 +3077,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -3013,10 +3088,10 @@ }, { "type": "timeseries", - "id": 55, + "id": 57, "targets": [ { - "expr": "sum(rate(log_warn_count{instance=~\"${instance}\", }[$__rate_interval])) by (instance)", + "expr": "sum(rate(log_warn_count{}[$__rate_interval])) by (instance)", "format": "", "legendFormat": "{{instance}} - error", "refId": "" @@ -3032,7 +3107,7 @@ "h": 6, "w": 8, "x": 0, - "y": 173 + "y": 189 }, "options": { "legend": { @@ -3052,7 +3127,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -3063,10 +3138,10 @@ }, { "type": "timeseries", - "id": 56, + "id": 58, "targets": [ { - "expr": "sum(rate(log_error_count{instance=~\"${instance}\", }[$__rate_interval])) by (instance)", + "expr": "sum(rate(log_error_count{}[$__rate_interval])) by (instance)", "format": "", "legendFormat": "{{instance}} - error", "refId": "" @@ -3082,7 +3157,7 @@ "h": 6, "w": 8, "x": 8, - "y": 173 + "y": 189 }, "options": { "legend": { @@ -3102,7 +3177,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -3119,17 +3194,17 @@ "h": 1, "w": 24, "x": 0, - "y": 179 + "y": 195 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 57, + "id": 59, "targets": [ { - "expr": "evm_pool_rpc_node_highest_seen_block{instance=~\"${instance}\", }", + "expr": "evm_pool_rpc_node_highest_seen_block{}", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -3145,7 +3220,7 @@ "h": 6, "w": 12, "x": 0, - "y": 180 + "y": 196 }, "options": { "legend": { @@ -3165,7 +3240,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -3176,10 +3251,10 @@ }, { "type": "timeseries", - "id": 58, + "id": 60, "targets": [ { - "expr": "evm_pool_rpc_node_num_seen_blocks{instance=~\"${instance}\", }", + "expr": "evm_pool_rpc_node_num_seen_blocks{}", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -3195,7 +3270,7 @@ "h": 6, "w": 12, "x": 12, - "y": 180 + "y": 196 }, "options": { "legend": { @@ -3215,7 +3290,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -3226,10 +3301,10 @@ }, { "type": "timeseries", - "id": 59, + "id": 61, "targets": [ { - "expr": "evm_pool_rpc_node_polls_total{instance=~\"${instance}\", }", + "expr": "evm_pool_rpc_node_polls_total{}", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -3245,7 +3320,7 @@ "h": 6, "w": 12, "x": 0, - "y": 186 + "y": 202 }, "options": { "legend": { @@ -3265,7 +3340,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -3276,10 +3351,10 @@ }, { "type": "timeseries", - "id": 60, + "id": 62, "targets": [ { - "expr": "evm_pool_rpc_node_polls_failed{instance=~\"${instance}\", }", + "expr": "evm_pool_rpc_node_polls_failed{}", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -3295,7 +3370,7 @@ "h": 6, "w": 12, "x": 12, - "y": 186 + "y": 202 }, "options": { "legend": { @@ -3315,7 +3390,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -3326,10 +3401,10 @@ }, { "type": "timeseries", - "id": 61, + "id": 63, "targets": [ { - "expr": "evm_pool_rpc_node_polls_success{instance=~\"${instance}\", }", + "expr": "evm_pool_rpc_node_polls_success{}", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -3345,7 +3420,7 @@ "h": 6, "w": 12, "x": 0, - "y": 192 + "y": 208 }, "options": { "legend": { @@ -3365,7 +3440,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -3382,17 +3457,17 @@ "h": 1, "w": 24, "x": 0, - "y": 198 + "y": 214 }, "id": 0, "panels": null }, { "type": "stat", - "id": 62, + "id": 64, "targets": [ { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"Alive\"}) by (instance, chainId)", + "expr": "sum(multi_node_states{state=\"Alive\"}) by (instance, chainId)", "format": "", "legendFormat": "{{instance}} - {{chainId}}", "refId": "" @@ -3408,7 +3483,7 @@ "h": 6, "w": 6, "x": 0, - "y": 199 + "y": 215 }, "options": { "graphMode": "none", @@ -3416,6 +3491,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -3425,7 +3501,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -3439,10 +3515,10 @@ }, { "type": "stat", - "id": 63, + "id": 65, "targets": [ { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"Closed\"}) by (instance, chainId)", + "expr": "sum(multi_node_states{state=\"Closed\"}) by (instance, chainId)", "format": "", "legendFormat": "{{instance}} - {{chainId}}", "refId": "" @@ -3458,7 +3534,7 @@ "h": 6, "w": 6, "x": 6, - "y": 199 + "y": 215 }, "options": { "graphMode": "none", @@ -3466,6 +3542,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -3475,7 +3552,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -3489,10 +3566,10 @@ }, { "type": "stat", - "id": 64, + "id": 66, "targets": [ { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"Dialed\"}) by (instance, chainId)", + "expr": "sum(multi_node_states{state=\"Dialed\"}) by (instance, chainId)", "format": "", "legendFormat": "{{instance}} - {{chainId}}", "refId": "" @@ -3508,7 +3585,7 @@ "h": 6, "w": 6, "x": 12, - "y": 199 + "y": 215 }, "options": { "graphMode": "none", @@ -3516,6 +3593,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -3525,7 +3603,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -3539,10 +3617,10 @@ }, { "type": "stat", - "id": 65, + "id": 67, "targets": [ { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"InvalidChainID\"}) by (instance, chainId)", + "expr": "sum(multi_node_states{state=\"InvalidChainID\"}) by (instance, chainId)", "format": "", "legendFormat": "{{instance}} - {{chainId}}", "refId": "" @@ -3558,7 +3636,7 @@ "h": 6, "w": 6, "x": 18, - "y": 199 + "y": 215 }, "options": { "graphMode": "none", @@ -3566,6 +3644,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -3575,7 +3654,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -3589,10 +3668,10 @@ }, { "type": "stat", - "id": 66, + "id": 68, "targets": [ { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"OutOfSync\"}) by (instance, chainId)", + "expr": "sum(multi_node_states{state=\"OutOfSync\"}) by (instance, chainId)", "format": "", "legendFormat": "{{instance}} - {{chainId}}", "refId": "" @@ -3608,7 +3687,7 @@ "h": 6, "w": 6, "x": 0, - "y": 205 + "y": 221 }, "options": { "graphMode": "none", @@ -3616,6 +3695,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -3625,7 +3705,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -3639,10 +3719,10 @@ }, { "type": "stat", - "id": 67, + "id": 69, "targets": [ { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"Undialed\"}) by (instance, chainId)", + "expr": "sum(multi_node_states{state=\"Undialed\"}) by (instance, chainId)", "format": "", "legendFormat": "{{instance}} - {{chainId}}", "refId": "" @@ -3658,7 +3738,7 @@ "h": 6, "w": 6, "x": 6, - "y": 205 + "y": 221 }, "options": { "graphMode": "none", @@ -3666,6 +3746,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -3675,7 +3756,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -3689,10 +3770,10 @@ }, { "type": "stat", - "id": 68, + "id": 70, "targets": [ { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"Unreachable\"}) by (instance, chainId)", + "expr": "sum(multi_node_states{state=\"Unreachable\"}) by (instance, chainId)", "format": "", "legendFormat": "{{instance}} - {{chainId}}", "refId": "" @@ -3708,7 +3789,7 @@ "h": 6, "w": 6, "x": 12, - "y": 205 + "y": 221 }, "options": { "graphMode": "none", @@ -3716,6 +3797,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -3725,7 +3807,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -3739,10 +3821,10 @@ }, { "type": "stat", - "id": 69, + "id": 71, "targets": [ { - "expr": "sum(multi_node_states{instance=~\"${instance}\", state=\"Unusable\"}) by (instance, chainId)", + "expr": "sum(multi_node_states{state=\"Unusable\"}) by (instance, chainId)", "format": "", "legendFormat": "{{instance}} - {{chainId}}", "refId": "" @@ -3758,7 +3840,7 @@ "h": 6, "w": 6, "x": 18, - "y": 205 + "y": 221 }, "options": { "graphMode": "none", @@ -3766,6 +3848,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -3775,7 +3858,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -3795,17 +3878,17 @@ "h": 1, "w": 24, "x": 0, - "y": 211 + "y": 227 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 70, + "id": 72, "targets": [ { - "expr": "sum(increase(evm_pool_rpc_node_calls_success{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_calls_total{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName)", + "expr": "sum(increase(evm_pool_rpc_node_calls_success{}[$__rate_interval])) by (instance, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_calls_total{}[$__rate_interval])) by (instance, evmChainID, nodeName)", "format": "", "legendFormat": "{{instance}} - {{nodeName}}", "refId": "" @@ -3821,7 +3904,7 @@ "h": 6, "w": 24, "x": 0, - "y": 212 + "y": 228 }, "options": { "legend": { @@ -3838,7 +3921,7 @@ "fieldConfig": { "defaults": { "unit": "percentunit", - "decimals": 0, + "decimals": 2, "max": 1, "thresholds": { "mode": "absolute", @@ -3863,7 +3946,7 @@ }, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -3874,10 +3957,10 @@ }, { "type": "timeseries", - "id": 71, + "id": 73, "targets": [ { - "expr": "sum(increase(evm_pool_rpc_node_dials_failed{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_calls_total{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName)", + "expr": "sum(increase(evm_pool_rpc_node_dials_failed{}[$__rate_interval])) by (instance, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_calls_total{}[$__rate_interval])) by (instance, evmChainID, nodeName)", "format": "", "legendFormat": "{{instance}} - {{evmChainID}} - {{nodeName}}", "refId": "" @@ -3893,7 +3976,7 @@ "h": 6, "w": 24, "x": 0, - "y": 218 + "y": 234 }, "options": { "legend": { @@ -3910,7 +3993,7 @@ "fieldConfig": { "defaults": { "unit": "percentunit", - "decimals": 0, + "decimals": 2, "max": 1, "thresholds": { "mode": "absolute", @@ -3935,7 +4018,7 @@ }, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -3946,40 +4029,40 @@ }, { "type": "timeseries", - "id": 72, + "id": 74, "targets": [ { - "expr": "evm_pool_rpc_node_num_transitions_to_alive{instance=~\"${instance}\", }", + "expr": "evm_pool_rpc_node_num_transitions_to_alive{}", "format": "", "legendFormat": "Alive", "refId": "" }, { - "expr": "evm_pool_rpc_node_num_transitions_to_in_sync{instance=~\"${instance}\", }", + "expr": "evm_pool_rpc_node_num_transitions_to_in_sync{}", "format": "", "legendFormat": "InSync", "refId": "" }, { - "expr": "evm_pool_rpc_node_num_transitions_to_out_of_sync{instance=~\"${instance}\", }", + "expr": "evm_pool_rpc_node_num_transitions_to_out_of_sync{}", "format": "", "legendFormat": "OutOfSync", "refId": "" }, { - "expr": "evm_pool_rpc_node_num_transitions_to_unreachable{instance=~\"${instance}\", }", + "expr": "evm_pool_rpc_node_num_transitions_to_unreachable{}", "format": "", "legendFormat": "UnReachable", "refId": "" }, { - "expr": "evm_pool_rpc_node_num_transitions_to_invalid_chain_id{instance=~\"${instance}\", }", + "expr": "evm_pool_rpc_node_num_transitions_to_invalid_chain_id{}", "format": "", "legendFormat": "InvalidChainID", "refId": "" }, { - "expr": "evm_pool_rpc_node_num_transitions_to_unusable{instance=~\"${instance}\", }", + "expr": "evm_pool_rpc_node_num_transitions_to_unusable{}", "format": "", "legendFormat": "TransitionToUnusable", "refId": "" @@ -3995,7 +4078,7 @@ "h": 6, "w": 12, "x": 0, - "y": 224 + "y": 240 }, "options": { "legend": { @@ -4015,7 +4098,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4026,10 +4109,10 @@ }, { "type": "timeseries", - "id": 73, + "id": 75, "targets": [ { - "expr": "evm_pool_rpc_node_states{instance=~\"${instance}\", }", + "expr": "evm_pool_rpc_node_states{}", "format": "", "legendFormat": "{{instance}} - {{evmChainID}} - {{state}}", "refId": "" @@ -4045,7 +4128,7 @@ "h": 6, "w": 12, "x": 12, - "y": 224 + "y": 240 }, "options": { "legend": { @@ -4065,7 +4148,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4076,10 +4159,10 @@ }, { "type": "timeseries", - "id": 74, + "id": 76, "targets": [ { - "expr": "sum(increase(evm_pool_rpc_node_verifies_success{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_verifies{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName) * 100", + "expr": "sum(increase(evm_pool_rpc_node_verifies_success{}[$__rate_interval])) by (instance, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_verifies{}[$__rate_interval])) by (instance, evmChainID, nodeName) * 100", "format": "", "legendFormat": "{{instance}} - {{evmChainID}} - {{nodeName}}", "refId": "" @@ -4095,7 +4178,7 @@ "h": 6, "w": 12, "x": 0, - "y": 230 + "y": 246 }, "options": { "legend": { @@ -4115,7 +4198,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4126,10 +4209,10 @@ }, { "type": "timeseries", - "id": 75, + "id": 77, "targets": [ { - "expr": "sum(increase(evm_pool_rpc_node_verifies_failed{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_verifies{instance=~\"${instance}\", }[$__rate_interval])) by (instance, evmChainID, nodeName) * 100", + "expr": "sum(increase(evm_pool_rpc_node_verifies_failed{}[$__rate_interval])) by (instance, evmChainID, nodeName) / sum(increase(evm_pool_rpc_node_verifies{}[$__rate_interval])) by (instance, evmChainID, nodeName) * 100", "format": "", "legendFormat": "{{instance}} - {{evmChainID}} - {{nodeName}}", "refId": "" @@ -4145,7 +4228,7 @@ "h": 6, "w": 12, "x": 12, - "y": 230 + "y": 246 }, "options": { "legend": { @@ -4165,7 +4248,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4182,17 +4265,17 @@ "h": 1, "w": 24, "x": 0, - "y": 236 + "y": 252 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 76, + "id": 78, "targets": [ { - "expr": "histogram_quantile(0.90, sum(rate(evm_pool_rpc_node_rpc_call_time_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (instance, le, rpcCallName)) / 1e6", + "expr": "histogram_quantile(0.90, sum(rate(evm_pool_rpc_node_rpc_call_time_bucket{}[$__rate_interval])) by (instance, le, rpcCallName)) / 1e6", "format": "", "legendFormat": "{{instance}} - {{rpcCallName}}", "refId": "" @@ -4208,7 +4291,7 @@ "h": 6, "w": 24, "x": 0, - "y": 237 + "y": 253 }, "options": { "legend": { @@ -4228,7 +4311,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4239,10 +4322,10 @@ }, { "type": "timeseries", - "id": 77, + "id": 79, "targets": [ { - "expr": "histogram_quantile(0.95, sum(rate(evm_pool_rpc_node_rpc_call_time_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (instance, le, rpcCallName)) / 1e6", + "expr": "histogram_quantile(0.95, sum(rate(evm_pool_rpc_node_rpc_call_time_bucket{}[$__rate_interval])) by (instance, le, rpcCallName)) / 1e6", "format": "", "legendFormat": "{{instance}} - {{rpcCallName}}", "refId": "" @@ -4258,7 +4341,7 @@ "h": 6, "w": 24, "x": 0, - "y": 243 + "y": 259 }, "options": { "legend": { @@ -4278,7 +4361,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4289,10 +4372,10 @@ }, { "type": "timeseries", - "id": 78, + "id": 80, "targets": [ { - "expr": "histogram_quantile(0.99, sum(rate(evm_pool_rpc_node_rpc_call_time_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (instance, le, rpcCallName)) / 1e6", + "expr": "histogram_quantile(0.99, sum(rate(evm_pool_rpc_node_rpc_call_time_bucket{}[$__rate_interval])) by (instance, le, rpcCallName)) / 1e6", "format": "", "legendFormat": "{{instance}} - {{rpcCallName}}", "refId": "" @@ -4308,7 +4391,7 @@ "h": 6, "w": 24, "x": 0, - "y": 249 + "y": 265 }, "options": { "legend": { @@ -4328,7 +4411,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4345,17 +4428,17 @@ "h": 1, "w": 24, "x": 0, - "y": 255 + "y": 271 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 79, + "id": 81, "targets": [ { - "expr": "sum(gas_updater_all_gas_price_percentiles{instance=~\"${instance}\", }) by (instance, evmChainID, percentile)", + "expr": "sum(gas_updater_all_gas_price_percentiles{}) by (instance, evmChainID, percentile)", "format": "", "legendFormat": "{{instance}} - {{evmChainID}} - {{percentile}}", "refId": "" @@ -4371,7 +4454,7 @@ "h": 6, "w": 24, "x": 0, - "y": 256 + "y": 272 }, "options": { "legend": { @@ -4391,7 +4474,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4402,10 +4485,10 @@ }, { "type": "timeseries", - "id": 80, + "id": 82, "targets": [ { - "expr": "sum(gas_updater_all_tip_cap_percentiles{instance=~\"${instance}\", }) by (instance, evmChainID, percentile)", + "expr": "sum(gas_updater_all_tip_cap_percentiles{}) by (instance, evmChainID, percentile)", "format": "", "legendFormat": "{{instance}} - {{evmChainID}} - {{percentile}}", "refId": "" @@ -4421,7 +4504,7 @@ "h": 6, "w": 24, "x": 0, - "y": 262 + "y": 278 }, "options": { "legend": { @@ -4441,7 +4524,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4452,10 +4535,10 @@ }, { "type": "timeseries", - "id": 81, + "id": 83, "targets": [ { - "expr": "sum(gas_updater_set_gas_price{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(gas_updater_set_gas_price{}) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -4471,7 +4554,7 @@ "h": 6, "w": 12, "x": 0, - "y": 268 + "y": 284 }, "options": { "legend": { @@ -4491,7 +4574,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4502,10 +4585,10 @@ }, { "type": "timeseries", - "id": 82, + "id": 84, "targets": [ { - "expr": "sum(gas_updater_set_tip_cap{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(gas_updater_set_tip_cap{}) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -4521,7 +4604,7 @@ "h": 6, "w": 12, "x": 12, - "y": 268 + "y": 284 }, "options": { "legend": { @@ -4541,7 +4624,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4552,10 +4635,10 @@ }, { "type": "timeseries", - "id": 83, + "id": 85, "targets": [ { - "expr": "sum(gas_updater_current_base_fee{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(gas_updater_current_base_fee{}) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -4571,7 +4654,7 @@ "h": 6, "w": 12, "x": 0, - "y": 274 + "y": 290 }, "options": { "legend": { @@ -4591,7 +4674,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4602,10 +4685,10 @@ }, { "type": "timeseries", - "id": 84, + "id": 86, "targets": [ { - "expr": "sum(block_history_estimator_connectivity_failure_count{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(block_history_estimator_connectivity_failure_count{}) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -4621,7 +4704,7 @@ "h": 6, "w": 12, "x": 12, - "y": 274 + "y": 290 }, "options": { "legend": { @@ -4641,7 +4724,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4658,17 +4741,17 @@ "h": 1, "w": 24, "x": 0, - "y": 280 + "y": 296 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 85, + "id": 87, "targets": [ { - "expr": "pipeline_task_execution_time{instance=~\"${instance}\", } / 1e6", + "expr": "pipeline_task_execution_time{} / 1e6", "format": "", "legendFormat": "{{instance}} JobID: {{job_id}}", "refId": "" @@ -4684,7 +4767,7 @@ "h": 6, "w": 24, "x": 0, - "y": 281 + "y": 297 }, "options": { "legend": { @@ -4704,7 +4787,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4715,10 +4798,10 @@ }, { "type": "timeseries", - "id": 86, + "id": 88, "targets": [ { - "expr": "pipeline_run_errors{instance=~\"${instance}\", }", + "expr": "pipeline_run_errors{}", "format": "", "legendFormat": "{{instance}} JobID: {{job_id}}", "refId": "" @@ -4734,7 +4817,7 @@ "h": 6, "w": 24, "x": 0, - "y": 287 + "y": 303 }, "options": { "legend": { @@ -4754,7 +4837,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4765,10 +4848,10 @@ }, { "type": "timeseries", - "id": 87, + "id": 89, "targets": [ { - "expr": "pipeline_run_total_time_to_completion{instance=~\"${instance}\", } / 1e6", + "expr": "pipeline_run_total_time_to_completion{} / 1e6", "format": "", "legendFormat": "{{instance}} JobID: {{job_id}}", "refId": "" @@ -4784,7 +4867,7 @@ "h": 6, "w": 24, "x": 0, - "y": 293 + "y": 309 }, "options": { "legend": { @@ -4804,7 +4887,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4815,10 +4898,10 @@ }, { "type": "timeseries", - "id": 88, + "id": 90, "targets": [ { - "expr": "pipeline_tasks_total_finished{instance=~\"${instance}\", }", + "expr": "pipeline_tasks_total_finished{}", "format": "", "legendFormat": "{{instance}} JobID: {{job_id}}", "refId": "" @@ -4834,7 +4917,7 @@ "h": 6, "w": 24, "x": 0, - "y": 299 + "y": 315 }, "options": { "legend": { @@ -4854,7 +4937,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4871,17 +4954,17 @@ "h": 1, "w": 24, "x": 0, - "y": 305 + "y": 321 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 89, + "id": 91, "targets": [ { - "expr": "histogram_quantile(0.95, sum(rate(service_gonic_request_duration_bucket{instance=~\"${instance}\", }[$__rate_interval])) by (instance, le, path, method))", + "expr": "histogram_quantile(0.95, sum(rate(service_gonic_request_duration_bucket{}[$__rate_interval])) by (instance, le, path, method))", "format": "", "legendFormat": "{{instance}} - {{method}} - {{path}}", "refId": "" @@ -4897,7 +4980,7 @@ "h": 6, "w": 24, "x": 0, - "y": 306 + "y": 322 }, "options": { "legend": { @@ -4917,7 +5000,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4928,10 +5011,10 @@ }, { "type": "timeseries", - "id": 90, + "id": 92, "targets": [ { - "expr": "sum(rate(service_gonic_requests_total{instance=~\"${instance}\", }[$__rate_interval])) by (instance, path, method, code)", + "expr": "sum(rate(service_gonic_requests_total{}[$__rate_interval])) by (instance, path, method, code)", "format": "", "legendFormat": "{{instance}} - {{method}} - {{path}} - {{code}}", "refId": "" @@ -4947,7 +5030,7 @@ "h": 6, "w": 24, "x": 0, - "y": 312 + "y": 328 }, "options": { "legend": { @@ -4967,7 +5050,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -4978,10 +5061,10 @@ }, { "type": "timeseries", - "id": 91, + "id": 93, "targets": [ { - "expr": "avg(rate(service_gonic_request_size_bytes_sum{instance=~\"${instance}\", }[$__rate_interval])) by (instance)/avg(rate(service_gonic_request_size_bytes_count{instance=~\"${instance}\", }[$__rate_interval])) by (instance)", + "expr": "avg(rate(service_gonic_request_size_bytes_sum{}[$__rate_interval])) by (instance)/avg(rate(service_gonic_request_size_bytes_count{}[$__rate_interval])) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -4997,7 +5080,7 @@ "h": 6, "w": 12, "x": 0, - "y": 318 + "y": 334 }, "options": { "legend": { @@ -5017,7 +5100,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -5028,10 +5111,10 @@ }, { "type": "timeseries", - "id": 92, + "id": 94, "targets": [ { - "expr": "avg(rate(service_gonic_response_size_bytes_sum{instance=~\"${instance}\", }[$__rate_interval])) by (instance)/avg(rate(service_gonic_response_size_bytes_count{instance=~\"${instance}\", }[$__rate_interval])) by (instance)", + "expr": "avg(rate(service_gonic_response_size_bytes_sum{}[$__rate_interval])) by (instance)/avg(rate(service_gonic_response_size_bytes_count{}[$__rate_interval])) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -5047,7 +5130,7 @@ "h": 6, "w": 12, "x": 12, - "y": 318 + "y": 334 }, "options": { "legend": { @@ -5067,7 +5150,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -5084,17 +5167,17 @@ "h": 1, "w": 24, "x": 0, - "y": 324 + "y": 340 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 93, + "id": 95, "targets": [ { - "expr": "sum(rate(promhttp_metric_handler_requests_total{instance=~\"${instance}\", }[$__rate_interval])) by (instance, code)", + "expr": "sum(rate(promhttp_metric_handler_requests_total{}[$__rate_interval])) by (instance, code)", "format": "", "legendFormat": "{{instance}} - {{code}}", "refId": "" @@ -5110,7 +5193,7 @@ "h": 6, "w": 24, "x": 0, - "y": 325 + "y": 341 }, "options": { "legend": { @@ -5130,7 +5213,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -5147,17 +5230,17 @@ "h": 1, "w": 24, "x": 0, - "y": 331 + "y": 347 }, "id": 0, "panels": null }, { "type": "timeseries", - "id": 94, + "id": 96, "targets": [ { - "expr": "sum(go_threads{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(go_threads{}) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -5173,7 +5256,7 @@ "h": 6, "w": 24, "x": 0, - "y": 332 + "y": 348 }, "options": { "legend": { @@ -5193,7 +5276,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -5204,10 +5287,10 @@ }, { "type": "stat", - "id": 95, + "id": 97, "targets": [ { - "expr": "sum(go_memstats_heap_alloc_bytes{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(go_memstats_heap_alloc_bytes{}) by (instance)", "format": "", "legendFormat": "", "refId": "" @@ -5223,7 +5306,7 @@ "h": 6, "w": 24, "x": 0, - "y": 338 + "y": 354 }, "options": { "graphMode": "none", @@ -5231,6 +5314,7 @@ "justifyMode": "auto", "textMode": "value", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -5240,7 +5324,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -5254,10 +5338,10 @@ }, { "type": "timeseries", - "id": 96, + "id": 98, "targets": [ { - "expr": "sum(go_memstats_heap_alloc_bytes{instance=~\"${instance}\", }) by (instance)", + "expr": "sum(go_memstats_heap_alloc_bytes{}) by (instance)", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -5273,7 +5357,7 @@ "h": 6, "w": 24, "x": 0, - "y": 344 + "y": 360 }, "options": { "legend": { @@ -5293,7 +5377,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -5304,34 +5388,34 @@ }, { "type": "timeseries", - "id": 97, + "id": 99, "targets": [ { - "expr": "go_memstats_heap_alloc_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_heap_alloc_bytes{}", "format": "", "legendFormat": "{{instance}} - Alloc", "refId": "" }, { - "expr": "go_memstats_heap_sys_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_heap_sys_bytes{}", "format": "", "legendFormat": "{{instance}} - Sys", "refId": "" }, { - "expr": "go_memstats_heap_idle_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_heap_idle_bytes{}", "format": "", "legendFormat": "{{instance}} - Idle", "refId": "" }, { - "expr": "go_memstats_heap_inuse_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_heap_inuse_bytes{}", "format": "", "legendFormat": "{{instance}} - InUse", "refId": "" }, { - "expr": "go_memstats_heap_released_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_heap_released_bytes{}", "format": "", "legendFormat": "{{instance}} - Released", "refId": "" @@ -5347,7 +5431,7 @@ "h": 6, "w": 12, "x": 0, - "y": 350 + "y": 366 }, "options": { "legend": { @@ -5367,7 +5451,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -5378,52 +5462,52 @@ }, { "type": "timeseries", - "id": 98, + "id": 100, "targets": [ { - "expr": "go_memstats_mspan_inuse_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_mspan_inuse_bytes{}", "format": "", "legendFormat": "{{instance}} - Total InUse", "refId": "" }, { - "expr": "go_memstats_mspan_sys_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_mspan_sys_bytes{}", "format": "", "legendFormat": "{{instance}} - Total Sys", "refId": "" }, { - "expr": "go_memstats_mcache_inuse_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_mcache_inuse_bytes{}", "format": "", "legendFormat": "{{instance}} - Cache InUse", "refId": "" }, { - "expr": "go_memstats_mcache_sys_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_mcache_sys_bytes{}", "format": "", "legendFormat": "{{instance}} - Cache Sys", "refId": "" }, { - "expr": "go_memstats_buck_hash_sys_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_buck_hash_sys_bytes{}", "format": "", "legendFormat": "{{instance}} - Hash Sys", "refId": "" }, { - "expr": "go_memstats_gc_sys_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_gc_sys_bytes{}", "format": "", "legendFormat": "{{instance}} - GC Sys", "refId": "" }, { - "expr": "go_memstats_other_sys_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_other_sys_bytes{}", "format": "", "legendFormat": "{{instance}} - bytes of memory are used for other runtime allocations", "refId": "" }, { - "expr": "go_memstats_next_gc_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_next_gc_bytes{}", "format": "", "legendFormat": "{{instance}} - Next GC", "refId": "" @@ -5439,7 +5523,7 @@ "h": 6, "w": 12, "x": 12, - "y": 350 + "y": 366 }, "options": { "legend": { @@ -5459,7 +5543,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -5470,16 +5554,16 @@ }, { "type": "timeseries", - "id": 99, + "id": 101, "targets": [ { - "expr": "go_memstats_stack_inuse_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_stack_inuse_bytes{}", "format": "", "legendFormat": "{{instance}} - InUse", "refId": "" }, { - "expr": "go_memstats_stack_sys_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_stack_sys_bytes{}", "format": "", "legendFormat": "{{instance}} - Sys", "refId": "" @@ -5495,7 +5579,7 @@ "h": 6, "w": 12, "x": 0, - "y": 356 + "y": 372 }, "options": { "legend": { @@ -5515,7 +5599,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -5526,10 +5610,10 @@ }, { "type": "timeseries", - "id": 100, + "id": 102, "targets": [ { - "expr": "go_memstats_sys_bytes{instance=~\"${instance}\", }", + "expr": "go_memstats_sys_bytes{}", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -5545,7 +5629,7 @@ "h": 6, "w": 12, "x": 12, - "y": 356 + "y": 372 }, "options": { "legend": { @@ -5565,7 +5649,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -5576,10 +5660,10 @@ }, { "type": "timeseries", - "id": 101, + "id": 103, "targets": [ { - "expr": "go_memstats_mallocs_total{instance=~\"${instance}\", } - go_memstats_frees_total{instance=~\"${instance}\", }", + "expr": "go_memstats_mallocs_total{} - go_memstats_frees_total{}", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -5595,7 +5679,7 @@ "h": 6, "w": 12, "x": 0, - "y": 362 + "y": 378 }, "options": { "legend": { @@ -5615,7 +5699,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -5626,10 +5710,10 @@ }, { "type": "timeseries", - "id": 102, + "id": 104, "targets": [ { - "expr": "rate(go_memstats_mallocs_total{instance=~\"${instance}\", }[1m])", + "expr": "rate(go_memstats_mallocs_total{}[1m])", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -5645,7 +5729,7 @@ "h": 6, "w": 12, "x": 12, - "y": 362 + "y": 378 }, "options": { "legend": { @@ -5665,7 +5749,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -5676,10 +5760,10 @@ }, { "type": "timeseries", - "id": 103, + "id": 105, "targets": [ { - "expr": "rate(go_memstats_lookups_total{instance=~\"${instance}\", }[1m])", + "expr": "rate(go_memstats_lookups_total{}[1m])", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -5695,7 +5779,7 @@ "h": 6, "w": 12, "x": 0, - "y": 368 + "y": 384 }, "options": { "legend": { @@ -5715,7 +5799,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -5726,10 +5810,10 @@ }, { "type": "timeseries", - "id": 104, + "id": 106, "targets": [ { - "expr": "go_goroutines{instance=~\"${instance}\", }", + "expr": "go_goroutines{}", "format": "", "legendFormat": "{{instance}}", "refId": "" @@ -5745,7 +5829,7 @@ "h": 6, "w": 12, "x": 12, - "y": 368 + "y": 384 }, "options": { "legend": { @@ -5765,7 +5849,7 @@ "decimals": 1, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -5781,6 +5865,7 @@ "type": "query", "name": "instance", "label": "Instance", + "description": "", "query": "label_values(instance)", "datasource": { "uid": "Prometheus" @@ -5805,7 +5890,8 @@ "Alerts": [ { "annotations": { - "description": "Component {{ index $labels \"service_id\" }} uptime in the last 15m is {{ index $values \"A\" }}%", + "description": "Component {{ index $labels \"service_id\" }} uptime in the last 15m is {{ index $values \"C\" }}%", + "panel_title": "Health Avg by Service over 15m", "runbook_url": "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", "summary": "Uptime less than 90% over last 15 minutes on one component in a Node" }, @@ -5885,17 +5971,200 @@ "execErrState": "Alerting", "folderUID": "", "for": "15m", + "labels": { + "severity": "info" + }, + "noDataState": "NoData", + "orgID": 0, + "ruleGroup": "", + "title": "Health Avg by Service is less than 90%" + }, + { + "annotations": { + "description": "Component {{ index $labels \"service_id\" }} uptime in the last 15m is {{ index $values \"C\" }}%", + "panel_title": "Health Avg by Service over 15m", + "runbook_url": "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", + "summary": "Uptime less than 70% over last 15 minutes on one component in a Node" + }, + "condition": "D", + "data": [ + { + "datasourceUid": "1", + "model": { + "expr": "health{}", + "legendFormat": "__auto", + "refId": "A" + }, + "refId": "A", + "relativeTimeRange": { + "from": 600, + "to": 0 + } + }, + { + "datasourceUid": "__expr__", + "model": { + "expression": "A", + "intervalMs": 1000, + "maxDataPoints": 43200, + "reducer": "mean", + "refId": "B", + "type": "reduce" + }, + "refId": "B", + "relativeTimeRange": { + "from": 600, + "to": 0 + } + }, + { + "datasourceUid": "__expr__", + "model": { + "expression": "$B * 100", + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "C", + "type": "math" + }, + "refId": "C", + "relativeTimeRange": { + "from": 600, + "to": 0 + } + }, + { + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 70, + 0 + ], + "type": "lt" + } + } + ], + "expression": "C", + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "D", + "type": "threshold" + }, + "refId": "D", + "relativeTimeRange": { + "from": 600, + "to": 0 + } + } + ], + "execErrState": "Alerting", + "folderUID": "", + "for": "15m", "labels": { "severity": "warning" }, "noDataState": "NoData", "orgID": 0, "ruleGroup": "", - "title": "Health Avg by Service over 15m" + "title": "Health Avg by Service is less than 70%" + }, + { + "annotations": { + "description": "Component {{ index $labels \"service_id\" }} uptime in the last 15m is {{ index $values \"C\" }}%", + "panel_title": "Health Avg by Service over 15m", + "runbook_url": "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", + "summary": "Uptime less than 50% over last 15 minutes on one component in a Node" + }, + "condition": "D", + "data": [ + { + "datasourceUid": "1", + "model": { + "expr": "health{}", + "legendFormat": "__auto", + "refId": "A" + }, + "refId": "A", + "relativeTimeRange": { + "from": 600, + "to": 0 + } + }, + { + "datasourceUid": "__expr__", + "model": { + "expression": "A", + "intervalMs": 1000, + "maxDataPoints": 43200, + "reducer": "mean", + "refId": "B", + "type": "reduce" + }, + "refId": "B", + "relativeTimeRange": { + "from": 600, + "to": 0 + } + }, + { + "datasourceUid": "__expr__", + "model": { + "expression": "$B * 100", + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "C", + "type": "math" + }, + "refId": "C", + "relativeTimeRange": { + "from": 600, + "to": 0 + } + }, + { + "datasourceUid": "__expr__", + "model": { + "conditions": [ + { + "evaluator": { + "params": [ + 50, + 0 + ], + "type": "lt" + } + } + ], + "expression": "C", + "intervalMs": 1000, + "maxDataPoints": 43200, + "refId": "D", + "type": "threshold" + }, + "refId": "D", + "relativeTimeRange": { + "from": 600, + "to": 0 + } + } + ], + "execErrState": "Alerting", + "folderUID": "", + "for": "15m", + "labels": { + "severity": "critical" + }, + "noDataState": "NoData", + "orgID": 0, + "ruleGroup": "", + "title": "Health Avg by Service is less than 50%" }, { "annotations": { "description": "ETH Balance critically low at {{ index $values \"A\" }} on {{ index $labels \"instance\" }}", + "panel_title": "ETH Balance", "runbook_url": "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", "summary": "ETH Balance is lower than threshold" }, @@ -5957,6 +6226,7 @@ { "annotations": { "description": "Solana Balance critically low at {{ index $values \"A\" }} on {{ index $labels \"instance\" }}", + "panel_title": "SOL Balance", "runbook_url": "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", "summary": "Solana Balance is lower than threshold" }, @@ -6018,6 +6288,7 @@ { "annotations": { "description": "{{ index $labels \"instance\" }} on ChainID {{ index $labels \"ChainID\" }} has received {{ index $values \"A\" }} heads over 10 minutes.", + "panel_title": "Head Tracker Heads Received Rate", "runbook_url": "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", "summary": "No Headers Received" }, @@ -6078,13 +6349,5 @@ } ], "ContactPoints": null, - "NotificationPolicies": [ - { - "group_by": [ - "grafana_folder", - "alertname" - ], - "receiver": "chainlink-slack" - } - ] + "NotificationPolicies": null } \ No newline at end of file diff --git a/observability-lib/dashboards/k8s-resources/component.go b/observability-lib/dashboards/k8s-resources/component.go index 1268113a7..2e3b19d9e 100644 --- a/observability-lib/dashboards/k8s-resources/component.go +++ b/observability-lib/dashboards/k8s-resources/component.go @@ -15,7 +15,7 @@ type Props struct { MetricsDataSource *grafana.DataSource // MetricsDataSource is the datasource for querying metrics } -func NewDashboard(props *Props) (*grafana.Dashboard, error) { +func NewDashboard(props *Props) (*grafana.Observability, error) { if props.Name == "" { return nil, fmt.Errorf("Name is required") } diff --git a/observability-lib/dashboards/k8s-resources/component_test.go b/observability-lib/dashboards/k8s-resources/component_test.go index 32ebbde54..a32c7cda6 100644 --- a/observability-lib/dashboards/k8s-resources/component_test.go +++ b/observability-lib/dashboards/k8s-resources/component_test.go @@ -1,6 +1,7 @@ package k8sresources_test import ( + "flag" "os" "testing" @@ -10,6 +11,44 @@ import ( "github.com/stretchr/testify/require" ) +var update = flag.Bool("update", false, "update golden test files") +var fileOutput = "test-output.json" + +func TestGenerateFile(t *testing.T) { + if *update == false { + t.Skip("skipping test") + } + + testDashboard, err := k8sresources.NewDashboard(&k8sresources.Props{ + Name: "K8s resources", + MetricsDataSource: grafana.NewDataSource("Prometheus", ""), + }) + if err != nil { + t.Errorf("Error creating dashboard: %v", err) + } + json, errJSON := testDashboard.GenerateJSON() + if errJSON != nil { + t.Errorf("Error generating JSON: %v", errJSON) + } + if _, errExists := os.Stat(fileOutput); errExists == nil { + errRemove := os.Remove(fileOutput) + if errRemove != nil { + t.Errorf("Error removing file: %v", errRemove) + } + } + file, errFile := os.Create(fileOutput) + if errFile != nil { + panic(errFile) + } + writeString, err := file.WriteString(string(json)) + if err != nil { + t.Errorf("Error writing to file: %v", writeString) + } + t.Cleanup(func() { + file.Close() + }) +} + func TestNewDashboard(t *testing.T) { t.Run("NewDashboard creates a dashboard", func(t *testing.T) { testDashboard, err := k8sresources.NewDashboard(&k8sresources.Props{ @@ -19,18 +58,18 @@ func TestNewDashboard(t *testing.T) { if err != nil { t.Errorf("Error creating dashboard: %v", err) } - require.IsType(t, grafana.Dashboard{}, *testDashboard) + require.IsType(t, grafana.Observability{}, *testDashboard) require.Equal(t, "K8s resources", *testDashboard.Dashboard.Title) json, errJSON := testDashboard.GenerateJSON() if errJSON != nil { t.Errorf("Error generating JSON: %v", errJSON) } - jsonCompared, errCompared := os.ReadFile("test-output.json") + jsonCompared, errCompared := os.ReadFile(fileOutput) if errCompared != nil { t.Errorf("Error reading file: %v", errCompared) } - require.ElementsMatch(t, jsonCompared, json) + require.JSONEq(t, string(jsonCompared), string(json)) }) } diff --git a/observability-lib/dashboards/k8s-resources/test-output.json b/observability-lib/dashboards/k8s-resources/test-output.json index 4667b4d38..2a38e07e3 100644 --- a/observability-lib/dashboards/k8s-resources/test-output.json +++ b/observability-lib/dashboards/k8s-resources/test-output.json @@ -8,6 +8,7 @@ "Resources" ], "timezone": "browser", + "editable": true, "graphTooltip": 0, "time": { "from": "now-30m", @@ -15,7 +16,7 @@ }, "fiscalYearStartMonth": 0, "refresh": "30s", - "schemaVersion": 0, + "schemaVersion": 39, "panels": [ { "type": "row", @@ -32,7 +33,7 @@ }, { "type": "stat", - "id": 0, + "id": 1, "targets": [ { "expr": "100 * sum(node_namespace_pod_container:container_cpu_usage_seconds_total:sum_irate{cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}) by (container) / sum(cluster:namespace:pod_cpu:active:kube_pod_container_resource_requests{cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}) by (container)", @@ -61,6 +62,7 @@ "justifyMode": "auto", "textMode": "value", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -70,7 +72,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -84,7 +86,7 @@ }, { "type": "stat", - "id": 1, + "id": 2, "targets": [ { "expr": "100 * sum(node_namespace_pod_container:container_cpu_usage_seconds_total:sum_irate{cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}) by (container) / sum(cluster:namespace:pod_cpu:active:kube_pod_container_resource_limits{cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}) by (container)", @@ -113,6 +115,7 @@ "justifyMode": "auto", "textMode": "value", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -122,7 +125,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -136,7 +139,7 @@ }, { "type": "stat", - "id": 2, + "id": 3, "targets": [ { "expr": "100 * sum(container_memory_working_set_bytes{job=\"kubelet\", metrics_path=\"/metrics/cadvisor\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\", image!=\"\"}) by (container) / sum(cluster:namespace:pod_memory:active:kube_pod_container_resource_requests{cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}) by (container)", @@ -165,6 +168,7 @@ "justifyMode": "auto", "textMode": "value", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -174,7 +178,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -188,7 +192,7 @@ }, { "type": "stat", - "id": 3, + "id": 4, "targets": [ { "expr": "100 * sum(container_memory_working_set_bytes{job=\"kubelet\", metrics_path=\"/metrics/cadvisor\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\", container!=\"\", image!=\"\"}) by (container) / sum(cluster:namespace:pod_memory:active:kube_pod_container_resource_limits{cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}) by (container)", @@ -217,6 +221,7 @@ "justifyMode": "auto", "textMode": "value", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -226,7 +231,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -253,7 +258,7 @@ }, { "type": "stat", - "id": 4, + "id": 5, "targets": [ { "expr": "sum(increase(kube_pod_container_status_restarts_total{pod=~\"$pod\", namespace=~\"${namespace}\"}[$__rate_interval])) by (pod)", @@ -280,6 +285,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -289,7 +295,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -303,7 +309,7 @@ }, { "type": "stat", - "id": 5, + "id": 6, "targets": [ { "expr": "sum(container_oom_events_total{pod=~\"$pod\", namespace=~\"${namespace}\"}) by (pod)", @@ -330,6 +336,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -339,7 +346,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -353,7 +360,7 @@ }, { "type": "stat", - "id": 6, + "id": 7, "targets": [ { "expr": "kube_pod_container_status_last_terminated_reason{reason=\"OOMKilled\", pod=~\"$pod\", namespace=~\"${namespace}\"}", @@ -380,6 +387,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -389,7 +397,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "horizontal" }, "fieldConfig": { @@ -416,7 +424,7 @@ }, { "type": "timeseries", - "id": 7, + "id": 8, "targets": [ { "expr": "sum(node_namespace_pod_container:container_cpu_usage_seconds_total:sum_irate{pod=~\"$pod\", namespace=~\"${namespace}\"}) by (pod)", @@ -467,7 +475,7 @@ "decimals": 3, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -478,7 +486,7 @@ }, { "type": "timeseries", - "id": 8, + "id": 9, "targets": [ { "expr": "sum(container_memory_rss{pod=~\"$pod\", namespace=~\"${namespace}\", container!=\"\"}) by (pod)", @@ -529,7 +537,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -553,7 +561,7 @@ }, { "type": "timeseries", - "id": 9, + "id": 10, "targets": [ { "expr": "sum(irate(container_network_receive_bytes_total{pod=~\"$pod\", namespace=~\"${namespace}\"}[$__rate_interval])) by (pod)", @@ -592,7 +600,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -603,7 +611,7 @@ }, { "type": "timeseries", - "id": 10, + "id": 11, "targets": [ { "expr": "sum(irate(container_network_transmit_bytes_total{pod=~\"$pod\", namespace=~\"${namespace}\"}[$__rate_interval])) by (pod)", @@ -642,7 +650,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -653,7 +661,7 @@ }, { "type": "timeseries", - "id": 11, + "id": 12, "targets": [ { "expr": "avg(irate(container_network_receive_bytes_total{pod=~\"$pod\", namespace=~\"${namespace}\"}[$__rate_interval])) by (pod)", @@ -692,7 +700,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -703,7 +711,7 @@ }, { "type": "timeseries", - "id": 12, + "id": 13, "targets": [ { "expr": "avg(irate(container_network_transmit_bytes_total{pod=~\"$pod\", namespace=~\"${namespace}\"}[$__rate_interval])) by (pod)", @@ -742,7 +750,7 @@ "decimals": 0, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -766,7 +774,7 @@ }, { "type": "timeseries", - "id": 13, + "id": 14, "targets": [ { "expr": "ceil(sum by(container, pod) (rate(container_fs_reads_total{job=\"kubelet\", metrics_path=\"/metrics/cadvisor\", container!=\"\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}[$__rate_interval]) + rate(container_fs_writes_total{job=\"kubelet\", metrics_path=\"/metrics/cadvisor\", container!=\"\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}[$__rate_interval])))", @@ -805,7 +813,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -816,7 +824,7 @@ }, { "type": "timeseries", - "id": 14, + "id": 15, "targets": [ { "expr": "sum by(container, pod) (rate(container_fs_reads_bytes_total{job=\"kubelet\", metrics_path=\"/metrics/cadvisor\", container!=\"\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}[$__rate_interval]) + rate(container_fs_writes_bytes_total{job=\"kubelet\", metrics_path=\"/metrics/cadvisor\", container!=\"\", cluster=\"$cluster\", namespace=\"$namespace\", pod=\"$pod\"}[$__rate_interval]))", @@ -855,7 +863,7 @@ "decimals": 2, "noValue": "No data", "custom": { - "fillOpacity": 2, + "fillOpacity": 0, "scaleDistribution": { "type": "linear" } @@ -871,6 +879,7 @@ "type": "query", "name": "env", "label": "Environment", + "description": "", "query": "label_values(up, env)", "datasource": { "uid": "Prometheus" @@ -891,6 +900,7 @@ "type": "query", "name": "cluster", "label": "Cluster", + "description": "", "query": "label_values(up{env=\"$env\"}, cluster)", "datasource": { "uid": "Prometheus" @@ -911,6 +921,7 @@ "type": "query", "name": "namespace", "label": "Namespace", + "description": "", "query": "label_values(up{env=\"$env\", cluster=\"$cluster\"}, namespace)", "datasource": { "uid": "Prometheus" @@ -931,6 +942,7 @@ "type": "query", "name": "job", "label": "Job", + "description": "", "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", namespace=\"$namespace\"}, job)", "datasource": { "uid": "Prometheus" @@ -951,6 +963,7 @@ "type": "query", "name": "pod", "label": "Pod", + "description": "", "query": "label_values(up{env=\"$env\", cluster=\"$cluster\", namespace=\"$namespace\", job=\"$job\"}, pod)", "datasource": { "uid": "Prometheus" diff --git a/observability-lib/dashboards/nop-ocr/component.go b/observability-lib/dashboards/nop-ocr/component.go index 2c58a40ce..9978c5f73 100644 --- a/observability-lib/dashboards/nop-ocr/component.go +++ b/observability-lib/dashboards/nop-ocr/component.go @@ -16,7 +16,7 @@ type Props struct { OCRVersion string // OCRVersion is the version of the OCR (ocr, ocr2, ocr3) } -func NewDashboard(props *Props) (*grafana.Dashboard, error) { +func NewDashboard(props *Props) (*grafana.Observability, error) { if props.Name == "" { return nil, fmt.Errorf("Name is required") } diff --git a/observability-lib/dashboards/nop-ocr/component_test.go b/observability-lib/dashboards/nop-ocr/component_test.go index 3f4c36692..cfa4009a3 100644 --- a/observability-lib/dashboards/nop-ocr/component_test.go +++ b/observability-lib/dashboards/nop-ocr/component_test.go @@ -1,6 +1,7 @@ package nopocr_test import ( + "flag" "os" "testing" @@ -10,6 +11,45 @@ import ( "github.com/stretchr/testify/require" ) +var update = flag.Bool("update", false, "update golden test files") + +const fileOutput = "test-output.json" + +func TestGenerateFile(t *testing.T) { + if *update == false { + t.Skip("skipping test") + } + + testDashboard, err := nopocr.NewDashboard(&nopocr.Props{ + Name: "NOP OCR Dashboard", + MetricsDataSource: grafana.NewDataSource("Prometheus", ""), + }) + if err != nil { + t.Errorf("Error creating dashboard: %v", err) + } + json, errJSON := testDashboard.GenerateJSON() + if errJSON != nil { + t.Errorf("Error generating JSON: %v", errJSON) + } + if _, errExists := os.Stat(fileOutput); errExists == nil { + errRemove := os.Remove(fileOutput) + if errRemove != nil { + t.Errorf("Error removing file: %v", errRemove) + } + } + file, errFile := os.Create(fileOutput) + if errFile != nil { + panic(errFile) + } + writeString, err := file.WriteString(string(json)) + if err != nil { + t.Errorf("Error writing to file: %v", writeString) + } + t.Cleanup(func() { + file.Close() + }) +} + func TestNewDashboard(t *testing.T) { t.Run("NewDashboard creates a dashboard", func(t *testing.T) { testDashboard, err := nopocr.NewDashboard(&nopocr.Props{ @@ -19,18 +59,18 @@ func TestNewDashboard(t *testing.T) { if err != nil { t.Errorf("Error creating dashboard: %v", err) } - require.IsType(t, grafana.Dashboard{}, *testDashboard) + require.IsType(t, grafana.Observability{}, *testDashboard) require.Equal(t, "NOP OCR Dashboard", *testDashboard.Dashboard.Title) json, errJSON := testDashboard.GenerateJSON() if errJSON != nil { t.Errorf("Error generating JSON: %v", errJSON) } - jsonCompared, errCompared := os.ReadFile("test-output.json") + jsonCompared, errCompared := os.ReadFile(fileOutput) if errCompared != nil { t.Errorf("Error reading file: %v", errCompared) } - require.ElementsMatch(t, jsonCompared, json) + require.JSONEq(t, string(jsonCompared), string(json)) }) } diff --git a/observability-lib/dashboards/nop-ocr/test-output.json b/observability-lib/dashboards/nop-ocr/test-output.json index 5cbabe132..006074e65 100644 --- a/observability-lib/dashboards/nop-ocr/test-output.json +++ b/observability-lib/dashboards/nop-ocr/test-output.json @@ -7,6 +7,7 @@ "" ], "timezone": "browser", + "editable": true, "graphTooltip": 0, "time": { "from": "now-1d", @@ -14,7 +15,7 @@ }, "fiscalYearStartMonth": 0, "refresh": "30s", - "schemaVersion": 0, + "schemaVersion": 39, "panels": [ { "type": "row", @@ -31,7 +32,7 @@ }, { "type": "stat", - "id": 0, + "id": 1, "targets": [ { "expr": "avg_over_time((sum(changes(_telemetry_epoch_round{env=~\"${env}\", contract=~\"${contract}\"}[90s])) by (env, contract, feed_id, network_name, oracle) \u003ebool 0)[$__range:])", @@ -67,6 +68,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -76,7 +78,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "auto" }, "fieldConfig": { @@ -111,7 +113,7 @@ }, { "type": "stat", - "id": 1, + "id": 2, "targets": [ { "expr": "avg_over_time((sum(changes(_telemetry_message_observe_total{env=~\"${env}\", contract=~\"${contract}\"}[3m])) by (env, contract, feed_id, network_name, oracle) \u003ebool 0)[$__range:])", @@ -147,6 +149,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -156,7 +159,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "auto" }, "fieldConfig": { @@ -191,7 +194,7 @@ }, { "type": "stat", - "id": 2, + "id": 3, "targets": [ { "expr": "avg_over_time((sum(changes(_telemetry_message_report_req_observation_total{env=~\"${env}\", contract=~\"${contract}\"}[3m])) by (env, contract, feed_id, network_name, oracle) \u003ebool 0)[$__range:])", @@ -227,6 +230,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -236,7 +240,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "auto" }, "fieldConfig": { @@ -284,7 +288,7 @@ }, { "type": "stat", - "id": 3, + "id": 4, "targets": [ { "expr": "avg_over_time((sum(changes(_telemetry_epoch_round{env=~\"${env}\", oracle=~\"${oracle}\"}[90s])) by (env, contract, feed_id, network_name, oracle) \u003ebool 0)[$__range:])", @@ -320,6 +324,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -329,7 +334,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "auto" }, "fieldConfig": { @@ -364,7 +369,7 @@ }, { "type": "stat", - "id": 4, + "id": 5, "targets": [ { "expr": "avg_over_time((sum(changes(_telemetry_message_observe_total{env=~\"${env}\", oracle=~\"${oracle}\"}[3m])) by (env, contract, feed_id, network_name, oracle) \u003ebool 0)[$__range:])", @@ -400,6 +405,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -409,7 +415,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "auto" }, "fieldConfig": { @@ -444,7 +450,7 @@ }, { "type": "stat", - "id": 5, + "id": 6, "targets": [ { "expr": "avg_over_time((sum(changes(_telemetry_message_report_req_observation_total{env=~\"${env}\", oracle=~\"${oracle}\"}[3m])) by (env, contract, feed_id, network_name, oracle) \u003ebool 0)[$__range:])", @@ -480,6 +486,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -489,7 +496,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "auto" }, "fieldConfig": { @@ -524,7 +531,7 @@ }, { "type": "stat", - "id": 6, + "id": 7, "targets": [ { "expr": "avg_over_time((sum(changes(_telemetry_p2p_received_total{env=~\"${env}\", receiver=~\"${oracle}\"}[3m])) by (sender, receiver) \u003ebool 0)[$__range:])", @@ -560,6 +567,7 @@ "justifyMode": "auto", "textMode": "value_and_name", "wideLayout": true, + "showPercentChange": false, "reduceOptions": { "calcs": [ "last" @@ -569,7 +577,7 @@ "titleSize": 10, "valueSize": 18 }, - "showPercentChange": false, + "percentChangeColorMode": "standard", "orientation": "auto" }, "fieldConfig": { @@ -609,6 +617,7 @@ "type": "query", "name": "env", "label": "Environment", + "description": "", "query": "label_values(_contract_config_f{}, env)", "datasource": { "uid": "Prometheus" @@ -629,6 +638,7 @@ "type": "query", "name": "contract", "label": "Contract", + "description": "", "query": "label_values(_contract_oracle_active{env=\"$env\"}, contract)", "datasource": { "uid": "Prometheus" @@ -649,6 +659,7 @@ "type": "query", "name": "oracle", "label": "NOP", + "description": "", "query": "label_values(_contract_oracle_active{env=\"$env\"}, oracle)", "datasource": { "uid": "Prometheus" diff --git a/observability-lib/go.mod b/observability-lib/go.mod index e74087574..e03ce2f63 100644 --- a/observability-lib/go.mod +++ b/observability-lib/go.mod @@ -3,8 +3,8 @@ module github.com/smartcontractkit/chainlink-common/observability-lib go 1.21.4 require ( - github.com/go-resty/resty/v2 v2.14.0 - github.com/grafana/grafana-foundation-sdk/go v0.0.0-20240717180137-18b7def9b008 + github.com/go-resty/resty/v2 v2.15.3 + github.com/grafana/grafana-foundation-sdk/go v0.0.0-20241009194022-923b32e3e69b github.com/rs/zerolog v1.33.0 github.com/spf13/cobra v1.8.1 github.com/stretchr/testify v1.9.0 @@ -16,11 +16,11 @@ require ( github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.19 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rogpeppe/go-internal v1.10.0 // indirect github.com/spf13/pflag v1.0.5 // indirect - golang.org/x/net v0.27.0 // indirect - golang.org/x/sys v0.22.0 // indirect + golang.org/x/net v0.30.0 // indirect + golang.org/x/sys v0.26.0 // indirect gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect ) diff --git a/observability-lib/go.sum b/observability-lib/go.sum index 040c1778f..f59de2c33 100644 --- a/observability-lib/go.sum +++ b/observability-lib/go.sum @@ -3,12 +3,11 @@ github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46t github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/go-resty/resty/v2 v2.14.0 h1:/rhkzsAqGQkozwfKS5aFAbb6TyKd3zyFRWcdRXLPCAU= -github.com/go-resty/resty/v2 v2.14.0/go.mod h1:IW6mekUOsElt9C7oWr0XRt9BNSD6D5rr9mhk6NjmNHg= +github.com/go-resty/resty/v2 v2.15.3 h1:bqff+hcqAflpiF591hhJzNdkRsFhlB96CYfBwSFvql8= +github.com/go-resty/resty/v2 v2.15.3/go.mod h1:0fHAoK7JoBy/Ch36N8VFeMsK7xQOHhvWaC3iOktwmIU= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/grafana/grafana-foundation-sdk/go v0.0.0-20240717180137-18b7def9b008 h1:QEqDMW+20VJTkqU892tb9FbvCtI1uxxGvyXwulRhpAU= -github.com/grafana/grafana-foundation-sdk/go v0.0.0-20240717180137-18b7def9b008/go.mod h1:WtWosval1KCZP9BGa42b8aVoJmVXSg0EvQXi9LDSVZQ= +github.com/grafana/grafana-foundation-sdk/go v0.0.0-20241009194022-923b32e3e69b h1:YxlugK0wL5hh86wT0hZSGw9cPTvacOUmHxjP15fsIlE= +github.com/grafana/grafana-foundation-sdk/go v0.0.0-20241009194022-923b32e3e69b/go.mod h1:WtWosval1KCZP9BGa42b8aVoJmVXSg0EvQXi9LDSVZQ= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= @@ -21,8 +20,9 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -40,75 +40,15 @@ github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= -golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= -golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M= -golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= -golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= -golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= -golang.org/x/net v0.27.0 h1:5K3Njcw06/l2y9vpGCSdcxWOYHOUk3dVNGDXN+FvAys= -golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= -golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4= +golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI= -golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= -golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= -golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= -golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= -golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= +golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= +golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U= golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= -golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= diff --git a/observability-lib/grafana/alerts.go b/observability-lib/grafana/alerts.go index 2c426376f..980d3ac74 100644 --- a/observability-lib/grafana/alerts.go +++ b/observability-lib/grafana/alerts.go @@ -2,6 +2,7 @@ package grafana import ( "github.com/grafana/grafana-foundation-sdk/go/alerting" + "github.com/grafana/grafana-foundation-sdk/go/cog" "github.com/grafana/grafana-foundation-sdk/go/expr" "github.com/grafana/grafana-foundation-sdk/go/prometheus" ) @@ -62,57 +63,41 @@ type ResampleExpression struct { type ThresholdExpression struct { Expression string - ThresholdConditionsOptions []ThresholdConditionsOption + ThresholdConditionsOptions ThresholdConditionsOption } +type TypeThresholdType string + +const ( + TypeThresholdTypeGt TypeThresholdType = "gt" + TypeThresholdTypeLt TypeThresholdType = "lt" + TypeThresholdTypeWithinRange TypeThresholdType = "within_range" + TypeThresholdTypeOutsideRange TypeThresholdType = "outside_range" +) + type ThresholdConditionsOption struct { Params []float64 - Type expr.TypeThresholdType + Type TypeThresholdType } -func newThresholdConditionsOptions(options []ThresholdConditionsOption) []struct { - Evaluator struct { - Params []float64 `json:"params"` - Type expr.TypeThresholdType `json:"type"` - } `json:"evaluator"` - LoadedDimensions any `json:"loadedDimensions,omitempty"` - UnloadEvaluator *struct { - Params []float64 `json:"params"` - Type expr.TypeThresholdType `json:"type"` - } `json:"unloadEvaluator,omitempty"` -} { - var conditions []struct { - Evaluator struct { - Params []float64 `json:"params"` - Type expr.TypeThresholdType `json:"type"` - } `json:"evaluator"` - LoadedDimensions any `json:"loadedDimensions,omitempty"` - UnloadEvaluator *struct { - Params []float64 `json:"params"` - Type expr.TypeThresholdType `json:"type"` - } `json:"unloadEvaluator,omitempty"` - } - for _, option := range options { - conditions = append(conditions, struct { - Evaluator struct { - Params []float64 `json:"params"` - Type expr.TypeThresholdType `json:"type"` - } `json:"evaluator"` - LoadedDimensions any `json:"loadedDimensions,omitempty"` - UnloadEvaluator *struct { - Params []float64 `json:"params"` - Type expr.TypeThresholdType `json:"type"` - } `json:"unloadEvaluator,omitempty"` - }{ - Evaluator: struct { - Params []float64 `json:"params"` - Type expr.TypeThresholdType `json:"type"` - }{ - Params: option.Params, - Type: option.Type, - }, - }) +func newThresholdConditionsOptions(options ThresholdConditionsOption) []cog.Builder[expr.ExprTypeThresholdConditions] { + var conditions []cog.Builder[expr.ExprTypeThresholdConditions] + + var params []float64 + params = append(params, options.Params...) + + if len(options.Params) == 1 { + params = append(params, 0) } + + conditions = append(conditions, expr.NewExprTypeThresholdConditionsBuilder(). + Evaluator( + expr.NewExprTypeThresholdConditionsEvaluatorBuilder(). + Params(params). + Type(expr.TypeThresholdType(options.Type)), + ), + ) + return conditions } @@ -173,8 +158,7 @@ func newConditionQuery(options ConditionQuery) *alerting.QueryBuilder { } type AlertOptions struct { - Name string - Datasource string + Title string Summary string Description string RunbookURL string @@ -185,6 +169,7 @@ type AlertOptions struct { Query []RuleQuery QueryRefCondition string Condition []ConditionQuery + PanelTitle string } func NewAlertRule(options *AlertOptions) *alerting.RuleBuilder { @@ -204,16 +189,22 @@ func NewAlertRule(options *AlertOptions) *alerting.RuleBuilder { options.QueryRefCondition = "A" } - rule := alerting.NewRuleBuilder(options.Name). + annotations := map[string]string{ + "summary": options.Summary, + "description": options.Description, + "runbook_url": options.RunbookURL, + } + + if options.PanelTitle != "" { + annotations["panel_title"] = options.PanelTitle + } + + rule := alerting.NewRuleBuilder(options.Title). For(options.For). NoDataState(options.NoDataState). ExecErrState(options.RuleExecErrState). Condition(options.QueryRefCondition). - Annotations(map[string]string{ - "summary": options.Summary, - "description": options.Description, - "runbook_url": options.RunbookURL, - }). + Annotations(annotations). Labels(options.Tags) for _, query := range options.Query { diff --git a/observability-lib/grafana/builder.go b/observability-lib/grafana/builder.go index 11fa1305e..10d5813c9 100644 --- a/observability-lib/grafana/builder.go +++ b/observability-lib/grafana/builder.go @@ -29,16 +29,23 @@ type BuilderOptions struct { } func NewBuilder(options *BuilderOptions) *Builder { - if options.TimeZone == "" { - options.TimeZone = common.TimeZoneBrowser - } + builder := &Builder{} - builder := &Builder{ - dashboardBuilder: dashboard.NewDashboardBuilder(options.Name). - Tags(options.Tags). - Refresh(options.Refresh). - Time(options.TimeFrom, options.TimeTo). - Timezone(options.TimeZone), + if options.Name != "" { + builder.dashboardBuilder = dashboard.NewDashboardBuilder(options.Name) + if options.Tags != nil { + builder.dashboardBuilder.Tags(options.Tags) + } + if options.Refresh != "" { + builder.dashboardBuilder.Refresh(options.Refresh) + } + if options.TimeFrom != "" && options.TimeTo != "" { + builder.dashboardBuilder.Time(options.TimeFrom, options.TimeTo) + } + if options.TimeZone == "" { + options.TimeZone = common.TimeZoneBrowser + } + builder.dashboardBuilder.Timezone(options.TimeZone) } if options.AlertsTags != nil { @@ -59,8 +66,8 @@ func (b *Builder) AddRow(title string) { } func (b *Builder) getPanelCounter() uint32 { - res := b.panelCounter b.panelCounter = inc(&b.panelCounter) + res := b.panelCounter return res } @@ -82,13 +89,20 @@ func (b *Builder) AddPanel(panel ...*Panel) { } else if item.logPanelBuilder != nil { item.logPanelBuilder.Id(panelID) b.dashboardBuilder.WithPanel(item.logPanelBuilder) + } else if item.heatmapBuilder != nil { + item.heatmapBuilder.Id(panelID) + b.dashboardBuilder.WithPanel(item.heatmapBuilder) } - if item.alertBuilder != nil { - b.alertsBuilder = append(b.alertsBuilder, item.alertBuilder) + if item.alertBuilders != nil && len(item.alertBuilders) > 0 { + b.AddAlert(item.alertBuilders...) } } } +func (b *Builder) AddAlert(alerts ...*alerting.RuleBuilder) { + b.alertsBuilder = append(b.alertsBuilder, alerts...) +} + func (b *Builder) AddContactPoint(contactPoints ...*alerting.ContactPointBuilder) { b.contactPointsBuilder = append(b.contactPointsBuilder, contactPoints...) } @@ -97,33 +111,39 @@ func (b *Builder) AddNotificationPolicy(notificationPolicies ...*alerting.Notifi b.notificationPoliciesBuilder = append(b.notificationPoliciesBuilder, notificationPolicies...) } -func (b *Builder) Build() (*Dashboard, error) { - db, errBuildDashboard := b.dashboardBuilder.Build() - if errBuildDashboard != nil { - return nil, errBuildDashboard - } +func (b *Builder) Build() (*Observability, error) { + observability := Observability{} - var alerts []alerting.Rule - for _, alertBuilder := range b.alertsBuilder { - alert, errBuildAlert := alertBuilder.Build() - if errBuildAlert != nil { - return nil, errBuildAlert + if b.dashboardBuilder != nil { + db, errBuildDashboard := b.dashboardBuilder.Build() + if errBuildDashboard != nil { + return nil, errBuildDashboard } + observability.Dashboard = &db - // Add common tags to alerts - if b.alertsTags != nil && len(b.alertsTags) > 0 { - tags := maps.Clone(b.alertsTags) - maps.Copy(tags, alert.Labels) + var alerts []alerting.Rule + for _, alertBuilder := range b.alertsBuilder { + alert, errBuildAlert := alertBuilder.Build() + if errBuildAlert != nil { + return nil, errBuildAlert + } - alertBuildWithTags := alertBuilder.Labels(tags) - alertWithTags, errBuildAlertWithTags := alertBuildWithTags.Build() - if errBuildAlertWithTags != nil { - return nil, errBuildAlertWithTags + // Add common tags to alerts + if b.alertsTags != nil && len(b.alertsTags) > 0 { + tags := maps.Clone(b.alertsTags) + maps.Copy(tags, alert.Labels) + + alertBuildWithTags := alertBuilder.Labels(tags) + alertWithTags, errBuildAlertWithTags := alertBuildWithTags.Build() + if errBuildAlertWithTags != nil { + return nil, errBuildAlertWithTags + } + alerts = append(alerts, alertWithTags) + } else { + alerts = append(alerts, alert) } - alerts = append(alerts, alertWithTags) - } else { - alerts = append(alerts, alert) } + observability.Alerts = alerts } var contactPoints []alerting.ContactPoint @@ -134,6 +154,7 @@ func (b *Builder) Build() (*Dashboard, error) { } contactPoints = append(contactPoints, contactPoint) } + observability.ContactPoints = contactPoints var notificationPolicies []alerting.NotificationPolicy for _, notificationPolicyBuilder := range b.notificationPoliciesBuilder { @@ -143,11 +164,7 @@ func (b *Builder) Build() (*Dashboard, error) { } notificationPolicies = append(notificationPolicies, notificationPolicy) } + observability.NotificationPolicies = notificationPolicies - return &Dashboard{ - Dashboard: &db, - Alerts: alerts, - ContactPoints: contactPoints, - NotificationPolicies: notificationPolicies, - }, nil + return &observability, nil } diff --git a/observability-lib/grafana/builder_test.go b/observability-lib/grafana/builder_test.go index 003072176..e31db74de 100644 --- a/observability-lib/grafana/builder_test.go +++ b/observability-lib/grafana/builder_test.go @@ -3,6 +3,7 @@ package grafana_test import ( "testing" + "github.com/grafana/grafana-foundation-sdk/go/alerting" "github.com/stretchr/testify/require" "github.com/grafana/grafana-foundation-sdk/go/dashboard" @@ -21,18 +22,86 @@ func TestNewBuilder(t *testing.T) { TimeZone: "UTC", }) - db, err := builder.Build() + o, err := builder.Build() if err != nil { - t.Errorf("Error building dashboard: %v", err) + t.Errorf("Error during build: %v", err) + } + + require.NotEmpty(t, o.Dashboard) + require.Empty(t, o.Alerts) + require.Empty(t, o.ContactPoints) + require.Empty(t, o.NotificationPolicies) + }) + + t.Run("NewBuilder builds a dashboard with alerts", func(t *testing.T) { + builder := grafana.NewBuilder(&grafana.BuilderOptions{ + Name: "Dashboard Name", + Tags: []string{"foo", "bar"}, + Refresh: "1m", + TimeFrom: "now-1h", + TimeTo: "now", + TimeZone: "UTC", + }) + builder.AddAlert(grafana.NewAlertRule(&grafana.AlertOptions{ + Title: "Alert Title", + })) + + o, err := builder.Build() + if err != nil { + t.Errorf("Error during build: %v", err) } - require.IsType(t, dashboard.Dashboard{}, *db.Dashboard) + require.NotEmpty(t, o.Dashboard) + require.NotEmpty(t, o.Alerts) + require.Empty(t, o.ContactPoints) + require.Empty(t, o.NotificationPolicies) + }) + + t.Run("NewBuilder builds a contact point", func(t *testing.T) { + builder := grafana.NewBuilder(&grafana.BuilderOptions{}) + builder.AddContactPoint(grafana.NewContactPoint(&grafana.ContactPointOptions{ + Name: "slack", + Type: "slack", + })) + + o, err := builder.Build() + if err != nil { + t.Errorf("Error during build: %v", err) + } + + require.Empty(t, o.Dashboard) + require.Empty(t, o.Alerts) + require.NotEmpty(t, o.ContactPoints) + require.Empty(t, o.NotificationPolicies) + }) + + t.Run("NewBuilder builds a notification policy", func(t *testing.T) { + builder := grafana.NewBuilder(&grafana.BuilderOptions{}) + builder.AddNotificationPolicy(grafana.NewNotificationPolicy(&grafana.NotificationPolicyOptions{ + Receiver: "slack", + GroupBy: []string{"grafana_folder", "alertname"}, + ObjectMatchers: []alerting.ObjectMatcher{ + {"team", "=", "chainlink"}, + }, + })) + + o, err := builder.Build() + if err != nil { + t.Errorf("Error during build: %v", err) + } + + require.Empty(t, o.Dashboard) + require.Empty(t, o.Alerts) + require.Empty(t, o.ContactPoints) + require.NotEmpty(t, o.NotificationPolicies) }) } func TestBuilder_AddVars(t *testing.T) { t.Run("AddVars adds variables to the dashboard", func(t *testing.T) { - builder := grafana.NewBuilder(&grafana.BuilderOptions{}) + builder := grafana.NewBuilder(&grafana.BuilderOptions{ + Name: "Dashboard Name", + }) variable := grafana.NewQueryVariable(&grafana.QueryVariableOptions{ VariableOption: &grafana.VariableOption{ @@ -44,30 +113,34 @@ func TestBuilder_AddVars(t *testing.T) { }) builder.AddVars(variable) - db, err := builder.Build() + o, err := builder.Build() if err != nil { t.Errorf("Error building dashboard: %v", err) } - require.IsType(t, dashboard.Dashboard{}, *db.Dashboard) + require.Len(t, o.Dashboard.Templating.List, 1) }) } func TestBuilder_AddRow(t *testing.T) { t.Run("AddRow adds a row to the dashboard", func(t *testing.T) { - builder := grafana.NewBuilder(&grafana.BuilderOptions{}) + builder := grafana.NewBuilder(&grafana.BuilderOptions{ + Name: "Dashboard Name", + }) builder.AddRow("Row Title") - db, err := builder.Build() + o, err := builder.Build() if err != nil { t.Errorf("Error building dashboard: %v", err) } - require.IsType(t, dashboard.Dashboard{}, *db.Dashboard) + require.IsType(t, dashboard.RowPanel{}, *o.Dashboard.Panels[0].RowPanel) }) } func TestBuilder_AddPanel(t *testing.T) { t.Run("AddPanel adds a panel to the dashboard", func(t *testing.T) { - builder := grafana.NewBuilder(&grafana.BuilderOptions{}) + builder := grafana.NewBuilder(&grafana.BuilderOptions{ + Name: "Dashboard Name", + }) panel := grafana.NewStatPanel(&grafana.StatPanelOptions{ PanelOptions: &grafana.PanelOptions{ @@ -76,10 +149,10 @@ func TestBuilder_AddPanel(t *testing.T) { }) builder.AddPanel(panel) - db, err := builder.Build() + o, err := builder.Build() if err != nil { t.Errorf("Error building dashboard: %v", err) } - require.IsType(t, dashboard.Dashboard{}, *db.Dashboard) + require.IsType(t, dashboard.Panel{}, *o.Dashboard.Panels[0].Panel) }) } diff --git a/observability-lib/grafana/dashboard.go b/observability-lib/grafana/dashboard.go index 425660969..a9b31fa59 100644 --- a/observability-lib/grafana/dashboard.go +++ b/observability-lib/grafana/dashboard.go @@ -3,6 +3,7 @@ package grafana import ( "encoding/json" "fmt" + "reflect" "github.com/grafana/grafana-foundation-sdk/go/alerting" "github.com/grafana/grafana-foundation-sdk/go/dashboard" @@ -16,20 +17,20 @@ const ( TypePlatformDocker TypePlatform = "docker" ) -type Dashboard struct { +type Observability struct { Dashboard *dashboard.Dashboard Alerts []alerting.Rule ContactPoints []alerting.ContactPoint NotificationPolicies []alerting.NotificationPolicy } -func (db *Dashboard) GenerateJSON() ([]byte, error) { - dashboardJSON, err := json.MarshalIndent(db, "", " ") +func (o *Observability) GenerateJSON() ([]byte, error) { + output, err := json.MarshalIndent(o, "", " ") if err != nil { return nil, err } - return dashboardJSON, nil + return output, nil } type DeployOptions struct { @@ -40,52 +41,107 @@ type DeployOptions struct { NotificationTemplates string } -func (db *Dashboard) DeployToGrafana(options *DeployOptions) error { +func alertRuleExist(alerts []alerting.Rule, alert alerting.Rule) bool { + for _, a := range alerts { + if reflect.DeepEqual(a, alert) { + return true + } + } + return false +} + +func getAlertRuleByTitle(alerts []alerting.Rule, title string) *alerting.Rule { + for _, a := range alerts { + if a.Title == title { + return &a + } + } + return nil +} + +func (o *Observability) DeployToGrafana(options *DeployOptions) error { grafanaClient := api.NewClient( options.GrafanaURL, options.GrafanaToken, ) - folder, errFolder := grafanaClient.FindOrCreateFolder(options.FolderName) - if errFolder != nil { - return errFolder - } + if options.FolderName != "" { + folder, errFolder := grafanaClient.FindOrCreateFolder(options.FolderName) + if errFolder != nil { + return errFolder + } + newDashboard, _, errPostDashboard := grafanaClient.PostDashboard(api.PostDashboardRequest{ + Dashboard: o.Dashboard, + Overwrite: true, + FolderID: int(folder.ID), + }) + if errPostDashboard != nil { + return errPostDashboard + } - newDashboard, _, errPostDashboard := grafanaClient.PostDashboard(api.PostDashboardRequest{ - Dashboard: db.Dashboard, - Overwrite: true, - FolderID: int(folder.ID), - }) - if errPostDashboard != nil { - return errPostDashboard - } + if !options.EnableAlerts && o.Alerts != nil && len(o.Alerts) > 0 { + // Get alert rules for the dashboard + alertsRule, errGetAlertRules := grafanaClient.GetAlertRulesByDashboardUID(*newDashboard.UID) + if errGetAlertRules != nil { + return errGetAlertRules + } - // Create alerts for the dashboard - if options.EnableAlerts && db.Alerts != nil && len(db.Alerts) > 0 { - // Get alert rules for the dashboard - alertsRule, errGetAlertRules := grafanaClient.GetAlertRulesByDashboardUID(*newDashboard.UID) - if errGetAlertRules != nil { - return errGetAlertRules + // delete existing alert rules for the dashboard if alerts are disabled + for _, rule := range alertsRule { + _, _, errDeleteAlertRule := grafanaClient.DeleteAlertRule(*rule.Uid) + if errDeleteAlertRule != nil { + return errDeleteAlertRule + } + } } - // delete alert rules for the dashboard - for _, rule := range alertsRule { - _, _, errDeleteAlertRule := grafanaClient.DeleteAlertRule(*rule.Uid) - if errDeleteAlertRule != nil { - return errDeleteAlertRule + // Create alerts for the dashboard + if options.EnableAlerts && o.Alerts != nil && len(o.Alerts) > 0 { + // Get alert rules for the dashboard + alertsRule, errGetAlertRules := grafanaClient.GetAlertRulesByDashboardUID(*newDashboard.UID) + if errGetAlertRules != nil { + return errGetAlertRules } - } - // Create alert rules for the dashboard - for _, alert := range db.Alerts { - alert.RuleGroup = *db.Dashboard.Title - alert.FolderUID = folder.UID - alert.Annotations["__dashboardUid__"] = *newDashboard.UID - alert.Annotations["__panelId__"] = panelIDByTitle(db.Dashboard, alert.Title) + // delete alert rules for the dashboard + for _, rule := range alertsRule { + // delete alert rule only if it won't be created again from code + if !alertRuleExist(o.Alerts, rule) { + _, _, errDeleteAlertRule := grafanaClient.DeleteAlertRule(*rule.Uid) + if errDeleteAlertRule != nil { + return errDeleteAlertRule + } + } + } - _, _, errPostAlertRule := grafanaClient.PostAlertRule(alert) - if errPostAlertRule != nil { - return errPostAlertRule + // Create alert rules for the dashboard + for _, alert := range o.Alerts { + alert.RuleGroup = *o.Dashboard.Title + alert.FolderUID = folder.UID + alert.Annotations["__dashboardUid__"] = *newDashboard.UID + + panelId := panelIDByTitle(o.Dashboard, alert.Annotations["panel_title"]) + // we can clean it up as it was only used to get the panelId + delete(alert.Annotations, "panel_title") + if panelId != "" { + alert.Annotations["__panelId__"] = panelId + } + if alertRuleExist(alertsRule, alert) { + // update alert rule if it already exists + alertToUpdate := getAlertRuleByTitle(alertsRule, alert.Title) + if alertToUpdate != nil { + _, _, errPutAlertRule := grafanaClient.UpdateAlertRule(*alertToUpdate.Uid, alert) + if errPutAlertRule != nil { + return errPutAlertRule + } + } + } else { + // create alert rule if it doesn't exist + _, _, errPostAlertRule := grafanaClient.PostAlertRule(alert) + if errPostAlertRule != nil { + return errPostAlertRule + } + } } } } @@ -107,8 +163,8 @@ func (db *Dashboard) DeployToGrafana(options *DeployOptions) error { } // Create contact points for the alerts - if db.ContactPoints != nil && len(db.ContactPoints) > 0 { - for _, contactPoint := range db.ContactPoints { + if o.ContactPoints != nil && len(o.ContactPoints) > 0 { + for _, contactPoint := range o.ContactPoints { errCreateOrUpdateContactPoint := grafanaClient.CreateOrUpdateContactPoint(contactPoint) if errCreateOrUpdateContactPoint != nil { return errCreateOrUpdateContactPoint @@ -117,8 +173,8 @@ func (db *Dashboard) DeployToGrafana(options *DeployOptions) error { } // Create notification policies for the alerts - if db.NotificationPolicies != nil && len(db.NotificationPolicies) > 0 { - for _, notificationPolicy := range db.NotificationPolicies { + if o.NotificationPolicies != nil && len(o.NotificationPolicies) > 0 { + for _, notificationPolicy := range o.NotificationPolicies { errAddNestedPolicy := grafanaClient.AddNestedPolicy(notificationPolicy) if errAddNestedPolicy != nil { return errAddNestedPolicy diff --git a/observability-lib/grafana/dashboard_test.go b/observability-lib/grafana/dashboard_test.go index 4a46ed17a..ecac1d04a 100644 --- a/observability-lib/grafana/dashboard_test.go +++ b/observability-lib/grafana/dashboard_test.go @@ -3,7 +3,6 @@ package grafana_test import ( "testing" - "github.com/grafana/grafana-foundation-sdk/go/expr" "github.com/smartcontractkit/chainlink-common/observability-lib/grafana" "github.com/stretchr/testify/require" ) @@ -32,7 +31,9 @@ func TestGenerateJSON(t *testing.T) { Legend: `{{account}}`, }, }, - AlertOptions: &grafana.AlertOptions{ + }, + AlertsOptions: []grafana.AlertOptions{ + { Summary: `ETH Balance is lower than threshold`, Description: `ETH Balance critically low at {{ index $values "A" }}`, RunbookURL: "https://github.com/smartcontractkit/chainlink-common/tree/main/observability-lib", @@ -54,11 +55,9 @@ func TestGenerateJSON(t *testing.T) { RefID: "B", ThresholdExpression: &grafana.ThresholdExpression{ Expression: "A", - ThresholdConditionsOptions: []grafana.ThresholdConditionsOption{ - { - Params: []float64{2, 0}, - Type: expr.TypeThresholdTypeLt, - }, + ThresholdConditionsOptions: grafana.ThresholdConditionsOption{ + Params: []float64{2}, + Type: grafana.TypeThresholdTypeLt, }, }, }, @@ -67,12 +66,12 @@ func TestGenerateJSON(t *testing.T) { }, })) - db, err := builder.Build() + o, err := builder.Build() if err != nil { t.Errorf("Error building dashboard: %v", err) } - json, err := db.GenerateJSON() + json, err := o.GenerateJSON() require.IsType(t, json, []byte{}) }) } diff --git a/observability-lib/grafana/panels.go b/observability-lib/grafana/panels.go index 4466a1085..9869a2317 100644 --- a/observability-lib/grafana/panels.go +++ b/observability-lib/grafana/panels.go @@ -5,6 +5,7 @@ import ( "github.com/grafana/grafana-foundation-sdk/go/common" "github.com/grafana/grafana-foundation-sdk/go/dashboard" "github.com/grafana/grafana-foundation-sdk/go/gauge" + "github.com/grafana/grafana-foundation-sdk/go/heatmap" "github.com/grafana/grafana-foundation-sdk/go/logs" "github.com/grafana/grafana-foundation-sdk/go/prometheus" "github.com/grafana/grafana-foundation-sdk/go/stat" @@ -36,6 +37,7 @@ func newQuery(query Query) *prometheus.DataqueryBuilder { type LegendOptions struct { Placement common.LegendPlacement DisplayMode common.LegendDisplayMode + Calcs []string } func newLegend(options *LegendOptions) *common.VizLegendOptionsBuilder { @@ -49,8 +51,14 @@ func newLegend(options *LegendOptions) *common.VizLegendOptionsBuilder { builder := common.NewVizLegendOptionsBuilder(). ShowLegend(true). - Placement(options.Placement). - DisplayMode(options.DisplayMode) + Placement(options.Placement) + + if len(options.Calcs) > 0 { + options.DisplayMode = common.LegendDisplayModeTable + builder.Calcs(options.Calcs) + } + + builder.DisplayMode(options.DisplayMode) return builder } @@ -81,20 +89,30 @@ func newTransform(options *TransformOptions) dashboard.DataTransformerConfig { } type PanelOptions struct { - Datasource string - Title string - Description string - Span uint32 - Height uint32 - Decimals float64 - Unit string - NoValue string - Min *float64 - Max *float64 - Query []Query - Threshold *ThresholdOptions - Transform *TransformOptions - AlertOptions *AlertOptions + Datasource string + Title string + Description string + Span uint32 + Height uint32 + Decimals float64 + Unit string + NoValue string + Min *float64 + Max *float64 + Query []Query + Threshold *ThresholdOptions + Transform *TransformOptions + ColorScheme dashboard.FieldColorModeId +} + +type Panel struct { + statPanelBuilder *stat.PanelBuilder + timeSeriesPanelBuilder *timeseries.PanelBuilder + gaugePanelBuilder *gauge.PanelBuilder + tablePanelBuilder *table.PanelBuilder + logPanelBuilder *logs.PanelBuilder + heatmapBuilder *heatmap.PanelBuilder + alertBuilders []*alerting.RuleBuilder } // panel defaults @@ -125,15 +143,7 @@ type StatPanelOptions struct { GraphMode common.BigValueGraphMode TextMode common.BigValueTextMode Orientation common.VizOrientation -} - -type Panel struct { - statPanelBuilder *stat.PanelBuilder - timeSeriesPanelBuilder *timeseries.PanelBuilder - gaugePanelBuilder *gauge.PanelBuilder - tablePanelBuilder *table.PanelBuilder - logPanelBuilder *logs.PanelBuilder - alertBuilder *alerting.RuleBuilder + Mappings []dashboard.ValueMapping } func NewStatPanel(options *StatPanelOptions) *Panel { @@ -170,6 +180,7 @@ func NewStatPanel(options *StatPanelOptions) *Panel { TextMode(options.TextMode). Orientation(options.Orientation). JustifyMode(options.JustifyMode). + Mappings(options.Mappings). ReduceOptions(common.NewReduceDataOptionsBuilder().Calcs([]string{"last"})) if options.Min != nil { @@ -204,13 +215,8 @@ func NewStatPanel(options *StatPanelOptions) *Panel { newPanel.WithTransformation(newTransform(options.Transform)) } - if options.AlertOptions != nil { - options.AlertOptions.Name = options.Title - - return &Panel{ - statPanelBuilder: newPanel, - alertBuilder: NewAlertRule(options.AlertOptions), - } + if options.ColorScheme != "" { + newPanel.ColorScheme(dashboard.NewFieldColorBuilder().Mode(options.ColorScheme)) } return &Panel{ @@ -220,18 +226,16 @@ func NewStatPanel(options *StatPanelOptions) *Panel { type TimeSeriesPanelOptions struct { *PanelOptions + AlertsOptions []AlertOptions FillOpacity float64 ScaleDistribution common.ScaleDistribution LegendOptions *LegendOptions + ThresholdStyle common.GraphThresholdsStyleMode } func NewTimeSeriesPanel(options *TimeSeriesPanelOptions) *Panel { setDefaults(options.PanelOptions) - if options.FillOpacity == 0 { - options.FillOpacity = 2 - } - if options.ScaleDistribution == "" { options.ScaleDistribution = common.ScaleDistributionLinear } @@ -269,23 +273,36 @@ func NewTimeSeriesPanel(options *TimeSeriesPanelOptions) *Panel { if options.Threshold != nil { newPanel.Thresholds(newThresholds(options.Threshold)) + + if options.ThresholdStyle != "" { + newPanel.ThresholdsStyle(common.NewGraphThresholdsStyleConfigBuilder().Mode(options.ThresholdStyle)) + } } if options.Transform != nil { newPanel.WithTransformation(newTransform(options.Transform)) } - if options.AlertOptions != nil { - options.AlertOptions.Name = options.Title + if options.ColorScheme != "" { + newPanel.ColorScheme(dashboard.NewFieldColorBuilder().Mode(options.ColorScheme)) + } - return &Panel{ - timeSeriesPanelBuilder: newPanel, - alertBuilder: NewAlertRule(options.AlertOptions), + var alertBuilders []*alerting.RuleBuilder + if options.AlertsOptions != nil && len(options.AlertsOptions) > 0 { + for _, alert := range options.AlertsOptions { + // this is used as an internal mechanism to set the panel title in the alert to associate panelId with alert + alert.PanelTitle = options.Title + // if name is provided use it, otherwise use panel title + if alert.Title == "" { + alert.Title = options.Title + } + alertBuilders = append(alertBuilders, NewAlertRule(&alert)) } } return &Panel{ timeSeriesPanelBuilder: newPanel, + alertBuilders: alertBuilders, } } @@ -303,7 +320,11 @@ func NewGaugePanel(options *GaugePanelOptions) *Panel { Span(options.Span). Height(options.Height). Decimals(options.Decimals). - Unit(options.Unit) + Unit(options.Unit). + ReduceOptions( + common.NewReduceDataOptionsBuilder(). + Calcs([]string{"lastNotNull"}).Values(false), + ) if options.Min != nil { newPanel.Min(*options.Min) @@ -325,15 +346,6 @@ func NewGaugePanel(options *GaugePanelOptions) *Panel { newPanel.WithTransformation(newTransform(options.Transform)) } - if options.AlertOptions != nil { - options.AlertOptions.Name = options.Title - - return &Panel{ - gaugePanelBuilder: newPanel, - alertBuilder: NewAlertRule(options.AlertOptions), - } - } - return &Panel{ gaugePanelBuilder: newPanel, } @@ -376,13 +388,8 @@ func NewTablePanel(options *TablePanelOptions) *Panel { newPanel.WithTransformation(newTransform(options.Transform)) } - if options.AlertOptions != nil { - options.AlertOptions.Name = options.Title - - return &Panel{ - tablePanelBuilder: newPanel, - alertBuilder: NewAlertRule(options.AlertOptions), - } + if options.ColorScheme != "" { + newPanel.ColorScheme(dashboard.NewFieldColorBuilder().Mode(options.ColorScheme)) } return &Panel{ @@ -392,6 +399,7 @@ func NewTablePanel(options *TablePanelOptions) *Panel { type LogPanelOptions struct { *PanelOptions + PrettifyJSON bool } func NewLogPanel(options *LogPanelOptions) *Panel { @@ -403,7 +411,8 @@ func NewLogPanel(options *LogPanelOptions) *Panel { Description(options.Description). Span(options.Span). Height(options.Height). - NoValue(options.NoValue) + NoValue(options.NoValue). + PrettifyLogMessage(options.PrettifyJSON) if options.Min != nil { newPanel.Min(*options.Min) @@ -425,16 +434,58 @@ func NewLogPanel(options *LogPanelOptions) *Panel { newPanel.WithTransformation(newTransform(options.Transform)) } - if options.AlertOptions != nil { - options.AlertOptions.Name = options.Title - - return &Panel{ - logPanelBuilder: newPanel, - alertBuilder: NewAlertRule(options.AlertOptions), - } + if options.ColorScheme != "" { + newPanel.ColorScheme(dashboard.NewFieldColorBuilder().Mode(options.ColorScheme)) } return &Panel{ logPanelBuilder: newPanel, } } + +type HeatmapPanelOptions struct { + *PanelOptions +} + +func NewHeatmapPanel(options *HeatmapPanelOptions) *Panel { + setDefaults(options.PanelOptions) + + newPanel := heatmap.NewPanelBuilder(). + Datasource(datasourceRef(options.Datasource)). + Title(options.Title). + Description(options.Description). + Span(options.Span). + Height(options.Height). + Decimals(options.Decimals). + Unit(options.Unit). + NoValue(options.NoValue) + + if options.Min != nil { + newPanel.Min(*options.Min) + } + + if options.Max != nil { + newPanel.Max(*options.Max) + } + + for _, q := range options.Query { + q.Format = prometheus.PromQueryFormatHeatmap + newPanel.WithTarget(newQuery(q)) + } + + if options.Threshold != nil { + newPanel.Thresholds(newThresholds(options.Threshold)) + } + + if options.Transform != nil { + newPanel.WithTransformation(newTransform(options.Transform)) + } + + if options.ColorScheme != "" { + newPanel.ColorScheme(dashboard.NewFieldColorBuilder().Mode(options.ColorScheme)) + } + + return &Panel{ + heatmapBuilder: newPanel, + } +} diff --git a/observability-lib/grafana/variables.go b/observability-lib/grafana/variables.go index 5ad263368..445a071cb 100644 --- a/observability-lib/grafana/variables.go +++ b/observability-lib/grafana/variables.go @@ -1,38 +1,93 @@ package grafana import ( + "strings" + "github.com/grafana/grafana-foundation-sdk/go/cog" "github.com/grafana/grafana-foundation-sdk/go/dashboard" ) -type VariableOption struct { - Name string - Label string +type VariableOptionValues struct { } -type QueryVariableOptions struct { - *VariableOption - Datasource string - Query string - Multi bool - Regex string +type VariableOption struct { + Name string + Label string + Description string CurrentText string CurrentValue string - IncludeAll bool } -func NewQueryVariable(options *QueryVariableOptions) *dashboard.QueryVariableBuilder { - if options.CurrentText == "" { +type CustomVariableOptions struct { + *VariableOption + Values map[string]any +} + +func NewCustomVariable(options *CustomVariableOptions) *dashboard.CustomVariableBuilder { + if options.CurrentText == "" && options.CurrentValue == "" { options.CurrentText = "All" + options.CurrentValue = "$__all" + } + + variable := dashboard.NewCustomVariableBuilder(options.Name). + Label(options.Label). + Description(options.Description). + Current(dashboard.VariableOption{ + Selected: cog.ToPtr[bool](true), + Text: dashboard.StringOrArrayOfString{String: cog.ToPtr(options.CurrentText)}, + Value: dashboard.StringOrArrayOfString{String: cog.ToPtr(options.CurrentValue)}, + }) + + optionsList := []dashboard.VariableOption{ + { + Selected: cog.ToPtr[bool](true), + Text: dashboard.StringOrArrayOfString{String: cog.ToPtr(options.CurrentText)}, + Value: dashboard.StringOrArrayOfString{String: cog.ToPtr(options.CurrentValue)}, + }, + } + for key, value := range options.Values { + if key != options.CurrentText { + option := dashboard.VariableOption{ + Text: dashboard.StringOrArrayOfString{String: cog.ToPtr(key)}, + Value: dashboard.StringOrArrayOfString{String: cog.ToPtr(value.(string))}, + } + optionsList = append(optionsList, option) + } + } + variable.Options(optionsList) + + valuesString := "" + for key, value := range options.Values { + // Escape commas and colons in the value which are reserved characters for values string + cleanValue := strings.ReplaceAll(value.(string), ",", "\\,") + cleanValue = strings.ReplaceAll(cleanValue, ":", "\\:") + valuesString += key + " : " + cleanValue + " , " } + variable.Values(dashboard.StringOrMap{String: cog.ToPtr(strings.TrimSuffix(valuesString, ", "))}) + + return variable +} + +type QueryVariableOptions struct { + *VariableOption + Datasource string + Query string + Multi bool + Regex string + IncludeAll bool + QueryWithType map[string]any + Hide *dashboard.VariableHide +} - if options.CurrentValue == "" { +func NewQueryVariable(options *QueryVariableOptions) *dashboard.QueryVariableBuilder { + if options.CurrentText == "" && options.CurrentValue == "" { + options.CurrentText = "All" options.CurrentValue = "$__all" } variable := dashboard.NewQueryVariableBuilder(options.Name). Label(options.Label). - Query(dashboard.StringOrMap{String: cog.ToPtr[string](options.Query)}). + Description(options.Description). Datasource(datasourceRef(options.Datasource)). Current(dashboard.VariableOption{ Selected: cog.ToPtr[bool](true), @@ -42,6 +97,12 @@ func NewQueryVariable(options *QueryVariableOptions) *dashboard.QueryVariableBui Sort(dashboard.VariableSortAlphabeticalAsc). Multi(options.Multi) + if options.Query != "" { + variable.Query(dashboard.StringOrMap{String: cog.ToPtr[string](options.Query)}) + } else if options.QueryWithType != nil { + variable.Query(dashboard.StringOrMap{Map: options.QueryWithType}) + } + if options.Regex != "" { variable.Regex(options.Regex) } @@ -50,6 +111,10 @@ func NewQueryVariable(options *QueryVariableOptions) *dashboard.QueryVariableBui variable.IncludeAll(options.IncludeAll) } + if options.Hide != nil { + variable.Hide(*options.Hide) + } + return variable } @@ -59,12 +124,18 @@ type IntervalVariableOptions struct { } func NewIntervalVariable(options *IntervalVariableOptions) *dashboard.IntervalVariableBuilder { + if options.CurrentText == "" && options.CurrentValue == "" { + options.CurrentText = "All" + options.CurrentValue = "$__all" + } + return dashboard.NewIntervalVariableBuilder(options.Name). Label(options.Label). + Description(options.Description). Values(dashboard.StringOrMap{String: cog.ToPtr[string](options.Interval)}). Current(dashboard.VariableOption{ Selected: cog.ToPtr[bool](true), - Text: dashboard.StringOrArrayOfString{ArrayOfString: []string{"All"}}, - Value: dashboard.StringOrArrayOfString{ArrayOfString: []string{"$__all"}}, + Text: dashboard.StringOrArrayOfString{ArrayOfString: []string{options.CurrentText}}, + Value: dashboard.StringOrArrayOfString{ArrayOfString: []string{options.CurrentValue}}, }) } diff --git a/pkg/beholder/client.go b/pkg/beholder/client.go index 63a91922e..05f874910 100644 --- a/pkg/beholder/client.go +++ b/pkg/beholder/client.go @@ -6,6 +6,7 @@ import ( "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc" + "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp" "go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc" "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc" otellog "go.opentelemetry.io/otel/log" @@ -53,35 +54,58 @@ type Client struct { // NewClient creates a new Client with initialized OpenTelemetry components // To handle OpenTelemetry errors use [otel.SetErrorHandler](https://pkg.go.dev/go.opentelemetry.io/otel#SetErrorHandler) func NewClient(cfg Config) (*Client, error) { + if cfg.OtelExporterGRPCEndpoint != "" && cfg.OtelExporterHTTPEndpoint != "" { + return nil, errors.New("only one exporter endpoint should be set") + } + if cfg.OtelExporterGRPCEndpoint == "" && cfg.OtelExporterHTTPEndpoint == "" { + return nil, errors.New("at least one exporter endpoint should be set") + } + if cfg.OtelExporterHTTPEndpoint != "" { + factory := func(options ...otlploghttp.Option) (sdklog.Exporter, error) { + // note: context is unused internally + return otlploghttp.New(context.Background(), options...) //nolint + } + return newHTTPClient(cfg, factory) + } + factory := func(options ...otlploggrpc.Option) (sdklog.Exporter, error) { // note: context is unused internally return otlploggrpc.New(context.Background(), options...) //nolint } - return newClient(cfg, factory) + return newGRPCClient(cfg, factory) } // Used for testing to override the default exporter type otlploggrpcFactory func(options ...otlploggrpc.Option) (sdklog.Exporter, error) -func newClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { +func newGRPCClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { baseResource, err := newOtelResource(cfg) - noop := NewNoopClient() if err != nil { - return noop, err + return nil, err } creds := insecure.NewCredentials() if !cfg.InsecureConnection && cfg.CACertFile != "" { creds, err = credentials.NewClientTLSFromFile(cfg.CACertFile, "") if err != nil { - return noop, err + return nil, err } } - sharedLogExporter, err := otlploggrpcNew( + opts := []otlploggrpc.Option{ otlploggrpc.WithTLSCredentials(creds), otlploggrpc.WithEndpoint(cfg.OtelExporterGRPCEndpoint), - ) + } + if cfg.LogRetryConfig != nil { + // NOTE: By default, the retry is enabled in the OTel SDK + opts = append(opts, otlploggrpc.WithRetry(otlploggrpc.RetryConfig{ + Enabled: cfg.LogRetryConfig.Enabled(), + InitialInterval: cfg.LogRetryConfig.GetInitialInterval(), + MaxInterval: cfg.LogRetryConfig.GetMaxInterval(), + MaxElapsedTime: cfg.LogRetryConfig.GetMaxElapsedTime(), + })) + } + sharedLogExporter, err := otlploggrpcNew(opts...) if err != nil { - return noop, err + return nil, err } // Logger @@ -102,7 +126,7 @@ func newClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { baseResource, ) if err != nil { - return noop, err + return nil, err } loggerProvider := sdklog.NewLoggerProvider( sdklog.WithResource(loggerResource), @@ -113,14 +137,14 @@ func newClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { // Tracer tracerProvider, err := newTracerProvider(cfg, baseResource, creds) if err != nil { - return noop, err + return nil, err } tracer := tracerProvider.Tracer(defaultPackageName) // Meter meterProvider, err := newMeterProvider(cfg, baseResource, creds) if err != nil { - return noop, err + return nil, err } meter := meterProvider.Meter(defaultPackageName) @@ -143,7 +167,7 @@ func newClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { baseResource, ) if err != nil { - return noop, err + return nil, err } messageLoggerProvider := sdklog.NewLoggerProvider( @@ -174,7 +198,15 @@ func (c Client) Close() (err error) { } // Returns a new Client with the same configuration but with a different package name +// Deprecated: Use ForName func (c Client) ForPackage(name string) Client { + return c.ForName(name) +} + +// ForName returns a new Client with the same configuration but with a different name. +// For global package-scoped telemetry, use the package name. +// For injected component-scoped telemetry, use a fully qualified name that uniquely identifies this instance. +func (c Client) ForName(name string) Client { // Logger logger := c.LoggerProvider.Logger(name) // Tracer @@ -247,10 +279,21 @@ type shutdowner interface { func newTracerProvider(config Config, resource *sdkresource.Resource, creds credentials.TransportCredentials) (*sdktrace.TracerProvider, error) { ctx := context.Background() - exporter, err := otlptracegrpc.New(ctx, + exporterOpts := []otlptracegrpc.Option{ otlptracegrpc.WithTLSCredentials(creds), otlptracegrpc.WithEndpoint(config.OtelExporterGRPCEndpoint), - ) + } + if config.TraceRetryConfig != nil { + // NOTE: By default, the retry is enabled in the OTel SDK + exporterOpts = append(exporterOpts, otlptracegrpc.WithRetry(otlptracegrpc.RetryConfig{ + Enabled: config.TraceRetryConfig.Enabled(), + InitialInterval: config.TraceRetryConfig.GetInitialInterval(), + MaxInterval: config.TraceRetryConfig.GetMaxInterval(), + MaxElapsedTime: config.TraceRetryConfig.GetMaxElapsedTime(), + })) + } + // note: context is used internally + exporter, err := otlptracegrpc.New(ctx, exporterOpts...) if err != nil { return nil, err } @@ -272,12 +315,21 @@ func newTracerProvider(config Config, resource *sdkresource.Resource, creds cred func newMeterProvider(config Config, resource *sdkresource.Resource, creds credentials.TransportCredentials) (*sdkmetric.MeterProvider, error) { ctx := context.Background() - - exporter, err := otlpmetricgrpc.New( - ctx, + opts := []otlpmetricgrpc.Option{ otlpmetricgrpc.WithTLSCredentials(creds), otlpmetricgrpc.WithEndpoint(config.OtelExporterGRPCEndpoint), - ) + } + if config.MetricRetryConfig != nil { + // NOTE: By default, the retry is enabled in the OTel SDK + opts = append(opts, otlpmetricgrpc.WithRetry(otlpmetricgrpc.RetryConfig{ + Enabled: config.MetricRetryConfig.Enabled(), + InitialInterval: config.MetricRetryConfig.GetInitialInterval(), + MaxInterval: config.MetricRetryConfig.GetMaxInterval(), + MaxElapsedTime: config.MetricRetryConfig.GetMaxElapsedTime(), + })) + } + // note: context is unused internally + exporter, err := otlpmetricgrpc.New(ctx, opts...) if err != nil { return nil, err } diff --git a/pkg/beholder/client_test.go b/pkg/beholder/client_test.go index aad3f475f..c76e9dfdd 100644 --- a/pkg/beholder/client_test.go +++ b/pkg/beholder/client_test.go @@ -2,14 +2,16 @@ package beholder import ( "context" - "fmt" + "errors" "strings" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" "go.opentelemetry.io/otel" "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc" + "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp" otellog "go.opentelemetry.io/otel/log" sdklog "go.opentelemetry.io/otel/sdk/log" @@ -49,11 +51,37 @@ func TestClient(t *testing.T) { "byte_key_1": []byte("byte_val_1"), "str_slice_key_1": []string{"str_val_1", "str_val_2"}, "nil_key_1": nil, + "beholder_domain": "TestDomain", // Required field + "beholder_entity": "TestEntity", // Required field "beholder_data_schema": "/schemas/ids/1001", // Required field, URI } } defaultMessageBody := []byte("body bytes") + mustNewGRPCClient := func(t *testing.T, exporterMock *mocks.OTLPExporter) *Client { + // Override exporter factory which is used by Client + exporterFactory := func(...otlploggrpc.Option) (sdklog.Exporter, error) { + return exporterMock, nil + } + client, err := newGRPCClient(TestDefaultConfig(), exporterFactory) + if err != nil { + t.Fatalf("Error creating beholder client: %v", err) + } + return client + } + + mustNewHTTPClient := func(t *testing.T, exporterMock *mocks.OTLPExporter) *Client { + // Override exporter factory which is used by Client + exporterFactory := func(...otlploghttp.Option) (sdklog.Exporter, error) { + return exporterMock, nil + } + client, err := newHTTPClient(TestDefaultConfigHTTPClient(), exporterFactory) + if err != nil { + t.Fatalf("Error creating beholder client: %v", err) + } + return client + } + testCases := []struct { name string makeCustomAttributes func() map[string]any @@ -62,9 +90,24 @@ func TestClient(t *testing.T) { exporterMockErrorCount int exporterOutputExpected bool messageGenerator func(client *Client, messageBody []byte, customAttributes map[string]any) + mustNewGrpcClient func(*testing.T, *mocks.OTLPExporter) *Client }{ { - name: "Test Emit", + name: "Test Emit (GRPC Client)", + makeCustomAttributes: defaultCustomAttributes, + messageBody: defaultMessageBody, + messageCount: 10, + exporterMockErrorCount: 0, + exporterOutputExpected: true, + messageGenerator: func(client *Client, messageBody []byte, customAttributes map[string]any) { + err := client.Emitter.Emit(tests.Context(t), messageBody, customAttributes) + assert.NoError(t, err) + }, + mustNewGrpcClient: mustNewGRPCClient, + }, + + { + name: "Test Emit (HTTP Client)", makeCustomAttributes: defaultCustomAttributes, messageBody: defaultMessageBody, messageCount: 10, @@ -74,6 +117,7 @@ func TestClient(t *testing.T) { err := client.Emitter.Emit(tests.Context(t), messageBody, customAttributes) assert.NoError(t, err) }, + mustNewGrpcClient: mustNewHTTPClient, }, } @@ -82,29 +126,23 @@ func TestClient(t *testing.T) { exporterMock := mocks.NewOTLPExporter(t) defer exporterMock.AssertExpectations(t) - // Override exporter factory which is used by Client - exporterFactory := func(...otlploggrpc.Option) (sdklog.Exporter, error) { - return exporterMock, nil - } - client, err := newClient(TestDefaultConfig(), exporterFactory) - if err != nil { - t.Fatalf("Error creating beholder client: %v", err) - } + client := tc.mustNewGrpcClient(t, exporterMock) + otel.SetErrorHandler(otelMustNotErr(t)) // Number of exported messages exportedMessageCount := 0 // Simulate exporter error if configured if tc.exporterMockErrorCount > 0 { - exporterMock.On("Export", mock.Anything, mock.Anything).Return(fmt.Errorf("an error occurred")).Times(tc.exporterMockErrorCount) + exporterMock.On("Export", mock.Anything, mock.Anything).Return(errors.New("an error occurred")).Times(tc.exporterMockErrorCount) } customAttributes := tc.makeCustomAttributes() if tc.exporterOutputExpected { exporterMock.On("Export", mock.Anything, mock.Anything).Return(nil).Times(tc.messageCount). Run(func(args mock.Arguments) { - assert.IsType(t, args.Get(1), []sdklog.Record{}, "Record type mismatch") + assert.IsType(t, []sdklog.Record{}, args.Get(1), "Record type mismatch") records := args.Get(1).([]sdklog.Record) - assert.Equal(t, 1, len(records), "batching is disabled, expecte 1 record") + assert.Len(t, records, 1, "batching is disabled, expecte 1 record") record := records[0] assert.Equal(t, tc.messageBody, record.Body().AsBytes(), "Record body mismatch") actualAttributeKeys := map[string]struct{}{} @@ -117,7 +155,7 @@ func TestClient(t *testing.T) { } expectedKv := OtelAttr(key, expectedValue) equal := kv.Value.Equal(expectedKv.Value) - assert.True(t, equal, fmt.Sprintf("Record attributes mismatch for key %v", key)) + assert.True(t, equal, "Record attributes mismatch for key %v", key) return true }) for key := range customAttributes { @@ -138,7 +176,7 @@ func TestClient(t *testing.T) { func TestEmitterMessageValidation(t *testing.T) { getEmitter := func(exporterMock *mocks.OTLPExporter) Emitter { - client, err := newClient( + client, err := newGRPCClient( TestDefaultConfig(), // Override exporter factory which is used by Client func(...otlploggrpc.Option) (sdklog.Exporter, error) { @@ -167,15 +205,69 @@ func TestEmitterMessageValidation(t *testing.T) { { name: "Invalid URI", attrs: Attributes{ + "beholder_domain": "TestDomain", + "beholder_entity": "TestEntity", "beholder_data_schema": "example-schema", }, exporterCalledTimes: 0, expectedError: "'Metadata.BeholderDataSchema' Error:Field validation for 'BeholderDataSchema' failed on the 'uri' tag", }, { - name: "Valid URI", + name: "Invalid Beholder domain (double underscore)", + attrs: Attributes{ + "beholder_data_schema": "/example-schema/versions/1", + "beholder_entity": "TestEntity", + "beholder_domain": "Test__Domain", + }, + exporterCalledTimes: 0, + expectedError: "'Metadata.BeholderDomain' Error:Field validation for 'BeholderDomain' failed on the 'domain_entity' tag", + }, + { + name: "Invalid Beholder domain (special characters)", + attrs: Attributes{ + "beholder_data_schema": "/example-schema/versions/1", + "beholder_entity": "TestEntity", + "beholder_domain": "TestDomain*$", + }, + exporterCalledTimes: 0, + expectedError: "'Metadata.BeholderDomain' Error:Field validation for 'BeholderDomain' failed on the 'domain_entity' tag", + }, + { + name: "Invalid Beholder entity (double underscore)", + attrs: Attributes{ + "beholder_data_schema": "/example-schema/versions/1", + "beholder_entity": "Test__Entity", + "beholder_domain": "TestDomain", + }, + exporterCalledTimes: 0, + expectedError: "'Metadata.BeholderEntity' Error:Field validation for 'BeholderEntity' failed on the 'domain_entity' tag", + }, + { + name: "Invalid Beholder entity (special characters)", + attrs: Attributes{ + "beholder_data_schema": "/example-schema/versions/1", + "beholder_entity": "TestEntity*$", + "beholder_domain": "TestDomain", + }, + exporterCalledTimes: 0, + expectedError: "'Metadata.BeholderEntity' Error:Field validation for 'BeholderEntity' failed on the 'domain_entity' tag", + }, + { + name: "Valid Attributes", + exporterCalledTimes: 1, + attrs: Attributes{ + "beholder_domain": "TestDomain", + "beholder_entity": "TestEntity", + "beholder_data_schema": "/example-schema/versions/1", + }, + expectedError: "", + }, + { + name: "Valid Attributes (special characters)", exporterCalledTimes: 1, attrs: Attributes{ + "beholder_domain": "Test.Domain_42-1", + "beholder_entity": "Test.Entity_42-1", "beholder_data_schema": "/example-schema/versions/1", }, expectedError: "", @@ -194,9 +286,9 @@ func TestEmitterMessageValidation(t *testing.T) { err := emitter.Emit(tests.Context(t), message.Body, tc.attrs) // Assert expectations if tc.expectedError != "" { - assert.ErrorContains(t, err, tc.expectedError) + require.ErrorContains(t, err, tc.expectedError) } else { - assert.NoError(t, err) + require.NoError(t, err) } if tc.exporterCalledTimes > 0 { exporterMock.AssertExpectations(t) @@ -213,10 +305,10 @@ func TestClient_Close(t *testing.T) { defer exporterMock.AssertExpectations(t) client, err := NewStdoutClient() - assert.NoError(t, err) + require.NoError(t, err) err = client.Close() - assert.NoError(t, err) + require.NoError(t, err) exporterMock.AssertExpectations(t) } @@ -226,7 +318,7 @@ func TestClient_ForPackage(t *testing.T) { defer exporterMock.AssertExpectations(t) var b strings.Builder client, err := NewWriterClient(&b) - assert.NoError(t, err) + require.NoError(t, err) clientForTest := client.ForPackage("TestClient_ForPackage") // Log @@ -252,3 +344,40 @@ func TestClient_ForPackage(t *testing.T) { func otelMustNotErr(t *testing.T) otel.ErrorHandlerFunc { return func(err error) { t.Fatalf("otel error: %v", err) } } + +func TestNewClient(t *testing.T) { + t.Run("both endpoints set", func(t *testing.T) { + client, err := NewClient(Config{ + OtelExporterGRPCEndpoint: "grpc-endpoint", + OtelExporterHTTPEndpoint: "http-endpoint", + }) + require.Error(t, err) + assert.Nil(t, client) + assert.Equal(t, "only one exporter endpoint should be set", err.Error()) + }) + + t.Run("no endpoints set", func(t *testing.T) { + client, err := NewClient(Config{}) + require.Error(t, err) + assert.Nil(t, client) + assert.Equal(t, "at least one exporter endpoint should be set", err.Error()) + }) + + t.Run("GRPC endpoint set", func(t *testing.T) { + client, err := NewClient(Config{ + OtelExporterGRPCEndpoint: "grpc-endpoint", + }) + require.NoError(t, err) + assert.NotNil(t, client) + assert.IsType(t, &Client{}, client) + }) + + t.Run("HTTP endpoint set", func(t *testing.T) { + client, err := NewClient(Config{ + OtelExporterHTTPEndpoint: "http-endpoint", + }) + require.NoError(t, err) + assert.NotNil(t, client) + assert.IsType(t, &Client{}, client) + }) +} diff --git a/pkg/beholder/config.go b/pkg/beholder/config.go index b80021b44..6b023edcc 100644 --- a/pkg/beholder/config.go +++ b/pkg/beholder/config.go @@ -11,6 +11,7 @@ type Config struct { InsecureConnection bool CACertFile string OtelExporterGRPCEndpoint string + OtelExporterHTTPEndpoint string // OTel Resource ResourceAttributes []otelattr.KeyValue @@ -23,13 +24,39 @@ type Config struct { TraceSampleRatio float64 TraceBatchTimeout time.Duration TraceSpanExporter sdktrace.SpanExporter // optional additional exporter + TraceRetryConfig *RetryConfig // OTel Metric MetricReaderInterval time.Duration + MetricRetryConfig *RetryConfig // OTel Log LogExportTimeout time.Duration // Batch processing is enabled by default // Disable it only for testing LogBatchProcessor bool + // Retry config for shared log exporter, used by Emitter and Logger + LogRetryConfig *RetryConfig +} + +type RetryConfig struct { + // InitialInterval the time to wait after the first failure before + // retrying. + InitialInterval time.Duration + // MaxInterval is the upper bound on backoff interval. Once this value is + // reached the delay between consecutive retries will always be + // `MaxInterval`. + MaxInterval time.Duration + // MaxElapsedTime is the maximum amount of time (including retries) spent + // trying to send a request/batch. Once this value is reached, the data + // is discarded. + // Set to zero to disable retry + MaxElapsedTime time.Duration +} + +// Same defaults as used by the OTel SDK +var defaultRetryConfig = RetryConfig{ + InitialInterval: 5 * time.Second, + MaxInterval: 30 * time.Second, + MaxElapsedTime: 1 * time.Minute, // Retry is enabled } const ( @@ -50,11 +77,17 @@ func DefaultConfig() Config { // Message Emitter EmitterExportTimeout: 1 * time.Second, EmitterBatchProcessor: true, + // OTel message log exporter retry config + LogRetryConfig: defaultRetryConfig.Copy(), // Trace TraceSampleRatio: 1, TraceBatchTimeout: 1 * time.Second, + // OTel trace exporter retry config + TraceRetryConfig: defaultRetryConfig.Copy(), // Metric MetricReaderInterval: 1 * time.Second, + // OTel metric exporter retry config + MetricRetryConfig: defaultRetryConfig.Copy(), // Log LogExportTimeout: 1 * time.Second, LogBatchProcessor: true, @@ -66,5 +99,54 @@ func TestDefaultConfig() Config { // Should be only disabled for testing config.EmitterBatchProcessor = false config.LogBatchProcessor = false + // Retries are disabled for testing + config.LogRetryConfig.MaxElapsedTime = 0 // Retry is disabled + config.TraceRetryConfig.MaxElapsedTime = 0 // Retry is disabled + config.MetricRetryConfig.MaxElapsedTime = 0 // Retry is disabled return config } + +func TestDefaultConfigHTTPClient() Config { + config := DefaultConfig() + // Should be only disabled for testing + config.EmitterBatchProcessor = false + config.LogBatchProcessor = false + config.OtelExporterGRPCEndpoint = "" + config.OtelExporterHTTPEndpoint = "localhost:4318" + return config +} + +func (c *RetryConfig) Copy() *RetryConfig { + newConfig := *c + return &newConfig +} + +// Calculate if retry is enabled +func (c *RetryConfig) Enabled() bool { + if c == nil { + return false + } + return c.InitialInterval > 0 && c.MaxInterval > 0 && c.MaxElapsedTime > 0 +} + +// Implement getters for fields to avoid nil pointer dereference in case the config is not set +func (c *RetryConfig) GetInitialInterval() time.Duration { + if c == nil { + return 0 + } + return c.InitialInterval +} + +func (c *RetryConfig) GetMaxInterval() time.Duration { + if c == nil { + return 0 + } + return c.MaxInterval +} + +func (c *RetryConfig) GetMaxElapsedTime() time.Duration { + if c == nil { + return 0 + } + return c.MaxElapsedTime +} diff --git a/pkg/beholder/config_test.go b/pkg/beholder/config_test.go index 5bc81c7e6..f0d311826 100644 --- a/pkg/beholder/config_test.go +++ b/pkg/beholder/config_test.go @@ -18,6 +18,7 @@ func ExampleConfig() { InsecureConnection: true, CACertFile: "", OtelExporterGRPCEndpoint: "localhost:4317", + OtelExporterHTTPEndpoint: "localhost:4318", // Resource ResourceAttributes: []otelattr.KeyValue{ otelattr.String("package_name", packageName), @@ -26,16 +27,29 @@ func ExampleConfig() { // Message Emitter EmitterExportTimeout: 1 * time.Second, EmitterBatchProcessor: true, + // OTel message log exporter retry config + LogRetryConfig: nil, // Trace TraceSampleRatio: 1, TraceBatchTimeout: 1 * time.Second, + // OTel trace exporter retry config + TraceRetryConfig: nil, // Metric MetricReaderInterval: 1 * time.Second, + // OTel metric exporter retry config + MetricRetryConfig: nil, // Log LogExportTimeout: 1 * time.Second, LogBatchProcessor: true, } - fmt.Printf("%+v", config) + fmt.Printf("%+v\n", config) + config.LogRetryConfig = &beholder.RetryConfig{ + InitialInterval: 5 * time.Second, + MaxInterval: 30 * time.Second, + MaxElapsedTime: 1 * time.Minute, // Set to zero to disable retry + } + fmt.Printf("%+v\n", *config.LogRetryConfig) // Output: - // {InsecureConnection:true CACertFile: OtelExporterGRPCEndpoint:localhost:4317 ResourceAttributes:[{Key:package_name Value:{vtype:4 numeric:0 stringly:beholder slice:}} {Key:sender Value:{vtype:4 numeric:0 stringly:beholderclient slice:}}] EmitterExportTimeout:1s EmitterBatchProcessor:true TraceSampleRatio:1 TraceBatchTimeout:1s TraceSpanExporter: MetricReaderInterval:1s LogExportTimeout:1s LogBatchProcessor:true} + // {InsecureConnection:true CACertFile: OtelExporterGRPCEndpoint:localhost:4317 OtelExporterHTTPEndpoint:localhost:4318 ResourceAttributes:[{Key:package_name Value:{vtype:4 numeric:0 stringly:beholder slice:}} {Key:sender Value:{vtype:4 numeric:0 stringly:beholderclient slice:}}] EmitterExportTimeout:1s EmitterBatchProcessor:true TraceSampleRatio:1 TraceBatchTimeout:1s TraceSpanExporter: TraceRetryConfig: MetricReaderInterval:1s MetricRetryConfig: LogExportTimeout:1s LogBatchProcessor:true LogRetryConfig:} + // {InitialInterval:5s MaxInterval:30s MaxElapsedTime:1m0s} } diff --git a/pkg/beholder/example_test.go b/pkg/beholder/example_test.go index 04c895104..2045c3bb8 100644 --- a/pkg/beholder/example_test.go +++ b/pkg/beholder/example_test.go @@ -45,6 +45,8 @@ func ExampleNewClient() { for range 10 { err := beholder.GetEmitter().Emit(context.Background(), payloadBytes, "beholder_data_schema", "/custom-message/versions/1", // required + "beholder_domain", "ExampleDomain", // required + "beholder_entity", "ExampleEntity", // required "beholder_data_type", "custom_message", "foo", "bar", ) @@ -105,6 +107,8 @@ func ExampleNewNoopClient() { err := beholder.GetEmitter().Emit(context.Background(), []byte("test message"), "beholder_data_schema", "/custom-message/versions/1", // required + "beholder_domain", "ExampleDomain", // required + "beholder_entity", "ExampleEntity", // required ) if err != nil { log.Printf("Error emitting message: %v", err) diff --git a/pkg/beholder/global_test.go b/pkg/beholder/global_test.go index 1dbc9e373..2bb5c51f8 100644 --- a/pkg/beholder/global_test.go +++ b/pkg/beholder/global_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "go.opentelemetry.io/otel" otelattribute "go.opentelemetry.io/otel/attribute" @@ -76,7 +77,7 @@ func TestClient_SetGlobalOtelProviders(t *testing.T) { var b strings.Builder client, err := beholder.NewWriterClient(&b) - assert.NoError(t, err) + require.NoError(t, err) // Set global Otel Client beholder.SetClient(client) diff --git a/pkg/beholder/httpclient.go b/pkg/beholder/httpclient.go new file mode 100644 index 000000000..6f05cb8c8 --- /dev/null +++ b/pkg/beholder/httpclient.go @@ -0,0 +1,233 @@ +package beholder + +import ( + "context" + "crypto/tls" + "crypto/x509" + "errors" + "os" + + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp" + "go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp" + sdklog "go.opentelemetry.io/otel/sdk/log" + sdkmetric "go.opentelemetry.io/otel/sdk/metric" + sdkresource "go.opentelemetry.io/otel/sdk/resource" + "go.opentelemetry.io/otel/sdk/trace" + sdktrace "go.opentelemetry.io/otel/sdk/trace" +) + +// Used for testing to override the default exporter +type otlploghttpFactory func(options ...otlploghttp.Option) (sdklog.Exporter, error) + +func newCertFromFile(certFile string) (*x509.CertPool, error) { + b, err := os.ReadFile(certFile) + if err != nil { + return nil, err + } + cp := x509.NewCertPool() + if !cp.AppendCertsFromPEM(b) { + return nil, errors.New("credentials: failed to append certificates") + } + return cp, nil +} + +func newHTTPClient(cfg Config, otlploghttpNew otlploghttpFactory) (*Client, error) { + baseResource, err := newOtelResource(cfg) + if err != nil { + return nil, err + } + var tlsConfig *tls.Config + if !cfg.InsecureConnection { + tlsConfig = &tls.Config{ + MinVersion: tls.VersionTLS12, + } + if cfg.CACertFile != "" { + rootCAs, e := newCertFromFile(cfg.CACertFile) + if e != nil { + return nil, e + } + tlsConfig.RootCAs = rootCAs + } + } + tlsConfigOption := otlploghttp.WithInsecure() + if tlsConfig != nil { + tlsConfigOption = otlploghttp.WithTLSClientConfig(tlsConfig) + } + opts := []otlploghttp.Option{ + tlsConfigOption, + otlploghttp.WithEndpoint(cfg.OtelExporterHTTPEndpoint), + } + if cfg.LogRetryConfig != nil { + // NOTE: By default, the retry is enabled in the OTel SDK + opts = append(opts, otlploghttp.WithRetry(otlploghttp.RetryConfig{ + Enabled: cfg.LogRetryConfig.Enabled(), + InitialInterval: cfg.LogRetryConfig.GetInitialInterval(), + MaxInterval: cfg.LogRetryConfig.GetMaxInterval(), + MaxElapsedTime: cfg.LogRetryConfig.GetMaxElapsedTime(), + })) + } + sharedLogExporter, err := otlploghttpNew(opts...) + if err != nil { + return nil, err + } + + // Logger + var loggerProcessor sdklog.Processor + if cfg.LogBatchProcessor { + loggerProcessor = sdklog.NewBatchProcessor( + sharedLogExporter, + sdklog.WithExportTimeout(cfg.LogExportTimeout), // Default is 30s + ) + } else { + loggerProcessor = sdklog.NewSimpleProcessor(sharedLogExporter) + } + loggerAttributes := []attribute.KeyValue{ + attribute.String("beholder_data_type", "zap_log_message"), + } + loggerResource, err := sdkresource.Merge( + sdkresource.NewSchemaless(loggerAttributes...), + baseResource, + ) + if err != nil { + return nil, err + } + loggerProvider := sdklog.NewLoggerProvider( + sdklog.WithResource(loggerResource), + sdklog.WithProcessor(loggerProcessor), + ) + logger := loggerProvider.Logger(defaultPackageName) + + // Tracer + tracerProvider, err := newHTTPTracerProvider(cfg, baseResource, tlsConfig) + if err != nil { + return nil, err + } + tracer := tracerProvider.Tracer(defaultPackageName) + + // Meter + meterProvider, err := newHTTPMeterProvider(cfg, baseResource, tlsConfig) + if err != nil { + return nil, err + } + meter := meterProvider.Meter(defaultPackageName) + + // Message Emitter + var messageLogProcessor sdklog.Processor + if cfg.EmitterBatchProcessor { + messageLogProcessor = sdklog.NewBatchProcessor( + sharedLogExporter, + sdklog.WithExportTimeout(cfg.EmitterExportTimeout), // Default is 30s + ) + } else { + messageLogProcessor = sdklog.NewSimpleProcessor(sharedLogExporter) + } + + messageAttributes := []attribute.KeyValue{ + attribute.String("beholder_data_type", "custom_message"), + } + messageLoggerResource, err := sdkresource.Merge( + sdkresource.NewSchemaless(messageAttributes...), + baseResource, + ) + if err != nil { + return nil, err + } + + messageLoggerProvider := sdklog.NewLoggerProvider( + sdklog.WithResource(messageLoggerResource), + sdklog.WithProcessor(messageLogProcessor), + ) + messageLogger := messageLoggerProvider.Logger(defaultPackageName) + + emitter := messageEmitter{ + messageLogger: messageLogger, + } + + onClose := func() (err error) { + for _, provider := range []shutdowner{messageLoggerProvider, loggerProvider, tracerProvider, meterProvider, messageLoggerProvider} { + err = errors.Join(err, provider.Shutdown(context.Background())) + } + return + } + return &Client{cfg, logger, tracer, meter, emitter, loggerProvider, tracerProvider, meterProvider, messageLoggerProvider, onClose}, nil +} + +func newHTTPTracerProvider(config Config, resource *sdkresource.Resource, tlsConfig *tls.Config) (*sdktrace.TracerProvider, error) { + ctx := context.Background() + + tlsConfigOption := otlptracehttp.WithInsecure() + if tlsConfig != nil { + tlsConfigOption = otlptracehttp.WithTLSClientConfig(tlsConfig) + } + exporterOpts := []otlptracehttp.Option{ + tlsConfigOption, + otlptracehttp.WithEndpoint(config.OtelExporterHTTPEndpoint), + } + if config.TraceRetryConfig != nil { + // NOTE: By default, the retry is enabled in the OTel SDK + exporterOpts = append(exporterOpts, otlptracehttp.WithRetry(otlptracehttp.RetryConfig{ + Enabled: config.TraceRetryConfig.Enabled(), + InitialInterval: config.TraceRetryConfig.GetInitialInterval(), + MaxInterval: config.TraceRetryConfig.GetMaxInterval(), + MaxElapsedTime: config.TraceRetryConfig.GetMaxElapsedTime(), + })) + } + // note: context is unused internally + exporter, err := otlptracehttp.New(ctx, exporterOpts...) + if err != nil { + return nil, err + } + + opts := []sdktrace.TracerProviderOption{ + sdktrace.WithBatcher(exporter, trace.WithBatchTimeout(config.TraceBatchTimeout)), // Default is 5s + sdktrace.WithResource(resource), + sdktrace.WithSampler( + sdktrace.ParentBased( + sdktrace.TraceIDRatioBased(config.TraceSampleRatio), + ), + ), + } + if config.TraceSpanExporter != nil { + opts = append(opts, sdktrace.WithBatcher(config.TraceSpanExporter)) + } + return sdktrace.NewTracerProvider(opts...), nil +} + +func newHTTPMeterProvider(config Config, resource *sdkresource.Resource, tlsConfig *tls.Config) (*sdkmetric.MeterProvider, error) { + ctx := context.Background() + + tlsConfigOption := otlpmetrichttp.WithInsecure() + if tlsConfig != nil { + tlsConfigOption = otlpmetrichttp.WithTLSClientConfig(tlsConfig) + } + opts := []otlpmetrichttp.Option{ + tlsConfigOption, + otlpmetrichttp.WithEndpoint(config.OtelExporterHTTPEndpoint), + } + if config.MetricRetryConfig != nil { + // NOTE: By default, the retry is enabled in the OTel SDK + opts = append(opts, otlpmetrichttp.WithRetry(otlpmetrichttp.RetryConfig{ + Enabled: config.MetricRetryConfig.Enabled(), + InitialInterval: config.MetricRetryConfig.GetInitialInterval(), + MaxInterval: config.MetricRetryConfig.GetMaxInterval(), + MaxElapsedTime: config.MetricRetryConfig.GetMaxElapsedTime(), + })) + } + // note: context is unused internally + exporter, err := otlpmetrichttp.New(ctx, opts...) + if err != nil { + return nil, err + } + + mp := sdkmetric.NewMeterProvider( + sdkmetric.WithReader( + sdkmetric.NewPeriodicReader( + exporter, + sdkmetric.WithInterval(config.MetricReaderInterval), // Default is 10s + )), + sdkmetric.WithResource(resource), + ) + return mp, nil +} diff --git a/pkg/beholder/internal/exporter.go b/pkg/beholder/internal/exporter.go index 271077a5c..033854dcc 100644 --- a/pkg/beholder/internal/exporter.go +++ b/pkg/beholder/internal/exporter.go @@ -4,12 +4,17 @@ import ( "context" "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc" + "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp" + sdklog "go.opentelemetry.io/otel/sdk/log" ) var _ sdklog.Exporter = (*otlploggrpc.Exporter)(nil) var _ OTLPExporter = (*otlploggrpc.Exporter)(nil) +var _ sdklog.Exporter = (*otlploghttp.Exporter)(nil) +var _ OTLPExporter = (*otlploggrpc.Exporter)(nil) + // Copy of sdklog.Exporter interface, used for mocking type OTLPExporter interface { Export(ctx context.Context, records []sdklog.Record) error diff --git a/pkg/beholder/message.go b/pkg/beholder/message.go index 2b1d89d3a..7cb6f1bdb 100644 --- a/pkg/beholder/message.go +++ b/pkg/beholder/message.go @@ -1,7 +1,10 @@ package beholder import ( + "errors" "fmt" + "regexp" + "strings" "github.com/go-playground/validator/v10" "go.opentelemetry.io/otel/attribute" @@ -16,6 +19,8 @@ type Message struct { type Metadata struct { // REQUIRED FIELDS // Schema Registry URI to fetch schema + BeholderDomain string `validate:"required,domain_entity"` + BeholderEntity string `validate:"required,domain_entity"` BeholderDataSchema string `validate:"required,uri"` // OPTIONAL FIELDS @@ -55,6 +60,8 @@ func (m Metadata) Attributes() Attributes { "workflow_owner_address": m.WorkflowOwnerAddress, "workflow_spec_id": m.WorkflowSpecID, "workflow_execution_id": m.WorkflowExecutionID, + "beholder_domain": m.BeholderDomain, + "beholder_entity": m.BeholderEntity, "beholder_data_schema": m.BeholderDataSchema, "capability_contract_address": m.CapabilityContractAddress, "capability_id": m.CapabilityID, @@ -199,6 +206,10 @@ func (m *Metadata) FromAttributes(attrs Attributes) *Metadata { m.WorkflowSpecID = v.(string) case "workflow_execution_id": m.WorkflowExecutionID = v.(string) + case "beholder_domain": + m.BeholderDomain = v.(string) + case "beholder_entity": + m.BeholderEntity = v.(string) case "beholder_data_schema": m.BeholderDataSchema = v.(string) case "capability_contract_address": @@ -222,17 +233,44 @@ func NewMetadata(attrs Attributes) *Metadata { return m } -func (m *Metadata) Validate() error { +// validDomainAndEntityRegex allows for alphanumeric characters and ._- +var validDomainAndEntityRegex = regexp.MustCompile(`^[a-zA-Z0-9._-]+$`) + +func NewMetadataValidator() (*validator.Validate, error) { validate := validator.New() + err := validate.RegisterValidation("domain_entity", func(fl validator.FieldLevel) bool { + str, isStr := fl.Field().Interface().(string) + if !isStr { + return false + } + if strings.Contains(str, "__") { + return false + } + if !validDomainAndEntityRegex.MatchString(str) { + return false + } + return true + }) + if err != nil { + return nil, err + } + return validate, nil +} + +func (m *Metadata) Validate() error { + validate, err := NewMetadataValidator() + if err != nil { + return err + } return validate.Struct(m) } func (e Message) Validate() error { if e.Body == nil { - return fmt.Errorf("message body is required") + return errors.New("message body is required") } if len(e.Attrs) == 0 { - return fmt.Errorf("message attributes are required") + return errors.New("message attributes are required") } metadata := NewMetadata(e.Attrs) return metadata.Validate() diff --git a/pkg/beholder/message_test.go b/pkg/beholder/message_test.go index 1f8f990fb..266908733 100644 --- a/pkg/beholder/message_test.go +++ b/pkg/beholder/message_test.go @@ -6,7 +6,6 @@ import ( "strings" "testing" - "github.com/go-playground/validator/v10" "github.com/stretchr/testify/assert" otellog "go.opentelemetry.io/otel/log" @@ -110,6 +109,8 @@ func testMetadata() beholder.Metadata { WorkflowOwnerAddress: "test_owner_address", WorkflowSpecID: "test_spec_id", WorkflowExecutionID: "test_execution_id", + BeholderDomain: "TestDomain", // required field + BeholderEntity: "TestEntity", // required field BeholderDataSchema: "/schemas/ids/test_schema", // required field, URI CapabilityContractAddress: "test_contract_address", CapabilityID: "test_capability_id", @@ -123,14 +124,20 @@ func ExampleMetadata() { fmt.Printf("%#v\n", m) fmt.Println(m.Attributes()) // Output: - // beholder.Metadata{BeholderDataSchema:"/schemas/ids/test_schema", NodeVersion:"v1.0.0", NodeCsaKey:"test_key", NodeCsaSignature:"test_signature", DonID:"test_don_id", NetworkName:[]string{"test_network"}, WorkflowID:"test_workflow_id", WorkflowName:"test_workflow_name", WorkflowOwnerAddress:"test_owner_address", WorkflowSpecID:"test_spec_id", WorkflowExecutionID:"test_execution_id", CapabilityContractAddress:"test_contract_address", CapabilityID:"test_capability_id", CapabilityVersion:"test_capability_version", CapabilityName:"test_capability_name", NetworkChainID:"test_chain_id"} - // map[beholder_data_schema:/schemas/ids/test_schema capability_contract_address:test_contract_address capability_id:test_capability_id capability_name:test_capability_name capability_version:test_capability_version don_id:test_don_id network_chain_id:test_chain_id network_name:[test_network] node_csa_key:test_key node_csa_signature:test_signature node_version:v1.0.0 workflow_execution_id:test_execution_id workflow_id:test_workflow_id workflow_name:test_workflow_name workflow_owner_address:test_owner_address workflow_spec_id:test_spec_id] + // beholder.Metadata{BeholderDomain:"TestDomain", BeholderEntity:"TestEntity", BeholderDataSchema:"/schemas/ids/test_schema", NodeVersion:"v1.0.0", NodeCsaKey:"test_key", NodeCsaSignature:"test_signature", DonID:"test_don_id", NetworkName:[]string{"test_network"}, WorkflowID:"test_workflow_id", WorkflowName:"test_workflow_name", WorkflowOwnerAddress:"test_owner_address", WorkflowSpecID:"test_spec_id", WorkflowExecutionID:"test_execution_id", CapabilityContractAddress:"test_contract_address", CapabilityID:"test_capability_id", CapabilityVersion:"test_capability_version", CapabilityName:"test_capability_name", NetworkChainID:"test_chain_id"} + // map[beholder_data_schema:/schemas/ids/test_schema beholder_domain:TestDomain beholder_entity:TestEntity capability_contract_address:test_contract_address capability_id:test_capability_id capability_name:test_capability_name capability_version:test_capability_version don_id:test_don_id network_chain_id:test_chain_id network_name:[test_network] node_csa_key:test_key node_csa_signature:test_signature node_version:v1.0.0 workflow_execution_id:test_execution_id workflow_id:test_workflow_id workflow_name:test_workflow_name workflow_owner_address:test_owner_address workflow_spec_id:test_spec_id] } -func ExampleValidate() { - validate := validator.New() +func ExampleMetadata_Validate() { + validate, err := beholder.NewMetadataValidator() + if err != nil { + fmt.Println(err) + } - metadata := beholder.Metadata{} + metadata := beholder.Metadata{ + BeholderDomain: "TestDomain", + BeholderEntity: "TestEntity", + } if err := validate.Struct(metadata); err != nil { fmt.Println(err) } @@ -174,7 +181,7 @@ func TestMessage_OtelAttributes(t *testing.T) { return strings.Compare(a.Key, b.Key) }) - assert.Equal(t, 3, len(otelAttrs)) + assert.Len(t, otelAttrs, 3) assert.Equal(t, "key_int", otelAttrs[0].Key) assert.Equal(t, int64(1), otelAttrs[0].Value.AsInt64()) assert.Equal(t, "key_string", otelAttrs[1].Key) diff --git a/pkg/beholder/noop_test.go b/pkg/beholder/noop_test.go index ee1fb7209..7258a0208 100644 --- a/pkg/beholder/noop_test.go +++ b/pkg/beholder/noop_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "go.opentelemetry.io/otel/attribute" otellog "go.opentelemetry.io/otel/log" "go.opentelemetry.io/otel/trace" @@ -22,7 +23,7 @@ func TestNoopClient(t *testing.T) { err := noopClient.Emitter.Emit(tests.Context(t), []byte("test"), "key1", "value1", ) - assert.NoError(t, err) + require.NoError(t, err) // Logger noopClient.Logger.Emit(tests.Context(t), otellog.Record{}) @@ -38,7 +39,7 @@ func TestNoopClient(t *testing.T) { if err != nil { log.Fatalf("failed to create new gauge") } - assert.NoError(t, err) + require.NoError(t, err) // Use the counter and gauge for metrics within application logic counter.Add(tests.Context(t), 1) diff --git a/pkg/beholder/pb/base_message.pb.go b/pkg/beholder/pb/base_message.pb.go index 606ba85ca..c9a69c3b0 100644 --- a/pkg/beholder/pb/base_message.pb.go +++ b/pkg/beholder/pb/base_message.pb.go @@ -7,7 +7,6 @@ package pb import ( - pb "github.com/smartcontractkit/chainlink-common/pkg/values/pb" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" @@ -23,7 +22,8 @@ const ( // BaseMessage is a basic custom message, allowing the consumer to send // a string msg with some key-value pairs for labels. Consumers can consume -// BaseMessage directly or extend it by addding use-case specific fields +// BaseMessage directly or extend it by adding use-case specific fields +// NOTE: do not compose protos for Beholder until INFOPLAT-1386 is completed type BaseMessage struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -32,7 +32,7 @@ type BaseMessage struct { Msg string `protobuf:"bytes,1,opt,name=msg,proto3" json:"msg,omitempty"` // https://protobuf.dev/programming-guides/proto3/#maps // In go: if Value is empty for a key, nothing will be serialized - Labels map[string]*pb.Value `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Labels map[string]string `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *BaseMessage) Reset() { @@ -74,7 +74,7 @@ func (x *BaseMessage) GetMsg() string { return "" } -func (x *BaseMessage) GetLabels() map[string]*pb.Value { +func (x *BaseMessage) GetLabels() map[string]string { if x != nil { return x.Labels } @@ -86,23 +86,20 @@ var File_beholder_pb_base_message_proto protoreflect.FileDescriptor var file_beholder_pb_base_message_proto_rawDesc = []byte{ 0x0a, 0x1e, 0x62, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x62, 0x2f, 0x62, 0x61, 0x73, 0x65, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x12, 0x02, 0x70, 0x62, 0x1a, 0x16, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2f, 0x70, 0x62, 0x2f, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x9e, 0x01, 0x0a, - 0x0b, 0x42, 0x61, 0x73, 0x65, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x10, 0x0a, 0x03, - 0x6d, 0x73, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6d, 0x73, 0x67, 0x12, 0x33, - 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, - 0x2e, 0x70, 0x62, 0x2e, 0x42, 0x61, 0x73, 0x65, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, - 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x73, 0x1a, 0x48, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x23, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x3f, 0x5a, - 0x3d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, - 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, - 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, - 0x67, 0x2f, 0x62, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x62, 0x2f, 0x62, 0x06, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x12, 0x02, 0x70, 0x62, 0x22, 0x8f, 0x01, 0x0a, 0x0b, 0x42, 0x61, 0x73, 0x65, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6d, 0x73, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x03, 0x6d, 0x73, 0x67, 0x12, 0x33, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, + 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x62, 0x2e, 0x42, 0x61, 0x73, 0x65, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, + 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x3f, 0x5a, 0x3d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, + 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, + 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x62, 0x65, 0x68, 0x6f, 0x6c, + 0x64, 0x65, 0x72, 0x2f, 0x70, 0x62, 0x2f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -121,16 +118,14 @@ var file_beholder_pb_base_message_proto_msgTypes = make([]protoimpl.MessageInfo, var file_beholder_pb_base_message_proto_goTypes = []interface{}{ (*BaseMessage)(nil), // 0: pb.BaseMessage nil, // 1: pb.BaseMessage.LabelsEntry - (*pb.Value)(nil), // 2: values.Value } var file_beholder_pb_base_message_proto_depIdxs = []int32{ 1, // 0: pb.BaseMessage.labels:type_name -> pb.BaseMessage.LabelsEntry - 2, // 1: pb.BaseMessage.LabelsEntry.value:type_name -> values.Value - 2, // [2:2] is the sub-list for method output_type - 2, // [2:2] is the sub-list for method input_type - 2, // [2:2] is the sub-list for extension type_name - 2, // [2:2] is the sub-list for extension extendee - 0, // [0:2] is the sub-list for field type_name + 1, // [1:1] is the sub-list for method output_type + 1, // [1:1] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 1, // [1:1] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name } func init() { file_beholder_pb_base_message_proto_init() } diff --git a/pkg/beholder/pb/base_message.proto b/pkg/beholder/pb/base_message.proto index 341d1f3e6..0913a77ee 100644 --- a/pkg/beholder/pb/base_message.proto +++ b/pkg/beholder/pb/base_message.proto @@ -1,17 +1,16 @@ syntax = "proto3"; -import "values/pb/values.proto"; - option go_package = "github.com/smartcontractkit/chainlink-common/pkg/beholder/pb/"; package pb; // BaseMessage is a basic custom message, allowing the consumer to send // a string msg with some key-value pairs for labels. Consumers can consume -// BaseMessage directly or extend it by addding use-case specific fields +// BaseMessage directly or extend it by adding use-case specific fields +// NOTE: do not compose protos for Beholder until INFOPLAT-1386 is completed message BaseMessage { string msg=1; // https://protobuf.dev/programming-guides/proto3/#maps // In go: if Value is empty for a key, nothing will be serialized - map labels=2; -} \ No newline at end of file + map labels = 2; +} diff --git a/pkg/capabilities/cli/cmd/built_in_generators.go b/pkg/capabilities/cli/cmd/built_in_generators.go new file mode 100644 index 000000000..7f9a960d0 --- /dev/null +++ b/pkg/capabilities/cli/cmd/built_in_generators.go @@ -0,0 +1,16 @@ +package cmd + +import _ "embed" + +//go:embed go_workflow_builder.go.tmpl +var goWorkflowTemplate string + +//go:embed go_mock_capability_builder.go.tmpl +var goWorkflowTestTemplate string + +func AddDefaultGoTemplates(to map[string]TemplateAndCondition, includeMocks bool) { + to["{{if .BaseName}}{{.BaseName|ToSnake}}_builders{{ else }}wrappers{{ end }}_generated.go"] = BaseGenerate{TemplateValue: goWorkflowTemplate} + if includeMocks { + to["{{.Package}}test/{{.BaseName|ToSnake}}_mock_generated.go"] = TestHelperGenerate{TemplateValue: goWorkflowTestTemplate} + } +} diff --git a/pkg/capabilities/cli/cmd/field.go b/pkg/capabilities/cli/cmd/field.go index ca9297ec8..37f946393 100644 --- a/pkg/capabilities/cli/cmd/field.go +++ b/pkg/capabilities/cli/cmd/field.go @@ -5,4 +5,9 @@ type Field struct { NumSlice int IsPrimitive bool ConfigName string + SkipCap bool +} + +func (f Field) WrapCap() bool { + return !f.SkipCap && !f.IsPrimitive && f.NumSlice == 0 } diff --git a/pkg/capabilities/cli/cmd/generate-types/main.go b/pkg/capabilities/cli/cmd/generate-types/main.go index b0b9d2e7e..f78267e0d 100644 --- a/pkg/capabilities/cli/cmd/generate-types/main.go +++ b/pkg/capabilities/cli/cmd/generate-types/main.go @@ -1,7 +1,6 @@ package main import ( - _ "embed" "flag" "fmt" "os" @@ -11,12 +10,6 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd" ) -//go:embed go_workflow_builder.go.tmpl -var goWorkflowTemplate string - -//go:embed go_mock_capability_builder.go.tmpl -var goWorkflowTestTemplate string - var dir = flag.String("dir", "", fmt.Sprintf("Directory to search for %s files, if a file is provided, the directory it is in will be used", cmd.CapabilitySchemaFilePattern.String())) var localPrefix = flag.String("local_prefix", "github.com/smartcontractkit", "The local prefix to use when formatting go files") var extraUrls = flag.String("extra_urls", "", "Comma separated list of extra URLs to fetch schemas from") @@ -48,12 +41,10 @@ func run(dir string) error { extras = strings.Split(*extraUrls, ",") } + templates := map[string]cmd.TemplateAndCondition{} + cmd.AddDefaultGoTemplates(templates, true) + return cmd.GenerateTypes(dir, *localPrefix, extras, []cmd.WorkflowHelperGenerator{ - &cmd.TemplateWorkflowGeneratorHelper{ - Templates: map[string]cmd.TemplateAndCondition{ - "{{.BaseName|ToSnake}}_builders_generated.go": cmd.BaseGenerate{TemplateValue: goWorkflowTemplate}, - "{{.Package}}test/{{.BaseName|ToSnake}}_mock_generated.go": cmd.TestHelperGenerate{TemplateValue: goWorkflowTestTemplate}, - }, - }, + &cmd.TemplateWorkflowGeneratorHelper{Templates: templates}, }) } diff --git a/pkg/capabilities/cli/cmd/generate-user-types/main.go b/pkg/capabilities/cli/cmd/generate-user-types/main.go new file mode 100644 index 000000000..49e1ea0ea --- /dev/null +++ b/pkg/capabilities/cli/cmd/generate-user-types/main.go @@ -0,0 +1,98 @@ +package main + +import ( + "flag" + "fmt" + "strings" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd" +) + +var localPrefix = flag.String( + "local_prefix", + "github.com/smartcontractkit", + "The local prefix to use when formatting go files.", +) + +var types = flag.String( + "types", + "", + "Comma separated list of types to generate for. If empty, all types created in the package will be generated."+ + " if set, other types in the same package will automatically be added to the skip_cap list", +) + +var skipCap = flag.String( + "skip_cap", + "", + "Comma separated list of types (including the import name), or impute to not expect a capability definition to exist for"+ + " By default, this generator assumes that all types referenced (aside from primitives) will either be generated with this call or already have Cap type", +) + +var dir = flag.String("dir", ".", "The input directory, defaults to the running directory") + +func main() { + flag.Parse() + templates := map[string]cmd.TemplateAndCondition{} + cmd.AddDefaultGoTemplates(templates, false) + helpers := []cmd.WorkflowHelperGenerator{&cmd.TemplateWorkflowGeneratorHelper{Templates: templates}} + + info := cmd.UserGenerationInfo{ + Dir: *dir, + LocalPrefix: *localPrefix, + Helpers: helpers, + GenForStruct: genForStruct(), + } + + if err := cmd.GenerateUserTypes(info); err != nil { + panic(err) + } +} + +func genForStruct() func(string) bool { + skipGen := buildSkipGen() + genPackageType := buildGenPkgType() + return func(s string) bool { + if skipGen[s] { + return false + } + + pkgAndStruct := strings.Split(s, ".") + + switch len(pkgAndStruct) { + case 1: + return genPackageType(pkgAndStruct[0]) + + case 2: + if skipGen[pkgAndStruct[0]] { + return false + } + default: + panic(fmt.Sprintf("invalid type %s", s)) + } + + return true + } +} + +func buildSkipGen() map[string]bool { + skipGen := map[string]bool{} + for _, skip := range strings.Split(*skipCap, ",") { + skipGen[skip] = true + } + return skipGen +} + +func buildGenPkgType() func(string) bool { + genPkgType := func(_ string) bool { return true } + if *types != "" { + genPkg := map[string]bool{} + for _, t := range strings.Split(*types, ",") { + genPkg[t] = true + } + genPkgType = func(s string) bool { + return genPkg[s] + } + } + + return genPkgType +} diff --git a/pkg/capabilities/cli/cmd/generate_types.go b/pkg/capabilities/cli/cmd/generate_types.go index 2db528263..9a1648c98 100644 --- a/pkg/capabilities/cli/cmd/generate_types.go +++ b/pkg/capabilities/cli/cmd/generate_types.go @@ -58,20 +58,39 @@ func generateFromSchema(schemaPath, localPrefix string, cfgInfo ConfigInfo, help allFiles[file] = content typeInfo := cfgInfo.SchemaToTypeInfo[schemaPath] - structs, err := generatedInfoFromSrc(content, getCapID(typeInfo), typeInfo) + err = generateSchemaTypes(schemaPath, localPrefix, content, typeInfo, helpers, allFiles) if err != nil { return err } + return nil +} + +func generateSchemaTypes(schemaPath string, localPrefix string, content string, typeInfo TypeInfo, helpers []WorkflowHelperGenerator, allFiles map[string]string) error { + fullPkg, err := packageFromSchemaID(typeInfo.SchemaID) + if err != nil { + return err + } + + generatedInfo, err := generatedInfoFromSrc(content, fullPkg, getCapID(typeInfo), typeInfo, func(string) bool { + return true + }) + if err != nil { + return err + } + + return generateFromGoSrc(generatedInfo, path.Dir(schemaPath), localPrefix, helpers, allFiles) +} - if err = generateHelpers(helpers, structs, allFiles); err != nil { +func generateFromGoSrc(generatedInfo GeneratedInfo, dir, localPrefix string, helpers []WorkflowHelperGenerator, allFiles map[string]string) error { + if err := generateHelpers(helpers, generatedInfo, allFiles); err != nil { return err } - if err = codegen.WriteFiles(path.Dir(schemaPath), localPrefix, toolName, allFiles); err != nil { + if err := codegen.WriteFiles(dir, localPrefix, toolName, allFiles); err != nil { return err } - fmt.Println("Generated types for", schemaPath) + fmt.Println("Generated types for", dir) return nil } @@ -102,7 +121,7 @@ func schemaFilesFromDir(dir string) ([]string, error) { schemaPaths = append(schemaPaths, path) return nil }); err != nil { - return nil, fmt.Errorf("error walking the directory %v: %v", dir, err) + return nil, fmt.Errorf("error walking the directory %v: %w", dir, err) } return schemaPaths, nil } diff --git a/pkg/capabilities/cli/cmd/generate_user_types.go b/pkg/capabilities/cli/cmd/generate_user_types.go new file mode 100644 index 000000000..47a1ba0a7 --- /dev/null +++ b/pkg/capabilities/cli/cmd/generate_user_types.go @@ -0,0 +1,64 @@ +package cmd + +import ( + "errors" + "os" + "path" + "strings" +) + +func GenerateUserTypes(info UserGenerationInfo) error { + dir, err := os.ReadDir(info.Dir) + if err != nil { + return err + } + + generatedInfo := GeneratedInfo{} + err = errors.Join() + for i, file := range dir { + fileName := file.Name() + if file.IsDir() || !strings.HasSuffix(fileName, ".go") { + continue + } + + rawContent, err2 := os.ReadFile(path.Join(info.Dir, fileName)) + if err2 != nil { + err = errors.Join(err, err2) + } + + content := string(rawContent) + if strings.HasPrefix(content, "// Code generated by github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli, DO NOT EDIT.") { + continue + } + + typeInfo := TypeInfo{CapabilityType: "common"} + + fileGeneratedInfo, err2 := generatedInfoFromSrc(content, "", getCapID(typeInfo), typeInfo, info.GenForStruct) + + if err2 != nil { + err = errors.Join(err, err2) + continue + } + + if i == 0 { + generatedInfo = fileGeneratedInfo + } else { + for name, strct := range fileGeneratedInfo.Types { + generatedInfo.Types[name] = strct + } + } + } + + if err != nil { + return errors.Join(err) + } + + return generateFromGoSrc(generatedInfo, info.Dir, info.LocalPrefix, info.Helpers, map[string]string{}) +} + +type UserGenerationInfo struct { + Dir string + LocalPrefix string + Helpers []WorkflowHelperGenerator + GenForStruct func(string) bool +} diff --git a/pkg/capabilities/cli/cmd/generate_user_types_test.go b/pkg/capabilities/cli/cmd/generate_user_types_test.go new file mode 100644 index 000000000..0d9a3ee9c --- /dev/null +++ b/pkg/capabilities/cli/cmd/generate_user_types_test.go @@ -0,0 +1,59 @@ +package cmd_test + +import ( + "os" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +//go:generate go run github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/generate-user-types -dir ./testdata/fixtures/usercode/pkg -skip_cap time.Time +//go:generate go run github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/generate-user-types -dir ./testdata/fixtures/usercode/pkg2 -types OtherPackage + +func TestGenerateUserTypes(t *testing.T) { + t.Parallel() + + t.Run("generated types work as expected", func(t *testing.T) { + onlyVerifySyntax(func() { + myVal := pkg.ConstantMyType(pkg.MyType{I: 10}) + // verify both types were generated from different files + pkg.ConstantMyType2(pkg.MyType2{I: 10}) + + var tmp sdk.CapDefinition[pkg.MyType] = myVal // nolint + _ = tmp + + other := pkg2.ConstantOtherPackage(pkg2.OtherPackage{X: "x", Z: "z"}) //nolint + other = myVal.O() // nolint + _ = other + + var s sdk.CapDefinition[string] = myVal.S() // nolint + _ = s + }) + }) + + t.Run("specifying types to generate ignores other types", func(t *testing.T) { + content, err := os.ReadFile("./testdata/fixtures/usercode/pkg2/wrappers_generated.go") + require.NoError(t, err) + + require.False(t, strings.Contains(string(content), "NotWrappedCap")) + }) + + t.Run("Wrapping wrapped type is no-op", func(t *testing.T) { + original := pkg.NewMyTypeFromFields( + sdk.ConstantDefinition(1), + pkg.ConstantMyNestedType(pkg.MyNestedType{}), + pkg2.ConstantOtherPackage(pkg2.OtherPackage{}), + sdk.ConstantDefinition(""), + sdk.ConstantDefinition(time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC)), + ) + + wrapped := pkg.MyTypeWrapper(original) + require.Same(t, original, wrapped) + }) +} diff --git a/pkg/capabilities/cli/cmd/generated_info.go b/pkg/capabilities/cli/cmd/generated_info.go index 84d72dcb5..766e5ec45 100644 --- a/pkg/capabilities/cli/cmd/generated_info.go +++ b/pkg/capabilities/cli/cmd/generated_info.go @@ -2,12 +2,7 @@ package cmd import ( "fmt" - "go/ast" - "go/parser" - "go/token" - "reflect" "strings" - "unicode" "github.com/smartcontractkit/chainlink-common/pkg/capabilities" ) @@ -36,44 +31,20 @@ func (g GeneratedInfo) RootType() Struct { } } -func generatedInfoFromSrc(src string, capID *string, typeInfo TypeInfo) (GeneratedInfo, error) { - fset := token.NewFileSet() +func generatedInfoFromSrc( + src, fullPkg string, capID *string, typeInfo TypeInfo, includeType func(name string) bool) (GeneratedInfo, error) { + reader := GoStructReader{IncludeType: includeType} - // Parse the source code string - node, err := parser.ParseFile(fset, "", src, parser.AllErrors) + generatedStructs, pkg, extraImports, err := reader.Read(src) if err != nil { return GeneratedInfo{}, err } - pkg := node.Name.Name - - generatedStructs := map[string]Struct{} - var extraImports []string - ast.Inspect(node, func(n ast.Node) bool { - return inspectNode(n, fset, src, generatedStructs, &extraImports) - }) root := generatedStructs[typeInfo.RootType] input, config := extractInputAndConfig(generatedStructs, typeInfo, root) output := root.Outputs["Outputs"] - fullPkg := typeInfo.SchemaID - - // drop protocol - index := strings.Index(typeInfo.SchemaID, "//") - if index != -1 { - fullPkg = fullPkg[index+2:] - } - - // drop the capability name and version - index = strings.LastIndex(fullPkg, "/") - if index == -1 { - return GeneratedInfo{}, - fmt.Errorf("invalid schema ID: %s must end in /capability_name and optioanlly a version", typeInfo.SchemaID) - } - - fullPkg = fullPkg[:index] - return GeneratedInfo{ Package: pkg, Config: config, @@ -89,6 +60,25 @@ func generatedInfoFromSrc(src string, capID *string, typeInfo TypeInfo) (Generat }, nil } +func packageFromSchemaID(schemaID string) (string, error) { + fullPkg := schemaID + + // drop protocol + index := strings.Index(fullPkg, "//") + if index != -1 { + fullPkg = fullPkg[index+2:] + } + + // drop the capability name and version + index = strings.LastIndex(fullPkg, "/") + if index == -1 { + return "", fmt.Errorf("invalid schema ID: %s must end in /capability_name and optioanlly a version", schemaID) + } + + fullPkg = fullPkg[:index] + return fullPkg, nil +} + func extractInputAndConfig(generatedStructs map[string]Struct, typeInfo TypeInfo, root Struct) (*Struct, Struct) { delete(generatedStructs, typeInfo.RootType) inputField, ok := root.Outputs["Inputs"] @@ -124,76 +114,6 @@ func extractInputAndConfig(generatedStructs map[string]Struct, typeInfo TypeInfo return input, config } -func inspectNode(n ast.Node, fset *token.FileSet, src string, rawInfo map[string]Struct, extraImports *[]string) bool { - if ts, ok := n.(*ast.TypeSpec); ok { - s := Struct{ - Name: strings.TrimSpace(ts.Name.Name), - Outputs: map[string]Field{}, - } - - if structType, ok := ts.Type.(*ast.StructType); ok { - for _, field := range structType.Fields.List { - start := fset.Position(field.Type.Pos()).Offset - end := fset.Position(field.Type.End()).Offset - typeStr := src[start:end] - if typeStr == "interface{}" { - typeStr = "any" - } - f := Field{} - - if field.Tag != nil { - // This is safe because the generator used to create the structs from jsonschema - // will always have json tag if there's tags on the field, per configuration. - // The substring removes the quotes around that tag. - tag := reflect.StructTag(field.Tag.Value[1 : len(field.Tag.Value)-1]) - jsonTag := tag.Get("json") - if jsonTag != "" { - jsonName := strings.Split(jsonTag, ",")[0] - if jsonName != "" { - f.ConfigName = jsonName - } - } - } - - f.Type = typeStr - if f.ConfigName == "" { - f.ConfigName = field.Names[0].Name - } - - for strings.HasPrefix(f.Type, "[]") { - f.NumSlice++ - f.Type = f.Type[2:] - } - - f.Type = strings.TrimPrefix(f.Type, "*") - t := f.Type - for t[0] == '*' { - t = t[1:] - } - - f.IsPrimitive = unicode.IsLower(rune(t[0])) - s.Outputs[field.Names[0].Name] = f - } - } - - // artifact used for deserializing - if s.Name != "Plain" { - rawInfo[ts.Name.Name] = s - } - } else if imp, ok := n.(*ast.ImportSpec); ok { - switch imp.Path.Value { - case `"reflect"`, `"fmt"`, `"encoding/json"`, `"regexp"`: - default: - importStr := imp.Path.Value - if imp.Name != nil { - importStr = imp.Name.Name + " " + importStr - } - *extraImports = append(*extraImports, importStr) - } - } - return true -} - func lastAfterDot(s string) string { parts := strings.Split(s, ".") return parts[len(parts)-1] diff --git a/pkg/capabilities/cli/cmd/generator_test.go b/pkg/capabilities/cli/cmd/generator_test.go index d95eb5f78..e47bef18a 100644 --- a/pkg/capabilities/cli/cmd/generator_test.go +++ b/pkg/capabilities/cli/cmd/generator_test.go @@ -53,6 +53,8 @@ func TestTypeGeneration(t *testing.T) { var expectedOutput sdk.CapDefinition[string] //nolint expectedOutput = trigger.CoolOutput() _ = expectedOutput + + trigger = basictrigger.ConstantTriggerOutputs(basictrigger.TriggerOutputs{}) //nolint }) }) @@ -76,6 +78,8 @@ func TestTypeGeneration(t *testing.T) { var expectedOutput sdk.CapDefinition[string] //nolint expectedOutput = action.AdaptedThing() _ = expectedOutput + + action = basicaction.ConstantActionOutputs(basicaction.ActionOutputs{}) //nolint }) }) @@ -103,6 +107,8 @@ func TestTypeGeneration(t *testing.T) { var expectedSigsField sdk.CapDefinition[[]string] //nolint expectedSigsField = consensus.Sigs() _ = expectedSigsField + + consensus = basicconsensus.ConstantConsensusOutputs(basicconsensus.ConsensusOutputs{}) //nolint }) }) diff --git a/pkg/capabilities/cli/cmd/generate-types/go_mock_capability_builder.go.tmpl b/pkg/capabilities/cli/cmd/go_mock_capability_builder.go.tmpl similarity index 100% rename from pkg/capabilities/cli/cmd/generate-types/go_mock_capability_builder.go.tmpl rename to pkg/capabilities/cli/cmd/go_mock_capability_builder.go.tmpl diff --git a/pkg/capabilities/cli/cmd/go_reader.go b/pkg/capabilities/cli/cmd/go_reader.go new file mode 100644 index 000000000..aad2e9ad6 --- /dev/null +++ b/pkg/capabilities/cli/cmd/go_reader.go @@ -0,0 +1,160 @@ +package cmd + +import ( + "go/ast" + "go/parser" + "go/token" + "reflect" + "strings" + "unicode" +) + +type GoStructReader struct { + IncludeType func(name string) bool +} + +func (g *GoStructReader) Read(src string) (map[string]Struct, string, []string, error) { + fset := token.NewFileSet() + + // Parse the source code string + node, err := parser.ParseFile(fset, "", src, parser.AllErrors) + if err != nil { + return nil, "", nil, err + } + + structs := g.gatherStructs(node, fset, src) + return structs, node.Name.Name, g.gatherImports(node, structs), nil +} + +func (g *GoStructReader) gatherStructs(node *ast.File, fset *token.FileSet, src string) map[string]Struct { + generatedStructs := map[string]Struct{} + for _, decl := range node.Decls { + gd, ok := decl.(*ast.GenDecl) + if !ok || gd.Tok != token.TYPE { + continue + } + + for _, spec := range gd.Specs { + if strct := g.getStructFromSpec(spec, fset, src); strct != nil { + generatedStructs[strct.Name] = *strct + } + } + } + return generatedStructs +} + +func (g *GoStructReader) getStructFromSpec(spec ast.Spec, fset *token.FileSet, src string) *Struct { + ts, ok := spec.(*ast.TypeSpec) + if !ok { + return nil + } + + name := ts.Name.Name + if !g.IncludeType(name) { + return nil + } + + switch declType := ts.Type.(type) { + case *ast.StructType: + return g.structFromGoStruct(name, declType, fset, src) + case *ast.MapType, *ast.Ident: + return &Struct{Name: name} + default: + return nil + } +} + +func (g *GoStructReader) structFromGoStruct(name string, structType *ast.StructType, fset *token.FileSet, src string) *Struct { + s := Struct{ + Name: strings.TrimSpace(name), + Outputs: map[string]Field{}, + } + + for _, field := range structType.Fields.List { + start := fset.Position(field.Type.Pos()).Offset + end := fset.Position(field.Type.End()).Offset + typeStr := src[start:end] + if typeStr == "interface{}" { + typeStr = "any" + } + + f := Field{ + Type: typeStr, + ConfigName: g.configName(field), + SkipCap: !g.IncludeType(typeStr), + } + + for strings.HasPrefix(f.Type, "[]") { + f.NumSlice++ + f.Type = f.Type[2:] + } + + f.Type = strings.TrimPrefix(f.Type, "*") + t := f.Type + for t[0] == '*' { + t = t[1:] + } + + importLoc := strings.Index(t, ".") + if importLoc != -1 { + t = t[importLoc+1:] + } + f.IsPrimitive = unicode.IsLower(rune(t[0])) + s.Outputs[field.Names[0].Name] = f + } + + return &s +} + +func (g *GoStructReader) configName(field *ast.Field) string { + defaultName := field.Names[0].Name + if field.Tag == nil { + return defaultName + } + + // Tags have string values, so we need to strip the quotes + tag := reflect.StructTag(field.Tag.Value[1 : len(field.Tag.Value)-1]) + jsonTag := tag.Get("json") + if jsonTag != "" { + jsonName := strings.Split(jsonTag, ",")[0] + if jsonName != "" { + return jsonName + } + } + + return defaultName +} + +func (g *GoStructReader) gatherImports(node *ast.File, structs map[string]Struct) []string { + requiredImports := map[string]bool{} + for _, strct := range structs { + for _, field := range strct.Outputs { + parts := strings.Split(field.Type, ".") + if len(parts) > 1 { + requiredImports[parts[0]] = true + } + } + } + + var allValues []string + var imports []string + var check []bool + for _, imp := range node.Imports { + var importName string + if imp.Name != nil { + importName = imp.Name.Name + } else { + importParts := strings.Split(imp.Path.Value, "/") + importName = importParts[len(importParts)-1] + } + importName = strings.Trim(importName, "\"") + + allValues = append(allValues, importName) + check = append(check, requiredImports[importName]) + if requiredImports[importName] { + imports = append(imports, imp.Path.Value) + } + } + + return imports +} diff --git a/pkg/capabilities/cli/cmd/generate-types/go_workflow_builder.go.tmpl b/pkg/capabilities/cli/cmd/go_workflow_builder.go.tmpl similarity index 79% rename from pkg/capabilities/cli/cmd/generate-types/go_workflow_builder.go.tmpl rename to pkg/capabilities/cli/cmd/go_workflow_builder.go.tmpl index d68a3d75b..8a2fab611 100644 --- a/pkg/capabilities/cli/cmd/generate-types/go_workflow_builder.go.tmpl +++ b/pkg/capabilities/cli/cmd/go_workflow_builder.go.tmpl @@ -33,7 +33,8 @@ func (cfg {{.Config.Name}}) New(w *sdk.WorkflowSpecFactory, {{- if not .ID }}id {{- if eq .CapabilityType "target" }} step.AddTo(w) {{- else if eq 0 .RootNumSlice }} - return {{.RootType.Name}}CapFromStep(w, step) + raw := step.AddTo(w) + return {{.RootType.Name}}Wrapper(raw) {{- else }} return step.AddTo(w) {{- end }} @@ -41,54 +42,60 @@ func (cfg {{.Config.Name}}) New(w *sdk.WorkflowSpecFactory, {{- if not .ID }}id {{- end }} {{ range $key, $value := .Types }} + +// {{$key}}Wrapper allows access to field from an sdk.CapDefinition[{{$key}}] +func {{$key}}Wrapper(raw sdk.CapDefinition[{{$key}}]) {{$key}}Cap { + wrapped, ok := raw.({{$key}}Cap) + if ok { + return wrapped + } + + {{- if .Outputs }} + return &{{$key|LowerFirst}}Cap{CapDefinition: raw} + {{- else }} + return {{$key}}Cap(raw) + {{- end }} +} + {{- if .Outputs }} type {{$key}}Cap interface { sdk.CapDefinition[{{ $key }}] {{- range $fieldName, $type := .Outputs }} - {{- if or $type.IsPrimitive (ne $type.NumSlice 0) }} - {{$fieldName}}() sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] - {{- else }} + {{- if or $type.WrapCap }} {{$fieldName}}() {{ $type.Type }}Cap + {{- else }} + {{$fieldName}}() sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] {{- end }} {{- end }} private() } -{{ if ne $.CapabilityType "target" }} -// {{$key}}CapFromStep should only be called from generated code to assure type safety -func {{$key}}CapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[{{$key}}]) {{$key}}Cap { - raw := step.AddTo(w) - return &{{$key|LowerFirst}}{CapDefinition: raw} -} -{{ end }} - -type {{$key|LowerFirst}} struct { +type {{$key|LowerFirst}}Cap struct { sdk.CapDefinition[{{ $key }}] } -func (*{{$key|LowerFirst}}) private() {} +func (*{{$key|LowerFirst}}Cap) private() {} {{- range $fieldName, $type := .Outputs }} - {{- if or $type.IsPrimitive (ne $type.NumSlice 0) }} -func (c *{{$key|LowerFirst}}) {{$fieldName}}() sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] { - return sdk.AccessField[{{$value.Name}}, {{Repeat "[]" $type.NumSlice}}{{$type.Type}}](c.CapDefinition, "{{$type.ConfigName}}") -} + {{- if or $type.WrapCap }} +func (c *{{$key|LowerFirst}}Cap) {{$fieldName}}() {{ $type.Type }}Cap { + return {{ $type.Type }}Wrapper(sdk.AccessField[{{$value.Name}}, {{$type.Type}}](c.CapDefinition, "{{$type.ConfigName}}")) {{- else }} -func (c *{{$key|LowerFirst}}) {{$fieldName}}() {{ $type.Type }}Cap { - {{- if $type.Type|HasOutputs }} - return &{{ $type.Type | LowerFirst }}{ CapDefinition: sdk.AccessField[{{$value.Name}}, {{$type.Type}}](c.CapDefinition, "{{$type.ConfigName}}")} - {{- else }} - return {{ $type.Type }}Cap(sdk.AccessField[{{$value.Name}}, {{$type.Type}}](c.CapDefinition, "{{$type.ConfigName}}")) - {{- end }} -} +func (c *{{$key|LowerFirst}}Cap) {{$fieldName}}() sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] { + return sdk.AccessField[{{$value.Name}}, {{Repeat "[]" $type.NumSlice}}{{$type.Type}}](c.CapDefinition, "{{$type.ConfigName}}") {{- end }} +} {{- end }} +func Constant{{$key}}(value {{$key}}) {{$key}}Cap { + return &{{$key|LowerFirst}}Cap{CapDefinition: sdk.ConstantDefinition(value)} +} + func New{{$key}}FromFields({{- range $fieldName, $type := .Outputs }} - {{- if or $type.IsPrimitive (ne $type.NumSlice 0) }} - {{$fieldName|LowerFirst}} sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}], - {{- else }} + {{- if or $type.WrapCap }} {{$fieldName|LowerFirst}} {{ $type.Type }}Cap, + {{- else }} + {{$fieldName|LowerFirst}} sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}], {{- end }} {{- end }}) {{$key}}Cap { return &simple{{$key}}{ CapDefinition: sdk.ComponentCapDefinition[{{$value.Name}}]{ {{- range $fieldName, $type := .Outputs }} @@ -104,19 +111,19 @@ func New{{$key}}FromFields({{- range $fieldName, $type := .Outputs }} type simple{{$key}} struct { sdk.CapDefinition[{{ $key }}] {{- range $fieldName, $type := .Outputs }} - {{- if or $type.IsPrimitive (ne $type.NumSlice 0) }} - {{$fieldName|LowerFirst}} sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] - {{- else }} + {{- if $type.WrapCap }} {{$fieldName|LowerFirst}} {{ $type.Type }}Cap + {{- else }} + {{$fieldName|LowerFirst}} sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] {{- end }} {{- end }} } {{- range $fieldName, $type := .Outputs }} - {{- if or $type.IsPrimitive (ne $type.NumSlice 0) }} -func (c *simple{{$key}}) {{$fieldName}}() sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] { - {{- else }} + {{- if or $type.WrapCap }} func (c *simple{{$key}}) {{$fieldName}}() {{ $type.Type }}Cap { + {{- else }} +func (c *simple{{$key}}) {{$fieldName}}() sdk.CapDefinition[{{Repeat "[]" $type.NumSlice}}{{ $type.Type }}] { {{- end }} return c.{{$fieldName|LowerFirst}} } diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/anymapaction/map_action_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/anymapaction/map_action_builders_generated.go index b70987ede..d46d5d7c7 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/anymapaction/map_action_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/anymapaction/map_action_builders_generated.go @@ -17,7 +17,17 @@ func (cfg MapActionConfig) New(w *sdk.WorkflowSpecFactory, ref string, input Map } step := sdk.Step[MapActionOutputs]{Definition: def} - return MapActionOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return MapActionOutputsWrapper(raw) +} + +// MapActionOutputsWrapper allows access to field from an sdk.CapDefinition[MapActionOutputs] +func MapActionOutputsWrapper(raw sdk.CapDefinition[MapActionOutputs]) MapActionOutputsCap { + wrapped, ok := raw.(MapActionOutputsCap) + if ok { + return wrapped + } + return &mapActionOutputsCap{CapDefinition: raw} } type MapActionOutputsCap interface { @@ -26,19 +36,17 @@ type MapActionOutputsCap interface { private() } -// MapActionOutputsCapFromStep should only be called from generated code to assure type safety -func MapActionOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[MapActionOutputs]) MapActionOutputsCap { - raw := step.AddTo(w) - return &mapActionOutputs{CapDefinition: raw} +type mapActionOutputsCap struct { + sdk.CapDefinition[MapActionOutputs] } -type mapActionOutputs struct { - sdk.CapDefinition[MapActionOutputs] +func (*mapActionOutputsCap) private() {} +func (c *mapActionOutputsCap) Payload() MapActionOutputsPayloadCap { + return MapActionOutputsPayloadWrapper(sdk.AccessField[MapActionOutputs, MapActionOutputsPayload](c.CapDefinition, "payload")) } -func (*mapActionOutputs) private() {} -func (c *mapActionOutputs) Payload() MapActionOutputsPayloadCap { - return MapActionOutputsPayloadCap(sdk.AccessField[MapActionOutputs, MapActionOutputsPayload](c.CapDefinition, "payload")) +func ConstantMapActionOutputs(value MapActionOutputs) MapActionOutputsCap { + return &mapActionOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} } func NewMapActionOutputsFromFields( @@ -62,6 +70,15 @@ func (c *simpleMapActionOutputs) Payload() MapActionOutputsPayloadCap { func (c *simpleMapActionOutputs) private() {} +// MapActionOutputsPayloadWrapper allows access to field from an sdk.CapDefinition[MapActionOutputsPayload] +func MapActionOutputsPayloadWrapper(raw sdk.CapDefinition[MapActionOutputsPayload]) MapActionOutputsPayloadCap { + wrapped, ok := raw.(MapActionOutputsPayloadCap) + if ok { + return wrapped + } + return MapActionOutputsPayloadCap(raw) +} + type MapActionOutputsPayloadCap sdk.CapDefinition[MapActionOutputsPayload] type MapActionInput struct { diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/arrayaction/action_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/arrayaction/action_builders_generated.go index f5e449ad0..ee3c64a96 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/arrayaction/action_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/arrayaction/action_builders_generated.go @@ -22,25 +22,32 @@ func (cfg ActionConfig) New(w *sdk.WorkflowSpecFactory, ref string, input Action return step.AddTo(w) } +// ActionOutputsElemWrapper allows access to field from an sdk.CapDefinition[ActionOutputsElem] +func ActionOutputsElemWrapper(raw sdk.CapDefinition[ActionOutputsElem]) ActionOutputsElemCap { + wrapped, ok := raw.(ActionOutputsElemCap) + if ok { + return wrapped + } + return &actionOutputsElemCap{CapDefinition: raw} +} + type ActionOutputsElemCap interface { sdk.CapDefinition[ActionOutputsElem] Results() ActionOutputsElemResultsCap private() } -// ActionOutputsElemCapFromStep should only be called from generated code to assure type safety -func ActionOutputsElemCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[ActionOutputsElem]) ActionOutputsElemCap { - raw := step.AddTo(w) - return &actionOutputsElem{CapDefinition: raw} +type actionOutputsElemCap struct { + sdk.CapDefinition[ActionOutputsElem] } -type actionOutputsElem struct { - sdk.CapDefinition[ActionOutputsElem] +func (*actionOutputsElemCap) private() {} +func (c *actionOutputsElemCap) Results() ActionOutputsElemResultsCap { + return ActionOutputsElemResultsWrapper(sdk.AccessField[ActionOutputsElem, ActionOutputsElemResults](c.CapDefinition, "results")) } -func (*actionOutputsElem) private() {} -func (c *actionOutputsElem) Results() ActionOutputsElemResultsCap { - return &actionOutputsElemResults{CapDefinition: sdk.AccessField[ActionOutputsElem, ActionOutputsElemResults](c.CapDefinition, "results")} +func ConstantActionOutputsElem(value ActionOutputsElem) ActionOutputsElemCap { + return &actionOutputsElemCap{CapDefinition: sdk.ConstantDefinition(value)} } func NewActionOutputsElemFromFields( @@ -64,27 +71,34 @@ func (c *simpleActionOutputsElem) Results() ActionOutputsElemResultsCap { func (c *simpleActionOutputsElem) private() {} +// ActionOutputsElemResultsWrapper allows access to field from an sdk.CapDefinition[ActionOutputsElemResults] +func ActionOutputsElemResultsWrapper(raw sdk.CapDefinition[ActionOutputsElemResults]) ActionOutputsElemResultsCap { + wrapped, ok := raw.(ActionOutputsElemResultsCap) + if ok { + return wrapped + } + return &actionOutputsElemResultsCap{CapDefinition: raw} +} + type ActionOutputsElemResultsCap interface { sdk.CapDefinition[ActionOutputsElemResults] AdaptedThing() sdk.CapDefinition[string] private() } -// ActionOutputsElemResultsCapFromStep should only be called from generated code to assure type safety -func ActionOutputsElemResultsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[ActionOutputsElemResults]) ActionOutputsElemResultsCap { - raw := step.AddTo(w) - return &actionOutputsElemResults{CapDefinition: raw} -} - -type actionOutputsElemResults struct { +type actionOutputsElemResultsCap struct { sdk.CapDefinition[ActionOutputsElemResults] } -func (*actionOutputsElemResults) private() {} -func (c *actionOutputsElemResults) AdaptedThing() sdk.CapDefinition[string] { +func (*actionOutputsElemResultsCap) private() {} +func (c *actionOutputsElemResultsCap) AdaptedThing() sdk.CapDefinition[string] { return sdk.AccessField[ActionOutputsElemResults, string](c.CapDefinition, "adapted_thing") } +func ConstantActionOutputsElemResults(value ActionOutputsElemResults) ActionOutputsElemResultsCap { + return &actionOutputsElemResultsCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewActionOutputsElemResultsFromFields( adaptedThing sdk.CapDefinition[string]) ActionOutputsElemResultsCap { return &simpleActionOutputsElemResults{ diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicaction/action_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicaction/action_builders_generated.go index dc36bfd92..2b40fea6d 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicaction/action_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicaction/action_builders_generated.go @@ -20,7 +20,17 @@ func (cfg ActionConfig) New(w *sdk.WorkflowSpecFactory, ref string, input Action } step := sdk.Step[ActionOutputs]{Definition: def} - return ActionOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return ActionOutputsWrapper(raw) +} + +// ActionOutputsWrapper allows access to field from an sdk.CapDefinition[ActionOutputs] +func ActionOutputsWrapper(raw sdk.CapDefinition[ActionOutputs]) ActionOutputsCap { + wrapped, ok := raw.(ActionOutputsCap) + if ok { + return wrapped + } + return &actionOutputsCap{CapDefinition: raw} } type ActionOutputsCap interface { @@ -29,21 +39,19 @@ type ActionOutputsCap interface { private() } -// ActionOutputsCapFromStep should only be called from generated code to assure type safety -func ActionOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[ActionOutputs]) ActionOutputsCap { - raw := step.AddTo(w) - return &actionOutputs{CapDefinition: raw} -} - -type actionOutputs struct { +type actionOutputsCap struct { sdk.CapDefinition[ActionOutputs] } -func (*actionOutputs) private() {} -func (c *actionOutputs) AdaptedThing() sdk.CapDefinition[string] { +func (*actionOutputsCap) private() {} +func (c *actionOutputsCap) AdaptedThing() sdk.CapDefinition[string] { return sdk.AccessField[ActionOutputs, string](c.CapDefinition, "adapted_thing") } +func ConstantActionOutputs(value ActionOutputs) ActionOutputsCap { + return &actionOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewActionOutputsFromFields( adaptedThing sdk.CapDefinition[string]) ActionOutputsCap { return &simpleActionOutputs{ diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicconsensus/consensus_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicconsensus/consensus_builders_generated.go index 938709be3..f8bee1ebb 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicconsensus/consensus_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basicconsensus/consensus_builders_generated.go @@ -20,7 +20,17 @@ func (cfg ConsensusConfig) New(w *sdk.WorkflowSpecFactory, ref string, input Con } step := sdk.Step[ConsensusOutputs]{Definition: def} - return ConsensusOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return ConsensusOutputsWrapper(raw) +} + +// ConsensusOutputsWrapper allows access to field from an sdk.CapDefinition[ConsensusOutputs] +func ConsensusOutputsWrapper(raw sdk.CapDefinition[ConsensusOutputs]) ConsensusOutputsCap { + wrapped, ok := raw.(ConsensusOutputsCap) + if ok { + return wrapped + } + return &consensusOutputsCap{CapDefinition: raw} } type ConsensusOutputsCap interface { @@ -30,24 +40,22 @@ type ConsensusOutputsCap interface { private() } -// ConsensusOutputsCapFromStep should only be called from generated code to assure type safety -func ConsensusOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[ConsensusOutputs]) ConsensusOutputsCap { - raw := step.AddTo(w) - return &consensusOutputs{CapDefinition: raw} -} - -type consensusOutputs struct { +type consensusOutputsCap struct { sdk.CapDefinition[ConsensusOutputs] } -func (*consensusOutputs) private() {} -func (c *consensusOutputs) Consensus() sdk.CapDefinition[[]string] { +func (*consensusOutputsCap) private() {} +func (c *consensusOutputsCap) Consensus() sdk.CapDefinition[[]string] { return sdk.AccessField[ConsensusOutputs, []string](c.CapDefinition, "consensus") } -func (c *consensusOutputs) Sigs() sdk.CapDefinition[[]string] { +func (c *consensusOutputsCap) Sigs() sdk.CapDefinition[[]string] { return sdk.AccessField[ConsensusOutputs, []string](c.CapDefinition, "sigs") } +func ConstantConsensusOutputs(value ConsensusOutputs) ConsensusOutputsCap { + return &consensusOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewConsensusOutputsFromFields( consensus sdk.CapDefinition[[]string], sigs sdk.CapDefinition[[]string]) ConsensusOutputsCap { diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger/trigger_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger/trigger_builders_generated.go index 350408a3d..ba92f4162 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger/trigger_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger/trigger_builders_generated.go @@ -20,7 +20,17 @@ func (cfg TriggerConfig) New(w *sdk.WorkflowSpecFactory) TriggerOutputsCap { } step := sdk.Step[TriggerOutputs]{Definition: def} - return TriggerOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return TriggerOutputsWrapper(raw) +} + +// TriggerOutputsWrapper allows access to field from an sdk.CapDefinition[TriggerOutputs] +func TriggerOutputsWrapper(raw sdk.CapDefinition[TriggerOutputs]) TriggerOutputsCap { + wrapped, ok := raw.(TriggerOutputsCap) + if ok { + return wrapped + } + return &triggerOutputsCap{CapDefinition: raw} } type TriggerOutputsCap interface { @@ -29,21 +39,19 @@ type TriggerOutputsCap interface { private() } -// TriggerOutputsCapFromStep should only be called from generated code to assure type safety -func TriggerOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[TriggerOutputs]) TriggerOutputsCap { - raw := step.AddTo(w) - return &triggerOutputs{CapDefinition: raw} -} - -type triggerOutputs struct { +type triggerOutputsCap struct { sdk.CapDefinition[TriggerOutputs] } -func (*triggerOutputs) private() {} -func (c *triggerOutputs) CoolOutput() sdk.CapDefinition[string] { +func (*triggerOutputsCap) private() {} +func (c *triggerOutputsCap) CoolOutput() sdk.CapDefinition[string] { return sdk.AccessField[TriggerOutputs, string](c.CapDefinition, "cool_output") } +func ConstantTriggerOutputs(value TriggerOutputs) TriggerOutputsCap { + return &triggerOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewTriggerOutputsFromFields( coolOutput sdk.CapDefinition[string]) TriggerOutputsCap { return &simpleTriggerOutputs{ diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/action_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/action_builders_generated.go index 61a18d965..754c529de 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/action_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/action_builders_generated.go @@ -4,7 +4,7 @@ package externalreferenceaction import ( "github.com/smartcontractkit/chainlink-common/pkg/capabilities" - referenceaction "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction" "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" ) @@ -20,7 +20,8 @@ func (cfg SomeConfig) New(w *sdk.WorkflowSpecFactory, ref string, input ActionIn } step := sdk.Step[referenceaction.SomeOutputs]{Definition: def} - return referenceaction.SomeOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return referenceaction.SomeOutputsWrapper(raw) } type ActionInput = referenceaction.ActionInput diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/externalreferenceactiontest/action_mock_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/externalreferenceactiontest/action_mock_generated.go index 37ff271fc..e00c9b842 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/externalreferenceactiontest/action_mock_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/externalreferenceaction/externalreferenceactiontest/action_mock_generated.go @@ -5,7 +5,7 @@ package externalreferenceactiontest import ( - referenceaction "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction" "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk/testutils" ) diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/mapaction/action_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/mapaction/action_builders_generated.go index 9cd7e5aef..0d419feba 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/mapaction/action_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/mapaction/action_builders_generated.go @@ -17,7 +17,17 @@ func (cfg ActionConfig) New(w *sdk.WorkflowSpecFactory, ref string, input Action } step := sdk.Step[ActionOutputs]{Definition: def} - return ActionOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return ActionOutputsWrapper(raw) +} + +// ActionOutputsWrapper allows access to field from an sdk.CapDefinition[ActionOutputs] +func ActionOutputsWrapper(raw sdk.CapDefinition[ActionOutputs]) ActionOutputsCap { + wrapped, ok := raw.(ActionOutputsCap) + if ok { + return wrapped + } + return &actionOutputsCap{CapDefinition: raw} } type ActionOutputsCap interface { @@ -26,19 +36,17 @@ type ActionOutputsCap interface { private() } -// ActionOutputsCapFromStep should only be called from generated code to assure type safety -func ActionOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[ActionOutputs]) ActionOutputsCap { - raw := step.AddTo(w) - return &actionOutputs{CapDefinition: raw} +type actionOutputsCap struct { + sdk.CapDefinition[ActionOutputs] } -type actionOutputs struct { - sdk.CapDefinition[ActionOutputs] +func (*actionOutputsCap) private() {} +func (c *actionOutputsCap) Payload() ActionOutputsPayloadCap { + return ActionOutputsPayloadWrapper(sdk.AccessField[ActionOutputs, ActionOutputsPayload](c.CapDefinition, "payload")) } -func (*actionOutputs) private() {} -func (c *actionOutputs) Payload() ActionOutputsPayloadCap { - return ActionOutputsPayloadCap(sdk.AccessField[ActionOutputs, ActionOutputsPayload](c.CapDefinition, "payload")) +func ConstantActionOutputs(value ActionOutputs) ActionOutputsCap { + return &actionOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} } func NewActionOutputsFromFields( @@ -62,6 +70,15 @@ func (c *simpleActionOutputs) Payload() ActionOutputsPayloadCap { func (c *simpleActionOutputs) private() {} +// ActionOutputsPayloadWrapper allows access to field from an sdk.CapDefinition[ActionOutputsPayload] +func ActionOutputsPayloadWrapper(raw sdk.CapDefinition[ActionOutputsPayload]) ActionOutputsPayloadCap { + wrapped, ok := raw.(ActionOutputsPayloadCap) + if ok { + return wrapped + } + return ActionOutputsPayloadCap(raw) +} + type ActionOutputsPayloadCap sdk.CapDefinition[ActionOutputsPayload] type ActionInput struct { diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/nestedaction/action_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/nestedaction/action_builders_generated.go index f2d1d9731..4d7051a63 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/nestedaction/action_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/nestedaction/action_builders_generated.go @@ -19,7 +19,17 @@ func (cfg ActionConfig) New(w *sdk.WorkflowSpecFactory, ref string, input Action } step := sdk.Step[ActionOutputs]{Definition: def} - return ActionOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return ActionOutputsWrapper(raw) +} + +// ActionOutputsWrapper allows access to field from an sdk.CapDefinition[ActionOutputs] +func ActionOutputsWrapper(raw sdk.CapDefinition[ActionOutputs]) ActionOutputsCap { + wrapped, ok := raw.(ActionOutputsCap) + if ok { + return wrapped + } + return &actionOutputsCap{CapDefinition: raw} } type ActionOutputsCap interface { @@ -28,19 +38,17 @@ type ActionOutputsCap interface { private() } -// ActionOutputsCapFromStep should only be called from generated code to assure type safety -func ActionOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[ActionOutputs]) ActionOutputsCap { - raw := step.AddTo(w) - return &actionOutputs{CapDefinition: raw} +type actionOutputsCap struct { + sdk.CapDefinition[ActionOutputs] } -type actionOutputs struct { - sdk.CapDefinition[ActionOutputs] +func (*actionOutputsCap) private() {} +func (c *actionOutputsCap) Results() ActionOutputsResultsCap { + return ActionOutputsResultsWrapper(sdk.AccessField[ActionOutputs, ActionOutputsResults](c.CapDefinition, "results")) } -func (*actionOutputs) private() {} -func (c *actionOutputs) Results() ActionOutputsResultsCap { - return &actionOutputsResults{CapDefinition: sdk.AccessField[ActionOutputs, ActionOutputsResults](c.CapDefinition, "results")} +func ConstantActionOutputs(value ActionOutputs) ActionOutputsCap { + return &actionOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} } func NewActionOutputsFromFields( @@ -64,27 +72,34 @@ func (c *simpleActionOutputs) Results() ActionOutputsResultsCap { func (c *simpleActionOutputs) private() {} +// ActionOutputsResultsWrapper allows access to field from an sdk.CapDefinition[ActionOutputsResults] +func ActionOutputsResultsWrapper(raw sdk.CapDefinition[ActionOutputsResults]) ActionOutputsResultsCap { + wrapped, ok := raw.(ActionOutputsResultsCap) + if ok { + return wrapped + } + return &actionOutputsResultsCap{CapDefinition: raw} +} + type ActionOutputsResultsCap interface { sdk.CapDefinition[ActionOutputsResults] AdaptedThing() sdk.CapDefinition[string] private() } -// ActionOutputsResultsCapFromStep should only be called from generated code to assure type safety -func ActionOutputsResultsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[ActionOutputsResults]) ActionOutputsResultsCap { - raw := step.AddTo(w) - return &actionOutputsResults{CapDefinition: raw} -} - -type actionOutputsResults struct { +type actionOutputsResultsCap struct { sdk.CapDefinition[ActionOutputsResults] } -func (*actionOutputsResults) private() {} -func (c *actionOutputsResults) AdaptedThing() sdk.CapDefinition[string] { +func (*actionOutputsResultsCap) private() {} +func (c *actionOutputsResultsCap) AdaptedThing() sdk.CapDefinition[string] { return sdk.AccessField[ActionOutputsResults, string](c.CapDefinition, "adapted_thing") } +func ConstantActionOutputsResults(value ActionOutputsResults) ActionOutputsResultsCap { + return &actionOutputsResultsCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewActionOutputsResultsFromFields( adaptedThing sdk.CapDefinition[string]) ActionOutputsResultsCap { return &simpleActionOutputsResults{ diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction/action_builders_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction/action_builders_generated.go index db2d9b59d..4d399fb69 100644 --- a/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction/action_builders_generated.go +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/referenceaction/action_builders_generated.go @@ -20,7 +20,17 @@ func (cfg SomeConfig) New(w *sdk.WorkflowSpecFactory, ref string, input ActionIn } step := sdk.Step[SomeOutputs]{Definition: def} - return SomeOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return SomeOutputsWrapper(raw) +} + +// SomeOutputsWrapper allows access to field from an sdk.CapDefinition[SomeOutputs] +func SomeOutputsWrapper(raw sdk.CapDefinition[SomeOutputs]) SomeOutputsCap { + wrapped, ok := raw.(SomeOutputsCap) + if ok { + return wrapped + } + return &someOutputsCap{CapDefinition: raw} } type SomeOutputsCap interface { @@ -29,21 +39,19 @@ type SomeOutputsCap interface { private() } -// SomeOutputsCapFromStep should only be called from generated code to assure type safety -func SomeOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[SomeOutputs]) SomeOutputsCap { - raw := step.AddTo(w) - return &someOutputs{CapDefinition: raw} -} - -type someOutputs struct { +type someOutputsCap struct { sdk.CapDefinition[SomeOutputs] } -func (*someOutputs) private() {} -func (c *someOutputs) AdaptedThing() sdk.CapDefinition[string] { +func (*someOutputsCap) private() {} +func (c *someOutputsCap) AdaptedThing() sdk.CapDefinition[string] { return sdk.AccessField[SomeOutputs, string](c.CapDefinition, "adapted_thing") } +func ConstantSomeOutputs(value SomeOutputs) SomeOutputsCap { + return &someOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewSomeOutputsFromFields( adaptedThing sdk.CapDefinition[string]) SomeOutputsCap { return &simpleSomeOutputs{ diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/custom_types.go b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/custom_types.go new file mode 100644 index 000000000..e41fa97d1 --- /dev/null +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/custom_types.go @@ -0,0 +1,20 @@ +package pkg + +import ( + "time" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2" +) + +type MyType struct { + Nested MyNestedType + I int + S string + T time.Time + O pkg2.OtherPackage +} + +type MyNestedType struct { + II int + SS string +} diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/custom_types_2.go b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/custom_types_2.go new file mode 100644 index 000000000..c123504ff --- /dev/null +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/custom_types_2.go @@ -0,0 +1,17 @@ +package pkg + +import ( + "time" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2" +) + +// A second file is used to make sure that all files in the package are collapsed into one correctly. + +type MyType2 struct { + Nested MyNestedType + I int + S string + T time.Time + O pkg2.OtherPackage +} diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/wrappers_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/wrappers_generated.go new file mode 100644 index 000000000..6384a42ee --- /dev/null +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg/wrappers_generated.go @@ -0,0 +1,256 @@ +// Code generated by github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli, DO NOT EDIT. + +package pkg + +import ( + "time" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +// MyNestedTypeWrapper allows access to field from an sdk.CapDefinition[MyNestedType] +func MyNestedTypeWrapper(raw sdk.CapDefinition[MyNestedType]) MyNestedTypeCap { + wrapped, ok := raw.(MyNestedTypeCap) + if ok { + return wrapped + } + return &myNestedTypeCap{CapDefinition: raw} +} + +type MyNestedTypeCap interface { + sdk.CapDefinition[MyNestedType] + II() sdk.CapDefinition[int] + SS() sdk.CapDefinition[string] + private() +} + +type myNestedTypeCap struct { + sdk.CapDefinition[MyNestedType] +} + +func (*myNestedTypeCap) private() {} +func (c *myNestedTypeCap) II() sdk.CapDefinition[int] { + return sdk.AccessField[MyNestedType, int](c.CapDefinition, "II") +} +func (c *myNestedTypeCap) SS() sdk.CapDefinition[string] { + return sdk.AccessField[MyNestedType, string](c.CapDefinition, "SS") +} + +func ConstantMyNestedType(value MyNestedType) MyNestedTypeCap { + return &myNestedTypeCap{CapDefinition: sdk.ConstantDefinition(value)} +} + +func NewMyNestedTypeFromFields( + iI sdk.CapDefinition[int], + sS sdk.CapDefinition[string]) MyNestedTypeCap { + return &simpleMyNestedType{ + CapDefinition: sdk.ComponentCapDefinition[MyNestedType]{ + "II": iI.Ref(), + "SS": sS.Ref(), + }, + iI: iI, + sS: sS, + } +} + +type simpleMyNestedType struct { + sdk.CapDefinition[MyNestedType] + iI sdk.CapDefinition[int] + sS sdk.CapDefinition[string] +} + +func (c *simpleMyNestedType) II() sdk.CapDefinition[int] { + return c.iI +} +func (c *simpleMyNestedType) SS() sdk.CapDefinition[string] { + return c.sS +} + +func (c *simpleMyNestedType) private() {} + +// MyTypeWrapper allows access to field from an sdk.CapDefinition[MyType] +func MyTypeWrapper(raw sdk.CapDefinition[MyType]) MyTypeCap { + wrapped, ok := raw.(MyTypeCap) + if ok { + return wrapped + } + return &myTypeCap{CapDefinition: raw} +} + +type MyTypeCap interface { + sdk.CapDefinition[MyType] + I() sdk.CapDefinition[int] + Nested() MyNestedTypeCap + O() pkg2.OtherPackageCap + S() sdk.CapDefinition[string] + T() sdk.CapDefinition[time.Time] + private() +} + +type myTypeCap struct { + sdk.CapDefinition[MyType] +} + +func (*myTypeCap) private() {} +func (c *myTypeCap) I() sdk.CapDefinition[int] { + return sdk.AccessField[MyType, int](c.CapDefinition, "I") +} +func (c *myTypeCap) Nested() MyNestedTypeCap { + return MyNestedTypeWrapper(sdk.AccessField[MyType, MyNestedType](c.CapDefinition, "Nested")) +} +func (c *myTypeCap) O() pkg2.OtherPackageCap { + return pkg2.OtherPackageWrapper(sdk.AccessField[MyType, pkg2.OtherPackage](c.CapDefinition, "O")) +} +func (c *myTypeCap) S() sdk.CapDefinition[string] { + return sdk.AccessField[MyType, string](c.CapDefinition, "S") +} +func (c *myTypeCap) T() sdk.CapDefinition[time.Time] { + return sdk.AccessField[MyType, time.Time](c.CapDefinition, "T") +} + +func ConstantMyType(value MyType) MyTypeCap { + return &myTypeCap{CapDefinition: sdk.ConstantDefinition(value)} +} + +func NewMyTypeFromFields( + i sdk.CapDefinition[int], + nested MyNestedTypeCap, + o pkg2.OtherPackageCap, + s sdk.CapDefinition[string], + t sdk.CapDefinition[time.Time]) MyTypeCap { + return &simpleMyType{ + CapDefinition: sdk.ComponentCapDefinition[MyType]{ + "I": i.Ref(), + "Nested": nested.Ref(), + "O": o.Ref(), + "S": s.Ref(), + "T": t.Ref(), + }, + i: i, + nested: nested, + o: o, + s: s, + t: t, + } +} + +type simpleMyType struct { + sdk.CapDefinition[MyType] + i sdk.CapDefinition[int] + nested MyNestedTypeCap + o pkg2.OtherPackageCap + s sdk.CapDefinition[string] + t sdk.CapDefinition[time.Time] +} + +func (c *simpleMyType) I() sdk.CapDefinition[int] { + return c.i +} +func (c *simpleMyType) Nested() MyNestedTypeCap { + return c.nested +} +func (c *simpleMyType) O() pkg2.OtherPackageCap { + return c.o +} +func (c *simpleMyType) S() sdk.CapDefinition[string] { + return c.s +} +func (c *simpleMyType) T() sdk.CapDefinition[time.Time] { + return c.t +} + +func (c *simpleMyType) private() {} + +// MyType2Wrapper allows access to field from an sdk.CapDefinition[MyType2] +func MyType2Wrapper(raw sdk.CapDefinition[MyType2]) MyType2Cap { + wrapped, ok := raw.(MyType2Cap) + if ok { + return wrapped + } + return &myType2Cap{CapDefinition: raw} +} + +type MyType2Cap interface { + sdk.CapDefinition[MyType2] + I() sdk.CapDefinition[int] + Nested() MyNestedTypeCap + O() pkg2.OtherPackageCap + S() sdk.CapDefinition[string] + T() sdk.CapDefinition[time.Time] + private() +} + +type myType2Cap struct { + sdk.CapDefinition[MyType2] +} + +func (*myType2Cap) private() {} +func (c *myType2Cap) I() sdk.CapDefinition[int] { + return sdk.AccessField[MyType2, int](c.CapDefinition, "I") +} +func (c *myType2Cap) Nested() MyNestedTypeCap { + return MyNestedTypeWrapper(sdk.AccessField[MyType2, MyNestedType](c.CapDefinition, "Nested")) +} +func (c *myType2Cap) O() pkg2.OtherPackageCap { + return pkg2.OtherPackageWrapper(sdk.AccessField[MyType2, pkg2.OtherPackage](c.CapDefinition, "O")) +} +func (c *myType2Cap) S() sdk.CapDefinition[string] { + return sdk.AccessField[MyType2, string](c.CapDefinition, "S") +} +func (c *myType2Cap) T() sdk.CapDefinition[time.Time] { + return sdk.AccessField[MyType2, time.Time](c.CapDefinition, "T") +} + +func ConstantMyType2(value MyType2) MyType2Cap { + return &myType2Cap{CapDefinition: sdk.ConstantDefinition(value)} +} + +func NewMyType2FromFields( + i sdk.CapDefinition[int], + nested MyNestedTypeCap, + o pkg2.OtherPackageCap, + s sdk.CapDefinition[string], + t sdk.CapDefinition[time.Time]) MyType2Cap { + return &simpleMyType2{ + CapDefinition: sdk.ComponentCapDefinition[MyType2]{ + "I": i.Ref(), + "Nested": nested.Ref(), + "O": o.Ref(), + "S": s.Ref(), + "T": t.Ref(), + }, + i: i, + nested: nested, + o: o, + s: s, + t: t, + } +} + +type simpleMyType2 struct { + sdk.CapDefinition[MyType2] + i sdk.CapDefinition[int] + nested MyNestedTypeCap + o pkg2.OtherPackageCap + s sdk.CapDefinition[string] + t sdk.CapDefinition[time.Time] +} + +func (c *simpleMyType2) I() sdk.CapDefinition[int] { + return c.i +} +func (c *simpleMyType2) Nested() MyNestedTypeCap { + return c.nested +} +func (c *simpleMyType2) O() pkg2.OtherPackageCap { + return c.o +} +func (c *simpleMyType2) S() sdk.CapDefinition[string] { + return c.s +} +func (c *simpleMyType2) T() sdk.CapDefinition[time.Time] { + return c.t +} + +func (c *simpleMyType2) private() {} diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2/custom_type_2.go b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2/custom_type_2.go new file mode 100644 index 000000000..e839d3a52 --- /dev/null +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2/custom_type_2.go @@ -0,0 +1,11 @@ +package pkg2 + +type OtherPackage struct { + X string + Z string + Nr NotWrapped +} + +type NotWrapped struct { + A string +} diff --git a/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2/wrappers_generated.go b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2/wrappers_generated.go new file mode 100644 index 000000000..b6993f7c6 --- /dev/null +++ b/pkg/capabilities/cli/cmd/testdata/fixtures/usercode/pkg2/wrappers_generated.go @@ -0,0 +1,78 @@ +// Code generated by github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli, DO NOT EDIT. + +package pkg2 + +import ( + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +// OtherPackageWrapper allows access to field from an sdk.CapDefinition[OtherPackage] +func OtherPackageWrapper(raw sdk.CapDefinition[OtherPackage]) OtherPackageCap { + wrapped, ok := raw.(OtherPackageCap) + if ok { + return wrapped + } + return &otherPackageCap{CapDefinition: raw} +} + +type OtherPackageCap interface { + sdk.CapDefinition[OtherPackage] + Nr() sdk.CapDefinition[NotWrapped] + X() sdk.CapDefinition[string] + Z() sdk.CapDefinition[string] + private() +} + +type otherPackageCap struct { + sdk.CapDefinition[OtherPackage] +} + +func (*otherPackageCap) private() {} +func (c *otherPackageCap) Nr() sdk.CapDefinition[NotWrapped] { + return sdk.AccessField[OtherPackage, NotWrapped](c.CapDefinition, "Nr") +} +func (c *otherPackageCap) X() sdk.CapDefinition[string] { + return sdk.AccessField[OtherPackage, string](c.CapDefinition, "X") +} +func (c *otherPackageCap) Z() sdk.CapDefinition[string] { + return sdk.AccessField[OtherPackage, string](c.CapDefinition, "Z") +} + +func ConstantOtherPackage(value OtherPackage) OtherPackageCap { + return &otherPackageCap{CapDefinition: sdk.ConstantDefinition(value)} +} + +func NewOtherPackageFromFields( + nr sdk.CapDefinition[NotWrapped], + x sdk.CapDefinition[string], + z sdk.CapDefinition[string]) OtherPackageCap { + return &simpleOtherPackage{ + CapDefinition: sdk.ComponentCapDefinition[OtherPackage]{ + "Nr": nr.Ref(), + "X": x.Ref(), + "Z": z.Ref(), + }, + nr: nr, + x: x, + z: z, + } +} + +type simpleOtherPackage struct { + sdk.CapDefinition[OtherPackage] + nr sdk.CapDefinition[NotWrapped] + x sdk.CapDefinition[string] + z sdk.CapDefinition[string] +} + +func (c *simpleOtherPackage) Nr() sdk.CapDefinition[NotWrapped] { + return c.nr +} +func (c *simpleOtherPackage) X() sdk.CapDefinition[string] { + return c.x +} +func (c *simpleOtherPackage) Z() sdk.CapDefinition[string] { + return c.z +} + +func (c *simpleOtherPackage) private() {} diff --git a/pkg/capabilities/consensus/ocr3/aggregators/identical.go b/pkg/capabilities/consensus/ocr3/aggregators/identical.go index 389390882..aa05e7cf4 100644 --- a/pkg/capabilities/consensus/ocr3/aggregators/identical.go +++ b/pkg/capabilities/consensus/ocr3/aggregators/identical.go @@ -13,12 +13,12 @@ import ( ocrcommon "github.com/smartcontractkit/libocr/commontypes" ) +// Aggregates by the most frequent observation for each index of a data set type identicalAggregator struct { - config aggregatorConfig - lggr logger.Logger + config identicalAggConfig } -type aggregatorConfig struct { +type identicalAggConfig struct { // Length of the list of observations that each node is expected to provide. // Aggregator's output (i.e. EncodableOutcome) will be a values.Map with the same // number of elements and keyed by indices 0,1,2,... (unless KeyOverrides are provided). @@ -103,7 +103,7 @@ func (a *identicalAggregator) collectHighestCounts(counters []map[[32]byte]*coun } func NewIdenticalAggregator(config values.Map) (*identicalAggregator, error) { - parsedConfig, err := ParseConfig(config) + parsedConfig, err := ParseConfigIdenticalAggregator(config) if err != nil { return nil, fmt.Errorf("failed to parse config (%+v): %w", config, err) } @@ -112,10 +112,10 @@ func NewIdenticalAggregator(config values.Map) (*identicalAggregator, error) { }, nil } -func ParseConfig(config values.Map) (aggregatorConfig, error) { - parsedConfig := aggregatorConfig{} +func ParseConfigIdenticalAggregator(config values.Map) (identicalAggConfig, error) { + parsedConfig := identicalAggConfig{} if err := config.UnwrapTo(&parsedConfig); err != nil { - return aggregatorConfig{}, err + return identicalAggConfig{}, err } if parsedConfig.ExpectedObservationsLen == 0 { parsedConfig.ExpectedObservationsLen = 1 diff --git a/pkg/capabilities/consensus/ocr3/aggregators/identical_test.go b/pkg/capabilities/consensus/ocr3/aggregators/identical_test.go index 711b1ab25..95688e894 100644 --- a/pkg/capabilities/consensus/ocr3/aggregators/identical_test.go +++ b/pkg/capabilities/consensus/ocr3/aggregators/identical_test.go @@ -13,7 +13,7 @@ import ( ) func TestDataFeedsAggregator_Aggregate(t *testing.T) { - config := getConfig(t, nil) + config := getConfigIdenticalAggregator(t, nil) agg, err := aggregators.NewIdenticalAggregator(*config) require.NoError(t, err) @@ -37,7 +37,7 @@ func TestDataFeedsAggregator_Aggregate(t *testing.T) { } func TestDataFeedsAggregator_Aggregate_OverrideWithKeys(t *testing.T) { - config := getConfig(t, []string{"outcome"}) + config := getConfigIdenticalAggregator(t, []string{"outcome"}) agg, err := aggregators.NewIdenticalAggregator(*config) require.NoError(t, err) @@ -61,7 +61,7 @@ func TestDataFeedsAggregator_Aggregate_OverrideWithKeys(t *testing.T) { } func TestDataFeedsAggregator_Aggregate_NoConsensus(t *testing.T) { - config := getConfig(t, []string{"outcome"}) + config := getConfigIdenticalAggregator(t, []string{"outcome"}) agg, err := aggregators.NewIdenticalAggregator(*config) require.NoError(t, err) @@ -81,7 +81,7 @@ func TestDataFeedsAggregator_Aggregate_NoConsensus(t *testing.T) { require.ErrorContains(t, err, "can't reach consensus on observations with index 0") } -func getConfig(t *testing.T, overrideKeys []string) *values.Map { +func getConfigIdenticalAggregator(t *testing.T, overrideKeys []string) *values.Map { unwrappedConfig := map[string]any{ "expectedObservationsLen": len(overrideKeys), "keyOverrides": overrideKeys, diff --git a/pkg/capabilities/consensus/ocr3/aggregators/reduce_aggregator.go b/pkg/capabilities/consensus/ocr3/aggregators/reduce_aggregator.go new file mode 100644 index 000000000..f8c32af4a --- /dev/null +++ b/pkg/capabilities/consensus/ocr3/aggregators/reduce_aggregator.go @@ -0,0 +1,523 @@ +package aggregators + +import ( + "crypto/sha256" + "errors" + "fmt" + "math" + "math/big" + "sort" + "strconv" + "time" + + "github.com/shopspring/decimal" + "google.golang.org/protobuf/proto" + + ocrcommon "github.com/smartcontractkit/libocr/commontypes" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/types" + "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/values" + "github.com/smartcontractkit/chainlink-common/pkg/values/pb" +) + +const ( + AGGREGATION_METHOD_MEDIAN = "median" + AGGREGATION_METHOD_MODE = "mode" + DEVIATION_TYPE_NONE = "none" + DEVIATION_TYPE_PERCENT = "percent" + DEVIATION_TYPE_ABSOLUTE = "absolute" + REPORT_FORMAT_MAP = "map" + REPORT_FORMAT_ARRAY = "array" + REPORT_FORMAT_VALUE = "value" + + DEFAULT_REPORT_FORMAT = REPORT_FORMAT_MAP + DEFAULT_OUTPUT_FIELD_NAME = "Reports" +) + +type ReduceAggConfig struct { + // Configuration on how to aggregate one or more data points + Fields []AggregationField `mapstructure:"fields" required:"true"` + // The top level field name that report data is put into + OutputFieldName string `mapstructure:"outputFieldName" json:"outputFieldName" default:"Reports"` + // The structure surrounding the report data that is put on to "OutputFieldName" + ReportFormat string `mapstructure:"reportFormat" json:"reportFormat" default:"map" jsonschema:"enum=map,enum=array,enum=value"` + // Optional key name, that when given will contain a nested map with designated Fields moved into it + // If given, one or more fields must be given SubMapField: true + SubMapKey string `mapstructure:"subMapKey" json:"subMapKey" default:""` +} + +type AggregationField struct { + // An optional check to only report when the difference from the previous report exceeds a certain threshold. + // Can only be used when the field is of a numeric type: string, decimal, int64, big.Int, time.Time, float64 + // If no deviation is provided on any field, there will always be a report once minimum observations are reached. + Deviation decimal.Decimal `mapstructure:"-" json:"-"` + DeviationString string `mapstructure:"deviation" json:"deviation,omitempty"` + // The format of the deviation being provided + // * percent - a percentage deviation + // * absolute - an unsigned numeric difference + DeviationType string `mapstructure:"deviationType" json:"deviationType,omitempty" jsonschema:"enum=percent,enum=absolute,enum=none"` + // The key to find a data point within the input data + // If omitted, the entire input will be used + InputKey string `mapstructure:"inputKey" json:"inputKey"` + // How the data set should be aggregated to a single value + // * median - take the centermost value of the sorted data set of observations. can only be used on numeric types. not a true median, because no average if two middle values. + // * mode - take the most frequent value. if tied, use the "first". + Method string `mapstructure:"method" json:"method" jsonschema:"enum=median,enum=mode" required:"true"` + // The key that the aggregated data is put under + // If omitted, the InputKey will be used + OutputKey string `mapstructure:"outputKey" json:"outputKey"` + // If enabled, this field will be moved from the top level map + // into a nested map on the key defined by "SubMapKey" + SubMapField bool `mapstructure:"subMapField" json:"subMapField,omitempty"` +} + +type reduceAggregator struct { + config ReduceAggConfig +} + +var _ types.Aggregator = (*reduceAggregator)(nil) + +// Condenses multiple observations into a single encodable outcome +func (a *reduceAggregator) Aggregate(lggr logger.Logger, previousOutcome *types.AggregationOutcome, observations map[ocrcommon.OracleID][]values.Value, f int) (*types.AggregationOutcome, error) { + if len(observations) < 2*f+1 { + return nil, fmt.Errorf("not enough observations, have %d want %d", len(observations), 2*f+1) + } + + currentState, err := a.initializeCurrentState(lggr, previousOutcome) + if err != nil { + return nil, err + } + + report := map[string]any{} + shouldReport := false + + for _, field := range a.config.Fields { + vals := a.extractValues(lggr, observations, field.InputKey) + + // only proceed if every field has reached the minimum number of observations + if len(vals) < 2*f+1 { + return nil, fmt.Errorf("not enough observations provided %s, have %d want %d", field.InputKey, len(vals), 2*f+1) + } + + singleValue, err := reduce(field.Method, vals) + if err != nil { + return nil, fmt.Errorf("unable to reduce on method %s, err: %s", field.Method, err.Error()) + } + + if field.DeviationType != DEVIATION_TYPE_NONE { + oldValue := (*currentState)[field.OutputKey] + currDeviation, err := deviation(field.DeviationType, oldValue, singleValue) + if oldValue != nil && err != nil { + return nil, fmt.Errorf("unable to determine deviation %s", err.Error()) + } + if oldValue == nil || currDeviation.GreaterThan(field.Deviation) { + shouldReport = true + } + lggr.Debugw("checked deviation", "key", field.OutputKey, "deviationType", field.DeviationType, "currentDeviation", currDeviation.String(), "targetDeviation", field.Deviation.String(), "shouldReport", shouldReport) + } + + (*currentState)[field.OutputKey] = singleValue + if len(field.OutputKey) > 0 { + report[field.OutputKey] = singleValue + } else { + report[field.InputKey] = singleValue + } + } + + // if SubMapKey is provided, move fields in a nested map + if len(a.config.SubMapKey) > 0 { + subMap := map[string]any{} + for _, field := range a.config.Fields { + if field.SubMapField { + if len(field.OutputKey) > 0 { + subMap[field.OutputKey] = report[field.OutputKey] + delete(report, field.OutputKey) + } else { + subMap[field.InputKey] = report[field.InputKey] + delete(report, field.InputKey) + } + } + } + report[a.config.SubMapKey] = subMap + } + + // if none of the AggregationFields define deviation, always report + hasNoDeviation := true + for _, field := range a.config.Fields { + if field.DeviationType != DEVIATION_TYPE_NONE { + hasNoDeviation = false + break + } + } + if hasNoDeviation { + lggr.Debugw("no deviation defined, reporting") + shouldReport = true + } + + stateValuesMap, err := values.WrapMap(currentState) + if err != nil { + return nil, fmt.Errorf("aggregate state wrapmap error: %s", err.Error()) + } + stateBytes, err := proto.Marshal(values.ProtoMap(stateValuesMap)) + if err != nil { + return nil, fmt.Errorf("aggregate state proto marshal error: %s", err.Error()) + } + + toWrap, err := formatReport(report, a.config.ReportFormat) + if err != nil { + return nil, fmt.Errorf("aggregate formatReport error: %s", err.Error()) + } + reportValuesMap, err := values.NewMap(map[string]any{ + a.config.OutputFieldName: toWrap, + }) + if err != nil { + return nil, fmt.Errorf("aggregate new map error: %s", err.Error()) + } + reportProtoMap := values.Proto(reportValuesMap).GetMapValue() + + lggr.Debugw("Aggregation complete", "shouldReport", shouldReport) + + return &types.AggregationOutcome{ + EncodableOutcome: reportProtoMap, + Metadata: stateBytes, + ShouldReport: shouldReport, + }, nil +} + +func (a *reduceAggregator) initializeCurrentState(lggr logger.Logger, previousOutcome *types.AggregationOutcome) (*map[string]values.Value, error) { + currentState := map[string]values.Value{} + + if previousOutcome != nil { + pb := &pb.Map{} + proto.Unmarshal(previousOutcome.Metadata, pb) + mv, err := values.FromMapValueProto(pb) + if err != nil { + return nil, fmt.Errorf("initializeCurrentState FromMapValueProto error: %s", err.Error()) + } + err = mv.UnwrapTo(currentState) + if err != nil { + return nil, fmt.Errorf("initializeCurrentState FromMapValueProto error: %s", err.Error()) + } + } + + zeroValue := values.NewDecimal(decimal.Zero) + for _, field := range a.config.Fields { + if _, ok := currentState[field.OutputKey]; !ok { + currentState[field.OutputKey] = zeroValue + lggr.Debugw("initializing empty onchain state for feed", "fieldOutputKey", field.OutputKey) + } + } + + lggr.Debugw("current state initialized", "state", currentState, "previousOutcome", previousOutcome) + return ¤tState, nil +} + +func (a *reduceAggregator) extractValues(lggr logger.Logger, observations map[ocrcommon.OracleID][]values.Value, aggregationKey string) (vals []values.Value) { + for nodeID, nodeObservations := range observations { + // we only expect a single observation per node + if len(nodeObservations) == 0 || nodeObservations[0] == nil { + lggr.Warnf("node %d contributed with empty observations", nodeID) + continue + } + if len(nodeObservations) > 1 { + lggr.Warnf("node %d contributed with more than one observation", nodeID) + continue + } + + val, err := nodeObservations[0].Unwrap() + if err != nil { + lggr.Warnf("node %d contributed a Value that could not be unwrapped", nodeID) + continue + } + + // if the observation data is a complex type, extract the value using the inputKey + // values are then re-wrapped here to handle aggregating against Value types + // which is used for mode aggregation + switch val := val.(type) { + case map[string]interface{}: + _, ok := val[aggregationKey] + if !ok { + continue + } + + rewrapped, err := values.Wrap(val[aggregationKey]) + if err != nil { + lggr.Warnf("unable to wrap value %s", val[aggregationKey]) + continue + } + vals = append(vals, rewrapped) + case []interface{}: + i, err := strconv.Atoi(aggregationKey) + if err != nil { + lggr.Warnf("aggregation key %s could not be used to index a list type", aggregationKey) + continue + } + rewrapped, err := values.Wrap(val[i]) + if err != nil { + lggr.Warnf("unable to wrap value %s", val[i]) + continue + } + vals = append(vals, rewrapped) + default: + // not a complex type, use raw value + if len(aggregationKey) == 0 { + vals = append(vals, nodeObservations[0]) + } else { + lggr.Warnf("aggregation key %s provided, but value is not an indexable type", aggregationKey) + } + } + } + + return vals +} + +func reduce(method string, items []values.Value) (values.Value, error) { + switch method { + case AGGREGATION_METHOD_MEDIAN: + return median(items) + case AGGREGATION_METHOD_MODE: + return mode(items) + default: + // invariant, config should be validated + return nil, fmt.Errorf("unsupported aggregation method %s", method) + } +} + +func median(items []values.Value) (values.Value, error) { + if len(items) == 0 { + // invariant, as long as f > 0 there should be items + return nil, errors.New("items cannot be empty") + } + err := sortAsDecimal(items) + if err != nil { + return nil, err + } + return items[(len(items)-1)/2], nil +} + +func sortAsDecimal(items []values.Value) error { + var err error + sort.Slice(items, func(i, j int) bool { + decimalI, errI := toDecimal(items[i]) + if errI != nil { + err = errI + } + decimalJ, errJ := toDecimal(items[j]) + if errJ != nil { + err = errJ + } + return decimalI.GreaterThan(decimalJ) + }) + if err != nil { + return err + } + return nil +} + +func toDecimal(item values.Value) (decimal.Decimal, error) { + unwrapped, err := item.Unwrap() + if err != nil { + return decimal.NewFromInt(0), err + } + + switch v := unwrapped.(type) { + case string: + deci, err := decimal.NewFromString(unwrapped.(string)) + if err != nil { + return decimal.NewFromInt(0), err + } + return deci, nil + case decimal.Decimal: + return unwrapped.(decimal.Decimal), nil + case int64: + return decimal.NewFromInt(unwrapped.(int64)), nil + case *big.Int: + big := unwrapped.(*big.Int) + return decimal.NewFromBigInt(big, 10), nil + case time.Time: + return decimal.NewFromInt(unwrapped.(time.Time).Unix()), nil + case float64: + return decimal.NewFromFloat(unwrapped.(float64)), nil + default: + // unsupported type + return decimal.NewFromInt(0), fmt.Errorf("unable to convert type %T to decimal", v) + } +} + +func mode(items []values.Value) (values.Value, error) { + if len(items) == 0 { + // invariant, as long as f > 0 there should be items + return nil, errors.New("items cannot be empty") + } + + counts := make(map[[32]byte]*counter) + for _, item := range items { + marshalled, err := proto.MarshalOptions{Deterministic: true}.Marshal(values.Proto(item)) + if err != nil { + // invariant: values should always be able to be proto marshalled + return nil, err + } + sha := sha256.Sum256(marshalled) + elem, ok := counts[sha] + if !ok { + counts[sha] = &counter{ + fullObservation: item, + count: 1, + } + } else { + elem.count++ + } + } + + var maxCount int + for _, ctr := range counts { + if ctr.count > maxCount { + maxCount = ctr.count + } + } + + var modes []values.Value + for _, ctr := range counts { + if ctr.count == maxCount { + modes = append(modes, ctr.fullObservation) + } + } + + // If more than one mode found, choose first + + return modes[0], nil +} + +func deviation(method string, previousValue values.Value, nextValue values.Value) (decimal.Decimal, error) { + prevDeci, err := toDecimal(previousValue) + if err != nil { + return decimal.NewFromInt(0), err + } + nextDeci, err := toDecimal(nextValue) + if err != nil { + return decimal.NewFromInt(0), err + } + + diff := prevDeci.Sub(nextDeci).Abs() + + switch method { + case DEVIATION_TYPE_ABSOLUTE: + return diff, nil + case DEVIATION_TYPE_PERCENT: + if prevDeci.Cmp(decimal.NewFromInt(0)) == 0 { + if diff.Cmp(decimal.NewFromInt(0)) == 0 { + return decimal.NewFromInt(0), nil + } + return decimal.NewFromInt(math.MaxInt), nil + } + return diff.Div(prevDeci), nil + default: + return decimal.NewFromInt(0), fmt.Errorf("unsupported deviation method %s", method) + } +} + +func formatReport(report map[string]any, format string) (any, error) { + switch format { + case REPORT_FORMAT_ARRAY: + return []map[string]any{report}, nil + case REPORT_FORMAT_MAP: + return report, nil + case REPORT_FORMAT_VALUE: + for _, value := range report { + return value, nil + } + // invariant: validation enforces only one output value + return nil, errors.New("value format must contain at least one output") + default: + return nil, errors.New("unsupported report format") + } +} + +func isOneOf(toCheck string, options []string) bool { + for _, option := range options { + if toCheck == option { + return true + } + } + return false +} + +func NewReduceAggregator(config values.Map) (types.Aggregator, error) { + parsedConfig, err := ParseConfigReduceAggregator(config) + if err != nil { + return nil, fmt.Errorf("failed to parse config (%+v): %w", config, err) + } + return &reduceAggregator{ + config: parsedConfig, + }, nil +} + +func ParseConfigReduceAggregator(config values.Map) (ReduceAggConfig, error) { + parsedConfig := ReduceAggConfig{} + if err := config.UnwrapTo(&parsedConfig); err != nil { + return ReduceAggConfig{}, err + } + + // validations & fill defaults + if len(parsedConfig.Fields) == 0 { + return ReduceAggConfig{}, errors.New("reduce aggregator must contain config for Fields to aggregate") + } + if len(parsedConfig.OutputFieldName) == 0 { + parsedConfig.OutputFieldName = DEFAULT_OUTPUT_FIELD_NAME + } + if len(parsedConfig.ReportFormat) == 0 { + parsedConfig.ReportFormat = DEFAULT_REPORT_FORMAT + } + if len(parsedConfig.Fields) > 1 && parsedConfig.ReportFormat == REPORT_FORMAT_VALUE { + return ReduceAggConfig{}, errors.New("report type of value can only have one field") + } + hasSubMapField := false + outputKeyCount := make(map[any]bool) + for i, field := range parsedConfig.Fields { + if (parsedConfig.ReportFormat == REPORT_FORMAT_ARRAY || parsedConfig.ReportFormat == REPORT_FORMAT_MAP) && len(field.OutputKey) == 0 { + return ReduceAggConfig{}, fmt.Errorf("report type %s or %s must have an OutputKey to put the result under", REPORT_FORMAT_ARRAY, REPORT_FORMAT_MAP) + } + if len(field.DeviationType) == 0 { + field.DeviationType = DEVIATION_TYPE_NONE + parsedConfig.Fields[i].DeviationType = DEVIATION_TYPE_NONE + } + if !isOneOf(field.DeviationType, []string{DEVIATION_TYPE_ABSOLUTE, DEVIATION_TYPE_PERCENT, DEVIATION_TYPE_NONE}) { + return ReduceAggConfig{}, fmt.Errorf("invalid config DeviationType. received: %s. options: [%s, %s, %s]", field.DeviationType, DEVIATION_TYPE_ABSOLUTE, DEVIATION_TYPE_PERCENT, DEVIATION_TYPE_NONE) + } + if field.DeviationType != DEVIATION_TYPE_NONE && len(field.DeviationString) == 0 { + return ReduceAggConfig{}, errors.New("aggregation field deviation must contain DeviationString amount") + } + if field.DeviationType != DEVIATION_TYPE_NONE && len(field.DeviationString) > 0 { + deci, err := decimal.NewFromString(field.DeviationString) + if err != nil { + return ReduceAggConfig{}, fmt.Errorf("reduce aggregator could not parse deviation decimal from string %s", field.DeviationString) + } + parsedConfig.Fields[i].Deviation = deci + } + if len(field.Method) == 0 || !isOneOf(field.Method, []string{AGGREGATION_METHOD_MEDIAN, AGGREGATION_METHOD_MODE}) { + return ReduceAggConfig{}, fmt.Errorf("aggregation field must contain a method. options: [%s, %s]", AGGREGATION_METHOD_MEDIAN, AGGREGATION_METHOD_MODE) + } + if len(field.DeviationString) > 0 && field.DeviationType == DEVIATION_TYPE_NONE { + return ReduceAggConfig{}, fmt.Errorf("aggregation field cannot have deviation with a deviation type of %s", DEVIATION_TYPE_NONE) + } + if field.SubMapField { + hasSubMapField = true + } + if outputKeyCount[field.OutputKey] { + return ReduceAggConfig{}, errors.New("multiple fields have the same outputkey, which will overwrite each other") + } + outputKeyCount[field.OutputKey] = true + } + if len(parsedConfig.SubMapKey) > 0 && !hasSubMapField { + return ReduceAggConfig{}, fmt.Errorf("sub Map key %s given, but no fields are marked as sub map fields", parsedConfig.SubMapKey) + } + if hasSubMapField && len(parsedConfig.SubMapKey) == 0 { + return ReduceAggConfig{}, errors.New("fields are marked as sub Map fields, but no sub map key given") + } + if !isOneOf(parsedConfig.ReportFormat, []string{REPORT_FORMAT_ARRAY, REPORT_FORMAT_MAP, REPORT_FORMAT_VALUE}) { + return ReduceAggConfig{}, fmt.Errorf("invalid config ReportFormat. received: %s. options: %s, %s, %s", parsedConfig.ReportFormat, REPORT_FORMAT_ARRAY, REPORT_FORMAT_MAP, REPORT_FORMAT_VALUE) + } + + return parsedConfig, nil +} diff --git a/pkg/capabilities/consensus/ocr3/aggregators/reduce_test.go b/pkg/capabilities/consensus/ocr3/aggregators/reduce_test.go new file mode 100644 index 000000000..66467dd62 --- /dev/null +++ b/pkg/capabilities/consensus/ocr3/aggregators/reduce_test.go @@ -0,0 +1,1131 @@ +package aggregators_test + +import ( + "math/big" + "testing" + "time" + + "github.com/shopspring/decimal" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/proto" + + "github.com/smartcontractkit/libocr/commontypes" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/aggregators" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/types" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/datastreams" + "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/values" + "github.com/smartcontractkit/chainlink-common/pkg/values/pb" +) + +var ( + feedIDA = datastreams.FeedID("0x0001013ebd4ed3f5889fb5a8a52b42675c60c1a8c42bc79eaa72dcd922ac4292") + idABytes = feedIDA.Bytes() + feedIDB = datastreams.FeedID("0x0003c317fec7fad514c67aacc6366bf2f007ce37100e3cddcacd0ccaa1f3746d") + idBBytes = feedIDB.Bytes() + now = time.Now() +) + +func TestReduceAggregator_Aggregate(t *testing.T) { + t.Run("happy path", func(t *testing.T) { + cases := []struct { + name string + fields []aggregators.AggregationField + extraConfig map[string]any + observationsFactory func() map[commontypes.OracleID][]values.Value + shouldReport bool + expectedState any + expectedOutcome map[string]any + }{ + { + name: "aggregate on int64 median", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + }, + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + }, + { + InputKey: "Timestamp", + OutputKey: "Timestamp", + Method: "median", + DeviationString: "100", + DeviationType: "absolute", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": idABytes[:], + "BenchmarkPrice": int64(100), + "Timestamp": 12341414929, + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "FeedID": idABytes[:], + "Timestamp": int64(12341414929), + "Price": int64(100), + }, + }, + }, + expectedState: map[string]any{ + "FeedID": idABytes[:], + "Timestamp": int64(12341414929), + "Price": int64(100), + }, + }, + { + name: "aggregate on decimal median", + fields: []aggregators.AggregationField{ + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": decimal.NewFromInt(32), + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": decimal.NewFromInt(32), + }, + }, + }, + expectedState: map[string]any{ + "Price": decimal.NewFromInt(32), + }, + }, + { + name: "aggregate on float64 median", + fields: []aggregators.AggregationField{ + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": float64(1.2), + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": float64(1.2), + }, + }, + }, + expectedState: map[string]any{ + "Price": float64(1.2), + }, + }, + { + name: "aggregate on time median", + fields: []aggregators.AggregationField{ + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": now, + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": time.Time(now).UTC(), + }, + }, + }, + expectedState: map[string]any{ + "Price": now.UTC(), + }, + }, + { + name: "aggregate on big int median", + fields: []aggregators.AggregationField{ + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": big.NewInt(100), + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": big.NewInt(100), + }, + }, + }, + expectedState: map[string]any{ + "Price": big.NewInt(100), + }, + }, + { + name: "aggregate on bytes mode", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue1, err := values.WrapMap(map[string]any{ + "FeedID": idABytes[:], + }) + require.NoError(t, err) + mockValue2, err := values.WrapMap(map[string]any{ + "FeedID": idBBytes[:], + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue1}, 2: {mockValue1}, 3: {mockValue2}, 4: {mockValue1}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "FeedID": idABytes[:], + }, + }, + }, + expectedState: map[string]any{ + "FeedID": idABytes[:], + }, + }, + { + name: "aggregate on string mode", + fields: []aggregators.AggregationField{ + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "mode", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue1, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": "1", + }) + require.NoError(t, err) + mockValue2, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": "2", + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue1}, 2: {mockValue1}, 3: {mockValue2}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": "1", + }, + }, + }, + expectedState: map[string]any{ + "Price": "1", + }, + }, + { + name: "aggregate on bool mode", + fields: []aggregators.AggregationField{ + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "mode", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue1, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": true, + }) + require.NoError(t, err) + mockValue2, err := values.WrapMap(map[string]any{ + "BenchmarkPrice": false, + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue1}, 2: {mockValue1}, 3: {mockValue2}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": true, + }, + }, + }, + expectedState: map[string]any{ + "Price": true, + }, + }, + { + name: "aggregate on non-indexable type", + fields: []aggregators.AggregationField{ + { + // Omitting "InputKey" + OutputKey: "Price", + Method: "median", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.Wrap(1) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": int64(1), + }, + }, + }, + expectedState: map[string]any{"Price": int64(1)}, + }, + { + name: "aggregate on list type", + fields: []aggregators.AggregationField{ + { + InputKey: "1", + OutputKey: "Price", + Method: "median", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.NewList([]any{"1", "2", "3"}) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "Price": "2", + }, + }, + }, + expectedState: map[string]any{ + "Price": "2", + }, + }, + { + name: "submap", + fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + }, + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + SubMapField: true, + }, + { + InputKey: "Timestamp", + OutputKey: "Timestamp", + Method: "median", + DeviationString: "100", + DeviationType: "absolute", + }, + }, + extraConfig: map[string]any{ + "SubMapKey": "Report", + }, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{ + "FeedID": idABytes[:], + "BenchmarkPrice": int64(100), + "Timestamp": 12341414929, + }) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{ + map[string]any{ + "FeedID": idABytes[:], + "Timestamp": int64(12341414929), + "Report": map[string]any{ + "Price": int64(100), + }, + }, + }, + }, + expectedState: map[string]any{ + "FeedID": idABytes[:], + "Price": int64(100), + "Timestamp": int64(12341414929), + }, + }, + { + name: "report format value", + fields: []aggregators.AggregationField{ + { + OutputKey: "Price", + Method: "median", + }, + }, + extraConfig: map[string]any{ + "reportFormat": "value", + }, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.Wrap(1) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": int64(1), + }, + expectedState: map[string]any{"Price": int64(1)}, + }, + { + name: "report format array", + fields: []aggregators.AggregationField{ + { + OutputKey: "Price", + Method: "median", + }, + }, + extraConfig: map[string]any{ + "reportFormat": "array", + }, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.Wrap(1) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + shouldReport: true, + expectedOutcome: map[string]any{ + "Reports": []any{map[string]any{"Price": int64(1)}}, + }, + expectedState: map[string]any{"Price": int64(1)}, + }, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + config := getConfigReduceAggregator(t, tt.fields, tt.extraConfig) + agg, err := aggregators.NewReduceAggregator(*config) + require.NoError(t, err) + + pb := &pb.Map{} + outcome, err := agg.Aggregate(logger.Nop(), nil, tt.observationsFactory(), 1) + require.NoError(t, err) + require.Equal(t, tt.shouldReport, outcome.ShouldReport) + + // validate metadata + proto.Unmarshal(outcome.Metadata, pb) + vmap, err := values.FromMapValueProto(pb) + require.NoError(t, err) + state, err := vmap.Unwrap() + require.NoError(t, err) + require.Equal(t, tt.expectedState, state) + + // validate encodable outcome + val, err := values.FromMapValueProto(outcome.EncodableOutcome) + require.NoError(t, err) + topLevelMap, err := val.Unwrap() + require.NoError(t, err) + mm, ok := topLevelMap.(map[string]any) + require.True(t, ok) + + require.NoError(t, err) + + require.Equal(t, tt.expectedOutcome, mm) + }) + } + }) + + t.Run("error path", func(t *testing.T) { + cases := []struct { + name string + previousOutcome *types.AggregationOutcome + fields []aggregators.AggregationField + extraConfig map[string]any + observationsFactory func() map[commontypes.OracleID][]values.Value + }{ + { + name: "not enough observations", + previousOutcome: nil, + fields: []aggregators.AggregationField{ + { + Method: "median", + OutputKey: "Price", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + return map[commontypes.OracleID][]values.Value{} + }, + }, + { + name: "empty previous outcome", + previousOutcome: &types.AggregationOutcome{}, + fields: []aggregators.AggregationField{ + { + Method: "median", + OutputKey: "Price", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.Wrap(int64(100)) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + }, + { + name: "invalid previous outcome not pb", + previousOutcome: &types.AggregationOutcome{ + Metadata: []byte{1, 2, 3}, + }, + fields: []aggregators.AggregationField{ + { + Method: "median", + OutputKey: "Price", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.Wrap(int64(100)) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + }, + { + name: "not enough extracted values", + previousOutcome: nil, + fields: []aggregators.AggregationField{ + { + InputKey: "Price", + OutputKey: "Price", + Method: "median", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.WrapMap(map[string]any{"Price": int64(100)}) + require.NoError(t, err) + mockValueEmpty := values.EmptyMap() + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValueEmpty}} + }, + }, + { + name: "reduce error median", + previousOutcome: nil, + fields: []aggregators.AggregationField{ + { + Method: "median", + OutputKey: "Price", + }, + }, + extraConfig: map[string]any{}, + observationsFactory: func() map[commontypes.OracleID][]values.Value { + mockValue, err := values.Wrap(true) + require.NoError(t, err) + return map[commontypes.OracleID][]values.Value{1: {mockValue}, 2: {mockValue}, 3: {mockValue}} + }, + }, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + config := getConfigReduceAggregator(t, tt.fields, tt.extraConfig) + agg, err := aggregators.NewReduceAggregator(*config) + require.NoError(t, err) + + _, err = agg.Aggregate(logger.Nop(), tt.previousOutcome, tt.observationsFactory(), 1) + require.Error(t, err) + }) + } + }) +} + +func TestInputChanges(t *testing.T) { + fields := []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + }, + { + InputKey: "BenchmarkPrice", + OutputKey: "Price", + Method: "median", + DeviationString: "10", + DeviationType: "percent", + }, + { + InputKey: "Timestamp", + OutputKey: "Timestamp", + Method: "median", + DeviationString: "100", + DeviationType: "absolute", + }, + } + config := getConfigReduceAggregator(t, fields, map[string]any{}) + agg, err := aggregators.NewReduceAggregator(*config) + require.NoError(t, err) + + // First Round + mockValue1, err := values.WrapMap(map[string]any{ + "FeedID": idABytes[:], + "BenchmarkPrice": int64(100), + "Timestamp": 12341414929, + }) + require.NoError(t, err) + pb := &pb.Map{} + outcome, err := agg.Aggregate(logger.Nop(), nil, map[commontypes.OracleID][]values.Value{1: {mockValue1}, 2: {mockValue1}, 3: {mockValue1}}, 1) + require.NoError(t, err) + shouldReport := true + require.Equal(t, shouldReport, outcome.ShouldReport) + + // validate metadata + proto.Unmarshal(outcome.Metadata, pb) + vmap, err := values.FromMapValueProto(pb) + require.NoError(t, err) + state, err := vmap.Unwrap() + require.NoError(t, err) + expectedState1 := map[string]any{ + "FeedID": idABytes[:], + "Price": int64(100), + "Timestamp": int64(12341414929), + } + require.Equal(t, expectedState1, state) + + // validate encodable outcome + val, err := values.FromMapValueProto(outcome.EncodableOutcome) + require.NoError(t, err) + topLevelMap, err := val.Unwrap() + require.NoError(t, err) + mm, ok := topLevelMap.(map[string]any) + require.True(t, ok) + + require.NoError(t, err) + expectedOutcome1 := map[string]any{ + "Reports": []any{ + map[string]any{ + "FeedID": idABytes[:], + "Timestamp": int64(12341414929), + "Price": int64(100), + }, + }, + } + require.Equal(t, expectedOutcome1, mm) + + // Second Round + mockValue2, err := values.WrapMap(map[string]any{ + "FeedID": true, + "Timestamp": int64(12341414929), + "BenchmarkPrice": int64(100), + }) + require.NoError(t, err) + outcome, err = agg.Aggregate(logger.Nop(), nil, map[commontypes.OracleID][]values.Value{1: {mockValue2}, 2: {mockValue2}, 3: {mockValue2}}, 1) + require.NoError(t, err) + require.Equal(t, shouldReport, outcome.ShouldReport) + + // validate metadata + proto.Unmarshal(outcome.Metadata, pb) + vmap, err = values.FromMapValueProto(pb) + require.NoError(t, err) + state, err = vmap.Unwrap() + require.NoError(t, err) + expectedState2 := map[string]any{ + "FeedID": true, + "Price": int64(100), + "Timestamp": int64(12341414929), + } + require.Equal(t, expectedState2, state) + + // validate encodable outcome + val, err = values.FromMapValueProto(outcome.EncodableOutcome) + require.NoError(t, err) + topLevelMap, err = val.Unwrap() + require.NoError(t, err) + mm, ok = topLevelMap.(map[string]any) + require.True(t, ok) + + require.NoError(t, err) + expectedOutcome2 := map[string]any{ + "Reports": []any{ + map[string]any{ + "FeedID": true, + "Timestamp": int64(12341414929), + "Price": int64(100), + }, + }, + } + + require.Equal(t, expectedOutcome2, mm) + +} + +func TestMedianAggregator_ParseConfig(t *testing.T) { + t.Run("happy path", func(t *testing.T) { + cases := []struct { + name string + inputFactory func() map[string]any + outputFactory func() aggregators.ReduceAggConfig + }{ + { + name: "no inputkey", + inputFactory: func() map[string]any { + return map[string]any{ + "fields": []aggregators.AggregationField{ + { + Method: "median", + OutputKey: "Price", + }, + }, + } + }, + outputFactory: func() aggregators.ReduceAggConfig { + return aggregators.ReduceAggConfig{ + Fields: []aggregators.AggregationField{ + { + InputKey: "", + OutputKey: "Price", + Method: "median", + DeviationString: "", + Deviation: decimal.Decimal{}, + DeviationType: "none", + }, + }, + OutputFieldName: "Reports", + ReportFormat: "map", + } + }, + }, + { + name: "reportFormat map, aggregation method mode, deviation", + inputFactory: func() map[string]any { + return map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedId", + Method: "mode", + DeviationString: "1.1", + DeviationType: "absolute", + }, + }, + } + }, + outputFactory: func() aggregators.ReduceAggConfig { + return aggregators.ReduceAggConfig{ + Fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedId", + Method: "mode", + DeviationString: "1.1", + Deviation: decimal.NewFromFloat(1.1), + DeviationType: "absolute", + }, + }, + OutputFieldName: "Reports", + ReportFormat: "map", + } + }, + }, + { + name: "reportFormat array, aggregation method median, no deviation", + inputFactory: func() map[string]any { + return map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedId", + Method: "median", + }, + }, + "outputFieldName": "Reports", + "reportFormat": "array", + } + }, + outputFactory: func() aggregators.ReduceAggConfig { + return aggregators.ReduceAggConfig{ + Fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedId", + Method: "median", + DeviationString: "", + Deviation: decimal.Decimal{}, + DeviationType: "none", + }, + }, + OutputFieldName: "Reports", + ReportFormat: "array", + } + }, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + vMap, err := values.NewMap(tt.inputFactory()) + require.NoError(t, err) + parsedConfig, err := aggregators.ParseConfigReduceAggregator(*vMap) + require.NoError(t, err) + require.Equal(t, tt.outputFactory(), parsedConfig) + }) + } + }) + + t.Run("unhappy path", func(t *testing.T) { + cases := []struct { + name string + configFactory func() *values.Map + }{ + { + name: "empty", + configFactory: func() *values.Map { + return values.EmptyMap() + }, + }, + { + name: "invalid report format", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + }, + }, + "reportFormat": "invalid", + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with no method", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with empty method", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with invalid method", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "invalid", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with deviation string but no deviation type", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + DeviationString: "1", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with deviation string but empty deviation type", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + DeviationString: "1", + DeviationType: "", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with invalid deviation type", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + DeviationString: "1", + DeviationType: "invalid", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with deviation type but no deviation string", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + DeviationType: "absolute", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with deviation type but empty deviation string", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + DeviationType: "absolute", + DeviationString: "", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with invalid deviation string", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + DeviationType: "absolute", + DeviationString: "1-1", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "field with sub report, but no sub report key", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + SubMapField: true, + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "sub report key, but no sub report fields", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "subMapKey": "Report", + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "clashing output keys", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + }, + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "median", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "map/array type, no output key", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + Method: "median", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + { + name: "report type value with multiple fields", + configFactory: func() *values.Map { + vMap, err := values.NewMap(map[string]any{ + "reportFormat": "value", + "fields": []aggregators.AggregationField{ + { + InputKey: "FeedID", + Method: "median", + OutputKey: "FeedID", + }, + { + InputKey: "Price", + Method: "median", + OutputKey: "Price", + }, + }, + }) + require.NoError(t, err) + return vMap + }, + }, + } + + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + _, err := aggregators.ParseConfigReduceAggregator(*tt.configFactory()) + require.Error(t, err) + }, + ) + } + }) +} + +func getConfigReduceAggregator(t *testing.T, fields []aggregators.AggregationField, override map[string]any) *values.Map { + unwrappedConfig := map[string]any{ + "fields": fields, + "outputFieldName": "Reports", + "reportFormat": "array", + } + for key, val := range override { + unwrappedConfig[key] = val + } + config, err := values.NewMap(unwrappedConfig) + require.NoError(t, err) + return config +} diff --git a/pkg/capabilities/consensus/ocr3/capability.go b/pkg/capabilities/consensus/ocr3/capability.go index 6c4c8f1a8..ed62faa43 100644 --- a/pkg/capabilities/consensus/ocr3/capability.go +++ b/pkg/capabilities/consensus/ocr3/capability.go @@ -146,7 +146,7 @@ func (o *capability) getAggregator(workflowID string) (types.Aggregator, error) func (o *capability) getEncoderByWorkflowID(workflowID string) (types.Encoder, error) { enc, ok := o.encoders[workflowID] if !ok { - return nil, fmt.Errorf("no aggregator found for workflowID %s", workflowID) + return nil, fmt.Errorf("no encoder found for workflowID %s", workflowID) } return enc, nil diff --git a/pkg/capabilities/consensus/ocr3/capability_test.go b/pkg/capabilities/consensus/ocr3/capability_test.go index e383a487c..f1a8480ed 100644 --- a/pkg/capabilities/consensus/ocr3/capability_test.go +++ b/pkg/capabilities/consensus/ocr3/capability_test.go @@ -68,62 +68,79 @@ func TestOCR3Capability_Schema(t *testing.T) { } func TestOCR3Capability(t *testing.T) { - n := time.Now() - fc := clockwork.NewFakeClockAt(n) - lggr := logger.Test(t) - - ctx := tests.Context(t) - - s := requests.NewStore() - - cp := newCapability(s, fc, 1*time.Second, mockAggregatorFactory, mockEncoderFactory, lggr, 10) - require.NoError(t, cp.Start(ctx)) - - config, err := values.NewMap( - map[string]any{ - "aggregation_method": "data_feeds", - "aggregation_config": map[string]any{}, - "encoder_config": map[string]any{}, - "encoder": "evm", - "report_id": "ffff", + cases := []struct { + name string + aggregationMethod string + }{ + { + name: "success - aggregation_method data_feeds", + aggregationMethod: "data_feeds", }, - ) - require.NoError(t, err) - - ethUsdValStr := "1.123456" - ethUsdValue, err := decimal.NewFromString(ethUsdValStr) - require.NoError(t, err) - observationKey := "ETH_USD" - obs := []any{map[string]any{observationKey: ethUsdValue}} - inputs, err := values.NewMap(map[string]any{"observations": obs}) - require.NoError(t, err) - - executeReq := capabilities.CapabilityRequest{ - Metadata: capabilities.RequestMetadata{ - WorkflowID: workflowTestID, - WorkflowExecutionID: workflowExecutionTestID, + { + name: "success - aggregation_method reduce", + aggregationMethod: "reduce", }, - Config: config, - Inputs: inputs, } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + n := time.Now() + fc := clockwork.NewFakeClockAt(n) + lggr := logger.Test(t) + + ctx := tests.Context(t) + + s := requests.NewStore() + + cp := newCapability(s, fc, 1*time.Second, mockAggregatorFactory, mockEncoderFactory, lggr, 10) + require.NoError(t, cp.Start(ctx)) + + config, err := values.NewMap( + map[string]any{ + "aggregation_method": tt.aggregationMethod, + "aggregation_config": map[string]any{}, + "encoder_config": map[string]any{}, + "encoder": "evm", + "report_id": "ffff", + }, + ) + require.NoError(t, err) + + ethUsdValStr := "1.123456" + ethUsdValue, err := decimal.NewFromString(ethUsdValStr) + require.NoError(t, err) + observationKey := "ETH_USD" + obs := []any{map[string]any{observationKey: ethUsdValue}} + inputs, err := values.NewMap(map[string]any{"observations": obs}) + require.NoError(t, err) + + executeReq := capabilities.CapabilityRequest{ + Metadata: capabilities.RequestMetadata{ + WorkflowID: workflowTestID, + WorkflowExecutionID: workflowExecutionTestID, + }, + Config: config, + Inputs: inputs, + } - respCh := executeAsync(ctx, executeReq, cp.Execute) + respCh := executeAsync(ctx, executeReq, cp.Execute) - obsv, err := values.NewList(obs) - require.NoError(t, err) + obsv, err := values.NewList(obs) + require.NoError(t, err) - // Mock the oracle returning a response - mresp, err := values.NewMap(map[string]any{"observations": obsv}) - cp.reqHandler.SendResponse(ctx, requests.Response{ - Value: mresp, - WorkflowExecutionID: workflowExecutionTestID, - }) - require.NoError(t, err) + // Mock the oracle returning a response + mresp, err := values.NewMap(map[string]any{"observations": obsv}) + cp.reqHandler.SendResponse(ctx, requests.Response{ + Value: mresp, + WorkflowExecutionID: workflowExecutionTestID, + }) + require.NoError(t, err) - resp := <-respCh - assert.Nil(t, resp.Err) + resp := <-respCh + assert.Nil(t, resp.Err) - assert.Equal(t, mresp, resp.Value) + assert.Equal(t, mresp, resp.Value) + }) + } } func TestOCR3Capability_Eviction(t *testing.T) { diff --git a/pkg/capabilities/consensus/ocr3/models.go b/pkg/capabilities/consensus/ocr3/models.go index 86662dc31..9e7887685 100644 --- a/pkg/capabilities/consensus/ocr3/models.go +++ b/pkg/capabilities/consensus/ocr3/models.go @@ -5,7 +5,7 @@ import ( ) type config struct { - AggregationMethod string `mapstructure:"aggregation_method" json:"aggregation_method" jsonschema:"enum=data_feeds"` + AggregationMethod string `mapstructure:"aggregation_method" json:"aggregation_method" jsonschema:"enum=data_feeds,enum=reduce"` AggregationConfig *values.Map `mapstructure:"aggregation_config" json:"aggregation_config"` Encoder string `mapstructure:"encoder" json:"encoder"` EncoderConfig *values.Map `mapstructure:"encoder_config" json:"encoder_config"` diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/common_builders_generated.go b/pkg/capabilities/consensus/ocr3/ocr3cap/common_builders_generated.go index 1132816b6..b124bb6d2 100644 --- a/pkg/capabilities/consensus/ocr3/ocr3cap/common_builders_generated.go +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/common_builders_generated.go @@ -6,12 +6,48 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" ) +// EncoderWrapper allows access to field from an sdk.CapDefinition[Encoder] +func EncoderWrapper(raw sdk.CapDefinition[Encoder]) EncoderCap { + wrapped, ok := raw.(EncoderCap) + if ok { + return wrapped + } + return EncoderCap(raw) +} + type EncoderCap sdk.CapDefinition[Encoder] +// EncoderConfigWrapper allows access to field from an sdk.CapDefinition[EncoderConfig] +func EncoderConfigWrapper(raw sdk.CapDefinition[EncoderConfig]) EncoderConfigCap { + wrapped, ok := raw.(EncoderConfigCap) + if ok { + return wrapped + } + return EncoderConfigCap(raw) +} + type EncoderConfigCap sdk.CapDefinition[EncoderConfig] +// ReportIdWrapper allows access to field from an sdk.CapDefinition[ReportId] +func ReportIdWrapper(raw sdk.CapDefinition[ReportId]) ReportIdCap { + wrapped, ok := raw.(ReportIdCap) + if ok { + return wrapped + } + return ReportIdCap(raw) +} + type ReportIdCap sdk.CapDefinition[ReportId] +// SignedReportWrapper allows access to field from an sdk.CapDefinition[SignedReport] +func SignedReportWrapper(raw sdk.CapDefinition[SignedReport]) SignedReportCap { + wrapped, ok := raw.(SignedReportCap) + if ok { + return wrapped + } + return &signedReportCap{CapDefinition: raw} +} + type SignedReportCap interface { sdk.CapDefinition[SignedReport] Context() sdk.CapDefinition[[]uint8] @@ -21,30 +57,28 @@ type SignedReportCap interface { private() } -// SignedReportCapFromStep should only be called from generated code to assure type safety -func SignedReportCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[SignedReport]) SignedReportCap { - raw := step.AddTo(w) - return &signedReport{CapDefinition: raw} -} - -type signedReport struct { +type signedReportCap struct { sdk.CapDefinition[SignedReport] } -func (*signedReport) private() {} -func (c *signedReport) Context() sdk.CapDefinition[[]uint8] { +func (*signedReportCap) private() {} +func (c *signedReportCap) Context() sdk.CapDefinition[[]uint8] { return sdk.AccessField[SignedReport, []uint8](c.CapDefinition, "Context") } -func (c *signedReport) ID() sdk.CapDefinition[[]uint8] { +func (c *signedReportCap) ID() sdk.CapDefinition[[]uint8] { return sdk.AccessField[SignedReport, []uint8](c.CapDefinition, "ID") } -func (c *signedReport) Report() sdk.CapDefinition[[]uint8] { +func (c *signedReportCap) Report() sdk.CapDefinition[[]uint8] { return sdk.AccessField[SignedReport, []uint8](c.CapDefinition, "Report") } -func (c *signedReport) Signatures() sdk.CapDefinition[[][]uint8] { +func (c *signedReportCap) Signatures() sdk.CapDefinition[[][]uint8] { return sdk.AccessField[SignedReport, [][]uint8](c.CapDefinition, "Signatures") } +func ConstantSignedReport(value SignedReport) SignedReportCap { + return &signedReportCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewSignedReportFromFields( context sdk.CapDefinition[[]uint8], iD sdk.CapDefinition[[]uint8], diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/data_feeds_consensus_builders_generated.go b/pkg/capabilities/consensus/ocr3/ocr3cap/data_feeds_consensus_builders_generated.go index 1fbcb7534..be1f19d13 100644 --- a/pkg/capabilities/consensus/ocr3/ocr3cap/data_feeds_consensus_builders_generated.go +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/data_feeds_consensus_builders_generated.go @@ -4,7 +4,7 @@ package ocr3cap import ( "github.com/smartcontractkit/chainlink-common/pkg/capabilities" - streams "github.com/smartcontractkit/chainlink-common/pkg/capabilities/triggers/streams" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/triggers/streams" "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" ) @@ -24,7 +24,17 @@ func (cfg DataFeedsConsensusConfig) New(w *sdk.WorkflowSpecFactory, ref string, } step := sdk.Step[SignedReport]{Definition: def} - return SignedReportCapFromStep(w, step) + raw := step.AddTo(w) + return SignedReportWrapper(raw) +} + +// FeedValueWrapper allows access to field from an sdk.CapDefinition[FeedValue] +func FeedValueWrapper(raw sdk.CapDefinition[FeedValue]) FeedValueCap { + wrapped, ok := raw.(FeedValueCap) + if ok { + return wrapped + } + return &feedValueCap{CapDefinition: raw} } type FeedValueCap interface { @@ -35,27 +45,25 @@ type FeedValueCap interface { private() } -// FeedValueCapFromStep should only be called from generated code to assure type safety -func FeedValueCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[FeedValue]) FeedValueCap { - raw := step.AddTo(w) - return &feedValue{CapDefinition: raw} -} - -type feedValue struct { +type feedValueCap struct { sdk.CapDefinition[FeedValue] } -func (*feedValue) private() {} -func (c *feedValue) Deviation() sdk.CapDefinition[string] { +func (*feedValueCap) private() {} +func (c *feedValueCap) Deviation() sdk.CapDefinition[string] { return sdk.AccessField[FeedValue, string](c.CapDefinition, "deviation") } -func (c *feedValue) Heartbeat() sdk.CapDefinition[uint64] { +func (c *feedValueCap) Heartbeat() sdk.CapDefinition[uint64] { return sdk.AccessField[FeedValue, uint64](c.CapDefinition, "heartbeat") } -func (c *feedValue) RemappedID() sdk.CapDefinition[string] { +func (c *feedValueCap) RemappedID() sdk.CapDefinition[string] { return sdk.AccessField[FeedValue, string](c.CapDefinition, "remappedID") } +func ConstantFeedValue(value FeedValue) FeedValueCap { + return &feedValueCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewFeedValueFromFields( deviation sdk.CapDefinition[string], heartbeat sdk.CapDefinition[uint64], diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/identical_consensus.go b/pkg/capabilities/consensus/ocr3/ocr3cap/identical_consensus.go index 12913126c..1394df019 100644 --- a/pkg/capabilities/consensus/ocr3/ocr3cap/identical_consensus.go +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/identical_consensus.go @@ -27,8 +27,8 @@ func (c IdenticalConsensusConfig[T]) New(w *sdk.WorkflowSpecFactory, ref string, CapabilityType: capabilities.CapabilityTypeConsensus, } - step := sdk.Step[SignedReport]{Definition: def} - return SignedReportCapFromStep(w, step) + step := &sdk.Step[SignedReport]{Definition: def} + return SignedReportWrapper(step.AddTo(w)) } type IdenticalConsensusInput[T any] struct { diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/reduce_consensus.go b/pkg/capabilities/consensus/ocr3/ocr3cap/reduce_consensus.go new file mode 100644 index 000000000..9bff885b0 --- /dev/null +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/reduce_consensus.go @@ -0,0 +1,51 @@ +package ocr3cap + +import ( + "github.com/smartcontractkit/chainlink-common/pkg/capabilities" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/aggregators" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +// Note this isn't generated because generics isn't supported in json schema + +type ReduceConsensusConfig[T any] struct { + Encoder Encoder + EncoderConfig EncoderConfig + ReportID ReportId + AggregationConfig aggregators.ReduceAggConfig +} + +func (c ReduceConsensusConfig[T]) New(w *sdk.WorkflowSpecFactory, ref string, input ReduceConsensusInput[T]) SignedReportCap { + def := sdk.StepDefinition{ + ID: "offchain_reporting@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: map[string]any{ + "aggregation_method": "reduce", + "aggregation_config": c.AggregationConfig, + "encoder": c.Encoder, + "encoder_config": c.EncoderConfig, + "report_id": c.ReportID, + }, + CapabilityType: capabilities.CapabilityTypeConsensus, + } + + step := sdk.Step[SignedReport]{Definition: def} + return SignedReportWrapper(step.AddTo(w)) +} + +type ReduceConsensusInput[T any] struct { + Observation sdk.CapDefinition[T] + Encoder Encoder + EncoderConfig EncoderConfig +} + +func (input ReduceConsensusInput[T]) ToSteps() sdk.StepInputs { + return sdk.StepInputs{ + Mapping: map[string]any{ + "observations": sdk.ListOf(input.Observation).Ref(), + "encoder": input.Encoder, + "encoderConfig": input.EncoderConfig, + }, + } +} diff --git a/pkg/capabilities/consensus/ocr3/ocr3cap/reduce_consensus_test.go b/pkg/capabilities/consensus/ocr3/ocr3cap/reduce_consensus_test.go new file mode 100644 index 000000000..a74123d4f --- /dev/null +++ b/pkg/capabilities/consensus/ocr3/ocr3cap/reduce_consensus_test.go @@ -0,0 +1,154 @@ +package ocr3cap_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/aggregators" + ocr3 "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/ocr3cap" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/targets/chainwriter" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk/testutils" +) + +func TestReduceConsensus(t *testing.T) { + t.Parallel() + workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{ + Owner: "0x1234", + Name: "Test", + }) + + trigger := basictrigger.TriggerConfig{Name: "1234", Number: 1}.New(workflow) + + consensus := ocr3.ReduceConsensusConfig[basictrigger.TriggerOutputs]{ + Encoder: ocr3.EncoderEVM, + EncoderConfig: ocr3.EncoderConfig{}, + ReportID: "0001", + AggregationConfig: aggregators.ReduceAggConfig{ + Fields: []aggregators.AggregationField{ + { + InputKey: "FeedID", + OutputKey: "FeedID", + Method: "mode", + }, + { + InputKey: "Timestamp", + OutputKey: "Timestamp", + Method: "median", + DeviationString: "3600", // 1 hour in seconds + DeviationType: "absolute", + }, + { + InputKey: "Price", + OutputKey: "Price", + Method: "median", + DeviationString: "0.05", // 5% + DeviationType: "percent", + SubMapField: true, + }, + }, + OutputFieldName: "Reports", + ReportFormat: "array", + SubMapKey: "Report", + }, + }.New(workflow, "consensus", ocr3.ReduceConsensusInput[basictrigger.TriggerOutputs]{ + Observation: trigger, + Encoder: "evm", + EncoderConfig: ocr3.EncoderConfig(map[string]any{ + "abi": "(bytes32 FeedID, bytes Report, uint32 Timestamp)[] Reports", + }), + }) + + chainwriter.TargetConfig{ + Address: "0x1235", + DeltaStage: "45s", + Schedule: "oneAtATime", + }.New(workflow, "chainwriter@1.0.0", chainwriter.TargetInput{SignedReport: consensus}) + + actual, err := workflow.Spec() + require.NoError(t, err) + + expected := sdk.WorkflowSpec{ + Name: "Test", + Owner: "0x1234", + Triggers: []sdk.StepDefinition{ + { + ID: "basic-test-trigger@1.0.0", + Ref: "trigger", + Inputs: sdk.StepInputs{}, + Config: map[string]any{ + "name": "1234", + "number": 1, + }, + CapabilityType: capabilities.CapabilityTypeTrigger, + }, + }, + Actions: []sdk.StepDefinition{}, + Consensus: []sdk.StepDefinition{ + { + ID: "offchain_reporting@1.0.0", + Ref: "consensus", + Inputs: sdk.StepInputs{Mapping: map[string]any{ + "observations": []any{"$(trigger.outputs)"}, + "encoder": "evm", + "encoderConfig": map[string]any{ + "abi": "(bytes32 FeedID, bytes Report, uint32 Timestamp)[] Reports", + }, + }}, + Config: map[string]any{ + "encoder": "EVM", + "encoder_config": map[string]any{}, + "report_id": "0001", + "aggregation_method": "reduce", + "aggregation_config": map[string]any{ + "outputFieldName": "Reports", + "reportFormat": "array", + "subMapKey": "Report", + "Fields": []map[string]any{ + { + "inputKey": "FeedID", + "outputKey": "FeedID", + "method": "mode", + }, + { + "inputKey": "Timestamp", + "outputKey": "Timestamp", + "method": "median", + "deviation": "3600", + "deviationType": "absolute", + }, + { + "inputKey": "Price", + "outputKey": "Price", + "method": "median", + "deviation": "0.05", + "deviationType": "percent", + "subMapField": true, + }, + }, + }, + }, + CapabilityType: capabilities.CapabilityTypeConsensus, + }, + }, + Targets: []sdk.StepDefinition{ + { + ID: "chainwriter@1.0.0", + Inputs: sdk.StepInputs{ + Mapping: map[string]any{"signed_report": "$(consensus.outputs)"}, + }, + Config: map[string]any{ + "address": "0x1235", + "deltaStage": "45s", + "schedule": "oneAtATime", + }, + CapabilityType: capabilities.CapabilityTypeTarget, + }, + }, + } + + testutils.AssertWorkflowSpec(t, expected, actual) +} diff --git a/pkg/capabilities/consensus/ocr3/testdata/fixtures/capability/schema.json b/pkg/capabilities/consensus/ocr3/testdata/fixtures/capability/schema.json index ebdabb38d..a50ff7d88 100644 --- a/pkg/capabilities/consensus/ocr3/testdata/fixtures/capability/schema.json +++ b/pkg/capabilities/consensus/ocr3/testdata/fixtures/capability/schema.json @@ -7,7 +7,8 @@ "aggregation_method": { "type": "string", "enum": [ - "data_feeds" + "data_feeds", + "reduce" ] }, "aggregation_config": { diff --git a/pkg/capabilities/events/events.go b/pkg/capabilities/events/events.go index 81503b42b..444f45705 100644 --- a/pkg/capabilities/events/events.go +++ b/pkg/capabilities/events/events.go @@ -8,20 +8,19 @@ import ( "google.golang.org/protobuf/proto" "github.com/smartcontractkit/chainlink-common/pkg/beholder" - "github.com/smartcontractkit/chainlink-common/pkg/capabilities/events/pb" - "github.com/smartcontractkit/chainlink-common/pkg/values" + "github.com/smartcontractkit/chainlink-common/pkg/beholder/pb" ) +// Duplicates the attributes in beholder/message.go::Metadata const ( - // Duplicates the attributes in beholder/message.go::Metadata - labelWorkflowOwner = "workflow_owner_address" - labelWorkflowID = "workflow_id" - labelWorkflowExecutionID = "workflow_execution_id" - labelWorkflowName = "workflow_name" - labelCapabilityContractAddress = "capability_contract_address" - labelCapabilityID = "capability_id" - labelCapabilityVersion = "capability_version" - labelCapabilityName = "capability_name" + LabelWorkflowOwner = "workflow_owner_address" + LabelWorkflowID = "workflow_id" + LabelWorkflowExecutionID = "workflow_execution_id" + LabelWorkflowName = "workflow_name" + LabelCapabilityContractAddress = "capability_contract_address" + LabelCapabilityID = "capability_id" + LabelCapabilityVersion = "capability_version" + LabelCapabilityName = "capability_name" ) type EmitMetadata struct { @@ -93,35 +92,35 @@ func (e EmitMetadata) attrs() []any { a := []any{} if e.WorkflowOwner != "" { - a = append(a, labelWorkflowOwner, e.WorkflowOwner) + a = append(a, LabelWorkflowOwner, e.WorkflowOwner) } if e.WorkflowID != "" { - a = append(a, labelWorkflowID, e.WorkflowID) + a = append(a, LabelWorkflowID, e.WorkflowID) } if e.WorkflowExecutionID != "" { - a = append(a, labelWorkflowExecutionID, e.WorkflowExecutionID) + a = append(a, LabelWorkflowExecutionID, e.WorkflowExecutionID) } if e.WorkflowName != "" { - a = append(a, labelWorkflowName, e.WorkflowName) + a = append(a, LabelWorkflowName, e.WorkflowName) } if e.CapabilityContractAddress != "" { - a = append(a, labelCapabilityContractAddress, e.CapabilityContractAddress) + a = append(a, LabelCapabilityContractAddress, e.CapabilityContractAddress) } if e.CapabilityID != "" { - a = append(a, labelCapabilityID, e.CapabilityID) + a = append(a, LabelCapabilityID, e.CapabilityID) } if e.CapabilityVersion != "" { - a = append(a, labelCapabilityVersion, e.CapabilityVersion) + a = append(a, LabelCapabilityVersion, e.CapabilityVersion) } if e.CapabilityName != "" { - a = append(a, labelCapabilityName, e.CapabilityName) + a = append(a, LabelCapabilityName, e.CapabilityName) } return a @@ -167,16 +166,27 @@ func (e *Emitter) Emit(ctx context.Context, msg Message) error { return errors.New("must provide workflow name to emit event") } - wm, err := values.WrapMap(msg.Labels) - if err != nil { - return fmt.Errorf("could not wrap map: %w", err) - } - - pm := values.ProtoMap(wm) - - bytes, err := proto.Marshal(&pb.OperationalEvent{ - Labels: pm, - Message: msg.Msg, + // TODO un-comment after INFOPLAT-1386 + //wm, err := values.WrapMap(msg.Labels) + //if err != nil { + // return fmt.Errorf("could not wrap map: %w", err) + //} + // + //pm := values.ProtoMap(wm) + + bytes, err := proto.Marshal(&pb.BaseMessage{ + // any empty values will not be serialized (including the key) + Labels: map[string]string{ + LabelWorkflowID: nmd.WorkflowID, + LabelWorkflowName: nmd.WorkflowName, + LabelWorkflowOwner: nmd.WorkflowOwner, + LabelCapabilityContractAddress: nmd.CapabilityContractAddress, + LabelCapabilityID: nmd.CapabilityID, + LabelCapabilityVersion: nmd.CapabilityVersion, + LabelCapabilityName: nmd.CapabilityName, + LabelWorkflowExecutionID: nmd.WorkflowExecutionID, + }, + Msg: msg.Msg, }) if err != nil { return fmt.Errorf("could not marshal operational event: %w", err) diff --git a/pkg/capabilities/events/events_test.go b/pkg/capabilities/events/events_test.go index c08975b6e..709296ec7 100644 --- a/pkg/capabilities/events/events_test.go +++ b/pkg/capabilities/events/events_test.go @@ -9,7 +9,7 @@ import ( "github.com/stretchr/testify/require" "google.golang.org/protobuf/proto" - "github.com/smartcontractkit/chainlink-common/pkg/capabilities/events/pb" + "github.com/smartcontractkit/chainlink-common/pkg/beholder/pb" "github.com/smartcontractkit/chainlink-common/pkg/utils/tests" ) @@ -47,11 +47,11 @@ func TestEmitter(t *testing.T) { err = emitter.Emit(tests.Context(t), message) require.NoError(t, err) - event := &pb.OperationalEvent{} + event := &pb.BaseMessage{} err = proto.Unmarshal(client.payload, event) require.NoError(t, err) - assert.Equal(t, event.Message, msg) + assert.Equal(t, event.Msg, msg) } func assertHasKey(t *testing.T, attrs []any, keyName, keyValue string) { diff --git a/pkg/capabilities/events/pb/events.pb.go b/pkg/capabilities/events/pb/events.pb.go deleted file mode 100644 index 411b79b1b..000000000 --- a/pkg/capabilities/events/pb/events.pb.go +++ /dev/null @@ -1,227 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.31.0 -// protoc v4.25.1 -// source: capabilities/events/pb/events.proto - -package pb - -import ( - pb "github.com/smartcontractkit/chainlink-common/pkg/values/pb" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type OperationalEvent struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` - Labels *pb.Map `protobuf:"bytes,2,opt,name=labels,proto3" json:"labels,omitempty"` -} - -func (x *OperationalEvent) Reset() { - *x = OperationalEvent{} - if protoimpl.UnsafeEnabled { - mi := &file_capabilities_events_pb_events_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *OperationalEvent) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*OperationalEvent) ProtoMessage() {} - -func (x *OperationalEvent) ProtoReflect() protoreflect.Message { - mi := &file_capabilities_events_pb_events_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use OperationalEvent.ProtoReflect.Descriptor instead. -func (*OperationalEvent) Descriptor() ([]byte, []int) { - return file_capabilities_events_pb_events_proto_rawDescGZIP(), []int{0} -} - -func (x *OperationalEvent) GetMessage() string { - if x != nil { - return x.Message - } - return "" -} - -func (x *OperationalEvent) GetLabels() *pb.Map { - if x != nil { - return x.Labels - } - return nil -} - -// Used by custom compute to send any beholder errors -// back. -type OperationalEventResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - ErrMsg string `protobuf:"bytes,1,opt,name=errMsg,proto3" json:"errMsg,omitempty"` -} - -func (x *OperationalEventResponse) Reset() { - *x = OperationalEventResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_capabilities_events_pb_events_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *OperationalEventResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*OperationalEventResponse) ProtoMessage() {} - -func (x *OperationalEventResponse) ProtoReflect() protoreflect.Message { - mi := &file_capabilities_events_pb_events_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use OperationalEventResponse.ProtoReflect.Descriptor instead. -func (*OperationalEventResponse) Descriptor() ([]byte, []int) { - return file_capabilities_events_pb_events_proto_rawDescGZIP(), []int{1} -} - -func (x *OperationalEventResponse) GetErrMsg() string { - if x != nil { - return x.ErrMsg - } - return "" -} - -var File_capabilities_events_pb_events_proto protoreflect.FileDescriptor - -var file_capabilities_events_pb_events_proto_rawDesc = []byte{ - 0x0a, 0x23, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2f, 0x65, - 0x76, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x70, 0x62, 0x2f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0x16, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2f, 0x70, 0x62, 0x2f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x51, 0x0a, 0x10, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x61, 0x6c, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x12, 0x23, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, - 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x22, 0x32, 0x0a, 0x18, 0x4f, 0x70, 0x65, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x65, 0x72, 0x72, 0x4d, 0x73, 0x67, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x65, 0x72, 0x72, 0x4d, 0x73, 0x67, 0x42, 0x49, 0x5a, 0x47, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, - 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, - 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, - 0x2f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2f, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_capabilities_events_pb_events_proto_rawDescOnce sync.Once - file_capabilities_events_pb_events_proto_rawDescData = file_capabilities_events_pb_events_proto_rawDesc -) - -func file_capabilities_events_pb_events_proto_rawDescGZIP() []byte { - file_capabilities_events_pb_events_proto_rawDescOnce.Do(func() { - file_capabilities_events_pb_events_proto_rawDescData = protoimpl.X.CompressGZIP(file_capabilities_events_pb_events_proto_rawDescData) - }) - return file_capabilities_events_pb_events_proto_rawDescData -} - -var file_capabilities_events_pb_events_proto_msgTypes = make([]protoimpl.MessageInfo, 2) -var file_capabilities_events_pb_events_proto_goTypes = []interface{}{ - (*OperationalEvent)(nil), // 0: events.OperationalEvent - (*OperationalEventResponse)(nil), // 1: events.OperationalEventResponse - (*pb.Map)(nil), // 2: values.Map -} -var file_capabilities_events_pb_events_proto_depIdxs = []int32{ - 2, // 0: events.OperationalEvent.labels:type_name -> values.Map - 1, // [1:1] is the sub-list for method output_type - 1, // [1:1] is the sub-list for method input_type - 1, // [1:1] is the sub-list for extension type_name - 1, // [1:1] is the sub-list for extension extendee - 0, // [0:1] is the sub-list for field type_name -} - -func init() { file_capabilities_events_pb_events_proto_init() } -func file_capabilities_events_pb_events_proto_init() { - if File_capabilities_events_pb_events_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_capabilities_events_pb_events_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*OperationalEvent); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_capabilities_events_pb_events_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*OperationalEventResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_capabilities_events_pb_events_proto_rawDesc, - NumEnums: 0, - NumMessages: 2, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_capabilities_events_pb_events_proto_goTypes, - DependencyIndexes: file_capabilities_events_pb_events_proto_depIdxs, - MessageInfos: file_capabilities_events_pb_events_proto_msgTypes, - }.Build() - File_capabilities_events_pb_events_proto = out.File - file_capabilities_events_pb_events_proto_rawDesc = nil - file_capabilities_events_pb_events_proto_goTypes = nil - file_capabilities_events_pb_events_proto_depIdxs = nil -} diff --git a/pkg/capabilities/events/pb/events.proto b/pkg/capabilities/events/pb/events.proto deleted file mode 100644 index 7d19ca585..000000000 --- a/pkg/capabilities/events/pb/events.proto +++ /dev/null @@ -1,18 +0,0 @@ -syntax = "proto3"; - -option go_package = "github.com/smartcontractkit/chainlink-common/pkg/capabilities/events/pb"; - -package events; - -import "values/pb/values.proto"; - -message OperationalEvent { - string message = 1; - values.Map labels = 2; -} - -// Used by custom compute to send any beholder errors -// back. -message OperationalEventResponse { - string errMsg = 1; -} diff --git a/pkg/capabilities/events/pb/generate.go b/pkg/capabilities/events/pb/generate.go deleted file mode 100644 index 8d1c8ee25..000000000 --- a/pkg/capabilities/events/pb/generate.go +++ /dev/null @@ -1,2 +0,0 @@ -//go:generate protoc --go_out=../../../ --go_opt=paths=source_relative --go-grpc_out=../../../ --go-grpc_opt=paths=source_relative --proto_path=../../../ capabilities/events/pb/events.proto values/pb/values.proto -package pb diff --git a/pkg/capabilities/targets/chainwriter/target_builders_generated.go b/pkg/capabilities/targets/chainwriter/target_builders_generated.go index 71f7db1dd..d2c6602c7 100644 --- a/pkg/capabilities/targets/chainwriter/target_builders_generated.go +++ b/pkg/capabilities/targets/chainwriter/target_builders_generated.go @@ -4,7 +4,7 @@ package chainwriter import ( "github.com/smartcontractkit/chainlink-common/pkg/capabilities" - ocr3cap "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/ocr3cap" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/ocr3cap" "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" ) diff --git a/pkg/capabilities/triggers/mercury_trigger.go b/pkg/capabilities/triggers/mercury_trigger.go index cc456d863..3e9ab1efe 100644 --- a/pkg/capabilities/triggers/mercury_trigger.go +++ b/pkg/capabilities/triggers/mercury_trigger.go @@ -246,5 +246,5 @@ func (o *MercuryTriggerService) HealthReport() map[string]error { } func (o *MercuryTriggerService) Name() string { - return "MercuryTriggerService" + return o.lggr.Name() } diff --git a/pkg/capabilities/triggers/streams/trigger_builders_generated.go b/pkg/capabilities/triggers/streams/trigger_builders_generated.go index 677c2a6f6..2a8692f9f 100644 --- a/pkg/capabilities/triggers/streams/trigger_builders_generated.go +++ b/pkg/capabilities/triggers/streams/trigger_builders_generated.go @@ -20,7 +20,17 @@ func (cfg TriggerConfig) New(w *sdk.WorkflowSpecFactory) FeedCap { } step := sdk.Step[Feed]{Definition: def} - return FeedCapFromStep(w, step) + raw := step.AddTo(w) + return FeedWrapper(raw) +} + +// FeedWrapper allows access to field from an sdk.CapDefinition[Feed] +func FeedWrapper(raw sdk.CapDefinition[Feed]) FeedCap { + wrapped, ok := raw.(FeedCap) + if ok { + return wrapped + } + return &feedCap{CapDefinition: raw} } type FeedCap interface { @@ -31,27 +41,25 @@ type FeedCap interface { private() } -// FeedCapFromStep should only be called from generated code to assure type safety -func FeedCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[Feed]) FeedCap { - raw := step.AddTo(w) - return &feed{CapDefinition: raw} -} - -type feed struct { +type feedCap struct { sdk.CapDefinition[Feed] } -func (*feed) private() {} -func (c *feed) Metadata() SignersMetadataCap { - return &signersMetadata{CapDefinition: sdk.AccessField[Feed, SignersMetadata](c.CapDefinition, "Metadata")} +func (*feedCap) private() {} +func (c *feedCap) Metadata() SignersMetadataCap { + return SignersMetadataWrapper(sdk.AccessField[Feed, SignersMetadata](c.CapDefinition, "Metadata")) } -func (c *feed) Payload() sdk.CapDefinition[[]FeedReport] { +func (c *feedCap) Payload() sdk.CapDefinition[[]FeedReport] { return sdk.AccessField[Feed, []FeedReport](c.CapDefinition, "Payload") } -func (c *feed) Timestamp() sdk.CapDefinition[int64] { +func (c *feedCap) Timestamp() sdk.CapDefinition[int64] { return sdk.AccessField[Feed, int64](c.CapDefinition, "Timestamp") } +func ConstantFeed(value Feed) FeedCap { + return &feedCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewFeedFromFields( metadata SignersMetadataCap, payload sdk.CapDefinition[[]FeedReport], @@ -87,8 +95,26 @@ func (c *simpleFeed) Timestamp() sdk.CapDefinition[int64] { func (c *simpleFeed) private() {} +// FeedIdWrapper allows access to field from an sdk.CapDefinition[FeedId] +func FeedIdWrapper(raw sdk.CapDefinition[FeedId]) FeedIdCap { + wrapped, ok := raw.(FeedIdCap) + if ok { + return wrapped + } + return FeedIdCap(raw) +} + type FeedIdCap sdk.CapDefinition[FeedId] +// FeedReportWrapper allows access to field from an sdk.CapDefinition[FeedReport] +func FeedReportWrapper(raw sdk.CapDefinition[FeedReport]) FeedReportCap { + wrapped, ok := raw.(FeedReportCap) + if ok { + return wrapped + } + return &feedReportCap{CapDefinition: raw} +} + type FeedReportCap interface { sdk.CapDefinition[FeedReport] BenchmarkPrice() sdk.CapDefinition[[]uint8] @@ -100,36 +126,34 @@ type FeedReportCap interface { private() } -// FeedReportCapFromStep should only be called from generated code to assure type safety -func FeedReportCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[FeedReport]) FeedReportCap { - raw := step.AddTo(w) - return &feedReport{CapDefinition: raw} -} - -type feedReport struct { +type feedReportCap struct { sdk.CapDefinition[FeedReport] } -func (*feedReport) private() {} -func (c *feedReport) BenchmarkPrice() sdk.CapDefinition[[]uint8] { +func (*feedReportCap) private() {} +func (c *feedReportCap) BenchmarkPrice() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "BenchmarkPrice") } -func (c *feedReport) FeedID() FeedIdCap { - return FeedIdCap(sdk.AccessField[FeedReport, FeedId](c.CapDefinition, "FeedID")) +func (c *feedReportCap) FeedID() FeedIdCap { + return FeedIdWrapper(sdk.AccessField[FeedReport, FeedId](c.CapDefinition, "FeedID")) } -func (c *feedReport) FullReport() sdk.CapDefinition[[]uint8] { +func (c *feedReportCap) FullReport() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "FullReport") } -func (c *feedReport) ObservationTimestamp() sdk.CapDefinition[int64] { +func (c *feedReportCap) ObservationTimestamp() sdk.CapDefinition[int64] { return sdk.AccessField[FeedReport, int64](c.CapDefinition, "ObservationTimestamp") } -func (c *feedReport) ReportContext() sdk.CapDefinition[[]uint8] { +func (c *feedReportCap) ReportContext() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "ReportContext") } -func (c *feedReport) Signatures() sdk.CapDefinition[[][]uint8] { +func (c *feedReportCap) Signatures() sdk.CapDefinition[[][]uint8] { return sdk.AccessField[FeedReport, [][]uint8](c.CapDefinition, "Signatures") } +func ConstantFeedReport(value FeedReport) FeedReportCap { + return &feedReportCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewFeedReportFromFields( benchmarkPrice sdk.CapDefinition[[]uint8], feedID FeedIdCap, @@ -186,6 +210,15 @@ func (c *simpleFeedReport) Signatures() sdk.CapDefinition[[][]uint8] { func (c *simpleFeedReport) private() {} +// SignersMetadataWrapper allows access to field from an sdk.CapDefinition[SignersMetadata] +func SignersMetadataWrapper(raw sdk.CapDefinition[SignersMetadata]) SignersMetadataCap { + wrapped, ok := raw.(SignersMetadataCap) + if ok { + return wrapped + } + return &signersMetadataCap{CapDefinition: raw} +} + type SignersMetadataCap interface { sdk.CapDefinition[SignersMetadata] MinRequiredSignatures() sdk.CapDefinition[int64] @@ -193,24 +226,22 @@ type SignersMetadataCap interface { private() } -// SignersMetadataCapFromStep should only be called from generated code to assure type safety -func SignersMetadataCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[SignersMetadata]) SignersMetadataCap { - raw := step.AddTo(w) - return &signersMetadata{CapDefinition: raw} -} - -type signersMetadata struct { +type signersMetadataCap struct { sdk.CapDefinition[SignersMetadata] } -func (*signersMetadata) private() {} -func (c *signersMetadata) MinRequiredSignatures() sdk.CapDefinition[int64] { +func (*signersMetadataCap) private() {} +func (c *signersMetadataCap) MinRequiredSignatures() sdk.CapDefinition[int64] { return sdk.AccessField[SignersMetadata, int64](c.CapDefinition, "MinRequiredSignatures") } -func (c *signersMetadata) Signers() sdk.CapDefinition[[]string] { +func (c *signersMetadataCap) Signers() sdk.CapDefinition[[]string] { return sdk.AccessField[SignersMetadata, []string](c.CapDefinition, "Signers") } +func ConstantSignersMetadata(value SignersMetadata) SignersMetadataCap { + return &signersMetadataCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewSignersMetadataFromFields( minRequiredSignatures sdk.CapDefinition[int64], signers sdk.CapDefinition[[]string]) SignersMetadataCap { diff --git a/pkg/codec/byte_string_modifier.go b/pkg/codec/byte_string_modifier.go new file mode 100644 index 000000000..153cc6e20 --- /dev/null +++ b/pkg/codec/byte_string_modifier.go @@ -0,0 +1,256 @@ +package codec + +import ( + "fmt" + "reflect" + + "github.com/smartcontractkit/chainlink-common/pkg/types" +) + +// AddressModifier defines the interface for encoding, decoding, and handling addresses. +// This interface allows for chain-specific logic to be injected into the modifier without +// modifying the common repository. +type AddressModifier interface { + // EncodeAddress converts byte array representing an address into its string form using chain-specific logic. + EncodeAddress([]byte) (string, error) + // DecodeAddress converts a string representation of an address back into its byte array form using chain-specific logic. + DecodeAddress(string) ([]byte, error) + // Length returns the expected byte length of the address for the specific chain. + Length() int +} + +// NewAddressBytesToStringModifier creates and returns a new modifier that transforms address byte +// arrays to their corresponding string representation (or vice versa) based on the provided +// AddressModifier. +// +// The fields parameter specifies which fields within a struct should be modified. The AddressModifier +// is injected into the modifier to handle chain-specific logic during the contractReader relayer configuration. +func NewAddressBytesToStringModifier(fields []string, modifier AddressModifier) Modifier { + // bool is a placeholder value + fieldMap := map[string]bool{} + for _, field := range fields { + fieldMap[field] = true + } + + m := &bytesToStringModifier{ + modifier: modifier, + modifierBase: modifierBase[bool]{ + fields: fieldMap, + onToOffChainType: map[reflect.Type]reflect.Type{}, + offToOnChainType: map[reflect.Type]reflect.Type{}, + }, + } + + // Modify field for input using the modifier to convert the byte array to string + m.modifyFieldForInput = func(_ string, field *reflect.StructField, _ string, _ bool) error { + t, err := createStringTypeForBytes(field.Type, field.Name, modifier.Length()) + if err != nil { + return err + } + field.Type = t + return nil + } + + return m +} + +type bytesToStringModifier struct { + // Injected modifier that contains chain-specific logic + modifier AddressModifier + modifierBase[bool] +} + +func (t *bytesToStringModifier) RetypeToOffChain(onChainType reflect.Type, _ string) (tpe reflect.Type, err error) { + defer func() { + // StructOf can panic if the fields are not valid + if r := recover(); r != nil { + tpe = nil + err = fmt.Errorf("%w: %v", types.ErrInvalidType, r) + } + }() + + // Attempt to retype using the shared functionality in modifierBase + offChainType, err := t.modifierBase.RetypeToOffChain(onChainType, "") + if err != nil { + // Handle additional cases specific to bytesToStringModifier + if onChainType.Kind() == reflect.Array { + addrType := reflect.ArrayOf(t.modifier.Length(), reflect.TypeOf(byte(0))) + // Check for nested byte arrays (e.g., [n][20]byte) + if onChainType.Elem() == addrType.Elem() { + return reflect.ArrayOf(onChainType.Len(), reflect.TypeOf("")), nil + } + } + } + + return offChainType, err +} + +// TransformToOnChain uses the AddressModifier for string-to-address conversion. +func (t *bytesToStringModifier) TransformToOnChain(offChainValue any, _ string) (any, error) { + return transformWithMaps(offChainValue, t.offToOnChainType, t.fields, noop, stringToAddressHookForOnChain(t.modifier)) +} + +// TransformToOffChain uses the AddressModifier for address-to-string conversion. +func (t *bytesToStringModifier) TransformToOffChain(onChainValue any, _ string) (any, error) { + return transformWithMaps(onChainValue, t.onToOffChainType, t.fields, + addressTransformationAction(t.modifier.Length()), + addressToStringHookForOffChain(t.modifier), + ) +} + +// addressTransformationAction performs conversions over the fields we want to modify. +// It handles byte arrays, ensuring they are convertible to the expected length. +// It then replaces the field in the map with the transformed value. +func addressTransformationAction(length int) func(extractMap map[string]any, key string, _ bool) error { + return func(em map[string]any, fieldName string, _ bool) error { + if val, ok := em[fieldName]; ok { + rVal := reflect.ValueOf(val) + + if !rVal.IsValid() { + return fmt.Errorf("invalid value for field %s", fieldName) + } + + if rVal.Kind() == reflect.Ptr && !rVal.IsNil() { + rVal = reflect.Indirect(rVal) + } + + expectedType := reflect.ArrayOf(length, reflect.TypeOf(byte(0))) + if rVal.Type().ConvertibleTo(expectedType) { + if !rVal.CanConvert(expectedType) { + return fmt.Errorf("cannot convert type %v to expected type %v for field %s", rVal.Type(), expectedType, fieldName) + } + rVal = rVal.Convert(expectedType) + } + + switch rVal.Kind() { + case reflect.Array: + // Handle outer arrays (e.g., [n][length]byte) + if rVal.Type().Elem().Kind() == reflect.Array && rVal.Type().Elem().Len() == length { + addressArray := reflect.New(reflect.ArrayOf(rVal.Len(), expectedType)).Elem() + for i := 0; i < rVal.Len(); i++ { + elem := rVal.Index(i) + if elem.Len() != length { + return fmt.Errorf("expected [%d]byte but got length %d for element %d in field %s", length, elem.Len(), i, fieldName) + } + reflect.Copy(addressArray.Index(i), elem) + } + em[fieldName] = addressArray.Interface() + } else if rVal.Type() == expectedType { + // Handle a single array (e.g., [length]byte) + addressVal := reflect.New(expectedType).Elem() + reflect.Copy(addressVal, rVal) + em[fieldName] = addressVal.Interface() + } else { + return fmt.Errorf("expected [%d]byte but got %v for field %s", length, rVal.Type(), fieldName) + } + case reflect.Slice: + // Handle slices of byte arrays (e.g., [][length]byte) + if rVal.Len() > 0 && rVal.Index(0).Type() == expectedType { + addressSlice := reflect.MakeSlice(reflect.SliceOf(expectedType), rVal.Len(), rVal.Len()) + for i := 0; i < rVal.Len(); i++ { + elem := rVal.Index(i) + if elem.Len() != length { + return fmt.Errorf("expected element of [%d]byte but got length %d at index %d for field %s", length, elem.Len(), i, fieldName) + } + reflect.Copy(addressSlice.Index(i), elem) + } + em[fieldName] = addressSlice.Interface() + } else { + return fmt.Errorf("expected slice of [%d]byte but got %v for field %s", length, rVal.Type(), fieldName) + } + default: + return fmt.Errorf("unexpected type %v for field %s", rVal.Kind(), fieldName) + } + } + return nil + } +} + +// createStringTypeForBytes converts a byte array, pointer, or slice type to a string type for a given field. +// This function inspects the kind of the input type (array, pointer, slice) and performs the conversion +// if the element type matches the specified byte array length. Returns an error if the conversion is not possible. +func createStringTypeForBytes(t reflect.Type, field string, length int) (reflect.Type, error) { + switch t.Kind() { + case reflect.Pointer: + return createStringTypeForBytes(t.Elem(), field, length) + + case reflect.Array: + // Handle arrays, convert array of bytes to array of strings + if t.Elem().Kind() == reflect.Uint8 && t.Len() == length { + return reflect.TypeOf(""), nil + } else if t.Elem().Kind() == reflect.Array && t.Elem().Len() == length { + // Handle nested arrays (e.g., [2][20]byte to [2]string) + return reflect.ArrayOf(t.Len(), reflect.TypeOf("")), nil + } + return nil, fmt.Errorf("%w: cannot convert bytes for field %s", types.ErrInvalidType, field) + + case reflect.Slice: + // Handle slices of byte arrays, convert to slice of strings + if t.Elem().Kind() == reflect.Array && t.Elem().Len() == length { + return reflect.SliceOf(reflect.TypeOf("")), nil + } + return nil, fmt.Errorf("%w: cannot convert bytes for field %s", types.ErrInvalidType, field) + + default: + return nil, fmt.Errorf("%w: cannot convert bytes for field %s", types.ErrInvalidType, field) + } +} + +// stringToAddressHookForOnChain converts a string representation of an address back into a byte array for on-chain use. +func stringToAddressHookForOnChain(modifier AddressModifier) func(from reflect.Type, to reflect.Type, data any) (any, error) { + return func(from reflect.Type, to reflect.Type, data any) (any, error) { + byteArrTyp := reflect.ArrayOf(modifier.Length(), reflect.TypeOf(byte(0))) + strTyp := reflect.TypeOf("") + + // Convert from string to byte array (e.g., string -> [20]byte) + if from == strTyp && (to == byteArrTyp || to.ConvertibleTo(byteArrTyp)) { + addr, ok := data.(string) + if !ok { + return nil, fmt.Errorf("invalid type: expected string but got %T", data) + } + + bts, err := modifier.DecodeAddress(addr) + if err != nil { + return nil, err + } + + if len(bts) != modifier.Length() { + return nil, fmt.Errorf("length mismatch: expected %d bytes, got %d", modifier.Length(), len(bts)) + } + + val := reflect.New(byteArrTyp).Elem() + reflect.Copy(val, reflect.ValueOf(bts)) + return val.Interface(), nil + } + return data, nil + } +} + +// addressToStringHookForOffChain converts byte arrays to their string representation for off-chain use. +func addressToStringHookForOffChain(modifier AddressModifier) func(from reflect.Type, to reflect.Type, data any) (any, error) { + return func(from reflect.Type, to reflect.Type, data any) (any, error) { + byteArrTyp := reflect.ArrayOf(modifier.Length(), reflect.TypeOf(byte(0))) + strTyp := reflect.TypeOf("") + rVal := reflect.ValueOf(data) + + if !reflect.ValueOf(data).IsValid() { + return nil, fmt.Errorf("invalid value for conversion: got %T", data) + } + + // Convert from byte array to string (e.g., [20]byte -> string) + if from.ConvertibleTo(byteArrTyp) && to == strTyp { + bts := make([]byte, rVal.Len()) + for i := 0; i < rVal.Len(); i++ { + bts[i] = byte(rVal.Index(i).Uint()) + } + + encoded, err := modifier.EncodeAddress(bts) + if err != nil { + return nil, fmt.Errorf("failed to encode address: %w", err) + } + + return encoded, nil + } + return data, nil + } +} diff --git a/pkg/codec/byte_string_modifier_test.go b/pkg/codec/byte_string_modifier_test.go new file mode 100644 index 000000000..f6fdafed0 --- /dev/null +++ b/pkg/codec/byte_string_modifier_test.go @@ -0,0 +1,341 @@ +package codec_test + +import ( + "encoding/hex" + "errors" + "reflect" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/codec" +) + +// MockAddressModifier is a mock implementation of the AddressModifier interface. +type MockAddressModifier struct { + length int +} + +func (m MockAddressModifier) EncodeAddress(bytes []byte) (string, error) { + return "0x" + hex.EncodeToString(bytes), nil +} + +func (m MockAddressModifier) DecodeAddress(str string) ([]byte, error) { + if len(str) == 0 { + return nil, errors.New("empty address") + } + return hex.DecodeString(str[2:]) // Skip the "0x" prefix for hex encoding +} + +func (m MockAddressModifier) Length() int { + return m.length +} + +func TestAddressBytesToString(t *testing.T) { + // Mocking AddressModifier for 20-byte addresses + mockModifier := MockAddressModifier{length: 20} + + type concreteStruct struct { + A string + T [20]byte + } + + type concreteStructWithLargeAddress struct { + A string + T [20]byte + } + + type pointerStruct struct { + A string + T *[20]byte + } + + type arrayStruct struct { + A string + T [2][20]byte + } + + type sliceStruct struct { + A string + T [][20]byte + } + + concretest := reflect.TypeOf(&concreteStruct{}) + concreteLargest := reflect.TypeOf(&concreteStructWithLargeAddress{}) + pointertst := reflect.TypeOf(&pointerStruct{}) + arrayst := reflect.TypeOf(&arrayStruct{}) + slicest := reflect.TypeOf(&sliceStruct{}) + + type Bytes20AddressType [20]byte + + type otherIntegerType struct { + A string + T Bytes20AddressType + } + + type pointerOtherIntegerType struct { + A string + T *Bytes20AddressType + } + oit := reflect.TypeOf(&otherIntegerType{}) + oitpt := reflect.TypeOf(&pointerOtherIntegerType{}) + + testAddrBytes := [20]byte{} + testAddrStr := "0x" + hex.EncodeToString(testAddrBytes[:]) + anyString := "test" + + t.Run("RetypeToOffChain converts fixed length bytes to string", func(t *testing.T) { + for _, test := range []struct { + name string + tp reflect.Type + }{ + {"[20]byte", concretest}, + {"typed address", oit}, + {"[20]byte pointer", pointertst}, + {"*typed address", oitpt}, + } { + t.Run(test.name, func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + convertedType, err := converter.RetypeToOffChain(test.tp, "") + + require.NoError(t, err) + assert.Equal(t, reflect.Pointer, convertedType.Kind()) + convertedType = convertedType.Elem() + + require.Equal(t, 2, convertedType.NumField()) + assert.Equal(t, test.tp.Elem().Field(0), convertedType.Field(0)) + assert.Equal(t, test.tp.Elem().Field(1).Name, convertedType.Field(1).Name) + assert.Equal(t, reflect.TypeOf(""), convertedType.Field(1).Type) + }) + } + }) + + t.Run("RetypeToOffChain converts arrays of fixed length bytes to array of string", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + convertedType, err := converter.RetypeToOffChain(arrayst, "") + require.NoError(t, err) + assert.Equal(t, reflect.Pointer, convertedType.Kind()) + convertedType = convertedType.Elem() + + require.Equal(t, 2, convertedType.NumField()) + assert.Equal(t, arrayst.Elem().Field(0), convertedType.Field(0)) + assert.Equal(t, reflect.TypeOf([2]string{}), convertedType.Field(1).Type) + }) + + t.Run("RetypeToOffChain converts slices of fixed length bytes to slices of string", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + convertedType, err := converter.RetypeToOffChain(slicest, "") + require.NoError(t, err) + assert.Equal(t, reflect.Pointer, convertedType.Kind()) + convertedType = convertedType.Elem() + + require.Equal(t, 2, convertedType.NumField()) + assert.Equal(t, slicest.Elem().Field(0), convertedType.Field(0)) + assert.Equal(t, reflect.TypeOf([]string{}), convertedType.Field(1).Type) + }) + + t.Run("TransformToOnChain converts string to bytes", func(t *testing.T) { + for _, test := range []struct { + name string + t reflect.Type + expected any + }{ + {"[20]byte", concretest, &concreteStruct{A: anyString, T: [20]byte{}}}, + {"*[20]byte", pointertst, &pointerStruct{A: anyString, T: &[20]byte{}}}, + {"typed address", oit, &otherIntegerType{A: anyString, T: Bytes20AddressType{}}}, + {"*typed address", oitpt, &pointerOtherIntegerType{A: anyString, T: &Bytes20AddressType{}}}, + } { + t.Run(test.name, func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + convertedType, err := converter.RetypeToOffChain(test.t, "") + require.NoError(t, err) + + rOffchain := reflect.New(convertedType.Elem()) + iOffChain := reflect.Indirect(rOffchain) + iOffChain.FieldByName("A").SetString(anyString) + iOffChain.FieldByName("T").Set(reflect.ValueOf(testAddrStr)) + + actual, err := converter.TransformToOnChain(rOffchain.Interface(), "") + require.NoError(t, err) + + assert.Equal(t, test.expected, actual) + }) + } + }) + + t.Run("TransformToOnChain converts string array to array of fixed length bytes", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + convertedType, err := converter.RetypeToOffChain(arrayst, "") + require.NoError(t, err) + + rOffchain := reflect.New(convertedType.Elem()) + iOffChain := reflect.Indirect(rOffchain) + + arrayValue := [2]string{testAddrStr, testAddrStr} + + iOffChain.FieldByName("T").Set(reflect.ValueOf(arrayValue)) + + actual, err := converter.TransformToOnChain(rOffchain.Interface(), "") + require.NoError(t, err) + + expected := &arrayStruct{A: "", T: [2][20]byte{}} + assert.Equal(t, expected, actual) + }) + + t.Run("TransformToOnChain converts string slice to slice of [length]byte", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + convertedType, err := converter.RetypeToOffChain(slicest, "") + require.NoError(t, err) + + rOffchain := reflect.New(convertedType.Elem()) + iOffChain := reflect.Indirect(rOffchain) + + iOffChain.FieldByName("T").Set(reflect.ValueOf([]string{testAddrStr, testAddrStr})) + + actual, err := converter.TransformToOnChain(rOffchain.Interface(), "") + require.NoError(t, err) + + expected := &sliceStruct{ + A: "", + T: [][20]byte{ + testAddrBytes, + testAddrBytes, + }, + } + + assert.Equal(t, expected, actual) + }) + + t.Run("TransformToOnChain returns error on invalid inputs", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + tests := []struct { + name string + addrStr string + structType reflect.Type + }{ + { + name: "Invalid length input", + addrStr: "0x" + hex.EncodeToString([]byte{1, 2, 3}), + structType: concretest, + }, + { + name: "Larger than expected input", + addrStr: "0x" + hex.EncodeToString(make([]byte, 40)), + structType: concreteLargest, + }, + { + name: "Empty string input", + addrStr: "", + structType: concretest, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + convertedType, err := converter.RetypeToOffChain(tt.structType, "") + require.NoError(t, err) + + rOffchain := reflect.New(convertedType.Elem()) + iOffChain := reflect.Indirect(rOffchain) + iOffChain.FieldByName("A").SetString(anyString) + iOffChain.FieldByName("T").Set(reflect.ValueOf(tt.addrStr)) + + _, err = converter.TransformToOnChain(rOffchain.Interface(), "") + require.Error(t, err) + }) + } + }) + + t.Run("TransformToOffChain converts bytes to string", func(t *testing.T) { + for _, test := range []struct { + name string + t reflect.Type + offChain any + }{ + {"[20]byte", concretest, &concreteStruct{A: anyString, T: [20]byte{}}}, + {"*[20]byte", pointertst, &pointerStruct{A: anyString, T: &[20]byte{}}}, + {"typed address", oit, &otherIntegerType{A: anyString, T: Bytes20AddressType{}}}, + {"*typed address", oitpt, &pointerOtherIntegerType{A: anyString, T: &Bytes20AddressType{}}}, + } { + t.Run(test.name, func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + convertedType, err := converter.RetypeToOffChain(test.t, "") + require.NoError(t, err) + + actual, err := converter.TransformToOffChain(test.offChain, "") + require.NoError(t, err) + + expected := reflect.New(convertedType.Elem()) + iOffChain := reflect.Indirect(expected) + iOffChain.FieldByName("A").SetString(anyString) + iOffChain.FieldByName("T").Set(reflect.ValueOf(testAddrStr)) + assert.Equal(t, expected.Interface(), actual) + }) + } + }) + + t.Run("TransformToOffChain converts array of bytes to string array", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + convertedType, err := converter.RetypeToOffChain(arrayst, "") + require.NoError(t, err) + + rOffchain := reflect.New(convertedType.Elem()) + iOffChain := reflect.Indirect(rOffchain) + expectedAddrs := [2]string{testAddrStr, testAddrStr} + iOffChain.FieldByName("T").Set(reflect.ValueOf(expectedAddrs)) + + actual, err := converter.TransformToOffChain(&arrayStruct{A: anyString, T: [2][20]byte{}}, "") + require.NoError(t, err) + + expected := reflect.New(convertedType.Elem()) + iExpected := reflect.Indirect(expected) + iExpected.FieldByName("A").SetString(anyString) + iExpected.FieldByName("T").Set(reflect.ValueOf(expectedAddrs)) + assert.Equal(t, expected.Interface(), actual) + }) + + t.Run("TransformToOffChain converts slice bytes to string slice", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + convertedType, err := converter.RetypeToOffChain(slicest, "") + require.NoError(t, err) + + rOffchain := reflect.New(convertedType.Elem()) + iOffChain := reflect.Indirect(rOffchain) + expectedAddrs := []string{testAddrStr, testAddrStr} + iOffChain.FieldByName("T").Set(reflect.ValueOf(expectedAddrs)) + + actual, err := converter.TransformToOffChain(&sliceStruct{ + A: anyString, + T: [][20]byte{testAddrBytes, testAddrBytes}, + }, "") + require.NoError(t, err) + + expected := reflect.New(convertedType.Elem()) + iExpected := reflect.Indirect(expected) + iExpected.FieldByName("A").SetString(anyString) + iExpected.FieldByName("T").Set(reflect.ValueOf(expectedAddrs)) + assert.Equal(t, expected.Interface(), actual) + }) + + t.Run("Unsupported field type returns error", func(t *testing.T) { + converter := codec.NewAddressBytesToStringModifier([]string{"T"}, mockModifier) + + unsupportedStruct := struct { + A string + T int // Unsupported type + }{} + + // We expect RetypeToOffChain to return an error because 'T' is not a supported type. + _, err := converter.RetypeToOffChain(reflect.TypeOf(&unsupportedStruct), "") + require.Error(t, err) + assert.Contains(t, err.Error(), "cannot convert bytes for field T") + }) +} diff --git a/pkg/codec/config.go b/pkg/codec/config.go index d7581fcb7..a2fe37101 100644 --- a/pkg/codec/config.go +++ b/pkg/codec/config.go @@ -22,6 +22,8 @@ import ( // - hard code -> [HardCodeModifierConfig] // - extract element -> [ElementExtractorModifierConfig] // - epoch to time -> [EpochToTimeModifierConfig] +// - address to string -> [AddressBytesToStringModifierConfig] +// - field wrapper -> [WrapperModifierConfig] type ModifiersConfig []ModifierConfig func (m *ModifiersConfig) UnmarshalJSON(data []byte) error { @@ -52,6 +54,10 @@ func (m *ModifiersConfig) UnmarshalJSON(data []byte) error { (*m)[i] = &EpochToTimeModifierConfig{} case ModifierExtractProperty: (*m)[i] = &PropertyExtractorConfig{} + case ModifierAddressToString: + (*m)[i] = &AddressBytesToStringModifierConfig{} + case ModifierWrapper: + (*m)[i] = &ModifiersConfig{} default: return fmt.Errorf("%w: unknown modifier type: %s", types.ErrInvalidConfig, mType) } @@ -84,6 +90,8 @@ const ( ModifierExtractElement ModifierType = "extract element" ModifierEpochToTime ModifierType = "epoch to time" ModifierExtractProperty ModifierType = "extract property" + ModifierAddressToString ModifierType = "address to string" + ModifierWrapper ModifierType = "wrapper" ) type ModifierConfig interface { @@ -225,6 +233,99 @@ func (c *PropertyExtractorConfig) MarshalJSON() ([]byte, error) { }) } +// AddressBytesToStringModifierConfig is used to transform address byte fields into string fields. +// It holds the list of fields that should be modified and the chain-specific logic to do the modifications. +type AddressBytesToStringModifierConfig struct { + Fields []string + // Modifier is skipped in JSON serialization, will be injected later. + Modifier AddressModifier `json:"-"` +} + +func (c *AddressBytesToStringModifierConfig) ToModifier(_ ...mapstructure.DecodeHookFunc) (Modifier, error) { + return NewAddressBytesToStringModifier(c.Fields, c.Modifier), nil +} + +func (c *AddressBytesToStringModifierConfig) MarshalJSON() ([]byte, error) { + return json.Marshal(&modifierMarshaller[AddressBytesToStringModifierConfig]{ + Type: ModifierAddressToString, + T: c, + }) +} + +// WrapperModifierConfig replaces each field based on cfg map keys with a struct containing one field with the value of the original field which has is named based on map values. +// Wrapper modifier does not maintain the original pointers. +// Wrapper modifier config shouldn't edit fields that affect each other since the results are not deterministic. +// +// Example #1: +// +// Based on this input struct: +// type example struct { +// A string +// } +// +// And the wrapper config defined as: +// {"D": "W"} +// +// Result: +// type example struct { +// D +// } +// +// where D is a struct that contains the original value of D under the name W: +// type D struct { +// W string +// } +// +// +// Example #2: +// Wrapper modifier works on any type of field, including nested fields or nested fields in slices etc.! +// +// Based on this input struct: +// type example struct { +// A []B +// } +// +// type B struct { +// C string +// D string +// } +// +// And the wrapper config defined as: +// {"A.C": "E", "A.D": "F"} +// +// Result: +// type example struct { +// A []B +// } +// +// type B struct { +// C type struct { E string } +// D type struct { F string } +// } +// +// Where each element of slice A under fields C.E and D.F retains the values of their respective input slice elements A.C and A.D . +type WrapperModifierConfig struct { + // Fields key defines the fields to be wrapped and the name of the wrapper struct. + // The field becomes a subfield of the wrapper struct where the name of the subfield is map value. + Fields map[string]string +} + +func (r *WrapperModifierConfig) ToModifier(_ ...mapstructure.DecodeHookFunc) (Modifier, error) { + fields := map[string]string{} + for i, f := range r.Fields { + // using a private variable will make the field not serialize, essentially dropping the field + fields[upperFirstCharacter(f)] = fmt.Sprintf("dropFieldPrivateName-%s", i) + } + return NewWrapperModifier(r.Fields), nil +} + +func (r *WrapperModifierConfig) MarshalJSON() ([]byte, error) { + return json.Marshal(&modifierMarshaller[WrapperModifierConfig]{ + Type: ModifierWrapper, + T: r, + }) +} + type typer struct { Type string } diff --git a/pkg/codec/encodings/type_codec_test.go b/pkg/codec/encodings/type_codec_test.go index d7762fe61..19dadff2c 100644 --- a/pkg/codec/encodings/type_codec_test.go +++ b/pkg/codec/encodings/type_codec_test.go @@ -133,6 +133,10 @@ func (*interfaceTesterBase) GetAccountBytes(i int) []byte { return []byte{ib, ib + 1, ib + 2, ib + 3, ib + 4, ib + 5, ib + 6, ib + 7} } +func (t *interfaceTesterBase) GetAccountString(i int) string { + return string(t.GetAccountBytes(i)) +} + type bigEndianInterfaceTester struct { interfaceTesterBase lenient bool @@ -168,8 +172,10 @@ func (b *bigEndianInterfaceTester) encode(t *testing.T, bytes []byte, ts TestStr for _, oid := range ts.OracleIDs { bytes = append(bytes, byte(oid)) } - bytes = append(bytes, byte(len(ts.Account))) - bytes = append(bytes, ts.Account...) + bytes = append(bytes, byte(len(ts.AccountStruct.Account))) + bytes = append(bytes, ts.AccountStruct.Account...) + bytes = rawbin.BigEndian.AppendUint32(bytes, uint32(len(ts.AccountStruct.AccountStr))) + bytes = append(bytes, []byte(ts.AccountStruct.AccountStr)...) bytes = append(bytes, byte(len(ts.Accounts))) for _, account := range ts.Accounts { bytes = append(bytes, byte(len(account))) @@ -234,6 +240,12 @@ func newTestStructCodec(t *testing.T, builder encodings.Builder) encodings.TypeC }) require.NoError(t, err) + accountStructCodec, err := encodings.NewStructCodec([]encodings.NamedTypeCodec{ + {Name: "Account", Codec: acc}, + {Name: "AccountStr", Codec: sCodec}, + }) + require.NoError(t, err) + oIDs, err := encodings.NewArray(32, builder.OracleID()) require.NoError(t, err) @@ -248,7 +260,7 @@ func newTestStructCodec(t *testing.T, builder encodings.Builder) encodings.TypeC {Name: "DifferentField", Codec: sCodec}, {Name: "OracleID", Codec: builder.OracleID()}, {Name: "OracleIDs", Codec: oIDs}, - {Name: "Account", Codec: acc}, + {Name: "AccountStruct", Codec: accountStructCodec}, {Name: "Accounts", Codec: accs}, {Name: "BigField", Codec: bi}, {Name: "NestedDynamicStruct", Codec: midDynamicCodec}, @@ -288,8 +300,8 @@ func (b *bigEndianInterfaceTester) GetCodec(t *testing.T) types.Codec { } mod, err := codec.NewHardCoder(map[string]any{ - "BigField": ts.BigField.String(), - "Account": ts.Account, + "BigField": ts.BigField.String(), + "AccountStruct.Account": ts.AccountStruct.Account, }, map[string]any{"ExtraField": AnyExtraValue}, codec.BigIntHook) require.NoError(t, err) diff --git a/pkg/codec/example_test.go b/pkg/codec/example_test.go index 54f4f93e1..96a6fa032 100644 --- a/pkg/codec/example_test.go +++ b/pkg/codec/example_test.go @@ -36,7 +36,7 @@ func (ExampleStructJSONCodec) GetMaxEncodingSize(_ context.Context, n int, _ str func (ExampleStructJSONCodec) Decode(_ context.Context, raw []byte, into any, _ string) error { err := json.Unmarshal(raw, into) if err != nil { - return fmt.Errorf("%w: %s", types.ErrInvalidType, err) + return fmt.Errorf("%w: %w", types.ErrInvalidType, err) } return nil } diff --git a/pkg/codec/modifier_base.go b/pkg/codec/modifier_base.go index 6c7285e9b..8a092fe9b 100644 --- a/pkg/codec/modifier_base.go +++ b/pkg/codec/modifier_base.go @@ -37,6 +37,7 @@ func (m *modifierBase[T]) RetypeToOffChain(onChainType reflect.Type, itemType st return cached, nil } + var offChainType reflect.Type switch onChainType.Kind() { case reflect.Pointer: elm, err := m.RetypeToOffChain(onChainType.Elem(), "") @@ -44,35 +45,30 @@ func (m *modifierBase[T]) RetypeToOffChain(onChainType reflect.Type, itemType st return nil, err } - ptr := reflect.PointerTo(elm) - m.onToOffChainType[onChainType] = ptr - m.offToOnChainType[ptr] = onChainType - return ptr, nil + offChainType = reflect.PointerTo(elm) case reflect.Slice: elm, err := m.RetypeToOffChain(onChainType.Elem(), "") if err != nil { return nil, err } - sliceType := reflect.SliceOf(elm) - m.onToOffChainType[onChainType] = sliceType - m.offToOnChainType[sliceType] = onChainType - return sliceType, nil + offChainType = reflect.SliceOf(elm) case reflect.Array: elm, err := m.RetypeToOffChain(onChainType.Elem(), "") if err != nil { return nil, err } - arrayType := reflect.ArrayOf(onChainType.Len(), elm) - m.onToOffChainType[onChainType] = arrayType - m.offToOnChainType[arrayType] = onChainType - return arrayType, nil + offChainType = reflect.ArrayOf(onChainType.Len(), elm) case reflect.Struct: return m.getStructType(onChainType) default: return nil, fmt.Errorf("%w: cannot retype the kind %v", types.ErrInvalidType, onChainType.Kind()) } + + m.onToOffChainType[onChainType] = offChainType + m.offToOnChainType[offChainType] = onChainType + return offChainType, nil } func (m *modifierBase[T]) getStructType(outputType reflect.Type) (reflect.Type, error) { diff --git a/pkg/codec/wrapper.go b/pkg/codec/wrapper.go new file mode 100644 index 000000000..dd1061244 --- /dev/null +++ b/pkg/codec/wrapper.go @@ -0,0 +1,62 @@ +package codec + +import ( + "fmt" + "reflect" +) + +func NewWrapperModifier(fields map[string]string) Modifier { + m := &wrapperModifier{ + modifierBase: modifierBase[string]{ + fields: fields, + onToOffChainType: map[reflect.Type]reflect.Type{}, + offToOnChainType: map[reflect.Type]reflect.Type{}, + }, + } + + m.modifyFieldForInput = func(_ string, field *reflect.StructField, _ string, fieldName string) error { + field.Type = reflect.StructOf([]reflect.StructField{{ + Name: fieldName, + Type: field.Type, + }}) + return nil + } + + return m +} + +type wrapperModifier struct { + modifierBase[string] +} + +func (t *wrapperModifier) TransformToOnChain(offChainValue any, _ string) (any, error) { + return transformWithMaps(offChainValue, t.offToOnChainType, t.fields, unwrapFieldMapAction) +} + +func (t *wrapperModifier) TransformToOffChain(onChainValue any, _ string) (any, error) { + return transformWithMaps(onChainValue, t.onToOffChainType, t.fields, wrapFieldMapAction) +} + +func wrapFieldMapAction(typesMap map[string]any, fieldName string, wrappedFieldName string) error { + field, exists := typesMap[fieldName] + if !exists { + return fmt.Errorf("field %s does not exist", fieldName) + } + + typesMap[fieldName] = map[string]any{wrappedFieldName: field} + return nil +} + +func unwrapFieldMapAction(typesMap map[string]any, fieldName string, wrappedFieldName string) error { + _, exists := typesMap[fieldName] + if !exists { + return fmt.Errorf("field %s does not exist", fieldName) + } + val, isOk := typesMap[fieldName].(map[string]any)[wrappedFieldName] + if !isOk { + return fmt.Errorf("field %s.%s does not exist", fieldName, wrappedFieldName) + } + + typesMap[fieldName] = val + return nil +} diff --git a/pkg/codec/wrapper_test.go b/pkg/codec/wrapper_test.go new file mode 100644 index 000000000..11bf148b6 --- /dev/null +++ b/pkg/codec/wrapper_test.go @@ -0,0 +1,390 @@ +package codec_test + +import ( + "errors" + "reflect" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-common/pkg/codec" + "github.com/smartcontractkit/chainlink-common/pkg/types" +) + +func TestWrapper(t *testing.T) { + t.Parallel() + + type testStruct struct { + A string + B int64 + C int64 + } + + type nestedTestStruct struct { + A string + B testStruct + C []testStruct + D string + } + + wrapper := codec.NewWrapperModifier(map[string]string{"A": "X", "C": "Z"}) + invalidWrapper := codec.NewWrapperModifier(map[string]string{"W": "X", "C": "Z"}) + nestedWrapper := codec.NewWrapperModifier(map[string]string{"A": "X", "B.A": "X", "B.C": "Z", "C.A": "X", "C.C": "Z"}) + t.Run("RetypeToOffChain works on slices", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf([]testStruct{}), "") + require.NoError(t, err) + + assert.Equal(t, reflect.Slice, offChainType.Kind()) + assertBasicWrapperTransform(t, offChainType.Elem()) + }) + + t.Run("RetypeToOffChain works on pointers", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf(&testStruct{}), "") + require.NoError(t, err) + + assert.Equal(t, reflect.Pointer, offChainType.Kind()) + assertBasicWrapperTransform(t, offChainType.Elem()) + }) + + t.Run("RetypeToOffChain works on pointers to non structs", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf(&[]testStruct{}), "") + require.NoError(t, err) + + assert.Equal(t, reflect.Pointer, offChainType.Kind()) + assert.Equal(t, reflect.Slice, offChainType.Elem().Kind()) + assertBasicWrapperTransform(t, offChainType.Elem().Elem()) + }) + + t.Run("RetypeToOffChain works on arrays", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf([2]testStruct{}), "") + require.NoError(t, err) + + assert.Equal(t, reflect.Array, offChainType.Kind()) + assert.Equal(t, 2, offChainType.Len()) + assertBasicWrapperTransform(t, offChainType.Elem()) + }) + + t.Run("RetypeToOffChain returns exception if a field is not on the type", func(t *testing.T) { + _, err := invalidWrapper.RetypeToOffChain(reflect.TypeOf(testStruct{}), "") + assert.True(t, errors.Is(err, types.ErrInvalidType)) + }) + + t.Run("RetypeToOffChain works on nested fields", func(t *testing.T) { + offChainType, err := nestedWrapper.RetypeToOffChain(reflect.TypeOf(nestedTestStruct{}), "") + require.NoError(t, err) + assert.Equal(t, 4, offChainType.NumField()) + + f0 := offChainType.Field(0) + f0PreRetype := reflect.TypeOf(nestedTestStruct{}).Field(0) + assert.Equal(t, wrapType("X", f0PreRetype.Type).String(), f0.Type.String()) + assert.Equal(t, "struct { A struct { X string }; B int64; C struct { Z int64 } }", offChainType.Field(1).Type.String()) + + f2 := offChainType.Field(2) + assert.Equal(t, reflect.Slice, f2.Type.Kind()) + assertBasicWrapperTransform(t, f2.Type.Elem()) + f3 := offChainType.Field(3) + assert.Equal(t, reflect.TypeOf(""), f3.Type) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on structs", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf(testStruct{}), "") + require.NoError(t, err) + iOffchain := reflect.Indirect(reflect.New(offChainType)) + iOffchain.FieldByName("A").FieldByName("X").SetString("foo") + iOffchain.FieldByName("B").SetInt(10) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(20) + + output, err := wrapper.TransformToOnChain(iOffchain.Interface(), "") + require.NoError(t, err) + + expected := testStruct{ + A: "foo", + B: 10, + C: 20, + } + assert.Equal(t, expected, output) + newInput, err := wrapper.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, iOffchain.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain returns error if input type was not from TransformToOnChain", func(t *testing.T) { + _, err := invalidWrapper.TransformToOnChain(testStruct{}, "") + assert.True(t, errors.Is(err, types.ErrInvalidType)) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on pointers, but doesn't maintain same addresses", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf(&testStruct{}), "") + require.NoError(t, err) + rInput := reflect.New(offChainType.Elem()) + iOffchain := reflect.Indirect(rInput) + iOffchain.FieldByName("A").FieldByName("X").SetString("foo") + iOffchain.FieldByName("B").SetInt(10) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(20) + + output, err := wrapper.TransformToOnChain(rInput.Interface(), "") + require.NoError(t, err) + + expected := &testStruct{ + A: "foo", + B: 10, + C: 20, + } + assert.Equal(t, expected, output) + + newInput, err := wrapper.TransformToOffChain(output, "") + require.NoError(t, err) + assert.Equal(t, rInput.Interface(), newInput) + + }) + + t.Run("TransformToOnChain and TransformToOffChain works on slices", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf([]testStruct{}), "") + require.NoError(t, err) + rInput := reflect.MakeSlice(offChainType, 2, 2) + iOffchain := rInput.Index(0) + iOffchain.FieldByName("A").FieldByName("X").SetString("foo") + iOffchain.FieldByName("B").SetInt(10) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(20) + iOffchain = rInput.Index(1) + iOffchain.FieldByName("A").FieldByName("X").SetString("baz") + iOffchain.FieldByName("B").SetInt(15) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(25) + + output, err := wrapper.TransformToOnChain(rInput.Interface(), "") + + require.NoError(t, err) + + expected := []testStruct{ + { + A: "foo", + B: 10, + C: 20, + }, + { + A: "baz", + B: 15, + C: 25, + }, + } + assert.Equal(t, expected, output) + + newInput, err := wrapper.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, rInput.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on nested slices", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf([][]testStruct{}), "") + require.NoError(t, err) + rInput := reflect.MakeSlice(offChainType, 2, 2) + rOuter := rInput.Index(0) + rOuter.Set(reflect.MakeSlice(rOuter.Type(), 2, 2)) + iOffchain := rOuter.Index(0) + iOffchain.FieldByName("A").FieldByName("X").SetString("foo") + iOffchain.FieldByName("B").SetInt(10) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(20) + iOffchain = rOuter.Index(1) + iOffchain.FieldByName("A").FieldByName("X").SetString("baz") + iOffchain.FieldByName("B").SetInt(15) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(25) + rOuter = rInput.Index(1) + rOuter.Set(reflect.MakeSlice(rOuter.Type(), 2, 2)) + iOffchain = rOuter.Index(0) + iOffchain.FieldByName("A").FieldByName("X").SetString("fooz") + iOffchain.FieldByName("B").SetInt(100) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(200) + iOffchain = rOuter.Index(1) + iOffchain.FieldByName("A").FieldByName("X").SetString("bazz") + iOffchain.FieldByName("B").SetInt(150) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(250) + + output, err := wrapper.TransformToOnChain(rInput.Interface(), "") + + require.NoError(t, err) + + expected := [][]testStruct{ + { + { + A: "foo", + B: 10, + C: 20, + }, + { + A: "baz", + B: 15, + C: 25, + }, + }, + { + { + A: "fooz", + B: 100, + C: 200, + }, + { + A: "bazz", + B: 150, + C: 250, + }, + }, + } + assert.Equal(t, expected, output) + + newInput, err := wrapper.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, rInput.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on pointers to non structs", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf(&[]testStruct{}), "") + require.NoError(t, err) + rInput := reflect.New(offChainType.Elem()) + rElm := reflect.MakeSlice(offChainType.Elem(), 2, 2) + iElm := rElm.Index(0) + iElm.FieldByName("A").FieldByName("X").SetString("foo") + iElm.FieldByName("B").SetInt(10) + iElm.FieldByName("C").FieldByName("Z").SetInt(20) + iElm = rElm.Index(1) + iElm.FieldByName("A").FieldByName("X").SetString("baz") + iElm.FieldByName("B").SetInt(15) + iElm.FieldByName("C").FieldByName("Z").SetInt(25) + reflect.Indirect(rInput).Set(rElm) + + output, err := wrapper.TransformToOnChain(rInput.Interface(), "") + + require.NoError(t, err) + + expected := &[]testStruct{ + { + A: "foo", + B: 10, + C: 20, + }, + { + A: "baz", + B: 15, + C: 25, + }, + } + assert.Equal(t, expected, output) + + newInput, err := wrapper.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, rInput.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on arrays", func(t *testing.T) { + offChainType, err := wrapper.RetypeToOffChain(reflect.TypeOf([2]testStruct{}), "") + require.NoError(t, err) + rInput := reflect.New(offChainType).Elem() + iOffchain := rInput.Index(0) + iOffchain.FieldByName("A").FieldByName("X").SetString("foo") + iOffchain.FieldByName("B").SetInt(10) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(20) + iOffchain = rInput.Index(1) + iOffchain.FieldByName("A").FieldByName("X").SetString("baz") + iOffchain.FieldByName("B").SetInt(15) + iOffchain.FieldByName("C").FieldByName("Z").SetInt(25) + + output, err := wrapper.TransformToOnChain(rInput.Interface(), "") + + require.NoError(t, err) + + expected := [2]testStruct{ + { + A: "foo", + B: 10, + C: 20, + }, + { + A: "baz", + B: 15, + C: 25, + }, + } + assert.Equal(t, expected, output) + + newInput, err := wrapper.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, rInput.Interface(), newInput) + }) + + t.Run("TransformToOnChain and TransformToOffChain works on nested fields", func(t *testing.T) { + offChainType, err := nestedWrapper.RetypeToOffChain(reflect.TypeOf(nestedTestStruct{}), "") + require.NoError(t, err) + + iOffchain := reflect.Indirect(reflect.New(offChainType)) + iOffchain.FieldByName("A").FieldByName("X").SetString("foo") + rB := iOffchain.FieldByName("B") + assert.Equal(t, "struct { A struct { X string }; B int64; C struct { Z int64 } }", offChainType.Field(1).Type.String()) + + rB.FieldByName("A").FieldByName("X").SetString("foo") + rB.FieldByName("B").SetInt(10) + rB.FieldByName("C").FieldByName("Z").SetInt(20) + + rC := iOffchain.FieldByName("C") + rC.Set(reflect.MakeSlice(rC.Type(), 2, 2)) + iElm := rC.Index(0) + iElm.FieldByName("A").FieldByName("X").SetString("foo") + iElm.FieldByName("B").SetInt(10) + iElm.FieldByName("C").FieldByName("Z").SetInt(20) + iElm = rC.Index(1) + iElm.FieldByName("A").FieldByName("X").SetString("baz") + iElm.FieldByName("B").SetInt(15) + iElm.FieldByName("C").FieldByName("Z").SetInt(25) + + iOffchain.FieldByName("D").SetString("bar") + + output, err := nestedWrapper.TransformToOnChain(iOffchain.Interface(), "") + require.NoError(t, err) + + expected := nestedTestStruct{ + A: "foo", + B: testStruct{ + A: "foo", + B: 10, + C: 20, + }, + C: []testStruct{ + { + A: "foo", + B: 10, + C: 20, + }, + { + A: "baz", + B: 15, + C: 25, + }, + }, + D: "bar", + } + assert.Equal(t, expected, output) + newInput, err := nestedWrapper.TransformToOffChain(expected, "") + require.NoError(t, err) + assert.Equal(t, iOffchain.Interface(), newInput) + }) +} + +func assertBasicWrapperTransform(t *testing.T, offChainType reflect.Type) { + require.Equal(t, 3, offChainType.NumField()) + + f0 := offChainType.Field(0).Type.Field(0) + assert.Equal(t, wrapType(f0.Name, f0.Type).String(), offChainType.Field(0).Type.String()) + + f1 := offChainType.Field(1) + assert.Equal(t, reflect.TypeOf(int64(0)), f1.Type) + + f2 := offChainType.Field(2).Type.Field(0) + assert.Equal(t, wrapType(f2.Name, f2.Type).String(), offChainType.Field(2).Type.String()) +} + +func wrapType(name string, typ reflect.Type) reflect.Type { + wrapped := reflect.StructOf([]reflect.StructField{{ + Name: name, + Type: typ, + }}) + return wrapped +} diff --git a/pkg/config/validate.go b/pkg/config/validate.go index 5ba476a15..6fd9b09f9 100644 --- a/pkg/config/validate.go +++ b/pkg/config/validate.go @@ -78,7 +78,7 @@ func validate(v reflect.Value, checkInterface bool) (err error) { for iter.Next() { mk := iter.Key() mv := iter.Value() - if !v.CanInterface() { + if !mv.CanInterface() { continue } if mv.Kind() == reflect.Ptr && mv.IsNil() { @@ -92,7 +92,7 @@ func validate(v reflect.Value, checkInterface bool) (err error) { case reflect.Slice, reflect.Array: for i := 0; i < v.Len(); i++ { iv := v.Index(i) - if !v.CanInterface() { + if !iv.CanInterface() { continue } if iv.Kind() == reflect.Ptr && iv.IsNil() { diff --git a/pkg/custmsg/custom_message.go b/pkg/custmsg/custom_message.go new file mode 100644 index 000000000..da2595555 --- /dev/null +++ b/pkg/custmsg/custom_message.go @@ -0,0 +1,128 @@ +package custmsg + +import ( + "context" + "fmt" + + "google.golang.org/protobuf/proto" + + "github.com/smartcontractkit/chainlink-common/pkg/beholder" + "github.com/smartcontractkit/chainlink-common/pkg/beholder/pb" +) + +type MessageEmitter interface { + // Emit sends a message to the labeler's destination. + Emit(context.Context, string) error + + // WithMapLabels sets the labels for the message to be emitted. Labels are cumulative. + WithMapLabels(map[string]string) MessageEmitter + + // With adds multiple key-value pairs to the emission. + With(keyValues ...string) MessageEmitter + + // Labels returns a view of the current labels. + Labels() map[string]string +} + +type Labeler struct { + labels map[string]string +} + +func NewLabeler() Labeler { + return Labeler{labels: make(map[string]string)} +} + +// WithMapLabels adds multiple key-value pairs to the CustomMessageLabeler for transmission +// With SendLogAsCustomMessage +func (l Labeler) WithMapLabels(labels map[string]string) MessageEmitter { + newCustomMessageLabeler := NewLabeler() + + // Copy existing labels from the current agent + for k, v := range l.labels { + newCustomMessageLabeler.labels[k] = v + } + + // Add new key-value pairs + for k, v := range labels { + newCustomMessageLabeler.labels[k] = v + } + + return newCustomMessageLabeler +} + +// With adds multiple key-value pairs to the CustomMessageLabeler for transmission With SendLogAsCustomMessage +func (l Labeler) With(keyValues ...string) MessageEmitter { + newCustomMessageLabeler := NewLabeler() + + if len(keyValues)%2 != 0 { + // If an odd number of key-value arguments is passed, return the original CustomMessageLabeler unchanged + return l + } + + // Copy existing labels from the current agent + for k, v := range l.labels { + newCustomMessageLabeler.labels[k] = v + } + + // Add new key-value pairs + for i := 0; i < len(keyValues); i += 2 { + key := keyValues[i] + value := keyValues[i+1] + newCustomMessageLabeler.labels[key] = value + } + + return newCustomMessageLabeler +} + +func (l Labeler) Emit(ctx context.Context, msg string) error { + return sendLogAsCustomMessageW(ctx, msg, l.labels) +} + +func (l Labeler) Labels() map[string]string { + copied := make(map[string]string, len(l.labels)) + for k, v := range l.labels { + copied[k] = v + } + return copied +} + +// SendLogAsCustomMessage emits a BaseMessage With msg and labels as data. +// any key in labels that is not part of orderedLabelKeys will not be transmitted +func (l Labeler) SendLogAsCustomMessage(ctx context.Context, msg string) error { + return sendLogAsCustomMessageW(ctx, msg, l.labels) +} + +func sendLogAsCustomMessageW(ctx context.Context, msg string, labels map[string]string) error { + // TODO un-comment after INFOPLAT-1386 + // cast to map[string]any + //newLabels := map[string]any{} + //for k, v := range labels { + // newLabels[k] = v + //} + + //m, err := values.NewMap(newLabels) + //if err != nil { + // return fmt.Errorf("could not wrap labels to map: %w", err) + //} + + // Define a custom protobuf payload to emit + payload := &pb.BaseMessage{ + Msg: msg, + Labels: labels, + } + payloadBytes, err := proto.Marshal(payload) + if err != nil { + return fmt.Errorf("sending custom message failed to marshal protobuf: %w", err) + } + + err = beholder.GetEmitter().Emit(ctx, payloadBytes, + "beholder_data_schema", "/beholder-base-message/versions/1", // required + "beholder_domain", "platform", // required + "beholder_entity", "BaseMessage", // required + ) + if err != nil { + return fmt.Errorf("sending custom message failed on emit: %w", err) + } + + return nil +} diff --git a/pkg/custmsg/custom_message_test.go b/pkg/custmsg/custom_message_test.go new file mode 100644 index 000000000..d6881f1c1 --- /dev/null +++ b/pkg/custmsg/custom_message_test.go @@ -0,0 +1,26 @@ +package custmsg + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +// tests CustomMessageAgent does not share state across new instances created by `With` +func Test_CustomMessageAgent(t *testing.T) { + cma := NewLabeler() + cma1 := cma.With("key1", "value1") + cma2 := cma1.With("key2", "value2") + + assert.NotEqual(t, cma1.Labels(), cma2.Labels()) +} + +func Test_CustomMessageAgent_With(t *testing.T) { + cma := NewLabeler().With("key1", "value1") + assert.Equal(t, cma.Labels(), map[string]string{"key1": "value1"}) +} + +func Test_CustomMessageAgent_WithMapLabels(t *testing.T) { + cma := NewLabeler().WithMapLabels(map[string]string{"key1": "value1"}) + assert.Equal(t, cma.Labels(), map[string]string{"key1": "value1"}) +} diff --git a/pkg/loop/config.go b/pkg/loop/config.go index 7250c94b4..16fa906ec 100644 --- a/pkg/loop/config.go +++ b/pkg/loop/config.go @@ -87,7 +87,7 @@ func (e *EnvConfig) parse() error { var err error e.DatabaseURL, err = getDatabaseURL() if err != nil { - return fmt.Errorf("failed to parse %s: %q", envDatabaseURL, err) + return fmt.Errorf("failed to parse %s: %w", envDatabaseURL, err) } e.PrometheusPort, err = strconv.Atoi(promPortStr) @@ -105,7 +105,7 @@ func (e *EnvConfig) parse() error { if err != nil { return err } - e.TracingAttributes = getAttributes(envTracingAttribute) + e.TracingAttributes = getMap(envTracingAttribute) e.TracingSamplingRatio = getFloat64OrZero(envTracingSamplingRatio) e.TracingTLSCertPath = os.Getenv(envTracingTLSCertPath) } @@ -122,7 +122,7 @@ func (e *EnvConfig) parse() error { return fmt.Errorf("failed to parse %s: %w", envTelemetryEndpoint, err) } e.TelemetryCACertFile = os.Getenv(envTelemetryCACertFile) - e.TelemetryAttributes = getAttributes(envTelemetryAttribute) + e.TelemetryAttributes = getMap(envTelemetryAttribute) e.TelemetryTraceSampleRatio = getFloat64OrZero(envTelemetryTraceSampleRatio) } return nil @@ -158,14 +158,18 @@ func getValidCollectorTarget() (string, error) { return tracingCollectorTarget, nil } -func getAttributes(envKeyPrefix string) map[string]string { - tracingAttributes := make(map[string]string) +func getMap(envKeyPrefix string) map[string]string { + m := make(map[string]string) for _, env := range os.Environ() { if strings.HasPrefix(env, envKeyPrefix) { - tracingAttributes[strings.TrimPrefix(env, envKeyPrefix)] = os.Getenv(env) + key, value, found := strings.Cut(env, "=") + if found { + key = strings.TrimPrefix(key, envKeyPrefix) + m[key] = value + } } } - return tracingAttributes + return m } // Any errors in parsing result in a sampling ratio of 0.0. diff --git a/pkg/loop/config_test.go b/pkg/loop/config_test.go index e0bcc1d5e..f719ae566 100644 --- a/pkg/loop/config_test.go +++ b/pkg/loop/config_test.go @@ -2,6 +2,7 @@ package loop import ( "net/url" + "os" "strconv" "strings" "testing" @@ -153,6 +154,35 @@ func TestEnvConfig_AsCmdEnv(t *testing.T) { assert.Equal(t, "42", got[envTelemetryAttribute+"baz"]) } +func TestGetMap(t *testing.T) { + os.Setenv("TEST_PREFIX_KEY1", "value1") + os.Setenv("TEST_PREFIX_KEY2", "value2") + os.Setenv("OTHER_KEY", "othervalue") + + defer func() { + os.Unsetenv("TEST_PREFIX_KEY1") + os.Unsetenv("TEST_PREFIX_KEY2") + os.Unsetenv("OTHER_KEY") + }() + + result := getMap("TEST_PREFIX_") + + expected := map[string]string{ + "KEY1": "value1", + "KEY2": "value2", + } + + if len(result) != len(expected) { + t.Errorf("Expected map length %d, got %d", len(expected), len(result)) + } + + for k, v := range expected { + if result[k] != v { + t.Errorf("Expected key %s to have value %s, but got %s", k, v, result[k]) + } + } +} + func TestManagedGRPCClientConfig(t *testing.T) { t.Parallel() diff --git a/pkg/loop/internal/keystore/keystore.go b/pkg/loop/internal/keystore/keystore.go new file mode 100644 index 000000000..903b3dd5a --- /dev/null +++ b/pkg/loop/internal/keystore/keystore.go @@ -0,0 +1,306 @@ +package keystore + +import ( + "context" + + "google.golang.org/grpc" + + "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/goplugin" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/net" + keystorepb "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/pb/keystore" + "github.com/smartcontractkit/chainlink-common/pkg/services" + "github.com/smartcontractkit/chainlink-common/pkg/types/keystore" +) + +var _ keystore.Keystore = (*Client)(nil) + +type Client struct { + services.Service + *goplugin.PluginClient + + grpc keystorepb.KeystoreClient +} + +func NewKeystoreClient(broker net.Broker, brokerCfg net.BrokerConfig, conn *grpc.ClientConn) *Client { + brokerCfg.Logger = logger.Named(brokerCfg.Logger, "KeystoreClient") + pc := goplugin.NewPluginClient(broker, brokerCfg, conn) + return &Client{PluginClient: pc, grpc: keystorepb.NewKeystoreClient(pc)} +} + +func (c *Client) Sign(ctx context.Context, keyID []byte, data []byte) ([]byte, error) { + reply, err := c.grpc.Sign(ctx, &keystorepb.SignRequest{ + KeyID: keyID, + Data: data, + }) + + if err != nil { + return nil, err + } + return reply.Data, nil +} + +func (c *Client) SignBatch(ctx context.Context, keyID []byte, data [][]byte) ([][]byte, error) { + reply, err := c.grpc.SignBatch(ctx, &keystorepb.SignBatchRequest{ + KeyID: keyID, + Data: data, + }) + + if err != nil { + return nil, err + } + return reply.Data, nil +} + +func (c *Client) Verify(ctx context.Context, keyID []byte, data []byte) (bool, error) { + reply, err := c.grpc.Verify(ctx, &keystorepb.VerifyRequest{ + KeyID: keyID, + Data: data, + }) + + if err != nil { + return false, err + } + return reply.Valid, nil +} + +func (c *Client) VerifyBatch(ctx context.Context, keyID []byte, data [][]byte) ([]bool, error) { + reply, err := c.grpc.VerifyBatch(ctx, &keystorepb.VerifyBatchRequest{ + KeyID: keyID, + Data: data, + }) + + if err != nil { + return nil, err + } + return reply.Valid, nil +} + +func (c *Client) ListKeys(ctx context.Context, tags []string) ([][]byte, error) { + reply, err := c.grpc.ListKeys(ctx, &keystorepb.ListKeysRequest{ + Tags: tags, + }) + + if err != nil { + return nil, err + } + return reply.KeyIDs, nil +} + +func (c *Client) RunUDF(ctx context.Context, name string, keyID []byte, data []byte) ([]byte, error) { + reply, err := c.grpc.RunUDF(ctx, &keystorepb.RunUDFRequest{ + Name: name, + KeyID: keyID, + Data: data, + }) + + if err != nil { + return nil, err + } + return reply.Data, nil +} + +func (c *Client) ImportKey(ctx context.Context, keyType string, data []byte, tags []string) ([]byte, error) { + reply, err := c.grpc.ImportKey(ctx, &keystorepb.ImportKeyRequest{ + KeyType: keyType, + Data: data, + Tags: tags, + }) + + if err != nil { + return nil, err + } + return reply.KeyID, nil +} + +func (c *Client) ExportKey(ctx context.Context, keyID []byte) ([]byte, error) { + reply, err := c.grpc.ExportKey(ctx, &keystorepb.ExportKeyRequest{ + KeyID: keyID, + }) + + if err != nil { + return nil, err + } + return reply.Data, nil +} + +func (c *Client) CreateKey(ctx context.Context, keyType string, tags []string) ([]byte, error) { + reply, err := c.grpc.CreateKey(ctx, &keystorepb.CreateKeyRequest{ + KeyType: keyType, + Tags: tags, + }) + + if err != nil { + return nil, err + } + return reply.KeyID, nil +} + +func (c *Client) DeleteKey(ctx context.Context, keyID []byte) error { + _, err := c.grpc.DeleteKey(ctx, &keystorepb.DeleteKeyRequest{ + KeyID: keyID, + }) + + if err != nil { + return err + } + return nil +} + +func (c *Client) AddTag(ctx context.Context, keyID []byte, tag string) error { + _, err := c.grpc.AddTag(ctx, &keystorepb.AddTagRequest{ + KeyID: keyID, + Tag: tag, + }) + + if err != nil { + return err + } + return nil +} + +func (c *Client) RemoveTag(ctx context.Context, keyID []byte, tag string) error { + _, err := c.grpc.RemoveTag(ctx, &keystorepb.RemoveTagRequest{ + KeyID: keyID, + Tag: tag, + }) + + if err != nil { + return err + } + return nil +} + +func (c *Client) ListTags(ctx context.Context, keyID []byte) ([]string, error) { + reply, err := c.grpc.ListTags(ctx, &keystorepb.ListTagsRequest{ + KeyID: keyID, + }) + + if err != nil { + return nil, err + } + return reply.Tags, nil +} + +var _ keystorepb.KeystoreServer = (*server)(nil) + +type server struct { + *net.BrokerExt + keystorepb.UnimplementedKeystoreServer + + impl GRPCService +} + +func RegisterKeystoreServer(server *grpc.Server, broker net.Broker, brokerCfg net.BrokerConfig, impl GRPCService) error { + keystorepb.RegisterKeystoreServer(server, newKeystoreServer(broker, brokerCfg, impl)) + return nil +} + +func newKeystoreServer(broker net.Broker, brokerCfg net.BrokerConfig, impl GRPCService) *server { + brokerCfg.Logger = logger.Named(brokerCfg.Logger, "KeystoreServer") + return &server{BrokerExt: &net.BrokerExt{Broker: broker, BrokerConfig: brokerCfg}, impl: impl} +} + +func (s *server) Sign(ctx context.Context, request *keystorepb.SignRequest) (*keystorepb.SignResponse, error) { + data, err := s.impl.Sign(ctx, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.SignResponse{Data: data}, err +} + +func (s *server) SignBatch(ctx context.Context, request *keystorepb.SignBatchRequest) (*keystorepb.SignBatchResponse, error) { + data, err := s.impl.SignBatch(ctx, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.SignBatchResponse{Data: data}, err +} + +func (s *server) Verify(ctx context.Context, request *keystorepb.VerifyRequest) (*keystorepb.VerifyResponse, error) { + valid, err := s.impl.Verify(ctx, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.VerifyResponse{Valid: valid}, err +} + +func (s *server) VerifyBatch(ctx context.Context, request *keystorepb.VerifyBatchRequest) (*keystorepb.VerifyBatchResponse, error) { + valid, err := s.impl.VerifyBatch(ctx, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.VerifyBatchResponse{Valid: valid}, err +} + +func (s *server) ListKeys(ctx context.Context, request *keystorepb.ListKeysRequest) (*keystorepb.ListKeysResponse, error) { + keyIDs, err := s.impl.ListKeys(ctx, request.Tags) + if err != nil { + return nil, err + } + return &keystorepb.ListKeysResponse{KeyIDs: keyIDs}, err +} + +func (s *server) RunUDF(ctx context.Context, request *keystorepb.RunUDFRequest) (*keystorepb.RunUDFResponse, error) { + data, err := s.impl.RunUDF(ctx, request.Name, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.RunUDFResponse{Data: data}, err +} + +func (s *server) ImportKey(ctx context.Context, request *keystorepb.ImportKeyRequest) (*keystorepb.ImportKeyResponse, error) { + keyIDs, err := s.impl.ImportKey(ctx, request.KeyType, request.Data, request.Tags) + if err != nil { + return nil, err + } + return &keystorepb.ImportKeyResponse{KeyID: keyIDs}, err +} + +func (s *server) ExportKey(ctx context.Context, request *keystorepb.ExportKeyRequest) (*keystorepb.ExportKeyResponse, error) { + data, err := s.impl.ExportKey(ctx, request.KeyID) + if err != nil { + return nil, err + } + return &keystorepb.ExportKeyResponse{Data: data}, err +} + +func (s *server) CreateKey(ctx context.Context, request *keystorepb.CreateKeyRequest) (*keystorepb.CreateKeyResponse, error) { + keyIDs, err := s.impl.CreateKey(ctx, request.KeyType, request.Tags) + if err != nil { + return nil, err + } + return &keystorepb.CreateKeyResponse{KeyID: keyIDs}, err +} + +func (s *server) DeleteKey(ctx context.Context, request *keystorepb.DeleteKeyRequest) (*keystorepb.DeleteKeyResponse, error) { + err := s.impl.DeleteKey(ctx, request.KeyID) + if err != nil { + return nil, err + } + return &keystorepb.DeleteKeyResponse{}, err +} + +func (s *server) AddTag(ctx context.Context, request *keystorepb.AddTagRequest) (*keystorepb.AddTagResponse, error) { + err := s.impl.AddTag(ctx, request.KeyID, request.Tag) + if err != nil { + return nil, err + } + return &keystorepb.AddTagResponse{}, err +} + +func (s *server) RemoveTag(ctx context.Context, request *keystorepb.RemoveTagRequest) (*keystorepb.RemoveTagResponse, error) { + err := s.impl.RemoveTag(ctx, request.KeyID, request.Tag) + if err != nil { + return nil, err + } + return &keystorepb.RemoveTagResponse{}, err +} + +func (s *server) ListTags(ctx context.Context, request *keystorepb.ListTagsRequest) (*keystorepb.ListTagsResponse, error) { + tags, err := s.impl.ListTags(ctx, request.KeyID) + if err != nil { + return nil, err + } + return &keystorepb.ListTagsResponse{Tags: tags}, nil +} diff --git a/pkg/loop/internal/keystore/keystore_test.go b/pkg/loop/internal/keystore/keystore_test.go new file mode 100644 index 000000000..391d82168 --- /dev/null +++ b/pkg/loop/internal/keystore/keystore_test.go @@ -0,0 +1,255 @@ +package keystore + +import ( + "bytes" + "context" + "errors" + "fmt" + "reflect" + "testing" + + "github.com/hashicorp/go-plugin" + "github.com/stretchr/testify/require" + "google.golang.org/grpc" + + "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/net" + "github.com/smartcontractkit/chainlink-common/pkg/services" + "github.com/smartcontractkit/chainlink-common/pkg/utils/tests" +) + +func TestKeystore(t *testing.T) { + ctx := tests.Context(t) + stopCh := make(chan struct{}) + log := logger.Test(t) + + pluginName := "keystore-test" + client, server := plugin.TestPluginGRPCConn( + t, + true, + map[string]plugin.Plugin{ + pluginName: &testKeystorePlugin{ + log: log, + impl: &testKeystore{}, + brokerExt: &net.BrokerExt{ + BrokerConfig: net.BrokerConfig{ + StopCh: stopCh, + Logger: log, + }, + }, + }, + }, + ) + + defer client.Close() + defer server.Stop() + + keystoreClient, err := client.Dispense(pluginName) + require.NoError(t, err) + + ks, ok := keystoreClient.(*Client) + require.True(t, ok) + + r, err := ks.Sign(ctx, keyID, data) + require.NoError(t, err) + require.Equal(t, r, sign) + + r2, err := ks.SignBatch(ctx, keyID, dataList) + require.NoError(t, err) + require.Equal(t, r2, signBatch) + + r3, err := ks.Verify(ctx, keyID, data) + require.NoError(t, err) + require.Equal(t, r3, verify) + + r4, err := ks.VerifyBatch(ctx, keyID, dataList) + require.NoError(t, err) + require.Equal(t, r4, verifyBatch) + + r5, err := ks.ListKeys(ctx, tags) + require.NoError(t, err) + require.Equal(t, r5, list) + + r6, err := ks.RunUDF(ctx, udfName, keyID, data) + require.NoError(t, err) + require.Equal(t, r6, runUDF) + + r7, err := ks.ImportKey(ctx, keyType, data, tags) + require.NoError(t, err) + require.Equal(t, r7, importResponse) + + r8, err := ks.ExportKey(ctx, keyID) + require.NoError(t, err) + require.Equal(t, r8, export) + + r9, err := ks.CreateKey(ctx, keyType, tags) + require.NoError(t, err) + require.Equal(t, r9, create) + + err = ks.DeleteKey(ctx, keyID) + require.ErrorContains(t, err, errDelete.Error()) + + err = ks.AddTag(ctx, keyID, tag) + require.ErrorContains(t, err, errAddTag.Error()) + + err = ks.RemoveTag(ctx, keyID, tag) + require.ErrorContains(t, err, errRemoveTag.Error()) + + r10, err := ks.ListTags(ctx, keyID) + require.NoError(t, err) + require.Equal(t, r10, listTag) +} + +var ( + //Inputs + keyID = []byte("this-is-a-keyID") + data = []byte("some-data") + dataList = [][]byte{[]byte("some-data-in-a-list"), []byte("some-more-data-in-a-list")} + tags = []string{"tag1", "tag2"} + tag = "just-one-tag" + udfName = "i-am-a-udf-method-name" + keyType = "some-keyType" + + //Outputs + sign = []byte("signed") + signBatch = [][]byte{[]byte("signed1"), []byte("signed2")} + verify = true + verifyBatch = []bool{true, false} + list = [][]byte{[]byte("item1"), []byte("item2")} + runUDF = []byte("udf-response") + importResponse = []byte("imported") + export = []byte("exported") + create = []byte("created") + listTag = []string{"tag1", "tag2"} + errDelete = errors.New("delete-err") + errAddTag = errors.New("add-tag-err") + errRemoveTag = errors.New("remove-tag-err") +) + +type testKeystorePlugin struct { + log logger.Logger + plugin.NetRPCUnsupportedPlugin + brokerExt *net.BrokerExt + impl GRPCService +} + +func (r *testKeystorePlugin) GRPCClient(ctx context.Context, broker *plugin.GRPCBroker, client *grpc.ClientConn) (any, error) { + r.brokerExt.Broker = broker + + return NewKeystoreClient(r.brokerExt.Broker, r.brokerExt.BrokerConfig, client), nil +} + +func (r *testKeystorePlugin) GRPCServer(broker *plugin.GRPCBroker, server *grpc.Server) error { + r.brokerExt.Broker = broker + + err := RegisterKeystoreServer(server, r.brokerExt.Broker, r.brokerExt.BrokerConfig, r.impl) + if err != nil { + return err + } + return nil +} + +type testKeystore struct { + services.Service +} + +func checkKeyID(target []byte) error { + if !bytes.Equal(target, keyID) { + return fmt.Errorf("checkKeyID: expected %v but got %v", keyID, target) + } + return nil +} + +func checkData(target []byte) error { + if !bytes.Equal(target, data) { + return fmt.Errorf("checkData: expected %v but got %v", data, target) + } + return nil +} + +func checkDataList(target [][]byte) error { + if !reflect.DeepEqual(target, dataList) { + return fmt.Errorf("checkDataList: nexpected %v but got %v", data, target) + } + return nil +} + +func checkTags(target []string) error { + if !reflect.DeepEqual(target, tags) { + return fmt.Errorf("checkTags: expected %v but got %v", tags, target) + } + return nil +} + +func checkUdfName(target string) error { + if target != udfName { + return fmt.Errorf("checkUdfName: expected %v but got %v", udfName, target) + } + return nil +} + +func checkKeyType(target string) error { + if target != keyType { + return fmt.Errorf("checkKeyType: expected %q but got %q", keyType, target) + } + return nil +} + +func checkTag(target string) error { + if target != tag { + return fmt.Errorf("checkTag: expected %q but got %q", tag, target) + } + return nil +} + +func (t testKeystore) Sign(ctx context.Context, _keyID []byte, _data []byte) ([]byte, error) { + return sign, errors.Join(checkKeyID(_keyID), checkData(_data)) +} + +func (t testKeystore) SignBatch(ctx context.Context, _keyID []byte, _dataList [][]byte) ([][]byte, error) { + return signBatch, errors.Join(checkKeyID(_keyID), checkDataList(_dataList)) +} + +func (t testKeystore) Verify(ctx context.Context, _keyID []byte, _data []byte) (bool, error) { + return verify, errors.Join(checkKeyID(_keyID), checkData(_data)) +} + +func (t testKeystore) VerifyBatch(ctx context.Context, _keyID []byte, _dataList [][]byte) ([]bool, error) { + return verifyBatch, errors.Join(checkKeyID(_keyID), checkDataList(_dataList)) +} + +func (t testKeystore) ListKeys(ctx context.Context, _tags []string) ([][]byte, error) { + return list, checkTags(_tags) +} + +func (t testKeystore) RunUDF(ctx context.Context, _udfName string, _keyID []byte, _data []byte) ([]byte, error) { + return runUDF, errors.Join(checkUdfName(_udfName), checkKeyID(_keyID), checkData(_data)) +} + +func (t testKeystore) ImportKey(ctx context.Context, _keyType string, _data []byte, _tags []string) ([]byte, error) { + return importResponse, errors.Join(checkKeyType(_keyType), checkData(_data), checkTags(_tags)) +} + +func (t testKeystore) ExportKey(ctx context.Context, _keyID []byte) ([]byte, error) { + return export, checkKeyID(_keyID) +} + +func (t testKeystore) CreateKey(ctx context.Context, _keyType string, _tags []string) ([]byte, error) { + return create, errors.Join(checkKeyType(_keyType), checkTags(_tags)) +} + +func (t testKeystore) DeleteKey(ctx context.Context, _keyID []byte) error { + return errors.Join(errDelete, checkKeyID(_keyID)) +} + +func (t testKeystore) AddTag(ctx context.Context, _keyID []byte, _tag string) error { + return errors.Join(errAddTag, checkKeyID(_keyID), checkTag(_tag)) +} + +func (t testKeystore) RemoveTag(ctx context.Context, _keyID []byte, _tag string) error { + return errors.Join(errRemoveTag, checkKeyID(_keyID), checkTag(_tag)) +} + +func (t testKeystore) ListTags(ctx context.Context, _keyID []byte) ([]string, error) { + return listTag, checkKeyID(_keyID) +} diff --git a/pkg/loop/internal/keystore/types.go b/pkg/loop/internal/keystore/types.go new file mode 100644 index 000000000..145b1b28e --- /dev/null +++ b/pkg/loop/internal/keystore/types.go @@ -0,0 +1,30 @@ +package keystore + +import ( + "context" + + "github.com/smartcontractkit/chainlink-common/pkg/services" +) + +// GRPCService This interface contains all the functionalities of the GRPC layer of the LOOPP keystore +type GRPCService interface { + services.Service + Sign(ctx context.Context, keyID []byte, data []byte) ([]byte, error) + SignBatch(ctx context.Context, keyID []byte, data [][]byte) ([][]byte, error) + Verify(ctx context.Context, keyID []byte, data []byte) (bool, error) + VerifyBatch(ctx context.Context, keyID []byte, data [][]byte) ([]bool, error) + + ListKeys(ctx context.Context, tags []string) ([][]byte, error) + + RunUDF(ctx context.Context, name string, keyID []byte, data []byte) ([]byte, error) + + ImportKey(ctx context.Context, keyType string, data []byte, tags []string) ([]byte, error) + ExportKey(ctx context.Context, keyID []byte) ([]byte, error) + + CreateKey(ctx context.Context, keyType string, tags []string) ([]byte, error) + DeleteKey(ctx context.Context, keyID []byte) error + + AddTag(ctx context.Context, keyID []byte, tag string) error + RemoveTag(ctx context.Context, keyID []byte, tag string) error + ListTags(ctx context.Context, keyID []byte) ([]string, error) +} diff --git a/pkg/loop/internal/pb/keystore/generate.go b/pkg/loop/internal/pb/keystore/generate.go new file mode 100644 index 000000000..e6cc69dcc --- /dev/null +++ b/pkg/loop/internal/pb/keystore/generate.go @@ -0,0 +1,2 @@ +//go:generate protoc --proto_path=.:..:. --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative keystore.proto +package keystorepb diff --git a/pkg/loop/internal/pb/keystore/keystore.pb.go b/pkg/loop/internal/pb/keystore/keystore.pb.go new file mode 100644 index 000000000..f385c3de4 --- /dev/null +++ b/pkg/loop/internal/pb/keystore/keystore.pb.go @@ -0,0 +1,1891 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.31.0 +// protoc v4.25.1 +// source: keystore.proto + +package keystorepb + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type SignRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *SignRequest) Reset() { + *x = SignRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SignRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignRequest) ProtoMessage() {} + +func (x *SignRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignRequest.ProtoReflect.Descriptor instead. +func (*SignRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{0} +} + +func (x *SignRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *SignRequest) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type SignResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *SignResponse) Reset() { + *x = SignResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SignResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignResponse) ProtoMessage() {} + +func (x *SignResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignResponse.ProtoReflect.Descriptor instead. +func (*SignResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{1} +} + +func (x *SignResponse) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type SignBatchRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data [][]byte `protobuf:"bytes,2,rep,name=data,proto3" json:"data,omitempty"` +} + +func (x *SignBatchRequest) Reset() { + *x = SignBatchRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SignBatchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignBatchRequest) ProtoMessage() {} + +func (x *SignBatchRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignBatchRequest.ProtoReflect.Descriptor instead. +func (*SignBatchRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{2} +} + +func (x *SignBatchRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *SignBatchRequest) GetData() [][]byte { + if x != nil { + return x.Data + } + return nil +} + +type SignBatchResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data [][]byte `protobuf:"bytes,1,rep,name=data,proto3" json:"data,omitempty"` +} + +func (x *SignBatchResponse) Reset() { + *x = SignBatchResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SignBatchResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignBatchResponse) ProtoMessage() {} + +func (x *SignBatchResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignBatchResponse.ProtoReflect.Descriptor instead. +func (*SignBatchResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{3} +} + +func (x *SignBatchResponse) GetData() [][]byte { + if x != nil { + return x.Data + } + return nil +} + +type VerifyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *VerifyRequest) Reset() { + *x = VerifyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *VerifyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VerifyRequest) ProtoMessage() {} + +func (x *VerifyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VerifyRequest.ProtoReflect.Descriptor instead. +func (*VerifyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{4} +} + +func (x *VerifyRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *VerifyRequest) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type VerifyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Valid bool `protobuf:"varint,1,opt,name=valid,proto3" json:"valid,omitempty"` +} + +func (x *VerifyResponse) Reset() { + *x = VerifyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *VerifyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VerifyResponse) ProtoMessage() {} + +func (x *VerifyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VerifyResponse.ProtoReflect.Descriptor instead. +func (*VerifyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{5} +} + +func (x *VerifyResponse) GetValid() bool { + if x != nil { + return x.Valid + } + return false +} + +type VerifyBatchRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data [][]byte `protobuf:"bytes,2,rep,name=data,proto3" json:"data,omitempty"` +} + +func (x *VerifyBatchRequest) Reset() { + *x = VerifyBatchRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *VerifyBatchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VerifyBatchRequest) ProtoMessage() {} + +func (x *VerifyBatchRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VerifyBatchRequest.ProtoReflect.Descriptor instead. +func (*VerifyBatchRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{6} +} + +func (x *VerifyBatchRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *VerifyBatchRequest) GetData() [][]byte { + if x != nil { + return x.Data + } + return nil +} + +type VerifyBatchResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Valid []bool `protobuf:"varint,1,rep,packed,name=valid,proto3" json:"valid,omitempty"` +} + +func (x *VerifyBatchResponse) Reset() { + *x = VerifyBatchResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *VerifyBatchResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VerifyBatchResponse) ProtoMessage() {} + +func (x *VerifyBatchResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VerifyBatchResponse.ProtoReflect.Descriptor instead. +func (*VerifyBatchResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{7} +} + +func (x *VerifyBatchResponse) GetValid() []bool { + if x != nil { + return x.Valid + } + return nil +} + +type ListKeysRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Tags []string `protobuf:"bytes,1,rep,name=tags,proto3" json:"tags,omitempty"` +} + +func (x *ListKeysRequest) Reset() { + *x = ListKeysRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListKeysRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListKeysRequest) ProtoMessage() {} + +func (x *ListKeysRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListKeysRequest.ProtoReflect.Descriptor instead. +func (*ListKeysRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{8} +} + +func (x *ListKeysRequest) GetTags() []string { + if x != nil { + return x.Tags + } + return nil +} + +type ListKeysResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyIDs [][]byte `protobuf:"bytes,1,rep,name=keyIDs,proto3" json:"keyIDs,omitempty"` +} + +func (x *ListKeysResponse) Reset() { + *x = ListKeysResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListKeysResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListKeysResponse) ProtoMessage() {} + +func (x *ListKeysResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListKeysResponse.ProtoReflect.Descriptor instead. +func (*ListKeysResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{9} +} + +func (x *ListKeysResponse) GetKeyIDs() [][]byte { + if x != nil { + return x.KeyIDs + } + return nil +} + +type RunUDFRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + KeyID []byte `protobuf:"bytes,2,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data []byte `protobuf:"bytes,3,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *RunUDFRequest) Reset() { + *x = RunUDFRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RunUDFRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RunUDFRequest) ProtoMessage() {} + +func (x *RunUDFRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RunUDFRequest.ProtoReflect.Descriptor instead. +func (*RunUDFRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{10} +} + +func (x *RunUDFRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *RunUDFRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *RunUDFRequest) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type RunUDFResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *RunUDFResponse) Reset() { + *x = RunUDFResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RunUDFResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RunUDFResponse) ProtoMessage() {} + +func (x *RunUDFResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RunUDFResponse.ProtoReflect.Descriptor instead. +func (*RunUDFResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{11} +} + +func (x *RunUDFResponse) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type ImportKeyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyType string `protobuf:"bytes,1,opt,name=keyType,proto3" json:"keyType,omitempty"` + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + Tags []string `protobuf:"bytes,3,rep,name=tags,proto3" json:"tags,omitempty"` +} + +func (x *ImportKeyRequest) Reset() { + *x = ImportKeyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ImportKeyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ImportKeyRequest) ProtoMessage() {} + +func (x *ImportKeyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ImportKeyRequest.ProtoReflect.Descriptor instead. +func (*ImportKeyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{12} +} + +func (x *ImportKeyRequest) GetKeyType() string { + if x != nil { + return x.KeyType + } + return "" +} + +func (x *ImportKeyRequest) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +func (x *ImportKeyRequest) GetTags() []string { + if x != nil { + return x.Tags + } + return nil +} + +type ImportKeyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *ImportKeyResponse) Reset() { + *x = ImportKeyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ImportKeyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ImportKeyResponse) ProtoMessage() {} + +func (x *ImportKeyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ImportKeyResponse.ProtoReflect.Descriptor instead. +func (*ImportKeyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{13} +} + +func (x *ImportKeyResponse) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type ExportKeyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *ExportKeyRequest) Reset() { + *x = ExportKeyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ExportKeyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExportKeyRequest) ProtoMessage() {} + +func (x *ExportKeyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExportKeyRequest.ProtoReflect.Descriptor instead. +func (*ExportKeyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{14} +} + +func (x *ExportKeyRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type ExportKeyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *ExportKeyResponse) Reset() { + *x = ExportKeyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ExportKeyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExportKeyResponse) ProtoMessage() {} + +func (x *ExportKeyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExportKeyResponse.ProtoReflect.Descriptor instead. +func (*ExportKeyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{15} +} + +func (x *ExportKeyResponse) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type CreateKeyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyType string `protobuf:"bytes,1,opt,name=keyType,proto3" json:"keyType,omitempty"` + Tags []string `protobuf:"bytes,2,rep,name=tags,proto3" json:"tags,omitempty"` +} + +func (x *CreateKeyRequest) Reset() { + *x = CreateKeyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateKeyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateKeyRequest) ProtoMessage() {} + +func (x *CreateKeyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateKeyRequest.ProtoReflect.Descriptor instead. +func (*CreateKeyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{16} +} + +func (x *CreateKeyRequest) GetKeyType() string { + if x != nil { + return x.KeyType + } + return "" +} + +func (x *CreateKeyRequest) GetTags() []string { + if x != nil { + return x.Tags + } + return nil +} + +type CreateKeyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *CreateKeyResponse) Reset() { + *x = CreateKeyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateKeyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateKeyResponse) ProtoMessage() {} + +func (x *CreateKeyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateKeyResponse.ProtoReflect.Descriptor instead. +func (*CreateKeyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{17} +} + +func (x *CreateKeyResponse) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type DeleteKeyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *DeleteKeyRequest) Reset() { + *x = DeleteKeyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteKeyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteKeyRequest) ProtoMessage() {} + +func (x *DeleteKeyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteKeyRequest.ProtoReflect.Descriptor instead. +func (*DeleteKeyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{18} +} + +func (x *DeleteKeyRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type DeleteKeyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeleteKeyResponse) Reset() { + *x = DeleteKeyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteKeyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteKeyResponse) ProtoMessage() {} + +func (x *DeleteKeyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteKeyResponse.ProtoReflect.Descriptor instead. +func (*DeleteKeyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{19} +} + +type AddTagRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Tag string `protobuf:"bytes,2,opt,name=tag,proto3" json:"tag,omitempty"` +} + +func (x *AddTagRequest) Reset() { + *x = AddTagRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AddTagRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AddTagRequest) ProtoMessage() {} + +func (x *AddTagRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AddTagRequest.ProtoReflect.Descriptor instead. +func (*AddTagRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{20} +} + +func (x *AddTagRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *AddTagRequest) GetTag() string { + if x != nil { + return x.Tag + } + return "" +} + +type AddTagResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *AddTagResponse) Reset() { + *x = AddTagResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AddTagResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AddTagResponse) ProtoMessage() {} + +func (x *AddTagResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AddTagResponse.ProtoReflect.Descriptor instead. +func (*AddTagResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{21} +} + +type RemoveTagRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Tag string `protobuf:"bytes,2,opt,name=tag,proto3" json:"tag,omitempty"` +} + +func (x *RemoveTagRequest) Reset() { + *x = RemoveTagRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RemoveTagRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RemoveTagRequest) ProtoMessage() {} + +func (x *RemoveTagRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RemoveTagRequest.ProtoReflect.Descriptor instead. +func (*RemoveTagRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{22} +} + +func (x *RemoveTagRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *RemoveTagRequest) GetTag() string { + if x != nil { + return x.Tag + } + return "" +} + +type RemoveTagResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *RemoveTagResponse) Reset() { + *x = RemoveTagResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RemoveTagResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RemoveTagResponse) ProtoMessage() {} + +func (x *RemoveTagResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RemoveTagResponse.ProtoReflect.Descriptor instead. +func (*RemoveTagResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{23} +} + +type ListTagsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *ListTagsRequest) Reset() { + *x = ListTagsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListTagsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListTagsRequest) ProtoMessage() {} + +func (x *ListTagsRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListTagsRequest.ProtoReflect.Descriptor instead. +func (*ListTagsRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{24} +} + +func (x *ListTagsRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type ListTagsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Tags []string `protobuf:"bytes,1,rep,name=tags,proto3" json:"tags,omitempty"` +} + +func (x *ListTagsResponse) Reset() { + *x = ListTagsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListTagsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListTagsResponse) ProtoMessage() {} + +func (x *ListTagsResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[25] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListTagsResponse.ProtoReflect.Descriptor instead. +func (*ListTagsResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{25} +} + +func (x *ListTagsResponse) GetTags() []string { + if x != nil { + return x.Tags + } + return nil +} + +var File_keystore_proto protoreflect.FileDescriptor + +var file_keystore_proto_rawDesc = []byte{ + 0x0a, 0x0e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x12, 0x19, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, + 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x22, 0x37, 0x0a, 0x0b, 0x53, + 0x69, 0x67, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, + 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, + 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, + 0x64, 0x61, 0x74, 0x61, 0x22, 0x22, 0x0a, 0x0c, 0x53, 0x69, 0x67, 0x6e, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x3c, 0x0a, 0x10, 0x53, 0x69, 0x67, 0x6e, + 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, + 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, + 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, + 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x27, 0x0a, 0x11, 0x53, 0x69, 0x67, 0x6e, 0x42, 0x61, + 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, + 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, + 0x39, 0x0a, 0x0d, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x26, 0x0a, 0x0e, 0x56, 0x65, + 0x72, 0x69, 0x66, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x69, 0x64, 0x22, 0x3e, 0x0a, 0x12, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, + 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, + 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x12, + 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, + 0x74, 0x61, 0x22, 0x2b, 0x0a, 0x13, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, + 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x03, 0x28, 0x08, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x22, + 0x25, 0x0a, 0x0f, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, 0x79, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x22, 0x2a, 0x0a, 0x10, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, + 0x79, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6b, 0x65, + 0x79, 0x49, 0x44, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x06, 0x6b, 0x65, 0x79, 0x49, + 0x44, 0x73, 0x22, 0x4d, 0x0a, 0x0d, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x12, 0x0a, + 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, + 0x61, 0x22, 0x24, 0x0a, 0x0e, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x54, 0x0a, 0x10, 0x49, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6b, + 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6b, 0x65, + 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, + 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x22, 0x29, 0x0a, + 0x11, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x22, 0x28, 0x0a, 0x10, 0x45, 0x78, 0x70, 0x6f, + 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, + 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, + 0x49, 0x44, 0x22, 0x27, 0x0a, 0x11, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x40, 0x0a, 0x10, 0x43, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x18, 0x0a, 0x07, 0x6b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x07, 0x6b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, + 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x22, 0x29, 0x0a, + 0x11, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x22, 0x28, 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, + 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, + 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, + 0x49, 0x44, 0x22, 0x13, 0x0a, 0x11, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x37, 0x0a, 0x0d, 0x41, 0x64, 0x64, 0x54, 0x61, + 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, + 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x10, + 0x0a, 0x03, 0x74, 0x61, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x74, 0x61, 0x67, + 0x22, 0x10, 0x0a, 0x0e, 0x41, 0x64, 0x64, 0x54, 0x61, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x3a, 0x0a, 0x10, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x54, 0x61, 0x67, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x10, 0x0a, 0x03, + 0x74, 0x61, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x74, 0x61, 0x67, 0x22, 0x13, + 0x0a, 0x11, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x54, 0x61, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x27, 0x0a, 0x0f, 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x22, 0x26, 0x0a, 0x10, + 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, + 0x74, 0x61, 0x67, 0x73, 0x32, 0xa8, 0x0a, 0x0a, 0x08, 0x4b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x12, 0x57, 0x0a, 0x04, 0x53, 0x69, 0x67, 0x6e, 0x12, 0x26, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, + 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x27, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x69, + 0x67, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, 0x53, 0x69, + 0x67, 0x6e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, + 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x5d, 0x0a, 0x06, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x12, 0x28, 0x2e, 0x6c, + 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, + 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, + 0x72, 0x65, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x6c, 0x0a, 0x0b, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, 0x68, + 0x12, 0x2d, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x56, 0x65, 0x72, + 0x69, 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x2e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, + 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x56, 0x65, 0x72, 0x69, + 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x63, 0x0a, 0x08, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, 0x79, 0x73, 0x12, 0x2a, 0x2e, 0x6c, 0x6f, + 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, + 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, 0x79, 0x73, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, 0x79, 0x73, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, + 0x79, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, + 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, + 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, + 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, + 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, + 0x72, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x4b, 0x65, + 0x79, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, + 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, + 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, + 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, + 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, + 0x72, 0x65, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5d, 0x0a, 0x06, 0x41, 0x64, 0x64, 0x54, 0x61, 0x67, 0x12, 0x28, + 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, + 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x64, 0x64, 0x54, 0x61, + 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, + 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, + 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x64, 0x64, 0x54, 0x61, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x54, 0x61, 0x67, + 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x6d, + 0x6f, 0x76, 0x65, 0x54, 0x61, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, + 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, + 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, + 0x54, 0x61, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x63, 0x0a, 0x08, 0x4c, + 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x12, 0x2a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, + 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, + 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x5d, 0x0a, 0x06, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x12, 0x28, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, + 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, + 0x2e, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, + 0x57, 0x5a, 0x55, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, + 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, + 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, + 0x70, 0x6b, 0x67, 0x2f, 0x6c, 0x6f, 0x6f, 0x70, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x2f, 0x70, 0x62, 0x2f, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x3b, 0x6b, 0x65, + 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_keystore_proto_rawDescOnce sync.Once + file_keystore_proto_rawDescData = file_keystore_proto_rawDesc +) + +func file_keystore_proto_rawDescGZIP() []byte { + file_keystore_proto_rawDescOnce.Do(func() { + file_keystore_proto_rawDescData = protoimpl.X.CompressGZIP(file_keystore_proto_rawDescData) + }) + return file_keystore_proto_rawDescData +} + +var file_keystore_proto_msgTypes = make([]protoimpl.MessageInfo, 26) +var file_keystore_proto_goTypes = []interface{}{ + (*SignRequest)(nil), // 0: loop.internal.pb.keystore.SignRequest + (*SignResponse)(nil), // 1: loop.internal.pb.keystore.SignResponse + (*SignBatchRequest)(nil), // 2: loop.internal.pb.keystore.SignBatchRequest + (*SignBatchResponse)(nil), // 3: loop.internal.pb.keystore.SignBatchResponse + (*VerifyRequest)(nil), // 4: loop.internal.pb.keystore.VerifyRequest + (*VerifyResponse)(nil), // 5: loop.internal.pb.keystore.VerifyResponse + (*VerifyBatchRequest)(nil), // 6: loop.internal.pb.keystore.VerifyBatchRequest + (*VerifyBatchResponse)(nil), // 7: loop.internal.pb.keystore.VerifyBatchResponse + (*ListKeysRequest)(nil), // 8: loop.internal.pb.keystore.ListKeysRequest + (*ListKeysResponse)(nil), // 9: loop.internal.pb.keystore.ListKeysResponse + (*RunUDFRequest)(nil), // 10: loop.internal.pb.keystore.RunUDFRequest + (*RunUDFResponse)(nil), // 11: loop.internal.pb.keystore.RunUDFResponse + (*ImportKeyRequest)(nil), // 12: loop.internal.pb.keystore.ImportKeyRequest + (*ImportKeyResponse)(nil), // 13: loop.internal.pb.keystore.ImportKeyResponse + (*ExportKeyRequest)(nil), // 14: loop.internal.pb.keystore.ExportKeyRequest + (*ExportKeyResponse)(nil), // 15: loop.internal.pb.keystore.ExportKeyResponse + (*CreateKeyRequest)(nil), // 16: loop.internal.pb.keystore.CreateKeyRequest + (*CreateKeyResponse)(nil), // 17: loop.internal.pb.keystore.CreateKeyResponse + (*DeleteKeyRequest)(nil), // 18: loop.internal.pb.keystore.DeleteKeyRequest + (*DeleteKeyResponse)(nil), // 19: loop.internal.pb.keystore.DeleteKeyResponse + (*AddTagRequest)(nil), // 20: loop.internal.pb.keystore.AddTagRequest + (*AddTagResponse)(nil), // 21: loop.internal.pb.keystore.AddTagResponse + (*RemoveTagRequest)(nil), // 22: loop.internal.pb.keystore.RemoveTagRequest + (*RemoveTagResponse)(nil), // 23: loop.internal.pb.keystore.RemoveTagResponse + (*ListTagsRequest)(nil), // 24: loop.internal.pb.keystore.ListTagsRequest + (*ListTagsResponse)(nil), // 25: loop.internal.pb.keystore.ListTagsResponse +} +var file_keystore_proto_depIdxs = []int32{ + 0, // 0: loop.internal.pb.keystore.Keystore.Sign:input_type -> loop.internal.pb.keystore.SignRequest + 2, // 1: loop.internal.pb.keystore.Keystore.SignBatch:input_type -> loop.internal.pb.keystore.SignBatchRequest + 4, // 2: loop.internal.pb.keystore.Keystore.Verify:input_type -> loop.internal.pb.keystore.VerifyRequest + 6, // 3: loop.internal.pb.keystore.Keystore.VerifyBatch:input_type -> loop.internal.pb.keystore.VerifyBatchRequest + 8, // 4: loop.internal.pb.keystore.Keystore.ListKeys:input_type -> loop.internal.pb.keystore.ListKeysRequest + 12, // 5: loop.internal.pb.keystore.Keystore.ImportKey:input_type -> loop.internal.pb.keystore.ImportKeyRequest + 14, // 6: loop.internal.pb.keystore.Keystore.ExportKey:input_type -> loop.internal.pb.keystore.ExportKeyRequest + 16, // 7: loop.internal.pb.keystore.Keystore.CreateKey:input_type -> loop.internal.pb.keystore.CreateKeyRequest + 18, // 8: loop.internal.pb.keystore.Keystore.DeleteKey:input_type -> loop.internal.pb.keystore.DeleteKeyRequest + 20, // 9: loop.internal.pb.keystore.Keystore.AddTag:input_type -> loop.internal.pb.keystore.AddTagRequest + 22, // 10: loop.internal.pb.keystore.Keystore.RemoveTag:input_type -> loop.internal.pb.keystore.RemoveTagRequest + 24, // 11: loop.internal.pb.keystore.Keystore.ListTags:input_type -> loop.internal.pb.keystore.ListTagsRequest + 10, // 12: loop.internal.pb.keystore.Keystore.RunUDF:input_type -> loop.internal.pb.keystore.RunUDFRequest + 1, // 13: loop.internal.pb.keystore.Keystore.Sign:output_type -> loop.internal.pb.keystore.SignResponse + 3, // 14: loop.internal.pb.keystore.Keystore.SignBatch:output_type -> loop.internal.pb.keystore.SignBatchResponse + 5, // 15: loop.internal.pb.keystore.Keystore.Verify:output_type -> loop.internal.pb.keystore.VerifyResponse + 7, // 16: loop.internal.pb.keystore.Keystore.VerifyBatch:output_type -> loop.internal.pb.keystore.VerifyBatchResponse + 9, // 17: loop.internal.pb.keystore.Keystore.ListKeys:output_type -> loop.internal.pb.keystore.ListKeysResponse + 13, // 18: loop.internal.pb.keystore.Keystore.ImportKey:output_type -> loop.internal.pb.keystore.ImportKeyResponse + 15, // 19: loop.internal.pb.keystore.Keystore.ExportKey:output_type -> loop.internal.pb.keystore.ExportKeyResponse + 17, // 20: loop.internal.pb.keystore.Keystore.CreateKey:output_type -> loop.internal.pb.keystore.CreateKeyResponse + 19, // 21: loop.internal.pb.keystore.Keystore.DeleteKey:output_type -> loop.internal.pb.keystore.DeleteKeyResponse + 21, // 22: loop.internal.pb.keystore.Keystore.AddTag:output_type -> loop.internal.pb.keystore.AddTagResponse + 23, // 23: loop.internal.pb.keystore.Keystore.RemoveTag:output_type -> loop.internal.pb.keystore.RemoveTagResponse + 25, // 24: loop.internal.pb.keystore.Keystore.ListTags:output_type -> loop.internal.pb.keystore.ListTagsResponse + 11, // 25: loop.internal.pb.keystore.Keystore.RunUDF:output_type -> loop.internal.pb.keystore.RunUDFResponse + 13, // [13:26] is the sub-list for method output_type + 0, // [0:13] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_keystore_proto_init() } +func file_keystore_proto_init() { + if File_keystore_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_keystore_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SignRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SignResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SignBatchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SignBatchResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*VerifyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*VerifyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*VerifyBatchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*VerifyBatchResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListKeysRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListKeysResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RunUDFRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RunUDFResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ImportKeyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ImportKeyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ExportKeyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ExportKeyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateKeyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateKeyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteKeyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteKeyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AddTagRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AddTagResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RemoveTagRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RemoveTagResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListTagsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListTagsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_keystore_proto_rawDesc, + NumEnums: 0, + NumMessages: 26, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_keystore_proto_goTypes, + DependencyIndexes: file_keystore_proto_depIdxs, + MessageInfos: file_keystore_proto_msgTypes, + }.Build() + File_keystore_proto = out.File + file_keystore_proto_rawDesc = nil + file_keystore_proto_goTypes = nil + file_keystore_proto_depIdxs = nil +} diff --git a/pkg/loop/internal/pb/keystore/keystore.proto b/pkg/loop/internal/pb/keystore/keystore.proto new file mode 100644 index 000000000..ba6013b44 --- /dev/null +++ b/pkg/loop/internal/pb/keystore/keystore.proto @@ -0,0 +1,134 @@ +syntax = "proto3"; + +option go_package = "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/pb/keystore;keystorepb"; + +package loop.internal.pb.keystore; + +service Keystore { + rpc Sign(SignRequest) returns (SignResponse); + rpc SignBatch(SignBatchRequest)returns (SignBatchResponse); + rpc Verify(VerifyRequest)returns (VerifyResponse); + rpc VerifyBatch(VerifyBatchRequest)returns (VerifyBatchResponse); + + rpc ListKeys(ListKeysRequest)returns (ListKeysResponse); + rpc ImportKey(ImportKeyRequest)returns(ImportKeyResponse); + rpc ExportKey(ExportKeyRequest)returns(ExportKeyResponse); + + rpc CreateKey(CreateKeyRequest)returns(CreateKeyResponse); + rpc DeleteKey(DeleteKeyRequest)returns(DeleteKeyResponse); + + rpc AddTag(AddTagRequest)returns(AddTagResponse); + rpc RemoveTag(RemoveTagRequest)returns(RemoveTagResponse); + rpc ListTags(ListTagsRequest)returns(ListTagsResponse); + + rpc RunUDF(RunUDFRequest)returns (RunUDFResponse); +} + +message SignRequest { + bytes keyID = 1; + bytes data = 2; +} + +message SignResponse { + bytes data = 1; +} + +message SignBatchRequest { + bytes keyID = 1; + repeated bytes data = 2; +} + +message SignBatchResponse { + repeated bytes data = 1; +} + +message VerifyRequest { + bytes keyID = 1; + bytes data = 2; +} + +message VerifyResponse { + bool valid = 1; +} + +message VerifyBatchRequest { + bytes keyID = 1; + repeated bytes data = 2; +} + +message VerifyBatchResponse { + repeated bool valid = 1; +} + +message ListKeysRequest { + repeated string tags = 1; +} + +message ListKeysResponse { + repeated bytes keyIDs = 1; +} + +message RunUDFRequest { + string name = 1; + bytes keyID = 2; + bytes data = 3; +} + +message RunUDFResponse { + bytes data = 1; +} + +message ImportKeyRequest { + string keyType = 1; + bytes data = 2; + repeated string tags = 3; +} + +message ImportKeyResponse { + bytes keyID = 1; +} + +message ExportKeyRequest { + bytes keyID = 1; +} + +message ExportKeyResponse { + bytes data = 1; +} + +message CreateKeyRequest { + string keyType = 1; + repeated string tags = 2; +} + +message CreateKeyResponse{ + bytes keyID =1; +} + +message DeleteKeyRequest{ + bytes keyID =1; +} + +message DeleteKeyResponse{} + +message AddTagRequest{ + bytes keyID=1; + string tag =2; +} + +message AddTagResponse{} + +message RemoveTagRequest{ + bytes keyID =1; + string tag =2; +} + +message RemoveTagResponse{} + +message ListTagsRequest{ + bytes keyID=1; +} + +message ListTagsResponse{ + repeated string tags=1; +} diff --git a/pkg/loop/internal/pb/keystore/keystore_grpc.pb.go b/pkg/loop/internal/pb/keystore/keystore_grpc.pb.go new file mode 100644 index 000000000..6752c21ee --- /dev/null +++ b/pkg/loop/internal/pb/keystore/keystore_grpc.pb.go @@ -0,0 +1,553 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc v4.25.1 +// source: keystore.proto + +package keystorepb + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + Keystore_Sign_FullMethodName = "/loop.internal.pb.keystore.Keystore/Sign" + Keystore_SignBatch_FullMethodName = "/loop.internal.pb.keystore.Keystore/SignBatch" + Keystore_Verify_FullMethodName = "/loop.internal.pb.keystore.Keystore/Verify" + Keystore_VerifyBatch_FullMethodName = "/loop.internal.pb.keystore.Keystore/VerifyBatch" + Keystore_ListKeys_FullMethodName = "/loop.internal.pb.keystore.Keystore/ListKeys" + Keystore_ImportKey_FullMethodName = "/loop.internal.pb.keystore.Keystore/ImportKey" + Keystore_ExportKey_FullMethodName = "/loop.internal.pb.keystore.Keystore/ExportKey" + Keystore_CreateKey_FullMethodName = "/loop.internal.pb.keystore.Keystore/CreateKey" + Keystore_DeleteKey_FullMethodName = "/loop.internal.pb.keystore.Keystore/DeleteKey" + Keystore_AddTag_FullMethodName = "/loop.internal.pb.keystore.Keystore/AddTag" + Keystore_RemoveTag_FullMethodName = "/loop.internal.pb.keystore.Keystore/RemoveTag" + Keystore_ListTags_FullMethodName = "/loop.internal.pb.keystore.Keystore/ListTags" + Keystore_RunUDF_FullMethodName = "/loop.internal.pb.keystore.Keystore/RunUDF" +) + +// KeystoreClient is the client API for Keystore service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type KeystoreClient interface { + Sign(ctx context.Context, in *SignRequest, opts ...grpc.CallOption) (*SignResponse, error) + SignBatch(ctx context.Context, in *SignBatchRequest, opts ...grpc.CallOption) (*SignBatchResponse, error) + Verify(ctx context.Context, in *VerifyRequest, opts ...grpc.CallOption) (*VerifyResponse, error) + VerifyBatch(ctx context.Context, in *VerifyBatchRequest, opts ...grpc.CallOption) (*VerifyBatchResponse, error) + ListKeys(ctx context.Context, in *ListKeysRequest, opts ...grpc.CallOption) (*ListKeysResponse, error) + ImportKey(ctx context.Context, in *ImportKeyRequest, opts ...grpc.CallOption) (*ImportKeyResponse, error) + ExportKey(ctx context.Context, in *ExportKeyRequest, opts ...grpc.CallOption) (*ExportKeyResponse, error) + CreateKey(ctx context.Context, in *CreateKeyRequest, opts ...grpc.CallOption) (*CreateKeyResponse, error) + DeleteKey(ctx context.Context, in *DeleteKeyRequest, opts ...grpc.CallOption) (*DeleteKeyResponse, error) + AddTag(ctx context.Context, in *AddTagRequest, opts ...grpc.CallOption) (*AddTagResponse, error) + RemoveTag(ctx context.Context, in *RemoveTagRequest, opts ...grpc.CallOption) (*RemoveTagResponse, error) + ListTags(ctx context.Context, in *ListTagsRequest, opts ...grpc.CallOption) (*ListTagsResponse, error) + RunUDF(ctx context.Context, in *RunUDFRequest, opts ...grpc.CallOption) (*RunUDFResponse, error) +} + +type keystoreClient struct { + cc grpc.ClientConnInterface +} + +func NewKeystoreClient(cc grpc.ClientConnInterface) KeystoreClient { + return &keystoreClient{cc} +} + +func (c *keystoreClient) Sign(ctx context.Context, in *SignRequest, opts ...grpc.CallOption) (*SignResponse, error) { + out := new(SignResponse) + err := c.cc.Invoke(ctx, Keystore_Sign_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) SignBatch(ctx context.Context, in *SignBatchRequest, opts ...grpc.CallOption) (*SignBatchResponse, error) { + out := new(SignBatchResponse) + err := c.cc.Invoke(ctx, Keystore_SignBatch_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) Verify(ctx context.Context, in *VerifyRequest, opts ...grpc.CallOption) (*VerifyResponse, error) { + out := new(VerifyResponse) + err := c.cc.Invoke(ctx, Keystore_Verify_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) VerifyBatch(ctx context.Context, in *VerifyBatchRequest, opts ...grpc.CallOption) (*VerifyBatchResponse, error) { + out := new(VerifyBatchResponse) + err := c.cc.Invoke(ctx, Keystore_VerifyBatch_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) ListKeys(ctx context.Context, in *ListKeysRequest, opts ...grpc.CallOption) (*ListKeysResponse, error) { + out := new(ListKeysResponse) + err := c.cc.Invoke(ctx, Keystore_ListKeys_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) ImportKey(ctx context.Context, in *ImportKeyRequest, opts ...grpc.CallOption) (*ImportKeyResponse, error) { + out := new(ImportKeyResponse) + err := c.cc.Invoke(ctx, Keystore_ImportKey_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) ExportKey(ctx context.Context, in *ExportKeyRequest, opts ...grpc.CallOption) (*ExportKeyResponse, error) { + out := new(ExportKeyResponse) + err := c.cc.Invoke(ctx, Keystore_ExportKey_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) CreateKey(ctx context.Context, in *CreateKeyRequest, opts ...grpc.CallOption) (*CreateKeyResponse, error) { + out := new(CreateKeyResponse) + err := c.cc.Invoke(ctx, Keystore_CreateKey_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) DeleteKey(ctx context.Context, in *DeleteKeyRequest, opts ...grpc.CallOption) (*DeleteKeyResponse, error) { + out := new(DeleteKeyResponse) + err := c.cc.Invoke(ctx, Keystore_DeleteKey_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) AddTag(ctx context.Context, in *AddTagRequest, opts ...grpc.CallOption) (*AddTagResponse, error) { + out := new(AddTagResponse) + err := c.cc.Invoke(ctx, Keystore_AddTag_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) RemoveTag(ctx context.Context, in *RemoveTagRequest, opts ...grpc.CallOption) (*RemoveTagResponse, error) { + out := new(RemoveTagResponse) + err := c.cc.Invoke(ctx, Keystore_RemoveTag_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) ListTags(ctx context.Context, in *ListTagsRequest, opts ...grpc.CallOption) (*ListTagsResponse, error) { + out := new(ListTagsResponse) + err := c.cc.Invoke(ctx, Keystore_ListTags_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) RunUDF(ctx context.Context, in *RunUDFRequest, opts ...grpc.CallOption) (*RunUDFResponse, error) { + out := new(RunUDFResponse) + err := c.cc.Invoke(ctx, Keystore_RunUDF_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// KeystoreServer is the server API for Keystore service. +// All implementations must embed UnimplementedKeystoreServer +// for forward compatibility +type KeystoreServer interface { + Sign(context.Context, *SignRequest) (*SignResponse, error) + SignBatch(context.Context, *SignBatchRequest) (*SignBatchResponse, error) + Verify(context.Context, *VerifyRequest) (*VerifyResponse, error) + VerifyBatch(context.Context, *VerifyBatchRequest) (*VerifyBatchResponse, error) + ListKeys(context.Context, *ListKeysRequest) (*ListKeysResponse, error) + ImportKey(context.Context, *ImportKeyRequest) (*ImportKeyResponse, error) + ExportKey(context.Context, *ExportKeyRequest) (*ExportKeyResponse, error) + CreateKey(context.Context, *CreateKeyRequest) (*CreateKeyResponse, error) + DeleteKey(context.Context, *DeleteKeyRequest) (*DeleteKeyResponse, error) + AddTag(context.Context, *AddTagRequest) (*AddTagResponse, error) + RemoveTag(context.Context, *RemoveTagRequest) (*RemoveTagResponse, error) + ListTags(context.Context, *ListTagsRequest) (*ListTagsResponse, error) + RunUDF(context.Context, *RunUDFRequest) (*RunUDFResponse, error) + mustEmbedUnimplementedKeystoreServer() +} + +// UnimplementedKeystoreServer must be embedded to have forward compatible implementations. +type UnimplementedKeystoreServer struct { +} + +func (UnimplementedKeystoreServer) Sign(context.Context, *SignRequest) (*SignResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Sign not implemented") +} +func (UnimplementedKeystoreServer) SignBatch(context.Context, *SignBatchRequest) (*SignBatchResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method SignBatch not implemented") +} +func (UnimplementedKeystoreServer) Verify(context.Context, *VerifyRequest) (*VerifyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Verify not implemented") +} +func (UnimplementedKeystoreServer) VerifyBatch(context.Context, *VerifyBatchRequest) (*VerifyBatchResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method VerifyBatch not implemented") +} +func (UnimplementedKeystoreServer) ListKeys(context.Context, *ListKeysRequest) (*ListKeysResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListKeys not implemented") +} +func (UnimplementedKeystoreServer) ImportKey(context.Context, *ImportKeyRequest) (*ImportKeyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ImportKey not implemented") +} +func (UnimplementedKeystoreServer) ExportKey(context.Context, *ExportKeyRequest) (*ExportKeyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ExportKey not implemented") +} +func (UnimplementedKeystoreServer) CreateKey(context.Context, *CreateKeyRequest) (*CreateKeyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateKey not implemented") +} +func (UnimplementedKeystoreServer) DeleteKey(context.Context, *DeleteKeyRequest) (*DeleteKeyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteKey not implemented") +} +func (UnimplementedKeystoreServer) AddTag(context.Context, *AddTagRequest) (*AddTagResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method AddTag not implemented") +} +func (UnimplementedKeystoreServer) RemoveTag(context.Context, *RemoveTagRequest) (*RemoveTagResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method RemoveTag not implemented") +} +func (UnimplementedKeystoreServer) ListTags(context.Context, *ListTagsRequest) (*ListTagsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListTags not implemented") +} +func (UnimplementedKeystoreServer) RunUDF(context.Context, *RunUDFRequest) (*RunUDFResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method RunUDF not implemented") +} +func (UnimplementedKeystoreServer) mustEmbedUnimplementedKeystoreServer() {} + +// UnsafeKeystoreServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to KeystoreServer will +// result in compilation errors. +type UnsafeKeystoreServer interface { + mustEmbedUnimplementedKeystoreServer() +} + +func RegisterKeystoreServer(s grpc.ServiceRegistrar, srv KeystoreServer) { + s.RegisterService(&Keystore_ServiceDesc, srv) +} + +func _Keystore_Sign_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SignRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).Sign(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_Sign_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).Sign(ctx, req.(*SignRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_SignBatch_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SignBatchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).SignBatch(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_SignBatch_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).SignBatch(ctx, req.(*SignBatchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_Verify_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(VerifyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).Verify(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_Verify_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).Verify(ctx, req.(*VerifyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_VerifyBatch_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(VerifyBatchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).VerifyBatch(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_VerifyBatch_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).VerifyBatch(ctx, req.(*VerifyBatchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_ListKeys_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListKeysRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).ListKeys(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_ListKeys_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).ListKeys(ctx, req.(*ListKeysRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_ImportKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).ImportKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_ImportKey_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).ImportKey(ctx, req.(*ImportKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_ExportKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).ExportKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_ExportKey_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).ExportKey(ctx, req.(*ExportKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_CreateKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).CreateKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_CreateKey_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).CreateKey(ctx, req.(*CreateKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_DeleteKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).DeleteKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_DeleteKey_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).DeleteKey(ctx, req.(*DeleteKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_AddTag_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddTagRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).AddTag(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_AddTag_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).AddTag(ctx, req.(*AddTagRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_RemoveTag_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemoveTagRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).RemoveTag(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_RemoveTag_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).RemoveTag(ctx, req.(*RemoveTagRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_ListTags_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTagsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).ListTags(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_ListTags_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).ListTags(ctx, req.(*ListTagsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_RunUDF_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunUDFRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).RunUDF(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_RunUDF_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).RunUDF(ctx, req.(*RunUDFRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// Keystore_ServiceDesc is the grpc.ServiceDesc for Keystore service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Keystore_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "loop.internal.pb.keystore.Keystore", + HandlerType: (*KeystoreServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Sign", + Handler: _Keystore_Sign_Handler, + }, + { + MethodName: "SignBatch", + Handler: _Keystore_SignBatch_Handler, + }, + { + MethodName: "Verify", + Handler: _Keystore_Verify_Handler, + }, + { + MethodName: "VerifyBatch", + Handler: _Keystore_VerifyBatch_Handler, + }, + { + MethodName: "ListKeys", + Handler: _Keystore_ListKeys_Handler, + }, + { + MethodName: "ImportKey", + Handler: _Keystore_ImportKey_Handler, + }, + { + MethodName: "ExportKey", + Handler: _Keystore_ExportKey_Handler, + }, + { + MethodName: "CreateKey", + Handler: _Keystore_CreateKey_Handler, + }, + { + MethodName: "DeleteKey", + Handler: _Keystore_DeleteKey_Handler, + }, + { + MethodName: "AddTag", + Handler: _Keystore_AddTag_Handler, + }, + { + MethodName: "RemoveTag", + Handler: _Keystore_RemoveTag_Handler, + }, + { + MethodName: "ListTags", + Handler: _Keystore_ListTags_Handler, + }, + { + MethodName: "RunUDF", + Handler: _Keystore_RunUDF_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "keystore.proto", +} diff --git a/pkg/loop/internal/relayer/pluginprovider/contractreader/codec_test.go b/pkg/loop/internal/relayer/pluginprovider/contractreader/codec_test.go index c22ed15f4..c54696871 100644 --- a/pkg/loop/internal/relayer/pluginprovider/contractreader/codec_test.go +++ b/pkg/loop/internal/relayer/pluginprovider/contractreader/codec_test.go @@ -144,7 +144,10 @@ func (f *fakeCodec) Encode(_ context.Context, item any, itemType string) ([]byte return []byte{}, nil case interfacetests.TestItemWithConfigExtra: ts := item.(*interfacetests.TestStruct) - ts.Account = anyAccountBytes + ts.AccountStruct = interfacetests.AccountStruct{ + Account: anyAccountBytes, + AccountStr: anyAccountString, + } ts.BigField = big.NewInt(2) return encoder.Marshal(ts) case interfacetests.TestItemType, interfacetests.TestItemSliceType, interfacetests.TestItemArray2Type, interfacetests.TestItemArray1Type: diff --git a/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader_test.go b/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader_test.go index 667131020..d5496add8 100644 --- a/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader_test.go +++ b/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader_test.go @@ -484,7 +484,10 @@ func (f *fakeContractReader) GetLatestValue(_ context.Context, readIdentifier st rv := returnVal.(*TestStructWithExtraField) rv.TestStruct = *pv rv.ExtraField = AnyExtraValue - rv.Account = anyAccountBytes + rv.AccountStruct = AccountStruct{ + Account: anyAccountBytes, + AccountStr: anyAccountString, + } rv.BigField = big.NewInt(2) return nil } else if strings.HasSuffix(readIdentifier, EventName) { @@ -569,7 +572,10 @@ func (f *fakeContractReader) BatchGetLatestValues(_ context.Context, request typ *returnVal.(*[]uint64) = AnySliceToReadWithoutAnArgument } else if req.ReadName == MethodReturningSeenStruct { ts := *req.Params.(*TestStruct) - ts.Account = anyAccountBytes + ts.AccountStruct = AccountStruct{ + Account: anyAccountBytes, + AccountStr: anyAccountString, + } ts.BigField = big.NewInt(2) returnVal = &TestStructWithExtraField{ TestStruct: ts, diff --git a/pkg/loop/internal/relayer/pluginprovider/contractreader/helper_test.go b/pkg/loop/internal/relayer/pluginprovider/contractreader/helper_test.go index 00c8adae7..9d87f581c 100644 --- a/pkg/loop/internal/relayer/pluginprovider/contractreader/helper_test.go +++ b/pkg/loop/internal/relayer/pluginprovider/contractreader/helper_test.go @@ -44,11 +44,16 @@ func (*cannotEncode) UnmarshalText() error { type interfaceTesterBase struct{} var anyAccountBytes = []byte{1, 2, 3} +var anyAccountString = string(anyAccountBytes) func (it *interfaceTesterBase) GetAccountBytes(_ int) []byte { return anyAccountBytes } +func (it *interfaceTesterBase) GetAccountString(_ int) string { + return anyAccountString +} + func (it *interfaceTesterBase) Name() string { return "relay client" } diff --git a/pkg/loop/internal/relayer/pluginprovider/ext/median/median.go b/pkg/loop/internal/relayer/pluginprovider/ext/median/median.go index 9282a48fd..167bbe5a6 100644 --- a/pkg/loop/internal/relayer/pluginprovider/ext/median/median.go +++ b/pkg/loop/internal/relayer/pluginprovider/ext/median/median.go @@ -74,8 +74,8 @@ func (p *ProviderClient) OnchainConfigCodec() median.OnchainConfigCodec { return p.onchainConfigCodec } -func (m *ProviderClient) ContractReader() types.ContractReader { - return m.contractReader +func (p *ProviderClient) ContractReader() types.ContractReader { + return p.contractReader } func (p *ProviderClient) Codec() types.Codec { diff --git a/pkg/loop/internal/types/types.go b/pkg/loop/internal/types/types.go index e97225313..9e3f50e39 100644 --- a/pkg/loop/internal/types/types.go +++ b/pkg/loop/internal/types/types.go @@ -3,6 +3,8 @@ package internal import ( "context" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/keystore" + "github.com/smartcontractkit/chainlink-common/pkg/services" "github.com/smartcontractkit/chainlink-common/pkg/types" "github.com/smartcontractkit/chainlink-common/pkg/types/core" ) @@ -54,3 +56,9 @@ type Relayer interface { NewPluginProvider(context.Context, types.RelayArgs, types.PluginArgs) (types.PluginProvider, error) NewLLOProvider(context.Context, types.RelayArgs, types.PluginArgs) (types.LLOProvider, error) } + +// Keystore This interface contains all the keystore GRPC functionality, keystore.Keystore is meant to be exposed to consumers and the keystore.Management interface in exposed only to the core node +type Keystore interface { + services.Service + keystore.GRPCService +} diff --git a/pkg/loop/keystore_service.go b/pkg/loop/keystore_service.go new file mode 100644 index 000000000..1cd93dad7 --- /dev/null +++ b/pkg/loop/keystore_service.go @@ -0,0 +1,32 @@ +package loop + +import ( + "context" + "fmt" + "os/exec" + + "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/goplugin" + "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/keystore" +) + +// KeystoreService is a [types.Service] that maintains an internal [keystore.Keystore]. +type KeystoreService struct { + goplugin.PluginService[*GRPCPluginKeystore, keystore.GRPCService] +} + +func NewKeystoreService(lggr logger.Logger, grpcOpts GRPCOpts, cmd func() *exec.Cmd, config []byte) *KeystoreService { + newService := func(ctx context.Context, instance any) (keystore.GRPCService, error) { + plug, ok := instance.(*keystore.Client) + if !ok { + return nil, fmt.Errorf("expected PluginKeystore but got %T", instance) + } + return plug, nil + } + stopCh := make(chan struct{}) + lggr = logger.Named(lggr, "KeystoreService") + var rs KeystoreService + broker := BrokerConfig{StopCh: stopCh, Logger: lggr, GRPCOpts: grpcOpts} + rs.Init(PluginKeystoreName, &GRPCPluginKeystore{BrokerConfig: broker}, newService, lggr, cmd, stopCh) + return &rs +} diff --git a/pkg/loop/plugin_keystore.go b/pkg/loop/plugin_keystore.go new file mode 100644 index 000000000..a5b3b68bf --- /dev/null +++ b/pkg/loop/plugin_keystore.go @@ -0,0 +1,53 @@ +package loop + +import ( + "context" + + "github.com/hashicorp/go-plugin" + "google.golang.org/grpc" + + keystorepb "github.com/smartcontractkit/chainlink-common/pkg/loop/internal/keystore" + "github.com/smartcontractkit/chainlink-common/pkg/types/keystore" +) + +// PluginKeystoreName is the name for keystore.Keystore +const PluginKeystoreName = "keystore" + +func PluginKeystoreHandshakeConfig() plugin.HandshakeConfig { + return plugin.HandshakeConfig{ + MagicCookieKey: "CL_PLUGIN_KEYSTORE_MAGIC_COOKIE", + MagicCookieValue: "fe81b132-0d3d-4c16-9f13-c2f7bfd3c361", + } +} + +type GRPCPluginKeystore struct { + plugin.NetRPCUnsupportedPlugin + + BrokerConfig + + PluginServer keystorepb.GRPCService + + pluginClient *keystorepb.Client +} + +func (p *GRPCPluginKeystore) GRPCServer(broker *plugin.GRPCBroker, server *grpc.Server) error { + return keystorepb.RegisterKeystoreServer(server, broker, p.BrokerConfig, p.PluginServer) +} + +func (p *GRPCPluginKeystore) GRPCClient(_ context.Context, broker *plugin.GRPCBroker, conn *grpc.ClientConn) (interface{}, error) { + if p.pluginClient == nil { + p.pluginClient = keystorepb.NewKeystoreClient(broker, p.BrokerConfig, conn) + } else { + p.pluginClient.Refresh(broker, conn) + } + + return keystore.Keystore(p.pluginClient), nil +} + +func (p *GRPCPluginKeystore) ClientConfig() *plugin.ClientConfig { + c := &plugin.ClientConfig{ + HandshakeConfig: PluginKeystoreHandshakeConfig(), + Plugins: map[string]plugin.Plugin{PluginKeystoreName: p}, + } + return ManagedGRPCClientConfig(c, p.BrokerConfig) +} diff --git a/pkg/loop/reportingplugins/loopp_service_test.go b/pkg/loop/reportingplugins/loopp_service_test.go index e17c0a231..e4ba1bd31 100644 --- a/pkg/loop/reportingplugins/loopp_service_test.go +++ b/pkg/loop/reportingplugins/loopp_service_test.go @@ -50,39 +50,42 @@ func TestLOOPPService(t *testing.T) { {Plugin: reportingplugins.PluginServiceName}, } for _, ts := range tests { - looppSvc := reportingplugins.NewLOOPPService(logger.Test(t), loop.GRPCOpts{}, func() *exec.Cmd { - return NewHelperProcessCommand(ts.Plugin) - }, - core.ReportingPluginServiceConfig{}, - nettest.MockConn{}, - pipelinetest.PipelineRunner, - telemetrytest.Telemetry, - errorlogtest.ErrorLog, - keyvaluestoretest.KeyValueStore{}, - relayersettest.RelayerSet{}) - hook := looppSvc.XXXTestHook() - servicetest.Run(t, looppSvc) - - t.Run("control", func(t *testing.T) { - reportingplugintest.RunFactory(t, looppSvc) - }) - - t.Run("Kill", func(t *testing.T) { - hook.Kill() - - // wait for relaunch - time.Sleep(2 * goplugin.KeepAliveTickDuration) - - reportingplugintest.RunFactory(t, looppSvc) - }) - - t.Run("Reset", func(t *testing.T) { - hook.Reset() - - // wait for relaunch - time.Sleep(2 * goplugin.KeepAliveTickDuration) - - reportingplugintest.RunFactory(t, looppSvc) + t.Run(ts.Plugin, func(t *testing.T) { + t.Parallel() + looppSvc := reportingplugins.NewLOOPPService(logger.Test(t), loop.GRPCOpts{}, func() *exec.Cmd { + return NewHelperProcessCommand(ts.Plugin) + }, + core.ReportingPluginServiceConfig{}, + nettest.MockConn{}, + pipelinetest.PipelineRunner, + telemetrytest.Telemetry, + errorlogtest.ErrorLog, + keyvaluestoretest.KeyValueStore{}, + relayersettest.RelayerSet{}) + hook := looppSvc.XXXTestHook() + servicetest.Run(t, looppSvc) + + t.Run("control", func(t *testing.T) { + reportingplugintest.RunFactory(t, looppSvc) + }) + + t.Run("Kill", func(t *testing.T) { + hook.Kill() + + // wait for relaunch + time.Sleep(2 * goplugin.KeepAliveTickDuration) + + reportingplugintest.RunFactory(t, looppSvc) + }) + + t.Run("Reset", func(t *testing.T) { + hook.Reset() + + // wait for relaunch + time.Sleep(2 * goplugin.KeepAliveTickDuration) + + reportingplugintest.RunFactory(t, looppSvc) + }) }) } } diff --git a/pkg/loop/reportingplugins/ocr3/loopp_service_test.go b/pkg/loop/reportingplugins/ocr3/loopp_service_test.go index 5b17c263f..b15531cc5 100644 --- a/pkg/loop/reportingplugins/ocr3/loopp_service_test.go +++ b/pkg/loop/reportingplugins/ocr3/loopp_service_test.go @@ -54,40 +54,43 @@ func TestLOOPPService(t *testing.T) { }, } for _, ts := range tests { - looppSvc := NewLOOPPService(logger.Test(t), loop.GRPCOpts{}, func() *exec.Cmd { - return NewHelperProcessCommand(ts.Plugin) - }, - core.ReportingPluginServiceConfig{}, - nettest.MockConn{}, - pipelinetest.PipelineRunner, - telemetrytest.Telemetry, - errorlogtest.ErrorLog, - core.CapabilitiesRegistry(nil), - keyvaluestoretest.KeyValueStore{}, - relayersettest.RelayerSet{}) - hook := looppSvc.XXXTestHook() - servicetest.Run(t, looppSvc) - - t.Run("control", func(t *testing.T) { - ocr3test.OCR3ReportingPluginFactory(t, looppSvc) - }) - - t.Run("Kill", func(t *testing.T) { - hook.Kill() - - // wait for relaunch - time.Sleep(2 * goplugin.KeepAliveTickDuration) - - ocr3test.OCR3ReportingPluginFactory(t, looppSvc) - }) - - t.Run("Reset", func(t *testing.T) { - hook.Reset() - - // wait for relaunch - time.Sleep(2 * goplugin.KeepAliveTickDuration) - - ocr3test.OCR3ReportingPluginFactory(t, looppSvc) + t.Run(ts.Plugin, func(t *testing.T) { + t.Parallel() + looppSvc := NewLOOPPService(logger.Test(t), loop.GRPCOpts{}, func() *exec.Cmd { + return NewHelperProcessCommand(ts.Plugin) + }, + core.ReportingPluginServiceConfig{}, + nettest.MockConn{}, + pipelinetest.PipelineRunner, + telemetrytest.Telemetry, + errorlogtest.ErrorLog, + core.CapabilitiesRegistry(nil), + keyvaluestoretest.KeyValueStore{}, + relayersettest.RelayerSet{}) + hook := looppSvc.XXXTestHook() + servicetest.Run(t, looppSvc) + + t.Run("control", func(t *testing.T) { + ocr3test.OCR3ReportingPluginFactory(t, looppSvc) + }) + + t.Run("Kill", func(t *testing.T) { + hook.Kill() + + // wait for relaunch + time.Sleep(2 * goplugin.KeepAliveTickDuration) + + ocr3test.OCR3ReportingPluginFactory(t, looppSvc) + }) + + t.Run("Reset", func(t *testing.T) { + hook.Reset() + + // wait for relaunch + time.Sleep(2 * goplugin.KeepAliveTickDuration) + + ocr3test.OCR3ReportingPluginFactory(t, looppSvc) + }) }) } } diff --git a/pkg/loop/server.go b/pkg/loop/server.go index 3741e4b1d..fd2b02b7f 100644 --- a/pkg/loop/server.go +++ b/pkg/loop/server.go @@ -58,7 +58,7 @@ func newServer(loggerName string) (*Server, error) { lggr, err := NewLogger() if err != nil { - return nil, fmt.Errorf("error creating logger: %s", err) + return nil, fmt.Errorf("error creating logger: %w", err) } lggr = logger.Named(lggr, loggerName) s.Logger = logger.Sugared(lggr) diff --git a/pkg/metrics/metrics_labeler.go b/pkg/metrics/metrics_labeler.go new file mode 100644 index 000000000..ade67d780 --- /dev/null +++ b/pkg/metrics/metrics_labeler.go @@ -0,0 +1,33 @@ +package metrics + +type Labeler struct { + Labels map[string]string +} + +func NewLabeler() Labeler { + return Labeler{Labels: make(map[string]string)} +} + +// With adds multiple key-value pairs to the Labeler to eventually be consumed by a Beholder metrics resource +func (c Labeler) With(keyValues ...string) Labeler { + newCustomMetricsLabeler := NewLabeler() + + if len(keyValues)%2 != 0 { + // If an odd number of key-value arguments is passed, return the original CustomMessageLabeler unchanged + return c + } + + // Copy existing labels from the current agent + for k, v := range c.Labels { + newCustomMetricsLabeler.Labels[k] = v + } + + // Add new key-value pairs + for i := 0; i < len(keyValues); i += 2 { + key := keyValues[i] + value := keyValues[i+1] + newCustomMetricsLabeler.Labels[key] = value + } + + return newCustomMetricsLabeler +} diff --git a/pkg/metrics/metrics_labeler_test.go b/pkg/metrics/metrics_labeler_test.go new file mode 100644 index 000000000..25d5e51d7 --- /dev/null +++ b/pkg/metrics/metrics_labeler_test.go @@ -0,0 +1,16 @@ +package metrics + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +// tests CustomMessageAgent does not share state across new instances created by `With` +func Test_CustomMessageAgent(t *testing.T) { + cma := NewLabeler() + cma1 := cma.With("key1", "value1") + cma2 := cma1.With("key2", "value2") + + assert.NotEqual(t, cma1.Labels, cma2.Labels) +} diff --git a/pkg/services/service.go b/pkg/services/service.go index 893f470be..d0294681a 100644 --- a/pkg/services/service.go +++ b/pkg/services/service.go @@ -8,7 +8,6 @@ import ( "github.com/google/uuid" "go.opentelemetry.io/otel" - "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/trace" "golang.org/x/exp/maps" @@ -44,6 +43,8 @@ type Engine struct { StopChan logger.SugaredLogger + tracer trace.Tracer + wg sync.WaitGroup emitHealthErr func(error) @@ -52,6 +53,14 @@ type Engine struct { } // Go runs fn in a tracked goroutine that will block closing the service. +// +// If this operation runs continuously in the background, then do not trace it. +// If this operation will terminate, consider tracing via Tracer: +// +// v.e.Go(func(ctx context.Context) { +// ctx, span := v.e.Tracer().Start(ctx, "MyOperationName") +// defer span.End() +// }) func (e *Engine) Go(fn func(context.Context)) { e.wg.Add(1) go func() { @@ -65,6 +74,13 @@ func (e *Engine) Go(fn func(context.Context)) { // GoTick is like Go but calls fn for each tick. // // v.e.GoTick(services.NewTicker(time.Minute), v.method) +// +// Consider tracing each tick via Tracer: +// +// v.e.GoTick(services.NewTicker(time.Minute), func(ctx context.Context) { +// ctx, span := v.e.Tracer().Start(ctx, "MyOperationName") +// defer span.End() +// }) func (e *Engine) GoTick(ticker *timeutil.Ticker, fn func(context.Context)) { e.Go(func(ctx context.Context) { defer ticker.Stop() @@ -79,6 +95,11 @@ func (e *Engine) GoTick(ticker *timeutil.Ticker, fn func(context.Context)) { }) } +// Tracer returns the otel tracer with service attributes included. +func (e *Engine) Tracer() trace.Tracer { + return e.tracer +} + // EmitHealthErr records an error to be reported via the next call to Healthy(). func (e *Engine) EmitHealthErr(err error) { e.emitHealthErr(err) } @@ -87,7 +108,7 @@ func (e *Engine) EmitHealthErr(err error) { e.emitHealthErr(err) } func (e *Engine) SetHealthCond(condition string, err error) { e.condsMu.Lock() defer e.condsMu.Unlock() - e.conds[condition] = fmt.Errorf("%s: %e", condition, err) + e.conds[condition] = fmt.Errorf("%s: %w", condition, err) } // ClearHealthCond removes a condition and error recorded by SetHealthCond. @@ -157,16 +178,14 @@ func (c Config) NewService(lggr logger.Logger) Service { return c.new(logger.Sugared(lggr)) } -const scopeName = "github.com/smartcontractkit/chainlink-common/pkg/services" - func (c Config) new(lggr logger.SugaredLogger) *service { lggr = lggr.Named(c.Name) s := &service{ - tracer: otel.GetTracerProvider().Tracer(scopeName), - cfg: c, + cfg: c, eng: Engine{ StopChan: make(StopChan), SugaredLogger: lggr, + tracer: otel.GetTracerProvider().Tracer(lggr.Name()), conds: make(map[string]error), }, } @@ -179,10 +198,9 @@ func (c Config) new(lggr logger.SugaredLogger) *service { type service struct { StateMachine - tracer trace.Tracer - cfg Config - eng Engine - subs []Service + cfg Config + eng Engine + subs []Service } // Ready implements [HealthReporter.Ready] and overrides and extends [utils.StartStopOnce.Ready()] to include [Config.SubServices] @@ -218,10 +236,7 @@ func (s *service) Name() string { return s.eng.SugaredLogger.Name() } func (s *service) Start(ctx context.Context) error { return s.StartOnce(s.cfg.Name, func() error { var span trace.Span - ctx, span = s.tracer.Start(ctx, "Start", trace.WithAttributes( - attribute.String("service.name", s.cfg.Name), - attribute.String("service.instance", s.Name()), // full name from logger - )) + ctx, span = s.eng.tracer.Start(ctx, "Start") defer span.End() s.eng.Info("Starting") diff --git a/pkg/services/service_example_configured_test.go b/pkg/services/service_example_configured_test.go index 350440195..1765827af 100644 --- a/pkg/services/service_example_configured_test.go +++ b/pkg/services/service_example_configured_test.go @@ -5,6 +5,8 @@ import ( "fmt" "time" + "go.opentelemetry.io/otel/attribute" + . "github.com/smartcontractkit/chainlink-common/pkg/internal/example" // nolint "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink-common/pkg/services" @@ -32,6 +34,13 @@ func (c *configured) close() error { // do processes all outstanding work func (c *configured) do(ctx context.Context) { + ctx, span := c.eng.Tracer().Start(ctx, "DoWork") + defer span.End() + var count, errs int + defer func() { + span.SetAttributes(attribute.Int("count", count)) + span.SetAttributes(attribute.Int("errs", errs)) + }() for { select { case <-ctx.Done(): @@ -40,8 +49,10 @@ func (c *configured) do(ctx context.Context) { if !ok { return } + count++ name, err := work() if err != nil { + errs++ c.eng.SetHealthCond(name, err) } else { c.eng.ClearHealthCond(name) diff --git a/pkg/services/stop.go b/pkg/services/stop.go index 900ec6c84..d7f95f658 100644 --- a/pkg/services/stop.go +++ b/pkg/services/stop.go @@ -1,6 +1,9 @@ package services -import "context" +import ( + "context" + "time" +) // A StopChan signals when some work should stop. // Use StopChanR if you already have a read only <-chan. @@ -16,6 +19,11 @@ func (s StopChan) Ctx(ctx context.Context) (context.Context, context.CancelFunc) return StopRChan((<-chan struct{})(s)).Ctx(ctx) } +// CtxWithTimeout cancels a [context.Context] when StopChan is closed. +func (s StopChan) CtxWithTimeout(timeout time.Duration) (context.Context, context.CancelFunc) { + return s.CtxCancel(context.WithTimeout(context.Background(), timeout)) +} + // CtxCancel cancels a [context.Context] when StopChan is closed. // Returns ctx and cancel unmodified, for convenience. func (s StopChan) CtxCancel(ctx context.Context, cancel context.CancelFunc) (context.Context, context.CancelFunc) { @@ -36,6 +44,11 @@ func (s StopRChan) Ctx(ctx context.Context) (context.Context, context.CancelFunc return s.CtxCancel(context.WithCancel(ctx)) } +// CtxWithTimeout cancels a [context.Context] when StopChan is closed. +func (s StopRChan) CtxWithTimeout(timeout time.Duration) (context.Context, context.CancelFunc) { + return s.CtxCancel(context.WithTimeout(context.Background(), timeout)) +} + // CtxCancel cancels a [context.Context] when StopChan is closed. // Returns ctx and cancel unmodified, for convenience. func (s StopRChan) CtxCancel(ctx context.Context, cancel context.CancelFunc) (context.Context, context.CancelFunc) { diff --git a/pkg/types/codec.go b/pkg/types/codec.go index 93ae8ce59..395610911 100644 --- a/pkg/types/codec.go +++ b/pkg/types/codec.go @@ -29,6 +29,66 @@ type Decoder interface { GetMaxDecodingSize(ctx context.Context, n int, itemType string) (int, error) } +/* +Codec is an interface that provides encoding and decoding functionality for a specific type identified by a name. +Because there are many types that a [ContractReader] or [ChainWriter] can either accept or return, all encoding +instructions provided by the codec are based on the type name. + +Starting from the lowest level, take for instance a [big.Int] encoder where we want the output to be big endian binary +encoded. + + typeCodec, _ := binary.BigEndian().BigInt(32, true) + +This allows us to encode and decode [big.Int] values with big endian encoding using the [encodings.TypeCodec] interface. + + encodedBytes := []byte{} + + originalValue := big.NewInt(42) + encodedBytes, _ = typeCodec.Encode(originalValue, encodedBytes) // new encoded bytes are appended to existing + + value, _, _ := typeCodec.Decode(encodedBytes, value) + +The additional [encodings.TypeCodec] methods such as 'GetType() reflect.Type' allow composition. This is useful for +creating a struct codec such as the one defined in encodings/struct.go. + + tlCodec, _ := encodings.NewStructCodec([]encodings.NamedTypeCodec{{Name: "Value", Codec: typeCodec}}) + +This provides a [encodings.TopLevelCodec] which is a [encodings.TypeCodec] with a total size of all encoded elements. +Going up another level, we create a [Codec] from a map of [encodings.TypeCodec] instances using +[encodings.CodecFromTypeCodec]. + + codec := encodings.CodecFromTypeCodec{"SomeStruct": tlCodec} + + type SomeStruct struct { + Value *big.Int + } + + encodedStructBytes, _ := codec.Encode(ctx, SomeStruct{Value: big.NewInt(42)}, "SomeStruct") + + var someStruct SomeStruct + _ = codec.Decode(encodedStructBytes, &someStruct, "SomeStruct") + +Therefore 'itemType' passed to [Encode] and [Decode] references the key in the map of [encodings.TypeCodec] instances. +Also worth noting that a `TopLevelCodec` can also be added to a `CodecFromTypeCodec` map. This allows for the +[encodings.SizeAtTopLevel] method to be referenced when [encodings.GetMaxEncodingSize] is called on the [Codec]. + +Also, when the type is unknown to the caller, the decoded type for an 'itemName' can be retrieved from the codec to be +used for decoding. The `CreateType` method returns an instance of the expected type using reflection under the hood and +the overall composition of `TypeCodec` instances. This allows proper types to be conveyed to the caller through the +GRPC interface where data may be JSON encoded, passed through GRPC, and JSON decoded on the other side. + + decodedStruct, _ := codec.CreateType("SomeStruct", false) + _ = codec.Decode(encodedStructBytes, &decodedStruct, "SomeStruct") + +The `encodings` package provides a `Builder` interface that allows for the creation of any encoding type. This is useful +for creating custom encodings such as the EVM ABI encoding. An encoder implements the `Builder` interface and plugs +directly into `TypeCodec`. + +From the perspective of a `ContractReader` instance, the `itemType` at the top level is the `readIdentifier` which +can be imagined as `contractName + methodName` given that a contract method call returns some configured value that +would need its own codec. Each implementation of `ContractReader` maps the names to codecs differently on the inside, +but from the level of the interface, the `itemType` is the `readIdentifier`. +*/ type Codec interface { Encoder Decoder diff --git a/pkg/types/contract_reader.go b/pkg/types/contract_reader.go index caf97925b..4ba2ec9a7 100644 --- a/pkg/types/contract_reader.go +++ b/pkg/types/contract_reader.go @@ -46,6 +46,9 @@ type ContractReader interface { // Passing in a *values.Value as the returnVal will encode the return value as an appropriate value.Value instance. GetLatestValue(ctx context.Context, readIdentifier string, confidenceLevel primitives.ConfidenceLevel, params, returnVal any) error + // GetLatestValueWithHeadData should be used in the same way as GetLatestValue, but also returns the head data. + GetLatestValueWithHeadData(ctx context.Context, readIdentifier string, confidenceLevel primitives.ConfidenceLevel, params, returnVal any) (Head, error) + // BatchGetLatestValues batches get latest value calls based on request, which is grouped by contract names that each have a slice of BatchRead. // BatchGetLatestValuesRequest params and returnVal follow same rules as GetLatestValue params and returnVal arguments, with difference in how response is returned. // BatchGetLatestValuesResult response is grouped by contract names, which contain read results that maintain the order from the request. @@ -130,6 +133,10 @@ func (UnimplementedContractReader) GetLatestValue(ctx context.Context, readIdent return UnimplementedError("ContractReader.GetLatestValue unimplemented") } +func (UnimplementedContractReader) GetLatestValueWithHeadData(ctx context.Context, readIdentifier string, confidenceLevel primitives.ConfidenceLevel, params, returnVal any) (Head, error) { + return Head{}, UnimplementedError("ContractReader.GetLatestValueWithHeadData unimplemented") +} + func (UnimplementedContractReader) BatchGetLatestValues(ctx context.Context, request BatchGetLatestValuesRequest) (BatchGetLatestValuesResult, error) { return nil, UnimplementedError("ContractReader.BatchGetLatestValues unimplemented") } diff --git a/pkg/types/example_codec_test.go b/pkg/types/example_codec_test.go new file mode 100644 index 000000000..94dec8155 --- /dev/null +++ b/pkg/types/example_codec_test.go @@ -0,0 +1,46 @@ +package types_test + +import ( + "context" + "fmt" + "math/big" + + "github.com/smartcontractkit/chainlink-common/pkg/codec/encodings" + "github.com/smartcontractkit/chainlink-common/pkg/codec/encodings/binary" +) + +// ExampleCodec provides a minimal example of constructing and using a codec. +func ExampleCodec() { + ctx := context.Background() + typeCodec, _ := binary.BigEndian().BigInt(32, true) + + // start with empty encoded bytes + encodedBytes := []byte{} + originalValue := big.NewInt(42) + + encodedBytes, _ = typeCodec.Encode(originalValue, encodedBytes) // new encoded bytes are appended to existing + value, _, _ := typeCodec.Decode(encodedBytes) + + // originalValue is the same as value + fmt.Printf("%+v == %+v\n", originalValue, value) + + // TopLevelCodec is a TypeCodec that has a total size of all encoded elements + tlCodec, _ := encodings.NewStructCodec([]encodings.NamedTypeCodec{{Name: "Value", Codec: typeCodec}}) + codec := encodings.CodecFromTypeCodec{"SomeStruct": tlCodec} + + type SomeStruct struct { + Value *big.Int + } + + originalStruct := SomeStruct{Value: big.NewInt(42)} + encodedStructBytes, _ := codec.Encode(ctx, originalStruct, "SomeStruct") + + var someStruct SomeStruct + _ = codec.Decode(ctx, encodedStructBytes, &someStruct, "SomeStruct") + + decodedStruct, _ := codec.CreateType("SomeStruct", false) + _ = codec.Decode(ctx, encodedStructBytes, &decodedStruct, "SomeStruct") + + // encoded struct is equal to decoded struct using defined type and/or CreateType + fmt.Printf("%+v == %+v == %+v\n", originalStruct, someStruct, decodedStruct) +} diff --git a/pkg/types/interfacetests/chain_components_interface_tests.go b/pkg/types/interfacetests/chain_components_interface_tests.go index 06e622ee4..676ba4536 100644 --- a/pkg/types/interfacetests/chain_components_interface_tests.go +++ b/pkg/types/interfacetests/chain_components_interface_tests.go @@ -270,7 +270,7 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch ctx := tests.Context(t) testStruct := CreateTestStruct(0, tester) testStruct.BigField = nil - testStruct.Account = nil + testStruct.AccountStruct.Account = nil bindings := tester.GetBindings(t) bound := BindingsByName(bindings, AnyContractName)[0] @@ -529,7 +529,7 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes // setup call data testStruct := CreateTestStruct(0, tester) testStruct.BigField = nil - testStruct.Account = nil + testStruct.AccountStruct.Account = nil actual := &TestStructWithExtraField{} batchGetLatestValueRequest := make(types.BatchGetLatestValuesRequest) bindings := tester.GetBindings(t) diff --git a/pkg/types/interfacetests/codec_interface_fuzz_tests.go b/pkg/types/interfacetests/codec_interface_fuzz_tests.go index 2b8e2957a..38f845238 100644 --- a/pkg/types/interfacetests/codec_interface_fuzz_tests.go +++ b/pkg/types/interfacetests/codec_interface_fuzz_tests.go @@ -37,9 +37,12 @@ func RunCodecInterfaceFuzzTests(f *testing.F, tester CodecInterfaceTester) { DifferentField: differentField, OracleID: commontypes.OracleID(oracleId), OracleIDs: oids, - Account: tester.GetAccountBytes(accountSeed), - Accounts: [][]byte{tester.GetAccountBytes(accountsSeed + 1), tester.GetAccountBytes(accountsSeed + 2)}, - BigField: big.NewInt(bigField), + AccountStruct: AccountStruct{ + Account: tester.GetAccountBytes(accountSeed), + AccountStr: tester.GetAccountString(accountSeed), + }, + Accounts: [][]byte{tester.GetAccountBytes(accountsSeed + 1), tester.GetAccountBytes(accountsSeed + 2)}, + BigField: big.NewInt(bigField), NestedDynamicStruct: MidLevelDynamicTestStruct{ FixedBytes: fb, Inner: InnerDynamicTestStruct{ diff --git a/pkg/types/interfacetests/codec_interface_tests.go b/pkg/types/interfacetests/codec_interface_tests.go index 3f3c125d2..42cf5fd1d 100644 --- a/pkg/types/interfacetests/codec_interface_tests.go +++ b/pkg/types/interfacetests/codec_interface_tests.go @@ -65,7 +65,7 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { req := &EncodeRequest{TestStructs: []TestStruct{item}, TestOn: TestItemType} resp := tester.EncodeFields(t, req) compatibleItem := compatibleTestStruct{ - Account: item.Account, + AccountStruct: item.AccountStruct, Accounts: item.Accounts, BigField: item.BigField, DifferentField: item.DifferentField, @@ -94,7 +94,10 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { req := &EncodeRequest{TestStructs: []TestStruct{item}, TestOn: TestItemType} resp := tester.EncodeFields(t, req) compatibleMap := map[string]any{ - "Account": item.Account, + "AccountStruct": map[string]any{ + "Account": item.AccountStruct.Account, + "AccountStr": item.AccountStruct.AccountStr, + }, "Accounts": item.Accounts, "BigField": item.BigField, "DifferentField": item.DifferentField, @@ -138,7 +141,7 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { DifferentField: ts.DifferentField, OracleID: ts.OracleID, OracleIDs: ts.OracleIDs, - Account: ts.Account, + AccountStruct: ts.AccountStruct, Accounts: ts.Accounts, BigField: ts.BigField, NestedDynamicStruct: ts.NestedDynamicStruct, @@ -322,7 +325,7 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { cr := tester.GetCodec(t) modified := CreateTestStruct[*testing.T](0, tester) modified.BigField = nil - modified.Account = nil + modified.AccountStruct.Account = nil actual, err := cr.Encode(ctx, modified, TestItemWithConfigExtra) require.NoError(t, err) @@ -352,7 +355,7 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { DifferentField: "", OracleID: 0, OracleIDs: [32]commontypes.OracleID{}, - Account: nil, + AccountStruct: AccountStruct{}, Accounts: nil, BigField: nil, NestedDynamicStruct: MidLevelDynamicTestStruct{}, diff --git a/pkg/types/interfacetests/utils.go b/pkg/types/interfacetests/utils.go index a9f009eb8..0cb10a7a8 100644 --- a/pkg/types/interfacetests/utils.go +++ b/pkg/types/interfacetests/utils.go @@ -20,6 +20,7 @@ type BasicTester[T any] interface { Setup(t T) Name() string GetAccountBytes(i int) []byte + GetAccountString(i int) string } type testcase[T any] struct { @@ -152,11 +153,16 @@ type MidLevelStaticTestStruct struct { Inner InnerStaticTestStruct } +type AccountStruct struct { + Account []byte + AccountStr string +} + type TestStruct struct { Field *int32 OracleID commontypes.OracleID OracleIDs [32]commontypes.OracleID - Account []byte + AccountStruct AccountStruct Accounts [][]byte DifferentField string BigField *big.Int @@ -173,7 +179,7 @@ type TestStructMissingField struct { DifferentField string OracleID commontypes.OracleID OracleIDs [32]commontypes.OracleID - Account []byte + AccountStruct AccountStruct Accounts [][]byte BigField *big.Int NestedDynamicStruct MidLevelDynamicTestStruct @@ -182,7 +188,7 @@ type TestStructMissingField struct { // compatibleTestStruct has fields in a different order type compatibleTestStruct struct { - Account []byte + AccountStruct AccountStruct Accounts [][]byte BigField *big.Int DifferentField string @@ -213,10 +219,13 @@ func CreateTestStruct[T any](i int, tester BasicTester[T]) TestStruct { s := fmt.Sprintf("field%v", i) fv := int32(i) return TestStruct{ - Field: &fv, - OracleID: commontypes.OracleID(i + 1), - OracleIDs: [32]commontypes.OracleID{commontypes.OracleID(i + 2), commontypes.OracleID(i + 3)}, - Account: tester.GetAccountBytes(i + 3), + Field: &fv, + OracleID: commontypes.OracleID(i + 1), + OracleIDs: [32]commontypes.OracleID{commontypes.OracleID(i + 2), commontypes.OracleID(i + 3)}, + AccountStruct: AccountStruct{ + Account: tester.GetAccountBytes(i), + AccountStr: tester.GetAccountString(i), + }, Accounts: [][]byte{tester.GetAccountBytes(i + 4), tester.GetAccountBytes(i + 5)}, DifferentField: s, BigField: big.NewInt(int64((i + 1) * (i + 2))), diff --git a/pkg/types/keystore/types.go b/pkg/types/keystore/types.go new file mode 100644 index 000000000..b1ab498df --- /dev/null +++ b/pkg/types/keystore/types.go @@ -0,0 +1,37 @@ +package keystore + +import "context" + +// Keystore This interface is exposed to keystore consumers +type Keystore interface { + Sign(ctx context.Context, keyID []byte, data []byte) ([]byte, error) + SignBatch(ctx context.Context, keyID []byte, data [][]byte) ([][]byte, error) + Verify(ctx context.Context, keyID []byte, data []byte) (bool, error) + VerifyBatch(ctx context.Context, keyID []byte, data [][]byte) ([]bool, error) + + ListKeys(ctx context.Context, tags []string) ([][]byte, error) + + // RunUDF executes a user-defined function (UDF) on the keystore. + // This method is designed to provide flexibility by allowing users to define custom + // logic that can be executed without breaking the existing interface. While it enables + // future extensibility, developers should ensure that UDF implementations are safe + // and do not compromise the security of the keystore or the integrity of the data. + RunUDF(ctx context.Context, name string, keyID []byte, data []byte) ([]byte, error) +} + +// Management Core node exclusive +type Management interface { + AddPolicy(ctx context.Context, policy []byte) (string, error) + RemovePolicy(ctx context.Context, policyID string) error + ListPolicy(ctx context.Context) []byte + + ImportKey(ctx context.Context, keyType string, data []byte, tags []string) ([]byte, error) + ExportKey(ctx context.Context, keyID []byte) ([]byte, error) + + CreateKey(ctx context.Context, keyType string, tags []string) ([]byte, error) + DeleteKey(ctx context.Context, keyID []byte) error + + AddTag(ctx context.Context, keyID []byte, tag string) error + RemoveTag(ctx context.Context, keyID []byte, tag string) error + ListTags(ctx context.Context, keyID []byte) ([]string, error) +} diff --git a/pkg/types/llo/types.go b/pkg/types/llo/types.go index 8aec019d0..3951afa4a 100644 --- a/pkg/types/llo/types.go +++ b/pkg/types/llo/types.go @@ -8,6 +8,7 @@ import ( "math" "github.com/smartcontractkit/libocr/offchainreporting2plus/ocr3types" + ocr2types "github.com/smartcontractkit/libocr/offchainreporting2plus/types" "github.com/smartcontractkit/chainlink-common/pkg/services" ) @@ -35,8 +36,15 @@ const ( // NOTE: Only add something here if you actually need it, because it has to // be supported forever and can't be changed + + // ReportFormatEVMPremiumLegacy maintains compatibility with the legacy + // Mercury v0.3 report format ReportFormatEVMPremiumLegacy ReportFormat = 1 - ReportFormatJSON ReportFormat = 2 + // ReportFormatJSON is a simple JSON format for reference and debugging + ReportFormatJSON ReportFormat = 2 + // ReportFormatRetirement is a special "capstone" report format to indicate + // a retired OCR instance, and handover crucial information to a new one + ReportFormatRetirement ReportFormat = 3 _ ReportFormat = math.MaxUint32 // reserved ) @@ -44,6 +52,7 @@ const ( var ReportFormats = []ReportFormat{ ReportFormatEVMPremiumLegacy, ReportFormatJSON, + ReportFormatRetirement, } func (rf ReportFormat) String() string { @@ -52,6 +61,8 @@ func (rf ReportFormat) String() string { return "evm_premium_legacy" case ReportFormatJSON: return "json" + case ReportFormatRetirement: + return "retirement" default: return fmt.Sprintf("unknown(%d)", rf) } @@ -63,6 +74,8 @@ func ReportFormatFromString(s string) (ReportFormat, error) { return ReportFormatEVMPremiumLegacy, nil case "json": return ReportFormatJSON, nil + case "retirement": + return ReportFormatRetirement, nil default: return 0, fmt.Errorf("unknown report format: %q", s) } @@ -302,3 +315,7 @@ type ChannelDefinitionCache interface { Definitions() ChannelDefinitions services.Service } + +type ShouldRetireCache interface { + ShouldRetire(digest ocr2types.ConfigDigest) (bool, error) +} diff --git a/pkg/types/provider_llo.go b/pkg/types/provider_llo.go index db5817607..6fc8a5ccf 100644 --- a/pkg/types/provider_llo.go +++ b/pkg/types/provider_llo.go @@ -1,11 +1,21 @@ package types import ( + ocrtypes "github.com/smartcontractkit/libocr/offchainreporting2plus/types" + "github.com/smartcontractkit/chainlink-common/pkg/types/llo" ) +type LLOConfigProvider interface { + OffchainConfigDigester() ocrtypes.OffchainConfigDigester + // One instance will be run per config tracker + ContractConfigTrackers() []ocrtypes.ContractConfigTracker +} + type LLOProvider interface { - ConfigProvider + Service + LLOConfigProvider + ShouldRetireCache() llo.ShouldRetireCache ContractTransmitter() llo.Transmitter ChannelDefinitionCache() llo.ChannelDefinitionCache } diff --git a/pkg/utils/sleeper_task.go b/pkg/utils/sleeper_task.go index 0a65ea890..02dc970b3 100644 --- a/pkg/utils/sleeper_task.go +++ b/pkg/utils/sleeper_task.go @@ -1,6 +1,7 @@ package utils import ( + "context" "fmt" "time" @@ -13,12 +14,18 @@ type Worker interface { Name() string } +// WorkerCtx is like Worker but includes [context.Context]. +type WorkerCtx interface { + Work(ctx context.Context) + Name() string +} + // SleeperTask represents a task that waits in the background to process some work. type SleeperTask struct { services.StateMachine - worker Worker + worker WorkerCtx chQueue chan struct{} - chStop chan struct{} + chStop services.StopChan chDone chan struct{} chWorkDone chan struct{} } @@ -31,16 +38,27 @@ type SleeperTask struct { // immediately after it is finished. For this reason you should take care to // make sure that Worker is idempotent. // WakeUp does not block. -func NewSleeperTask(worker Worker) *SleeperTask { +func NewSleeperTask(w Worker) *SleeperTask { + return NewSleeperTaskCtx(&worker{w}) +} + +type worker struct { + Worker +} + +func (w *worker) Work(ctx context.Context) { w.Worker.Work() } + +// NewSleeperTaskCtx is like NewSleeperTask but accepts a WorkerCtx with a [context.Context]. +func NewSleeperTaskCtx(w WorkerCtx) *SleeperTask { s := &SleeperTask{ - worker: worker, + worker: w, chQueue: make(chan struct{}, 1), chStop: make(chan struct{}), chDone: make(chan struct{}), chWorkDone: make(chan struct{}, 10), } - _ = s.StartOnce("SleeperTask-"+worker.Name(), func() error { + _ = s.StartOnce("SleeperTask-"+w.Name(), func() error { go s.workerLoop() return nil }) @@ -98,10 +116,13 @@ func (s *SleeperTask) WorkDone() <-chan struct{} { func (s *SleeperTask) workerLoop() { defer close(s.chDone) + ctx, cancel := s.chStop.NewCtx() + defer cancel() + for { select { case <-s.chQueue: - s.worker.Work() + s.worker.Work(ctx) s.workDone() case <-s.chStop: return diff --git a/pkg/values/map.go b/pkg/values/map.go index 076831102..bfe5fb494 100644 --- a/pkg/values/map.go +++ b/pkg/values/map.go @@ -20,7 +20,7 @@ func EmptyMap() *Map { } } -func NewMap(m map[string]any) (*Map, error) { +func NewMap[T any](m map[string]T) (*Map, error) { mv := map[string]Value{} for k, v := range m { val, err := Wrap(v) diff --git a/pkg/values/value.go b/pkg/values/value.go index 7d9d75ff7..09f952c17 100644 --- a/pkg/values/value.go +++ b/pkg/values/value.go @@ -6,6 +6,7 @@ import ( "math" "math/big" "reflect" + "time" "github.com/go-viper/mapstructure/v2" "github.com/shopspring/decimal" @@ -76,6 +77,8 @@ func Wrap(v any) (Value, error) { return NewFloat64(float64(tv)), nil case *big.Int: return NewBigInt(tv), nil + case time.Time: + return NewTime(tv), nil case nil: return nil, nil @@ -95,6 +98,12 @@ func Wrap(v any) (Value, error) { return tv, nil case *Float64: return tv, nil + case *Bool: + return tv, nil + case *BigInt: + return tv, nil + case *Time: + return tv, nil } // Handle slices, structs, and pointers to structs diff --git a/pkg/values/value_test.go b/pkg/values/value_test.go index 23d02b728..9dbc9fb64 100644 --- a/pkg/values/value_test.go +++ b/pkg/values/value_test.go @@ -1,6 +1,7 @@ package values import ( + "bytes" "math" "math/big" "reflect" @@ -334,6 +335,58 @@ func Test_StructWrapUnwrap(t *testing.T) { assert.Equal(t, expected, unwrapped) } +func Test_NestedValueWrapUnwrap(t *testing.T) { + now := time.Now() + + wrapInt, err := Wrap(int64(100)) + require.NoError(t, err) + wrapDeci, err := Wrap(decimal.NewFromInt(32)) + require.NoError(t, err) + wrapFloat, err := Wrap(float64(1.2)) + require.NoError(t, err) + wrapBuffer, err := Wrap(bytes.NewBufferString("immabuffer").Bytes()) + require.NoError(t, err) + wrapString, err := Wrap("wrapme") + require.NoError(t, err) + wrapBool, err := Wrap(false) + require.NoError(t, err) + wrapBI, err := Wrap(big.NewInt(1)) + require.NoError(t, err) + wrapT, err := Wrap(now) + require.NoError(t, err) + + valuesMap, err := NewMap(map[string]any{ + "Int64": wrapInt, + "Decimal": wrapDeci, + "Float": wrapFloat, + "Buffer": wrapBuffer, + "String": wrapString, + "Bool": wrapBool, + "BI": wrapBI, + "T": wrapT, + }) + require.NoError(t, err) + + unwrappedMap, err := valuesMap.Unwrap() + require.NoError(t, err) + + expectedMap := map[string]any{ + "Int64": int64(100), + "Decimal": decimal.NewFromInt(32), + "Float": float64(1.2), + "Buffer": bytes.NewBufferString("immabuffer").Bytes(), + "String": "wrapme", + "Bool": false, + "BI": big.NewInt(1), + "T": now, + } + require.Equal( + t, + expectedMap, + unwrappedMap, + ) +} + func Test_SameUnderlyingTypes(t *testing.T) { type str string type i int diff --git a/pkg/workflows/exec/interpolation.go b/pkg/workflows/exec/interpolation.go index 27e6eb9a7..905036b41 100644 --- a/pkg/workflows/exec/interpolation.go +++ b/pkg/workflows/exec/interpolation.go @@ -104,8 +104,9 @@ func FindAndInterpolateAllKeys(input any, state Results) (any, error) { } type Env struct { - Binary []byte - Config []byte + Binary []byte + Config []byte + Secrets map[string]string } // FindAndInterpolateEnv takes a `config` any value, and recursively @@ -126,7 +127,7 @@ func FindAndInterpolateEnvVars(input any, env Env) (any, error) { } splitToken := strings.Split(matches[1], ".") - if len(splitToken) != 2 { + if len(splitToken) < 2 { return el, nil } @@ -139,8 +140,26 @@ func FindAndInterpolateEnvVars(input any, env Env) (any, error) { return env.Config, nil case "binary": return env.Binary, nil + case "secrets": + switch len(splitToken) { + // A token of the form: + // ENV.secrets. + case 3: + got, ok := env.Secrets[splitToken[2]] + if !ok { + return "", fmt.Errorf("invalid env token: could not find %q in ENV.secrets", splitToken[2]) + } + + return got, nil + // A token of the form: + // ENV.secrets + case 2: + return env.Secrets, nil + } + + return nil, fmt.Errorf("invalid env token: must contain two or three elements, got %q", el.(string)) default: - return "", fmt.Errorf("invalid env token: must be of the form $(ENV.): got %s", el) + return "", fmt.Errorf("invalid env token: must be of the form $(ENV.): got %s", el) } }, ) diff --git a/pkg/workflows/exec/interpolation_test.go b/pkg/workflows/exec/interpolation_test.go index d497aa02b..ffc2e43fb 100644 --- a/pkg/workflows/exec/interpolation_test.go +++ b/pkg/workflows/exec/interpolation_test.go @@ -247,6 +247,50 @@ func TestInterpolateEnv(t *testing.T) { assert.NoError(t, err) } +func TestInterpolateEnv_Secrets(t *testing.T) { + c := map[string]any{ + "fidelityAPIKey": "$(ENV.secrets.fidelity)", + } + _, err := exec.FindAndInterpolateEnvVars(c, exec.Env{}) + assert.ErrorContains(t, err, `invalid env token: could not find "fidelity" in ENV.secrets`) + + c = map[string]any{ + "fidelityAPIKey": "$(ENV.secrets.fidelity.foo)", + } + _, err = exec.FindAndInterpolateEnvVars(c, exec.Env{}) + assert.ErrorContains(t, err, `invalid env token: must contain two or three elements`) + + c = map[string]any{ + "secrets": "$(ENV.secrets)", + } + secrets := map[string]string{ + "foo": "fooSecret", + "bar": "barSecret", + } + got, err := exec.FindAndInterpolateEnvVars( + c, + exec.Env{Secrets: secrets}) + require.NoError(t, err) + assert.Equal(t, got, map[string]any{ + "secrets": secrets, + }) + + c = map[string]any{ + "secrets": "$(ENV.secrets.foo)", + } + secrets = map[string]string{ + "foo": "fooSecret", + "bar": "barSecret", + } + got, err = exec.FindAndInterpolateEnvVars( + c, + exec.Env{Secrets: secrets}) + require.NoError(t, err) + assert.Equal(t, got, map[string]any{ + "secrets": "fooSecret", + }) +} + type fakeResults map[string]*exec.Result func (f fakeResults) ResultForStep(s string) (*exec.Result, bool) { diff --git a/pkg/workflows/sdk/builder.go b/pkg/workflows/sdk/builder.go index ad3e3e462..63bff5dce 100644 --- a/pkg/workflows/sdk/builder.go +++ b/pkg/workflows/sdk/builder.go @@ -1,6 +1,7 @@ package sdk import ( + "errors" "fmt" "reflect" "strconv" @@ -15,6 +16,7 @@ type WorkflowSpecFactory struct { duplicateNames map[string]bool emptyNames bool badCapTypes []string + errors []error fns map[string]func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) } @@ -142,6 +144,7 @@ func NewWorkflowSpecFactory( }, names: map[string]bool{}, duplicateNames: map[string]bool{}, + errors: []error{}, emptyNames: false, } } @@ -182,7 +185,15 @@ func AccessField[I, O any](c CapDefinition[I], fieldName string) CapDefinition[O return &capDefinitionImpl[O]{ref: originalRef[:len(originalRef)-1] + "." + fieldName + ")"} } +func (w *WorkflowSpecFactory) AddErr(err error) { + w.errors = append(w.errors, err) +} + func (w *WorkflowSpecFactory) Spec() (WorkflowSpec, error) { + if len(w.errors) > 0 { + return WorkflowSpec{}, errors.Join(w.errors...) + } + if len(w.duplicateNames) > 0 { duplicates := make([]string, 0, len(w.duplicateNames)) for k := range w.duplicateNames { @@ -238,3 +249,23 @@ func AnyMap[M ~map[string]any](inputs CapMap) CapDefinition[M] { return components } + +type SecretValue string + +func (s SecretValue) Ref() any { + return s +} + +func (s SecretValue) private() {} + +func (s SecretValue) self() CapDefinition[string] { + return s +} + +func Secrets() SecretValue { + return "$(ENV.secrets)" +} + +func Secret(named string) SecretValue { + return SecretValue(fmt.Sprintf("$(ENV.secrets.%s)", named)) +} diff --git a/pkg/workflows/sdk/compute.go b/pkg/workflows/sdk/compute.go index e247984e2..fb762d356 100644 --- a/pkg/workflows/sdk/compute.go +++ b/pkg/workflows/sdk/compute.go @@ -1,5 +1,11 @@ package sdk +import ( + "errors" + + "github.com/smartcontractkit/chainlink-common/pkg/values" +) + //go:generate go run ./gen type ComputeOutput[T any] struct { @@ -20,3 +26,48 @@ func (c *computeOutputCap[T]) Value() CapDefinition[T] { } var _ ComputeOutputCap[struct{}] = &computeOutputCap[struct{}]{} + +type ComputeConfig[C any] struct { + Config C +} + +func (c *ComputeConfig[C]) ToMap() (map[string]any, error) { + var m map[string]any + switch cm := any(c.Config).(type) { + case map[string]any: + m = cm + default: + wc, err := values.WrapMap(c.Config) + if err != nil { + return nil, err + } + + uc, err := wc.Unwrap() + if err != nil { + return nil, err + } + + tm, ok := uc.(map[string]any) + if !ok { + return nil, errors.New("could not convert config into map") + } + + m = tm + } + + if _, ok := m["config"]; ok { + return nil, errors.New("`config` is a reserved keyword inside Compute config") + } + m["config"] = "$(ENV.config)" + + if _, ok := m["binary"]; ok { + return nil, errors.New("`binary` is a reserved keyword inside Compute config") + } + m["binary"] = "$(ENV.binary)" + + return m, nil +} + +func EmptyComputeConfig() *ComputeConfig[struct{}] { + return &ComputeConfig[struct{}]{Config: struct{}{}} +} diff --git a/pkg/workflows/sdk/compute_generated.go b/pkg/workflows/sdk/compute_generated.go index 01fc7cc7c..2b6b02b71 100644 --- a/pkg/workflows/sdk/compute_generated.go +++ b/pkg/workflows/sdk/compute_generated.go @@ -4,6 +4,7 @@ package sdk import ( "encoding/json" + "fmt" "github.com/smartcontractkit/chainlink-common/pkg/capabilities" "github.com/smartcontractkit/chainlink-common/pkg/values" @@ -26,14 +27,24 @@ func (input Compute1Inputs[I0]) ToSteps() StepInputs { } func Compute1[I0 any, O any](w *WorkflowSpecFactory, ref string, input Compute1Inputs[I0], compute func(Runtime, I0) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0) (O, error) { + return compute(r, i0) + } + return Compute1WithConfig[I0, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute1WithConfig[I0 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute1Inputs[I0], compute func(Runtime, C, I0) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", - Ref: ref, - Inputs: input.ToSteps(), - Config: map[string]any{ - "config": "$(ENV.config)", - "binary": "$(ENV.binary)", - }, + ID: "custom_compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, CapabilityType: capabilities.CapabilityTypeAction, } @@ -55,7 +66,15 @@ func Compute1[I0 any, O any](w *WorkflowSpecFactory, ref string, input Compute1I return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0) + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -148,14 +167,24 @@ func (input Compute2Inputs[I0, I1]) ToSteps() StepInputs { } func Compute2[I0 any, I1 any, O any](w *WorkflowSpecFactory, ref string, input Compute2Inputs[I0, I1], compute func(Runtime, I0, I1) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1) (O, error) { + return compute(r, i0, i1) + } + return Compute2WithConfig[I0, I1, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute2WithConfig[I0 any, I1 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute2Inputs[I0, I1], compute func(Runtime, C, I0, I1) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", - Ref: ref, - Inputs: input.ToSteps(), - Config: map[string]any{ - "config": "$(ENV.config)", - "binary": "$(ENV.binary)", - }, + ID: "custom_compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, CapabilityType: capabilities.CapabilityTypeAction, } @@ -177,7 +206,15 @@ func Compute2[I0 any, I1 any, O any](w *WorkflowSpecFactory, ref string, input C return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1) + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -221,14 +258,24 @@ func (input Compute3Inputs[I0, I1, I2]) ToSteps() StepInputs { } func Compute3[I0 any, I1 any, I2 any, O any](w *WorkflowSpecFactory, ref string, input Compute3Inputs[I0, I1, I2], compute func(Runtime, I0, I1, I2) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2) (O, error) { + return compute(r, i0, i1, i2) + } + return Compute3WithConfig[I0, I1, I2, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute3WithConfig[I0 any, I1 any, I2 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute3Inputs[I0, I1, I2], compute func(Runtime, C, I0, I1, I2) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", - Ref: ref, - Inputs: input.ToSteps(), - Config: map[string]any{ - "config": "$(ENV.config)", - "binary": "$(ENV.binary)", - }, + ID: "custom_compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, CapabilityType: capabilities.CapabilityTypeAction, } @@ -250,7 +297,15 @@ func Compute3[I0 any, I1 any, I2 any, O any](w *WorkflowSpecFactory, ref string, return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2) + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -297,14 +352,24 @@ func (input Compute4Inputs[I0, I1, I2, I3]) ToSteps() StepInputs { } func Compute4[I0 any, I1 any, I2 any, I3 any, O any](w *WorkflowSpecFactory, ref string, input Compute4Inputs[I0, I1, I2, I3], compute func(Runtime, I0, I1, I2, I3) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2, i3 I3) (O, error) { + return compute(r, i0, i1, i2, i3) + } + return Compute4WithConfig[I0, I1, I2, I3, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute4WithConfig[I0 any, I1 any, I2 any, I3 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute4Inputs[I0, I1, I2, I3], compute func(Runtime, C, I0, I1, I2, I3) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", - Ref: ref, - Inputs: input.ToSteps(), - Config: map[string]any{ - "config": "$(ENV.config)", - "binary": "$(ENV.binary)", - }, + ID: "custom_compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, CapabilityType: capabilities.CapabilityTypeAction, } @@ -326,7 +391,15 @@ func Compute4[I0 any, I1 any, I2 any, I3 any, O any](w *WorkflowSpecFactory, ref return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3) + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -376,14 +449,24 @@ func (input Compute5Inputs[I0, I1, I2, I3, I4]) ToSteps() StepInputs { } func Compute5[I0 any, I1 any, I2 any, I3 any, I4 any, O any](w *WorkflowSpecFactory, ref string, input Compute5Inputs[I0, I1, I2, I3, I4], compute func(Runtime, I0, I1, I2, I3, I4) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2, i3 I3, i4 I4) (O, error) { + return compute(r, i0, i1, i2, i3, i4) + } + return Compute5WithConfig[I0, I1, I2, I3, I4, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute5WithConfig[I0 any, I1 any, I2 any, I3 any, I4 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute5Inputs[I0, I1, I2, I3, I4], compute func(Runtime, C, I0, I1, I2, I3, I4) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", - Ref: ref, - Inputs: input.ToSteps(), - Config: map[string]any{ - "config": "$(ENV.config)", - "binary": "$(ENV.binary)", - }, + ID: "custom_compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, CapabilityType: capabilities.CapabilityTypeAction, } @@ -405,7 +488,15 @@ func Compute5[I0 any, I1 any, I2 any, I3 any, I4 any, O any](w *WorkflowSpecFact return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4) + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -458,14 +549,24 @@ func (input Compute6Inputs[I0, I1, I2, I3, I4, I5]) ToSteps() StepInputs { } func Compute6[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, O any](w *WorkflowSpecFactory, ref string, input Compute6Inputs[I0, I1, I2, I3, I4, I5], compute func(Runtime, I0, I1, I2, I3, I4, I5) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5) (O, error) { + return compute(r, i0, i1, i2, i3, i4, i5) + } + return Compute6WithConfig[I0, I1, I2, I3, I4, I5, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute6WithConfig[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute6Inputs[I0, I1, I2, I3, I4, I5], compute func(Runtime, C, I0, I1, I2, I3, I4, I5) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", - Ref: ref, - Inputs: input.ToSteps(), - Config: map[string]any{ - "config": "$(ENV.config)", - "binary": "$(ENV.binary)", - }, + ID: "custom_compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, CapabilityType: capabilities.CapabilityTypeAction, } @@ -487,7 +588,15 @@ func Compute6[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, O any](w *Workflow return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5) + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -543,14 +652,24 @@ func (input Compute7Inputs[I0, I1, I2, I3, I4, I5, I6]) ToSteps() StepInputs { } func Compute7[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, O any](w *WorkflowSpecFactory, ref string, input Compute7Inputs[I0, I1, I2, I3, I4, I5, I6], compute func(Runtime, I0, I1, I2, I3, I4, I5, I6) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6) (O, error) { + return compute(r, i0, i1, i2, i3, i4, i5, i6) + } + return Compute7WithConfig[I0, I1, I2, I3, I4, I5, I6, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute7WithConfig[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute7Inputs[I0, I1, I2, I3, I4, I5, I6], compute func(Runtime, C, I0, I1, I2, I3, I4, I5, I6) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", - Ref: ref, - Inputs: input.ToSteps(), - Config: map[string]any{ - "config": "$(ENV.config)", - "binary": "$(ENV.binary)", - }, + ID: "custom_compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, CapabilityType: capabilities.CapabilityTypeAction, } @@ -572,7 +691,15 @@ func Compute7[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, O any](w * return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6) + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -631,14 +758,24 @@ func (input Compute8Inputs[I0, I1, I2, I3, I4, I5, I6, I7]) ToSteps() StepInputs } func Compute8[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, O any](w *WorkflowSpecFactory, ref string, input Compute8Inputs[I0, I1, I2, I3, I4, I5, I6, I7], compute func(Runtime, I0, I1, I2, I3, I4, I5, I6, I7) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7) (O, error) { + return compute(r, i0, i1, i2, i3, i4, i5, i6, i7) + } + return Compute8WithConfig[I0, I1, I2, I3, I4, I5, I6, I7, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute8WithConfig[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute8Inputs[I0, I1, I2, I3, I4, I5, I6, I7], compute func(Runtime, C, I0, I1, I2, I3, I4, I5, I6, I7) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", - Ref: ref, - Inputs: input.ToSteps(), - Config: map[string]any{ - "config": "$(ENV.config)", - "binary": "$(ENV.binary)", - }, + ID: "custom_compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, CapabilityType: capabilities.CapabilityTypeAction, } @@ -660,7 +797,15 @@ func Compute8[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, O return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7) + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -722,14 +867,24 @@ func (input Compute9Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8]) ToSteps() StepIn } func Compute9[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, I8 any, O any](w *WorkflowSpecFactory, ref string, input Compute9Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8], compute func(Runtime, I0, I1, I2, I3, I4, I5, I6, I7, I8) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8) (O, error) { + return compute(r, i0, i1, i2, i3, i4, i5, i6, i7, i8) + } + return Compute9WithConfig[I0, I1, I2, I3, I4, I5, I6, I7, I8, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute9WithConfig[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, I8 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute9Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8], compute func(Runtime, C, I0, I1, I2, I3, I4, I5, I6, I7, I8) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", - Ref: ref, - Inputs: input.ToSteps(), - Config: map[string]any{ - "config": "$(ENV.config)", - "binary": "$(ENV.binary)", - }, + ID: "custom_compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, CapabilityType: capabilities.CapabilityTypeAction, } @@ -751,7 +906,15 @@ func Compute9[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, I8 return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7, inputs.Arg8) + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7, inputs.Arg8) if err != nil { return capabilities.CapabilityResponse{}, err } @@ -816,14 +979,24 @@ func (input Compute10Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9]) ToSteps() S } func Compute10[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, I8 any, I9 any, O any](w *WorkflowSpecFactory, ref string, input Compute10Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9], compute func(Runtime, I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, i0 I0, i1 I1, i2 I2, i3 I3, i4 I4, i5 I5, i6 I6, i7 I7, i8 I8, i9 I9) (O, error) { + return compute(r, i0, i1, i2, i3, i4, i5, i6, i7, i8, i9) + } + return Compute10WithConfig[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9, O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute10WithConfig[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, I8 any, I9 any, O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute10Inputs[I0, I1, I2, I3, I4, I5, I6, I7, I8, I9], compute func(Runtime, C, I0, I1, I2, I3, I4, I5, I6, I7, I8, I9) (O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ - ID: "custom_compute@1.0.0", - Ref: ref, - Inputs: input.ToSteps(), - Config: map[string]any{ - "config": "$(ENV.config)", - "binary": "$(ENV.binary)", - }, + ID: "custom_compute@1.0.0", + Ref: ref, + Inputs: input.ToSteps(), + Config: cm, CapabilityType: capabilities.CapabilityTypeAction, } @@ -845,7 +1018,15 @@ func Compute10[I0 any, I1 any, I2 any, I3 any, I4 any, I5 any, I6 any, I7 any, I return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7, inputs.Arg8, inputs.Arg9) + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, inputs.Arg0, inputs.Arg1, inputs.Arg2, inputs.Arg3, inputs.Arg4, inputs.Arg5, inputs.Arg6, inputs.Arg7, inputs.Arg8, inputs.Arg9) if err != nil { return capabilities.CapabilityResponse{}, err } diff --git a/pkg/workflows/sdk/compute_test.go b/pkg/workflows/sdk/compute_test.go index 9253b5b03..aa772ec78 100644 --- a/pkg/workflows/sdk/compute_test.go +++ b/pkg/workflows/sdk/compute_test.go @@ -1,6 +1,7 @@ package sdk_test import ( + "fmt" "testing" "github.com/stretchr/testify/assert" @@ -12,6 +13,7 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk/testutils" "github.com/smartcontractkit/chainlink-common/pkg/capabilities" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger" ocr3 "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/ocr3cap" "github.com/smartcontractkit/chainlink-common/pkg/capabilities/targets/chainwriter" "github.com/smartcontractkit/chainlink-common/pkg/capabilities/triggers/streams" @@ -128,6 +130,74 @@ func TestCompute(t *testing.T) { assert.Equal(t, expected, computed.Value) }) + + t.Run("compute supports passing in config via a struct", func(t *testing.T) { + computeFn := func(_ sdk.Runtime, config ComputeConfig, inputs basictrigger.TriggerOutputs) (ComputeOutput, error) { + return ComputeOutput{ + MySecret: string(config.Fidelity), + }, nil + } + conf := ComputeConfig{Fidelity: sdk.Secret("fidelity")} + workflow := createComputeWithConfigWorkflow( + conf, + computeFn, + ) + _, err := workflow.Spec() + require.NoError(t, err) + + fn := workflow.GetFn("Compute") + require.NotNil(t, fn) + + mc, err := values.WrapMap(conf) + require.NoError(t, err) + + req := capabilities.CapabilityRequest{Inputs: nsf, Config: mc} + actual, err := fn(&testutils.NoopRuntime{}, req) + require.NoError(t, err) + + expected, err := computeFn(nil, conf, basictrigger.TriggerOutputs{}) + require.NoError(t, err) + + uw, _ := actual.Value.Unwrap() + fmt.Printf("%+v", uw) + + computed := &sdk.ComputeOutput[ComputeOutput]{} + err = actual.Value.UnwrapTo(computed) + require.NoError(t, err) + + assert.Equal(t, expected, computed.Value) + }) +} + +type ComputeConfig struct { + Fidelity sdk.SecretValue +} + +type ComputeOutput struct { + MySecret string +} + +func createComputeWithConfigWorkflow(config ComputeConfig, fn func(_ sdk.Runtime, config ComputeConfig, input basictrigger.TriggerOutputs) (ComputeOutput, error)) *sdk.WorkflowSpecFactory { + workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{ + Owner: "owner", + Name: "name", + }) + + triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} + trigger := triggerCfg.New(workflow) + + cc := &sdk.ComputeConfig[ComputeConfig]{ + Config: config, + } + sdk.Compute1WithConfig( + workflow, + "Compute", + cc, + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + fn, + ) + + return workflow } func createWorkflow(fn func(_ sdk.Runtime, inputFeed notstreams.Feed) ([]streams.Feed, error)) *sdk.WorkflowSpecFactory { diff --git a/pkg/workflows/sdk/gen/compute.go.tmpl b/pkg/workflows/sdk/gen/compute.go.tmpl index c944ca2fa..6d1e87460 100644 --- a/pkg/workflows/sdk/gen/compute.go.tmpl +++ b/pkg/workflows/sdk/gen/compute.go.tmpl @@ -31,16 +31,26 @@ func (input Compute{{.}}Inputs[{{range RangeNum . }}I{{.}},{{ end }}]) ToSteps() } func Compute{{.}}[{{range RangeNum .}}I{{.}} any, {{ end }}O any](w *WorkflowSpecFactory, ref string, input Compute{{.}}Inputs[{{range RangeNum . }}I{{.}},{{ end }}], compute func(Runtime, {{range RangeNum . }}I{{.}},{{ end }})(O, error)) ComputeOutputCap[O] { + adaptedComputeFunc := func(r Runtime, _ struct{}, {{range RangeNum .}}i{{.}} I{{.}},{{end}}) (O, error) { + return compute(r, {{range RangeNum .}}i{{.}},{{end}}) + } + return Compute{{.}}WithConfig[{{range RangeNum .}}I{{.}}, {{ end }}O](w, ref, EmptyComputeConfig(), input, adaptedComputeFunc) +} + +func Compute{{.}}WithConfig[{{range RangeNum .}}I{{.}} any, {{ end }}O any, C any](w *WorkflowSpecFactory, ref string, config *ComputeConfig[C], input Compute{{.}}Inputs[{{range RangeNum . }}I{{.}},{{ end }}], compute func(Runtime, C, {{range RangeNum . }}I{{.}},{{ end }})(O, error)) ComputeOutputCap[O] { + cm, err := config.ToMap() + if err != nil { + w.AddErr(fmt.Errorf("could not convert config for compute step %s to config: %w", ref, err)) + return nil + } + def := StepDefinition{ ID: "custom_compute@1.0.0", Ref: ref, Inputs: input.ToSteps(), - Config: map[string]any{ - "config": "$(ENV.config)", - "binary": "$(ENV.binary)", - }, + Config: cm, CapabilityType: capabilities.CapabilityTypeAction, - } + } capFn := func(runtime Runtime, request capabilities.CapabilityRequest) (capabilities.CapabilityResponse, error) { var inputs runtime{{.}}Inputs[{{range RangeNum . }}I{{.}},{{ end }}] @@ -60,7 +70,15 @@ func Compute{{.}}[{{range RangeNum .}}I{{.}} any, {{ end }}O any](w *WorkflowSp return capabilities.CapabilityResponse{}, err } - output, err := compute(runtime, {{range RangeNum . }}inputs.Arg{{.}},{{ end }}) + var conf C + if request.Config != nil { + err = request.Config.UnwrapTo(&conf) + if err != nil { + return capabilities.CapabilityResponse{}, err + } + } + + output, err := compute(runtime, conf, {{range RangeNum . }}inputs.Arg{{.}},{{ end }}) if err != nil { return capabilities.CapabilityResponse{}, err } diff --git a/pkg/workflows/sdk/runtime.go b/pkg/workflows/sdk/runtime.go index de254acaf..d6403717e 100644 --- a/pkg/workflows/sdk/runtime.go +++ b/pkg/workflows/sdk/runtime.go @@ -7,9 +7,22 @@ import ( var BreakErr = capabilities.ErrStopExecution +type MessageEmitter interface { + // Emit sends a message to the labeler's destination. + Emit(string) error + + // With sets the labels for the message to be emitted. Labels are passed as key-value pairs + // and are cumulative. + With(kvs ...string) MessageEmitter +} + +// Guest interface type Runtime interface { Logger() logger.Logger Fetch(req FetchRequest) (FetchResponse, error) + + // Emitter sends the given message and labels to the configured collector. + Emitter() MessageEmitter } type FetchRequest struct { diff --git a/pkg/workflows/sdk/testdata/fixtures/capabilities/listtrigger/trigger_builders_generated.go b/pkg/workflows/sdk/testdata/fixtures/capabilities/listtrigger/trigger_builders_generated.go index c6a9d5be9..c9bececc6 100644 --- a/pkg/workflows/sdk/testdata/fixtures/capabilities/listtrigger/trigger_builders_generated.go +++ b/pkg/workflows/sdk/testdata/fixtures/capabilities/listtrigger/trigger_builders_generated.go @@ -19,7 +19,17 @@ func (cfg TriggerConfig) New(w *sdk.WorkflowSpecFactory) TriggerOutputsCap { } step := sdk.Step[TriggerOutputs]{Definition: def} - return TriggerOutputsCapFromStep(w, step) + raw := step.AddTo(w) + return TriggerOutputsWrapper(raw) +} + +// TriggerOutputsWrapper allows access to field from an sdk.CapDefinition[TriggerOutputs] +func TriggerOutputsWrapper(raw sdk.CapDefinition[TriggerOutputs]) TriggerOutputsCap { + wrapped, ok := raw.(TriggerOutputsCap) + if ok { + return wrapped + } + return &triggerOutputsCap{CapDefinition: raw} } type TriggerOutputsCap interface { @@ -28,21 +38,19 @@ type TriggerOutputsCap interface { private() } -// TriggerOutputsCapFromStep should only be called from generated code to assure type safety -func TriggerOutputsCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[TriggerOutputs]) TriggerOutputsCap { - raw := step.AddTo(w) - return &triggerOutputs{CapDefinition: raw} -} - -type triggerOutputs struct { +type triggerOutputsCap struct { sdk.CapDefinition[TriggerOutputs] } -func (*triggerOutputs) private() {} -func (c *triggerOutputs) CoolOutput() sdk.CapDefinition[[]string] { +func (*triggerOutputsCap) private() {} +func (c *triggerOutputsCap) CoolOutput() sdk.CapDefinition[[]string] { return sdk.AccessField[TriggerOutputs, []string](c.CapDefinition, "cool_output") } +func ConstantTriggerOutputs(value TriggerOutputs) TriggerOutputsCap { + return &triggerOutputsCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewTriggerOutputsFromFields( coolOutput sdk.CapDefinition[[]string]) TriggerOutputsCap { return &simpleTriggerOutputs{ diff --git a/pkg/workflows/sdk/testdata/fixtures/capabilities/notstreams/trigger_builders_generated.go b/pkg/workflows/sdk/testdata/fixtures/capabilities/notstreams/trigger_builders_generated.go index 4244079d4..42998133c 100644 --- a/pkg/workflows/sdk/testdata/fixtures/capabilities/notstreams/trigger_builders_generated.go +++ b/pkg/workflows/sdk/testdata/fixtures/capabilities/notstreams/trigger_builders_generated.go @@ -19,7 +19,17 @@ func (cfg TriggerConfig) New(w *sdk.WorkflowSpecFactory) FeedCap { } step := sdk.Step[Feed]{Definition: def} - return FeedCapFromStep(w, step) + raw := step.AddTo(w) + return FeedWrapper(raw) +} + +// FeedWrapper allows access to field from an sdk.CapDefinition[Feed] +func FeedWrapper(raw sdk.CapDefinition[Feed]) FeedCap { + wrapped, ok := raw.(FeedCap) + if ok { + return wrapped + } + return &feedCap{CapDefinition: raw} } type FeedCap interface { @@ -30,27 +40,25 @@ type FeedCap interface { private() } -// FeedCapFromStep should only be called from generated code to assure type safety -func FeedCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[Feed]) FeedCap { - raw := step.AddTo(w) - return &feed{CapDefinition: raw} -} - -type feed struct { +type feedCap struct { sdk.CapDefinition[Feed] } -func (*feed) private() {} -func (c *feed) Metadata() SignerMetadataCap { - return &signerMetadata{CapDefinition: sdk.AccessField[Feed, SignerMetadata](c.CapDefinition, "Metadata")} +func (*feedCap) private() {} +func (c *feedCap) Metadata() SignerMetadataCap { + return SignerMetadataWrapper(sdk.AccessField[Feed, SignerMetadata](c.CapDefinition, "Metadata")) } -func (c *feed) Payload() FeedReportCap { - return &feedReport{CapDefinition: sdk.AccessField[Feed, FeedReport](c.CapDefinition, "Payload")} +func (c *feedCap) Payload() FeedReportCap { + return FeedReportWrapper(sdk.AccessField[Feed, FeedReport](c.CapDefinition, "Payload")) } -func (c *feed) Timestamp() sdk.CapDefinition[int64] { +func (c *feedCap) Timestamp() sdk.CapDefinition[int64] { return sdk.AccessField[Feed, int64](c.CapDefinition, "Timestamp") } +func ConstantFeed(value Feed) FeedCap { + return &feedCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewFeedFromFields( metadata SignerMetadataCap, payload FeedReportCap, @@ -86,6 +94,15 @@ func (c *simpleFeed) Timestamp() sdk.CapDefinition[int64] { func (c *simpleFeed) private() {} +// FeedReportWrapper allows access to field from an sdk.CapDefinition[FeedReport] +func FeedReportWrapper(raw sdk.CapDefinition[FeedReport]) FeedReportCap { + wrapped, ok := raw.(FeedReportCap) + if ok { + return wrapped + } + return &feedReportCap{CapDefinition: raw} +} + type FeedReportCap interface { sdk.CapDefinition[FeedReport] BuyPrice() sdk.CapDefinition[[]uint8] @@ -97,36 +114,34 @@ type FeedReportCap interface { private() } -// FeedReportCapFromStep should only be called from generated code to assure type safety -func FeedReportCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[FeedReport]) FeedReportCap { - raw := step.AddTo(w) - return &feedReport{CapDefinition: raw} -} - -type feedReport struct { +type feedReportCap struct { sdk.CapDefinition[FeedReport] } -func (*feedReport) private() {} -func (c *feedReport) BuyPrice() sdk.CapDefinition[[]uint8] { +func (*feedReportCap) private() {} +func (c *feedReportCap) BuyPrice() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "BuyPrice") } -func (c *feedReport) FullReport() sdk.CapDefinition[[]uint8] { +func (c *feedReportCap) FullReport() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "FullReport") } -func (c *feedReport) ObservationTimestamp() sdk.CapDefinition[int64] { +func (c *feedReportCap) ObservationTimestamp() sdk.CapDefinition[int64] { return sdk.AccessField[FeedReport, int64](c.CapDefinition, "ObservationTimestamp") } -func (c *feedReport) ReportContext() sdk.CapDefinition[[]uint8] { +func (c *feedReportCap) ReportContext() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "ReportContext") } -func (c *feedReport) SellPrice() sdk.CapDefinition[[]uint8] { +func (c *feedReportCap) SellPrice() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "SellPrice") } -func (c *feedReport) Signature() sdk.CapDefinition[[]uint8] { +func (c *feedReportCap) Signature() sdk.CapDefinition[[]uint8] { return sdk.AccessField[FeedReport, []uint8](c.CapDefinition, "Signature") } +func ConstantFeedReport(value FeedReport) FeedReportCap { + return &feedReportCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewFeedReportFromFields( buyPrice sdk.CapDefinition[[]uint8], fullReport sdk.CapDefinition[[]uint8], @@ -183,27 +198,34 @@ func (c *simpleFeedReport) Signature() sdk.CapDefinition[[]uint8] { func (c *simpleFeedReport) private() {} +// SignerMetadataWrapper allows access to field from an sdk.CapDefinition[SignerMetadata] +func SignerMetadataWrapper(raw sdk.CapDefinition[SignerMetadata]) SignerMetadataCap { + wrapped, ok := raw.(SignerMetadataCap) + if ok { + return wrapped + } + return &signerMetadataCap{CapDefinition: raw} +} + type SignerMetadataCap interface { sdk.CapDefinition[SignerMetadata] Signer() sdk.CapDefinition[string] private() } -// SignerMetadataCapFromStep should only be called from generated code to assure type safety -func SignerMetadataCapFromStep(w *sdk.WorkflowSpecFactory, step sdk.Step[SignerMetadata]) SignerMetadataCap { - raw := step.AddTo(w) - return &signerMetadata{CapDefinition: raw} -} - -type signerMetadata struct { +type signerMetadataCap struct { sdk.CapDefinition[SignerMetadata] } -func (*signerMetadata) private() {} -func (c *signerMetadata) Signer() sdk.CapDefinition[string] { +func (*signerMetadataCap) private() {} +func (c *signerMetadataCap) Signer() sdk.CapDefinition[string] { return sdk.AccessField[SignerMetadata, string](c.CapDefinition, "Signer") } +func ConstantSignerMetadata(value SignerMetadata) SignerMetadataCap { + return &signerMetadataCap{CapDefinition: sdk.ConstantDefinition(value)} +} + func NewSignerMetadataFromFields( signer sdk.CapDefinition[string]) SignerMetadataCap { return &simpleSignerMetadata{ diff --git a/pkg/workflows/sdk/testutils/runner.go b/pkg/workflows/sdk/testutils/runner.go index aa3c7c35a..390ff233a 100644 --- a/pkg/workflows/sdk/testutils/runner.go +++ b/pkg/workflows/sdk/testutils/runner.go @@ -27,6 +27,7 @@ func NewRunner(ctx context.Context) *Runner { type Runner struct { RawConfig []byte + Secrets map[string]string // Context is held in this runner because it's for testing and capability calls are made by it. // The real SDK implementation will be for the WASM guest and will make host calls, and callbacks to the program. // nolint @@ -187,7 +188,17 @@ func (r *Runner) walk(spec sdk.WorkflowSpec, ref string) error { } func (r *Runner) buildRequest(spec sdk.WorkflowSpec, capability sdk.StepDefinition) (capabilities.CapabilityRequest, error) { - conf, err := values.NewMap(capability.Config) + env := exec.Env{ + Config: r.RawConfig, + Binary: []byte{}, + Secrets: r.Secrets, + } + config, err := exec.FindAndInterpolateEnvVars(capability.Config, env) + if err != nil { + return capabilities.CapabilityRequest{}, err + } + + conf, err := values.NewMap(config.(map[string]any)) if err != nil { return capabilities.CapabilityRequest{}, err } diff --git a/pkg/workflows/sdk/testutils/runner_test.go b/pkg/workflows/sdk/testutils/runner_test.go index 555e1f5c7..7c5bf9955 100644 --- a/pkg/workflows/sdk/testutils/runner_test.go +++ b/pkg/workflows/sdk/testutils/runner_test.go @@ -255,6 +255,10 @@ func TestRunner(t *testing.T) { }) } +type ComputeConfig struct { + Fidelity sdk.SecretValue +} + func TestCompute(t *testing.T) { t.Run("Inputs don't loose integer types when any is deserialized to", func(t *testing.T) { workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{Name: "name", Owner: "owner"}) @@ -286,6 +290,34 @@ func TestCompute(t *testing.T) { require.NoError(t, runner.Err()) }) + + t.Run("Config interpolates secrets", func(t *testing.T) { + workflow := sdk.NewWorkflowSpecFactory(sdk.NewWorkflowParams{Name: "name", Owner: "owner"}) + trigger := basictrigger.TriggerConfig{Name: "foo", Number: 100}.New(workflow) + + conf := ComputeConfig{ + Fidelity: sdk.Secret("fidelity"), + } + var gotC ComputeConfig + sdk.Compute1WithConfig(workflow, "tomap", &sdk.ComputeConfig[ComputeConfig]{Config: conf}, sdk.Compute1Inputs[string]{Arg0: trigger.CoolOutput()}, func(runtime sdk.Runtime, c ComputeConfig, i0 string) (ComputeConfig, error) { + gotC = c + return c, nil + }) + + runner := testutils.NewRunner(tests.Context(t)) + secretToken := "superSuperSecretToken" + runner.Secrets = map[string]string{ + "fidelity": secretToken, + } + basictriggertest.Trigger(runner, func() (basictrigger.TriggerOutputs, error) { + return basictrigger.TriggerOutputs{CoolOutput: "100"}, nil + }) + + runner.Run(workflow) + + require.NoError(t, runner.Err()) + assert.Equal(t, gotC.Fidelity, sdk.SecretValue(secretToken)) + }) } func registrationWorkflow() (*sdk.WorkflowSpecFactory, map[string]any, map[string]any) { diff --git a/pkg/workflows/sdk/testutils/runtime.go b/pkg/workflows/sdk/testutils/runtime.go index 5ae962663..8234b77b1 100644 --- a/pkg/workflows/sdk/testutils/runtime.go +++ b/pkg/workflows/sdk/testutils/runtime.go @@ -17,3 +17,7 @@ func (nr *NoopRuntime) Logger() logger.Logger { l, _ := logger.New() return l } + +func (nr *NoopRuntime) Emitter() sdk.MessageEmitter { + return nil +} diff --git a/pkg/workflows/secrets/secrets.go b/pkg/workflows/secrets/secrets.go new file mode 100644 index 000000000..443e2821a --- /dev/null +++ b/pkg/workflows/secrets/secrets.go @@ -0,0 +1,195 @@ +package secrets + +import ( + "crypto/rand" + "encoding/base64" + "encoding/hex" + "encoding/json" + "fmt" + + "golang.org/x/crypto/nacl/box" +) + +// this matches the secrets config file by the users, see the secretsConfig.yaml file +type SecretsConfig struct { + SecretsNames map[string][]string `yaml:"secretsNames"` +} + +// this is the payload that will be encrypted +type SecretPayloadToEncrypt struct { + WorkflowOwner string `json:"workflowOwner"` + Secrets map[string]string `json:"secrets"` +} + +// this holds the mapping of secret name (e.g. API_KEY) to the local environment variable name which points to the raw secret +type AssignedSecrets struct { + WorkflowSecretName string `json:"workflowSecretName"` + LocalEnvVarName string `json:"localEnvVarName"` +} + +// this is the metadata that will be stored in the encrypted secrets file +type Metadata struct { + WorkflowOwner string `json:"workflowOwner"` + CapabilitiesRegistry string `json:"capabilitiesRegistry"` + DonId string `json:"donId"` + DateEncrypted string `json:"dateEncrypted"` + NodePublicEncryptionKeys map[string]string `json:"nodePublicEncryptionKeys"` + EnvVarsAssignedToNodes map[string][]AssignedSecrets `json:"envVarsAssignedToNodes"` +} + +// this is the result of the encryption, will be used by the DON +type EncryptedSecretsResult struct { + EncryptedSecrets map[string]string `json:"encryptedSecrets"` + Metadata Metadata `json:"metadata"` +} + +func ContainsP2pId(p2pId [32]byte, p2pIds [][32]byte) bool { + for _, id := range p2pIds { + if id == p2pId { + return true + } + } + return false +} + +func EncryptSecretsForNodes( + workflowOwner string, + secrets map[string][]string, + encryptionPublicKeys map[string][32]byte, // map of p2pIds to the node's CSA (Ed25519) key. + config SecretsConfig, +) (map[string]string, map[string][]AssignedSecrets, error) { + encryptedSecrets := make(map[string]string) + secretsEnvVarsByNode := make(map[string][]AssignedSecrets) // Only used for metadata + i := 0 + + // Encrypt secrets for each node + for p2pId, encryptionPublicKey := range encryptionPublicKeys { + secretsPayload := SecretPayloadToEncrypt{ + WorkflowOwner: workflowOwner, + Secrets: make(map[string]string), + } + + for secretName, secretValues := range secrets { + // Assign secrets to nodes in a round-robin fashion + secretValue := secretValues[i%len(secretValues)] + secretsPayload.Secrets[secretName] = secretValue + } + + // Marshal the secrets payload into JSON + secretsJSON, err := json.Marshal(secretsPayload) + if err != nil { + return nil, nil, err + } + + // Encrypt secrets payload + encrypted, err := box.SealAnonymous(nil, secretsJSON, &encryptionPublicKey, rand.Reader) + if err != nil { + return nil, nil, err + } + encryptedSecrets[p2pId] = base64.StdEncoding.EncodeToString(encrypted) + + // Generate metadata showing which nodes were assigned which environment variables + for secretName, envVarNames := range config.SecretsNames { + secretsEnvVarsByNode[p2pId] = append(secretsEnvVarsByNode[p2pId], AssignedSecrets{ + WorkflowSecretName: secretName, + LocalEnvVarName: envVarNames[i%len(envVarNames)], + }) + } + + i++ + } + + return encryptedSecrets, secretsEnvVarsByNode, nil +} + +type X25519Key interface { + Decrypt(box []byte) ([]byte, error) + PublicKey() [32]byte + PublicKeyString() string +} + +func DecryptSecretsForNode( + result EncryptedSecretsResult, + key X25519Key, + workflowOwner string, +) (map[string]string, error) { + var foundP2pId string + for p2pId, pubKey := range result.Metadata.NodePublicEncryptionKeys { + if pubKey == key.PublicKeyString() { + foundP2pId = p2pId + break + } + } + + if foundP2pId == "" { + return nil, fmt.Errorf("cannot find public key %s in nodePublicEncryptionKeys list", key.PublicKeyString()) + } + + bundle, ok := result.EncryptedSecrets[foundP2pId] + if !ok { + return nil, fmt.Errorf("cannot find secrets blob for node with public key %s", key.PublicKeyString()) + } + + bundleBytes, err := base64.StdEncoding.DecodeString(bundle) + if err != nil { + return nil, fmt.Errorf("cannot base64 decode bundle into bytes: %w", err) + } + + payloadBytes, err := key.Decrypt(bundleBytes) + if err != nil { + return nil, fmt.Errorf("cannot decrypt box: %w", err) + } + + var payload SecretPayloadToEncrypt + err = json.Unmarshal(payloadBytes, &payload) + if err != nil { + return nil, err + } + + if payload.WorkflowOwner != workflowOwner { + return nil, fmt.Errorf("invalid secrets bundle: got owner %s, expected %s", payload.WorkflowOwner, workflowOwner) + } + + return payload.Secrets, nil +} + +func ValidateEncryptedSecrets(secretsData []byte, encryptionPublicKeys map[string][32]byte, workflowOwner string) error { + var encryptedSecrets EncryptedSecretsResult + err := json.Unmarshal(secretsData, &encryptedSecrets) + if err != nil { + return fmt.Errorf("failed to parse encrypted secrets JSON: %w", err) + } + + if encryptedSecrets.Metadata.WorkflowOwner != workflowOwner { + return fmt.Errorf("the workflow owner in the encrypted secrets metadata: %s does not match the input workflow owner: %s", encryptedSecrets.Metadata.WorkflowOwner, workflowOwner) + } + + // Verify that the encryptedSecrets values are all valid base64 strings + for _, encryptedSecret := range encryptedSecrets.EncryptedSecrets { + _, err := base64.StdEncoding.DecodeString(encryptedSecret) + if err != nil { + return fmt.Errorf("the encrypted secrets JSON payload contains encrypted secrets which are not in base64 format: %w", err) + } + } + + // Check that the p2pIds keys in encryptedSecrets.EncryptedSecrets match the keys in encryptionPublicKeys + for p2pId := range encryptedSecrets.Metadata.NodePublicEncryptionKeys { + if _, ok := encryptedSecrets.EncryptedSecrets[p2pId]; !ok { + return fmt.Errorf("no encrypted secret found for node with p2pId: %s. Ensure secrets have been correctly encrypted for this DON", p2pId) + } + } + + // Check that the encryptionPublicKey values in the encryptedSecrets metadata match the keys in encryptionPublicKeys + for p2pId, keyFromMetadata := range encryptedSecrets.Metadata.NodePublicEncryptionKeys { + encryptionPublicKey, ok := encryptionPublicKeys[p2pId] + if !ok { + return fmt.Errorf("encryption key not found for node with p2pId: %s. Ensure secrets have been correctly encrypted for this DON", p2pId) + } + + if keyFromMetadata != hex.EncodeToString(encryptionPublicKey[:]) { + return fmt.Errorf("the encryption public key in the encrypted secrets metadata does not match the one in the workflow registry. Ensure secrets have been correctly encrypted for this DON") + } + } + + return nil +} diff --git a/pkg/workflows/secrets/secrets_test.go b/pkg/workflows/secrets/secrets_test.go new file mode 100644 index 000000000..cf192b5b0 --- /dev/null +++ b/pkg/workflows/secrets/secrets_test.go @@ -0,0 +1,277 @@ +package secrets + +import ( + "crypto/rand" + "encoding/base64" + "encoding/hex" + "encoding/json" + "errors" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/crypto/nacl/box" +) + +// Mock data for testing, see JSON in https://gist.github.com/shileiwill/c077b31193f3f1a124bf4b046a464bf5 +var ( + encryptionPublicKeys = map[string][32]byte{ + "09ca39cd924653c72fbb0e458b629c3efebdad3e29e7cd0b5760754d919ed829": {17, 65, 221, 30, 70, 121, 124, 237, 155, 15, 186, 212, 145, 21, 241, 133, 7, 246, 246, 230, 227, 204, 134, 231, 229, 186, 22, 158, 88, 100, 90, 220}, + "147d5cc651819b093cd2fdff9760f0f0f77b7ef7798d9e24fc6a350b7300e5d9": {65, 45, 198, 254, 72, 234, 78, 52, 186, 170, 119, 218, 46, 59, 3, 45, 57, 185, 56, 89, 123, 111, 61, 97, 254, 126, 209, 131, 168, 39, 164, 49}, + "2934f31f278e5c60618f85861bd6add54a4525d79a642019bdc87d75d26372c3": {40, 185, 17, 67, 236, 145, 17, 121, 106, 125, 99, 225, 76, 28, 246, 187, 1, 180, 237, 89, 102, 122, 181, 79, 91, 199, 46, 190, 73, 200, 129, 190}, + "298834a041a056df58c839cb53d99b78558693042e54dff238f504f16d18d4b6": {72, 121, 1, 224, 192, 169, 211, 198, 110, 124, 252, 80, 243, 169, 227, 205, 191, 223, 27, 1, 7, 39, 61, 115, 217, 74, 145, 210, 120, 84, 85, 22}, + "5f247f61a6d5bfdd1d5064db0bd25fe443648133c6131975edb23481424e3d9c": {122, 22, 111, 188, 129, 110, 180, 164, 220, 182, 32, 209, 28, 60, 202, 197, 192, 133, 213, 107, 25, 114, 55, 65, 0, 17, 111, 135, 97, 157, 235, 184}, + "77224be9d052343b1d17156a1e463625c0d746468d4f5a44cddd452365b1d4ed": {7, 224, 255, 197, 123, 98, 99, 96, 77, 245, 23, 185, 75, 217, 134, 22, 148, 81, 163, 201, 6, 0, 168, 85, 187, 25, 33, 45, 197, 117, 222, 84}, + "adb6bf005cdb23f21e11b82d66b9f62628c2939640ed93028bf0dad3923c5a8b": {64, 59, 114, 240, 177, 179, 181, 245, 169, 27, 207, 237, 183, 242, 133, 153, 118, 117, 2, 160, 75, 91, 126, 6, 127, 207, 55, 130, 226, 62, 235, 156}, + "b96933429b1a81c811e1195389d7733e936b03e8086e75ea1fa92c61564b6c31": {117, 172, 99, 252, 151, 163, 30, 49, 22, 128, 132, 224, 222, 140, 205, 43, 234, 144, 5, 155, 96, 157, 150, 47, 62, 67, 252, 41, 108, 219, 162, 141}, + "d7e9f2252b09edf0802a65b60bc9956691747894cb3ab9fefd072adf742eb9f1": {180, 115, 9, 31, 225, 212, 219, 188, 38, 173, 113, 198, 123, 68, 50, 248, 244, 40, 14, 6, 186, 181, 226, 18, 42, 146, 244, 171, 139, 111, 242, 245}, + "e38c9f2760db006f070e9cc1bc1c2269ad033751adaa85d022fb760cbc5b5ef6": {69, 66, 244, 253, 46, 209, 80, 200, 201, 118, 179, 152, 2, 254, 61, 153, 74, 236, 58, 201, 79, 209, 30, 120, 23, 246, 147, 177, 201, 161, 218, 187}, + } + secrets = map[string][]string{ + "SECRET_A": {"one", "two", "three", "four"}, + "SECRET_B": {"all"}, + } + workflowOwner = "0x9ed925d8206a4f88a2f643b28b3035b315753cd6" + config = SecretsConfig{ + SecretsNames: map[string][]string{ + "SECRET_A": {"ENV_VAR_A_FOR_NODE_ONE", "ENV_VAR_A_FOR_NODE_TWO", "ENV_VAR_A_FOR_NODE_THREE", "ENV_VAR_A_FOR_NODE_FOUR"}, + "SECRET_B": {"ENV_VAR_B_FOR_ALL_NODES"}, + }, + } +) + +func TestEncryptSecretsForNodes(t *testing.T) { + encryptedSecrets, secretsEnvVarsByNode, err := EncryptSecretsForNodes(workflowOwner, secrets, encryptionPublicKeys, config) + // Ensure no error occurred + assert.NoError(t, err) + + // Ensure all p2pKeys are in encryptedSecrets map + assert.Equal(t, len(encryptionPublicKeys), len(encryptedSecrets)) + for p2pId := range encryptionPublicKeys { + _, exists := encryptedSecrets[p2pId] + assert.True(t, exists, "p2pId %s not found in encryptedSecrets", p2pId) + } + + // In envVarsAssignedToNodes, ensure SECRET_B has ENV_VAR_B_FOR_ALL_NODES for all nodes + for _, assignedSecrets := range secretsEnvVarsByNode { + for _, assignedSecret := range assignedSecrets { + if assignedSecret.WorkflowSecretName == "SECRET_B" { + assert.Contains(t, assignedSecret.LocalEnvVarName, "ENV_VAR_B_FOR_ALL_NODES") + } + } + } + + // In envVarsAssignedToNodes, ensure ENV_VAR_A_FOR_NODE_ONE and ENV_VAR_A_FOR_NODE_TWO shows up in 3 nodes and others in 2 nodes + nodeCount := make(map[string]int) + + for _, assignedSecrets := range secretsEnvVarsByNode { + for _, assignedSecret := range assignedSecrets { + nodeCount[assignedSecret.LocalEnvVarName]++ + } + } + + assert.Equal(t, 3, nodeCount["ENV_VAR_A_FOR_NODE_ONE"], "ENV_VAR_A_FOR_NODE_ONE should be assigned to 3 nodes") + assert.Equal(t, 3, nodeCount["ENV_VAR_A_FOR_NODE_TWO"], "ENV_VAR_A_FOR_NODE_TWO should be assigned to 3 nodes") + assert.Equal(t, 2, nodeCount["ENV_VAR_A_FOR_NODE_THREE"], "ENV_VAR_A_FOR_NODE_THREE should be assigned to 2 nodes") + assert.Equal(t, 2, nodeCount["ENV_VAR_A_FOR_NODE_FOUR"], "ENV_VAR_A_FOR_NODE_FOUR should be assigned to 2 nodes") +} + +type key struct { + publicKey *[32]byte + privateKey *[32]byte +} + +func (k *key) PublicKey() [32]byte { + return *k.publicKey +} + +func (k *key) PublicKeyString() string { + return base64.StdEncoding.EncodeToString((*k.publicKey)[:]) +} + +func (k *key) Decrypt(sealedBox []byte) ([]byte, error) { + b, ok := box.OpenAnonymous(nil, sealedBox, k.publicKey, k.privateKey) + if !ok { + return nil, errors.New("failed to decrypt box") + } + + return b, nil +} + +func newKey() (*key, error) { + pk, sk, err := box.GenerateKey(rand.Reader) + if err != nil { + return nil, err + } + + return &key{publicKey: pk, privateKey: sk}, nil +} + +func TestEncryptDecrypt(t *testing.T) { + k, err := newKey() + require.NoError(t, err) + + k2, err := newKey() + require.NoError(t, err) + + expectedSecrets := map[string]string{ + "foo": "fooToken", + "bar": "barToken", + } + secrets := map[string][]string{ + "foo": []string{expectedSecrets["foo"]}, + "bar": []string{expectedSecrets["bar"]}, + } + encryptionKeys := map[string][32]byte{ + "nodeAPeerID": k.PublicKey(), + "nodeBPeerID": k2.PublicKey(), + } + config := SecretsConfig{ + SecretsNames: map[string][]string{ + "foo": []string{"ENV_FOO"}, + "bar": []string{"ENV_BAR"}, + }, + } + + encryptedSecrets, _, err := EncryptSecretsForNodes(workflowOwner, secrets, encryptionKeys, config) + require.NoError(t, err) + + result := EncryptedSecretsResult{ + EncryptedSecrets: encryptedSecrets, + Metadata: Metadata{ + NodePublicEncryptionKeys: map[string]string{ + "nodeAPeerID": k.PublicKeyString(), + "nodeBPeerID": k2.PublicKeyString(), + }, + }, + } + t.Run("success", func(st *testing.T) { + gotSecrets, err := DecryptSecretsForNode(result, k, workflowOwner) + require.NoError(st, err) + + assert.Equal(st, expectedSecrets, gotSecrets) + + gotSecrets, err = DecryptSecretsForNode(result, k2, workflowOwner) + require.NoError(st, err) + + assert.Equal(st, expectedSecrets, gotSecrets) + }) + + t.Run("incorrect owner", func(st *testing.T) { + _, err = DecryptSecretsForNode(result, k, "wrong owner") + assert.ErrorContains(t, err, "invalid secrets bundle: got owner") + }) + + t.Run("key not in metadata", func(st *testing.T) { + overriddenResult := EncryptedSecretsResult{ + EncryptedSecrets: encryptedSecrets, + Metadata: Metadata{ + NodePublicEncryptionKeys: map[string]string{ + "nodeBPeerID": k2.PublicKeyString(), + }, + }, + } + _, err = DecryptSecretsForNode(overriddenResult, k, workflowOwner) + assert.ErrorContains(t, err, "cannot find public key") + }) + + t.Run("missing secrets blob", func(st *testing.T) { + overriddenSecrets := map[string]string{ + "nodeAPeerID": encryptedSecrets["nodeAPeerID"], + } + overriddenResult := EncryptedSecretsResult{ + EncryptedSecrets: overriddenSecrets, + Metadata: Metadata{ + NodePublicEncryptionKeys: map[string]string{ + "nodeAPeerID": k.PublicKeyString(), + "nodeBPeerID": k2.PublicKeyString(), + }, + }, + } + _, err = DecryptSecretsForNode(overriddenResult, k2, workflowOwner) + assert.ErrorContains(t, err, "cannot find secrets blob") + }) + +} + +func TestValidateEncryptedSecrets(t *testing.T) { + // Helper function to generate a valid base64 encoded string + validBase64 := func(input string) string { + return base64.StdEncoding.EncodeToString([]byte(input)) + } + + // Define a key for testing + keyFromMetadata := [32]byte{1, 2, 3} + + // Valid JSON input with matching workflow owner + validInput := map[string]interface{}{ + "encryptedSecrets": map[string]string{ + "09ca39cd924653c72fbb0e458b629c3efebdad3e29e7cd0b5760754d919ed829": validBase64("secret1"), + }, + "metadata": map[string]interface{}{ + "workflowOwner": "correctOwner", + "nodePublicEncryptionKeys": map[string]string{ + "09ca39cd924653c72fbb0e458b629c3efebdad3e29e7cd0b5760754d919ed829": hex.EncodeToString(keyFromMetadata[:]), + }, + }, + } + + // Serialize the valid input + validData, _ := json.Marshal(validInput) + + // Define test cases + tests := []struct { + name string + inputData []byte + encryptionPublicKeys map[string][32]byte + workflowOwner string + shouldError bool + }{ + { + name: "Valid input", + inputData: validData, + workflowOwner: "correctOwner", + encryptionPublicKeys: map[string][32]byte{ + "09ca39cd924653c72fbb0e458b629c3efebdad3e29e7cd0b5760754d919ed829": {1, 2, 3}, + }, + shouldError: false, + }, + { + name: "Invalid base64 encoded secret", + inputData: []byte(`{"encryptedSecrets": {"09ca39cd924653c72fbb0e458b629c3efebdad3e29e7cd0b5760754d919ed829": "invalid-base64!"}}`), + workflowOwner: "correctOwner", + encryptionPublicKeys: map[string][32]byte{ + "09ca39cd924653c72fbb0e458b629c3efebdad3e29e7cd0b5760754d919ed829": {1, 2, 3}, + }, + shouldError: true, + }, + { + name: "Missing public key", + inputData: validData, + workflowOwner: "correctOwner", + encryptionPublicKeys: map[string][32]byte{ + "some-other-id": {1, 2, 3}, + }, + shouldError: true, + }, + { + name: "Mismatched workflow owner", + inputData: validData, + workflowOwner: "incorrectOwner", + encryptionPublicKeys: map[string][32]byte{ + "09ca39cd924653c72fbb0e458b629c3efebdad3e29e7cd0b5760754d919ed829": {1, 2, 3}, + }, + shouldError: true, + }, + } + + // Run the test cases + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + err := ValidateEncryptedSecrets(test.inputData, test.encryptionPublicKeys, test.workflowOwner) + if (err != nil) != test.shouldError { + t.Errorf("Expected error: %v, got: %v", test.shouldError, err != nil) + } + }) + } +} diff --git a/pkg/workflows/wasm/host/module.go b/pkg/workflows/wasm/host/module.go index 68efe41c3..b859404e7 100644 --- a/pkg/workflows/wasm/host/module.go +++ b/pkg/workflows/wasm/host/module.go @@ -2,6 +2,7 @@ package host import ( "bytes" + "context" "encoding/base64" "encoding/binary" "encoding/json" @@ -18,43 +19,25 @@ import ( "github.com/bytecodealliance/wasmtime-go/v23" "google.golang.org/protobuf/proto" + "github.com/smartcontractkit/chainlink-common/pkg/custmsg" "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/values" "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm" wasmpb "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/pb" ) -// safeMem returns a copy of the wasm module memory at the given pointer and size. -func safeMem(caller *wasmtime.Caller, ptr int32, size int32) ([]byte, error) { - mem := caller.GetExport("memory").Memory() - data := mem.UnsafeData(caller) - if ptr+size > int32(len(data)) { - return nil, errors.New("out of bounds memory access") - } - - cd := make([]byte, size) - copy(cd, data[ptr:ptr+size]) - return cd, nil -} - -// copyBuffer copies the given src byte slice into the wasm module memory at the given pointer and size. -func copyBuffer(caller *wasmtime.Caller, src []byte, ptr int32, size int32) int64 { - mem := caller.GetExport("memory").Memory() - rawData := mem.UnsafeData(caller) - if int32(len(rawData)) < ptr+size { - return -1 - } - buffer := rawData[ptr : ptr+size] - dataLen := int64(len(src)) - copy(buffer, src) - return dataLen +type RequestData struct { + fetchRequestsCounter int + response *wasmpb.Response + ctx func() context.Context } -type respStore struct { - m map[string]*wasmpb.Response +type store struct { + m map[string]*RequestData mu sync.RWMutex } -func (r *respStore) add(id string, resp *wasmpb.Response) error { +func (r *store) add(id string, req *RequestData) error { r.mu.Lock() defer r.mu.Unlock() @@ -63,42 +46,54 @@ func (r *respStore) add(id string, resp *wasmpb.Response) error { return fmt.Errorf("error storing response: response already exists for id: %s", id) } - r.m[id] = resp + r.m[id] = req return nil } -func (r *respStore) get(id string) (*wasmpb.Response, error) { +func (r *store) get(id string) (*RequestData, error) { r.mu.Lock() defer r.mu.Unlock() _, found := r.m[id] if !found { - return nil, fmt.Errorf("could not find response for id %s", id) + return nil, fmt.Errorf("could not find request data for id %s", id) } return r.m[id], nil } +func (r *store) delete(id string) { + r.mu.Lock() + defer r.mu.Unlock() + + delete(r.m, id) +} + var ( - defaultTickInterval = 100 * time.Millisecond - defaultTimeout = 300 * time.Millisecond - defaultMaxMemoryMBs = 128 - DefaultInitialFuel = uint64(100_000_000) + defaultTickInterval = 100 * time.Millisecond + defaultTimeout = 2 * time.Second + defaultMinMemoryMBs = 128 + DefaultInitialFuel = uint64(100_000_000) + defaultMaxFetchRequests = 5 ) type DeterminismConfig struct { // Seed is the seed used to generate cryptographically insecure random numbers in the module. Seed int64 } - type ModuleConfig struct { - TickInterval time.Duration - Timeout *time.Duration - MaxMemoryMBs int64 - InitialFuel uint64 - Logger logger.Logger - IsUncompressed bool - Fetch func(*wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) + TickInterval time.Duration + Timeout *time.Duration + MaxMemoryMBs int64 + MinMemoryMBs int64 + InitialFuel uint64 + Logger logger.Logger + IsUncompressed bool + Fetch func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) + MaxFetchRequests int + + // Labeler is used to emit messages from the module. + Labeler custmsg.MessageEmitter // If Determinism is set, the module will override the random_get function in the WASI API with // the provided seed to ensure deterministic behavior. @@ -106,14 +101,14 @@ type ModuleConfig struct { } type Module struct { - engine *wasmtime.Engine - module *wasmtime.Module - linker *wasmtime.Linker + engine *wasmtime.Engine + module *wasmtime.Module + linker *wasmtime.Linker + wconfig *wasmtime.Config - r *respStore + requestStore *store cfg *ModuleConfig - wasmCfg *wasmtime.Config wg sync.WaitGroup stopCh chan struct{} @@ -144,11 +139,19 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) } if modCfg.Fetch == nil { - modCfg.Fetch = func(*wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + modCfg.Fetch = func(context.Context, *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { return nil, fmt.Errorf("fetch not implemented") } } + if modCfg.MaxFetchRequests == 0 { + modCfg.MaxFetchRequests = defaultMaxFetchRequests + } + + if modCfg.Labeler == nil { + modCfg.Labeler = &unimplementedMessageEmitter{} + } + logger := modCfg.Logger if modCfg.TickInterval == 0 { @@ -159,12 +162,15 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) modCfg.Timeout = &defaultTimeout } - // Take the max of the default and the configured max memory mbs. + if modCfg.MinMemoryMBs == 0 { + modCfg.MinMemoryMBs = int64(defaultMinMemoryMBs) + } + + // Take the max of the min and the configured max memory mbs. // We do this because Go requires a minimum of 16 megabytes to run, - // and local testing has shown that with less than 64 mbs, some - // binaries may error sporadically. At 64 MB max memory, we have - // some OOM issues with compute steps while using debugger. - modCfg.MaxMemoryMBs = int64(math.Max(float64(defaultMaxMemoryMBs), float64(modCfg.MaxMemoryMBs))) + // and local testing has shown that with less than the min, some + // binaries may error sporadically. + modCfg.MaxMemoryMBs = int64(math.Max(float64(modCfg.MinMemoryMBs), float64(modCfg.MaxMemoryMBs))) cfg := wasmtime.NewConfig() cfg.SetEpochInterruption(true) @@ -172,8 +178,10 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) cfg.SetConsumeFuel(true) } - engine := wasmtime.NewEngineWithConfig(cfg) + cfg.CacheConfigLoadDefault() + cfg.SetCraneliftOptLevel(wasmtime.OptLevelSpeedAndSize) + engine := wasmtime.NewEngineWithConfig(cfg) if !modCfg.IsUncompressed { rdr := brotli.NewReader(bytes.NewBuffer(binary)) decompedBinary, err := io.ReadAll(rdr) @@ -194,35 +202,14 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) return nil, fmt.Errorf("error creating wasi linker: %w", err) } - r := &respStore{ - m: map[string]*wasmpb.Response{}, + requestStore := &store{ + m: map[string]*RequestData{}, } err = linker.FuncWrap( "env", "sendResponse", - func(caller *wasmtime.Caller, ptr int32, ptrlen int32) int32 { - b, innerErr := safeMem(caller, ptr, ptrlen) - if innerErr != nil { - logger.Errorf("error calling sendResponse: %s", err) - return ErrnoFault - } - - var resp wasmpb.Response - innerErr = proto.Unmarshal(b, &resp) - if innerErr != nil { - logger.Errorf("error calling sendResponse: %s", err) - return ErrnoFault - } - - innerErr = r.add(resp.Id, &resp) - if innerErr != nil { - logger.Errorf("error calling sendResponse: %s", err) - return ErrnoFault - } - - return ErrnoSuccess - }, + createSendResponseFn(logger, requestStore), ) if err != nil { return nil, fmt.Errorf("error wrapping sendResponse func: %w", err) @@ -231,48 +218,7 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) err = linker.FuncWrap( "env", "log", - func(caller *wasmtime.Caller, ptr int32, ptrlen int32) { - b, innerErr := safeMem(caller, ptr, ptrlen) - if innerErr != nil { - logger.Errorf("error calling log: %s", err) - return - } - - var raw map[string]interface{} - innerErr = json.Unmarshal(b, &raw) - if innerErr != nil { - return - } - - level := raw["level"] - delete(raw, "level") - - msg := raw["msg"].(string) - delete(raw, "msg") - delete(raw, "ts") - - var args []interface{} - for k, v := range raw { - args = append(args, k, v) - } - - switch level { - case "debug": - logger.Debugw(msg, args...) - case "info": - logger.Infow(msg, args...) - case "warn": - logger.Warnw(msg, args...) - case "error": - logger.Errorw(msg, args...) - case "panic": - logger.Panicw(msg, args...) - case "fatal": - logger.Fatalw(msg, args...) - default: - logger.Infow(msg, args...) - } - }, + createLogFn(logger), ) if err != nil { return nil, fmt.Errorf("error wrapping log func: %w", err) @@ -281,21 +227,30 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) err = linker.FuncWrap( "env", "fetch", - fetchFn(logger, modCfg), + createFetchFn(logger, wasmRead, wasmWrite, wasmWriteUInt32, modCfg, requestStore), ) if err != nil { return nil, fmt.Errorf("error wrapping fetch func: %w", err) } + err = linker.FuncWrap( + "env", + "emit", + createEmitFn(logger, requestStore, modCfg.Labeler, wasmRead, wasmWrite, wasmWriteUInt32), + ) + if err != nil { + return nil, fmt.Errorf("error wrapping emit func: %w", err) + } + m := &Module{ - engine: engine, - module: mod, - linker: linker, + engine: engine, + module: mod, + linker: linker, + wconfig: cfg, - r: r, + requestStore: requestStore, cfg: modCfg, - wasmCfg: cfg, stopCh: make(chan struct{}), } @@ -327,10 +282,26 @@ func (m *Module) Close() { m.linker.Close() m.engine.Close() m.module.Close() - m.wasmCfg.Close() + m.wconfig.Close() } -func (m *Module) Run(request *wasmpb.Request) (*wasmpb.Response, error) { +func (m *Module) Run(ctx context.Context, request *wasmpb.Request) (*wasmpb.Response, error) { + if request == nil { + return nil, fmt.Errorf("invalid request: can't be nil") + } + + if request.Id == "" { + return nil, fmt.Errorf("invalid request: can't be empty") + } + + // we add the request context to the store to make it available to the Fetch fn + err := m.requestStore.add(request.Id, &RequestData{ctx: func() context.Context { return ctx }}) + if err != nil { + return nil, fmt.Errorf("error adding ctx to the store: %w", err) + } + // we delete the request data from the store when we're done + defer m.requestStore.delete(request.Id) + store := wasmtime.NewStore(m.engine) defer store.Close() @@ -343,6 +314,7 @@ func (m *Module) Run(request *wasmpb.Request) (*wasmpb.Response, error) { wasi := wasmtime.NewWasiConfig() defer wasi.Close() + wasi.SetArgv([]string{"wasi", reqstr}) store.SetWasi(wasi) @@ -379,22 +351,27 @@ func (m *Module) Run(request *wasmpb.Request) (*wasmpb.Response, error) { _, err = start.Call(store) switch { case containsCode(err, wasm.CodeSuccess): - resp, innerErr := m.r.get(request.Id) + storedRequest, innerErr := m.requestStore.get(request.Id) if innerErr != nil { return nil, innerErr } - return resp, nil + + if storedRequest.response == nil { + return nil, fmt.Errorf("could not find response for id %s", request.Id) + } + + return storedRequest.response, nil case containsCode(err, wasm.CodeInvalidResponse): return nil, fmt.Errorf("invariant violation: error marshaling response") case containsCode(err, wasm.CodeInvalidRequest): return nil, fmt.Errorf("invariant violation: invalid request to runner") case containsCode(err, wasm.CodeRunnerErr): - resp, innerErr := m.r.get(request.Id) + storedRequest, innerErr := m.requestStore.get(request.Id) if innerErr != nil { return nil, innerErr } - return nil, fmt.Errorf("error executing runner: %s: %w", resp.ErrMsg, innerErr) + return nil, fmt.Errorf("error executing runner: %s: %w", storedRequest.response.ErrMsg, err) case containsCode(err, wasm.CodeHostErr): return nil, fmt.Errorf("invariant violation: host errored during sendResponse") default: @@ -406,47 +383,353 @@ func containsCode(err error, code int) bool { return strings.Contains(err.Error(), fmt.Sprintf("exit status %d", code)) } -func fetchFn(logger logger.Logger, modCfg *ModuleConfig) func(caller *wasmtime.Caller, respptr int32, resplenptr int32, reqptr int32, reqptrlen int32) int32 { - const fetchErrSfx = "error calling fetch" - return func(caller *wasmtime.Caller, respptr int32, resplenptr int32, reqptr int32, reqptrlen int32) int32 { - b, innerErr := safeMem(caller, reqptr, reqptrlen) +// createSendResponseFn injects the dependency required by a WASM guest to +// send a response back to the host. +func createSendResponseFn(logger logger.Logger, requestStore *store) func(caller *wasmtime.Caller, ptr int32, ptrlen int32) int32 { + return func(caller *wasmtime.Caller, ptr int32, ptrlen int32) int32 { + b, innerErr := wasmRead(caller, ptr, ptrlen) if innerErr != nil { - logger.Errorf("%s: %s", fetchErrSfx, innerErr) + logger.Errorf("error calling sendResponse: %s", innerErr) return ErrnoFault } - req := &wasmpb.FetchRequest{} - innerErr = proto.Unmarshal(b, req) + var resp wasmpb.Response + innerErr = proto.Unmarshal(b, &resp) if innerErr != nil { - logger.Errorf("%s: %s", fetchErrSfx, innerErr) + logger.Errorf("error calling sendResponse: %s", innerErr) return ErrnoFault } - fetchResp, innerErr := modCfg.Fetch(req) + storedReq, innerErr := requestStore.get(resp.Id) if innerErr != nil { - logger.Errorf("%s: %s", fetchErrSfx, innerErr) + logger.Errorf("error calling sendResponse: %s", innerErr) return ErrnoFault } + storedReq.response = &resp + + return ErrnoSuccess + } +} + +func createFetchFn( + logger logger.Logger, + reader unsafeReaderFunc, + writer unsafeWriterFunc, + sizeWriter unsafeFixedLengthWriterFunc, + modCfg *ModuleConfig, + requestStore *store, +) func(caller *wasmtime.Caller, respptr int32, resplenptr int32, reqptr int32, reqptrlen int32) int32 { + return func(caller *wasmtime.Caller, respptr int32, resplenptr int32, reqptr int32, reqptrlen int32) int32 { + const errFetchSfx = "error calling fetch" + + // writeErr marshals and writes an error response to wasm + writeErr := func(err error) int32 { + resp := &wasmpb.FetchResponse{ + ExecutionError: true, + ErrorMessage: err.Error(), + } + + respBytes, perr := proto.Marshal(resp) + if perr != nil { + logger.Errorf("%s: %s", errFetchSfx, perr) + return ErrnoFault + } + + if size := writer(caller, respBytes, respptr, int32(len(respBytes))); size == -1 { + logger.Errorf("%s: %s", errFetchSfx, errors.New("failed to write error response")) + return ErrnoFault + } + + if size := sizeWriter(caller, resplenptr, uint32(len(respBytes))); size == -1 { + logger.Errorf("%s: %s", errFetchSfx, errors.New("failed to write error response length")) + return ErrnoFault + } + + return ErrnoSuccess + } + + b, innerErr := reader(caller, reqptr, reqptrlen) + if innerErr != nil { + logger.Errorf("%s: %s", errFetchSfx, innerErr) + return writeErr(innerErr) + } + + req := &wasmpb.FetchRequest{} + innerErr = proto.Unmarshal(b, req) + if innerErr != nil { + logger.Errorf("%s: %s", errFetchSfx, innerErr) + return writeErr(innerErr) + } + + storedRequest, innerErr := requestStore.get(req.Id) + if innerErr != nil { + logger.Errorf("%s: %s", errFetchSfx, innerErr) + return writeErr(innerErr) + } + + // limit the number of fetch calls we can make per request + if storedRequest.fetchRequestsCounter >= modCfg.MaxFetchRequests { + logger.Errorf("%s: max number of fetch request %d exceeded", errFetchSfx, modCfg.MaxFetchRequests) + return writeErr(errors.New("max number of fetch requests exceeded")) + } + storedRequest.fetchRequestsCounter++ + + fetchResp, innerErr := modCfg.Fetch(storedRequest.ctx(), req) + if innerErr != nil { + logger.Errorf("%s: %s", errFetchSfx, innerErr) + return writeErr(innerErr) + } respBytes, innerErr := proto.Marshal(fetchResp) if innerErr != nil { - logger.Errorf("%s: %s", fetchErrSfx, innerErr) - return ErrnoFault + logger.Errorf("%s: %s", errFetchSfx, innerErr) + return writeErr(innerErr) } - size := copyBuffer(caller, respBytes, respptr, int32(len(respBytes))) - if size == -1 { - return ErrnoFault + if size := writer(caller, respBytes, respptr, int32(len(respBytes))); size == -1 { + return writeErr(errors.New("failed to write response")) } - uint32Size := int32(4) - resplenBytes := make([]byte, uint32Size) - binary.LittleEndian.PutUint32(resplenBytes, uint32(len(respBytes))) - size = copyBuffer(caller, resplenBytes, resplenptr, uint32Size) - if size == -1 { - return ErrnoFault + if size := sizeWriter(caller, resplenptr, uint32(len(respBytes))); size == -1 { + return writeErr(errors.New("failed to write response length")) } return ErrnoSuccess } } + +// createEmitFn injects dependencies and builds the emit function exposed by the WASM. Errors in +// Emit, if any, are returned in the Error Message of the response. +func createEmitFn( + l logger.Logger, + requestStore *store, + e custmsg.MessageEmitter, + reader unsafeReaderFunc, + writer unsafeWriterFunc, + sizeWriter unsafeFixedLengthWriterFunc, +) func(caller *wasmtime.Caller, respptr, resplenptr, msgptr, msglen int32) int32 { + logErr := func(err error) { + l.Errorf("error emitting message: %s", err) + } + + return func(caller *wasmtime.Caller, respptr, resplenptr, msgptr, msglen int32) int32 { + // writeErr marshals and writes an error response to wasm + writeErr := func(err error) int32 { + logErr(err) + + resp := &wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: err.Error(), + }, + } + + respBytes, perr := proto.Marshal(resp) + if perr != nil { + logErr(perr) + return ErrnoFault + } + + if size := writer(caller, respBytes, respptr, int32(len(respBytes))); size == -1 { + logErr(errors.New("failed to write response")) + return ErrnoFault + } + + if size := sizeWriter(caller, resplenptr, uint32(len(respBytes))); size == -1 { + logErr(errors.New("failed to write response length")) + return ErrnoFault + } + + return ErrnoSuccess + } + + b, err := reader(caller, msgptr, msglen) + if err != nil { + return writeErr(err) + } + + reqID, msg, labels, err := toEmissible(b) + if err != nil { + return writeErr(err) + } + + req, err := requestStore.get(reqID) + if err != nil { + logErr(fmt.Errorf("failed to get request from store: %s", err)) + return writeErr(err) + } + + if err := e.WithMapLabels(labels).Emit(req.ctx(), msg); err != nil { + return writeErr(err) + } + + return ErrnoSuccess + } +} + +// createLogFn injects dependencies and builds the log function exposed by the WASM. +func createLogFn(logger logger.Logger) func(caller *wasmtime.Caller, ptr int32, ptrlen int32) { + return func(caller *wasmtime.Caller, ptr int32, ptrlen int32) { + b, innerErr := wasmRead(caller, ptr, ptrlen) + if innerErr != nil { + logger.Errorf("error calling log: %s", innerErr) + return + } + + var raw map[string]interface{} + innerErr = json.Unmarshal(b, &raw) + if innerErr != nil { + return + } + + level := raw["level"] + delete(raw, "level") + + msg := raw["msg"].(string) + delete(raw, "msg") + delete(raw, "ts") + + var args []interface{} + for k, v := range raw { + args = append(args, k, v) + } + + switch level { + case "debug": + logger.Debugw(msg, args...) + case "info": + logger.Infow(msg, args...) + case "warn": + logger.Warnw(msg, args...) + case "error": + logger.Errorw(msg, args...) + case "panic": + logger.Panicw(msg, args...) + case "fatal": + logger.Fatalw(msg, args...) + default: + logger.Infow(msg, args...) + } + } +} + +type unimplementedMessageEmitter struct{} + +func (u *unimplementedMessageEmitter) Emit(context.Context, string) error { + return errors.New("unimplemented") +} + +func (u *unimplementedMessageEmitter) WithMapLabels(map[string]string) custmsg.MessageEmitter { + return u +} + +func (u *unimplementedMessageEmitter) With(kvs ...string) custmsg.MessageEmitter { + return u +} + +func (u *unimplementedMessageEmitter) Labels() map[string]string { + return nil +} + +func toEmissible(b []byte) (string, string, map[string]string, error) { + msg := &wasmpb.EmitMessageRequest{} + if err := proto.Unmarshal(b, msg); err != nil { + return "", "", nil, err + } + + validated, err := toValidatedLabels(msg) + if err != nil { + return "", "", nil, err + } + + return msg.RequestId, msg.Message, validated, nil +} + +func toValidatedLabels(msg *wasmpb.EmitMessageRequest) (map[string]string, error) { + vl, err := values.FromMapValueProto(msg.Labels) + if err != nil { + return nil, err + } + + // Handle the case of no labels before unwrapping. + if vl == nil { + vl = values.EmptyMap() + } + + var labels map[string]string + if err := vl.UnwrapTo(&labels); err != nil { + return nil, err + } + + return labels, nil +} + +// unsafeWriterFunc defines behavior for writing directly to wasm memory. A source slice of bytes +// is written to the location defined by the ptr. +type unsafeWriterFunc func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 + +// unsafeFixedLengthWriterFunc defines behavior for writing a uint32 value to wasm memory at the location defined +// by the ptr. +type unsafeFixedLengthWriterFunc func(c *wasmtime.Caller, ptr int32, val uint32) int64 + +// unsafeReaderFunc abstractly defines the behavior of reading from WASM memory. Returns a copy of +// the memory at the given pointer and size. +type unsafeReaderFunc func(c *wasmtime.Caller, ptr, len int32) ([]byte, error) + +// wasmMemoryAccessor is the default implementation for unsafely accessing the memory of the WASM module. +func wasmMemoryAccessor(caller *wasmtime.Caller) []byte { + return caller.GetExport("memory").Memory().UnsafeData(caller) +} + +// wasmRead returns a copy of the wasm module memory at the given pointer and size. +func wasmRead(caller *wasmtime.Caller, ptr int32, size int32) ([]byte, error) { + return read(wasmMemoryAccessor(caller), ptr, size) +} + +// Read acts on a byte slice that should represent an unsafely accessed slice of memory. It returns +// a copy of the memory at the given pointer and size. +func read(memory []byte, ptr int32, size int32) ([]byte, error) { + if size < 0 || ptr < 0 { + return nil, fmt.Errorf("invalid memory access: ptr: %d, size: %d", ptr, size) + } + + if ptr+size > int32(len(memory)) { + return nil, errors.New("out of bounds memory access") + } + + cd := make([]byte, size) + copy(cd, memory[ptr:ptr+size]) + return cd, nil +} + +// wasmWrite copies the given src byte slice into the wasm module memory at the given pointer and size. +func wasmWrite(caller *wasmtime.Caller, src []byte, ptr int32, size int32) int64 { + return write(wasmMemoryAccessor(caller), src, ptr, size) +} + +// wasmWriteUInt32 binary encodes and writes a uint32 to the wasm module memory at the given pointer. +func wasmWriteUInt32(caller *wasmtime.Caller, ptr int32, val uint32) int64 { + return writeUInt32(wasmMemoryAccessor(caller), ptr, val) +} + +// writeUInt32 binary encodes and writes a uint32 to the memory at the given pointer. +func writeUInt32(memory []byte, ptr int32, val uint32) int64 { + uint32Size := int32(4) + buffer := make([]byte, uint32Size) + binary.LittleEndian.PutUint32(buffer, val) + return write(memory, buffer, ptr, uint32Size) +} + +// write copies the given src byte slice into the memory at the given pointer and size. +func write(memory, src []byte, ptr, size int32) int64 { + if size < 0 || ptr < 0 { + return -1 + } + + if int32(len(memory)) < ptr+size { + return -1 + } + buffer := memory[ptr : ptr+size] + dataLen := int64(len(src)) + copy(buffer, src) + return dataLen +} diff --git a/pkg/workflows/wasm/host/module_test.go b/pkg/workflows/wasm/host/module_test.go new file mode 100644 index 000000000..a19c43fa2 --- /dev/null +++ b/pkg/workflows/wasm/host/module_test.go @@ -0,0 +1,671 @@ +package host + +import ( + "context" + "encoding/binary" + "sync" + "testing" + + "github.com/bytecodealliance/wasmtime-go/v23" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/proto" + + "github.com/smartcontractkit/chainlink-common/pkg/custmsg" + "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/utils/tests" + "github.com/smartcontractkit/chainlink-common/pkg/values/pb" + wasmpb "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/pb" +) + +type mockMessageEmitter struct { + e func(context.Context, string, map[string]string) error + labels map[string]string +} + +func (m *mockMessageEmitter) Emit(ctx context.Context, msg string) error { + return m.e(ctx, msg, m.labels) +} + +func (m *mockMessageEmitter) WithMapLabels(labels map[string]string) custmsg.MessageEmitter { + m.labels = labels + return m +} + +func (m *mockMessageEmitter) With(keyValues ...string) custmsg.MessageEmitter { + // do nothing + return m +} + +func (m *mockMessageEmitter) Labels() map[string]string { + return m.labels +} + +func newMockMessageEmitter(e func(context.Context, string, map[string]string) error) custmsg.MessageEmitter { + return &mockMessageEmitter{e: e} +} + +// Test_createEmitFn tests that the emit function used by the module is created correctly. Memory +// access functions are injected as mocks. +func Test_createEmitFn(t *testing.T) { + t.Run("success", func(t *testing.T) { + ctxKey := "key" + ctxValue := "test-value" + ctx := tests.Context(t) + ctx = context.WithValue(ctx, ctxKey, "test-value") + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + reqId := "random-id" + err := store.add( + reqId, + &RequestData{ctx: func() context.Context { return ctx }}) + require.NoError(t, err) + emitFn := createEmitFn( + logger.Test(t), + store, + newMockMessageEmitter(func(ctx context.Context, _ string, _ map[string]string) error { + v := ctx.Value(ctxKey) + assert.Equal(t, ctxValue, v) + return nil + }), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.EmitMessageRequest{ + RequestId: reqId, + Message: "hello, world", + Labels: &pb.Map{ + Fields: map[string]*pb.Value{ + "foo": { + Value: &pb.Value_StringValue{ + StringValue: "bar", + }, + }, + }, + }, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("success without labels", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + emitFn := createEmitFn( + logger.Test(t), + store, + newMockMessageEmitter(func(_ context.Context, _ string, _ map[string]string) error { + return nil + }), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.EmitMessageRequest{}) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("successfully write error to memory on failure to read", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + respBytes, err := proto.Marshal(&wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: assert.AnError.Error(), + }, + }) + assert.NoError(t, err) + + emitFn := createEmitFn( + logger.Test(t), + store, + nil, + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + return nil, assert.AnError + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + assert.Equal(t, respBytes, src, "marshalled response not equal to bytes to write") + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + assert.Equal(t, uint32(len(respBytes)), val, "did not write length of response") + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, int32(len(respBytes)), 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode, "code mismatch") + }) + + t.Run("failure to emit writes error to memory", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + reqId := "random-id" + store.add(reqId, &RequestData{ + ctx: func() context.Context { return tests.Context(t) }, + }) + respBytes, err := proto.Marshal(&wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: assert.AnError.Error(), + }, + }) + assert.NoError(t, err) + + emitFn := createEmitFn( + logger.Test(t), + store, + newMockMessageEmitter(func(_ context.Context, _ string, _ map[string]string) error { + return assert.AnError + }), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.EmitMessageRequest{ + RequestId: reqId, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + assert.Equal(t, respBytes, src, "marshalled response not equal to bytes to write") + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + assert.Equal(t, uint32(len(respBytes)), val, "did not write length of response") + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("bad read failure to unmarshal protos", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + badData := []byte("not proto bufs") + msg := &wasmpb.EmitMessageRequest{} + marshallErr := proto.Unmarshal(badData, msg) + assert.Error(t, marshallErr) + + respBytes, err := proto.Marshal(&wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: marshallErr.Error(), + }, + }) + assert.NoError(t, err) + + emitFn := createEmitFn( + logger.Test(t), + store, + nil, + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + return badData, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + assert.Equal(t, respBytes, src, "marshalled response not equal to bytes to write") + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + assert.Equal(t, uint32(len(respBytes)), val, "did not write length of response") + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) +} + +func TestCreateFetchFn(t *testing.T) { + const testID = "test-id" + t.Run("OK-success", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + + // we add the request data to the store so that the fetch function can find it + store.m[testID] = &RequestData{ + ctx: func() context.Context { return tests.Context(t) }, + } + + fetchFn := createFetchFn( + logger.Test(t), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.FetchRequest{ + Id: testID, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + &ModuleConfig{ + Logger: logger.Test(t), + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{}, nil + }, + MaxFetchRequests: 5, + }, + store, + ) + + gotCode := fetchFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("NOK-fetch_fails_to_read_from_store", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + + fetchFn := createFetchFn( + logger.Test(t), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + return nil, assert.AnError + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + // the error is handled and written to the buffer + resp := &wasmpb.FetchResponse{} + err := proto.Unmarshal(src, resp) + require.NoError(t, err) + assert.Equal(t, assert.AnError.Error(), resp.ErrorMessage) + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + &ModuleConfig{ + Logger: logger.Test(t), + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{}, nil + }, + }, + store, + ) + + gotCode := fetchFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("NOK-fetch_fails_to_unmarshal_request", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + + fetchFn := createFetchFn( + logger.Test(t), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + return []byte("bad-request-payload"), nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + // the error is handled and written to the buffer + resp := &wasmpb.FetchResponse{} + err := proto.Unmarshal(src, resp) + require.NoError(t, err) + expectedErr := "cannot parse invalid wire-format data" + assert.Contains(t, resp.ErrorMessage, expectedErr) + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + &ModuleConfig{ + Logger: logger.Test(t), + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{}, nil + }, + }, + store, + ) + + gotCode := fetchFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("NOK-fetch_fails_to_find_id_in_store", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + + fetchFn := createFetchFn( + logger.Test(t), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.FetchRequest{ + Id: testID, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + // the error is handled and written to the buffer + resp := &wasmpb.FetchResponse{} + err := proto.Unmarshal(src, resp) + require.NoError(t, err) + expectedErr := "could not find request data for id test-id" + assert.Equal(t, expectedErr, resp.ErrorMessage) + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + &ModuleConfig{ + Logger: logger.Test(t), + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{}, nil + }, + }, + store, + ) + + gotCode := fetchFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("NOK-fetch_returns_an_error", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + + // we add the request data to the store so that the fetch function can find it + store.m[testID] = &RequestData{ + ctx: func() context.Context { return tests.Context(t) }, + } + + fetchFn := createFetchFn( + logger.Test(t), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.FetchRequest{ + Id: testID, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + // the error is handled and written to the buffer + resp := &wasmpb.FetchResponse{} + err := proto.Unmarshal(src, resp) + require.NoError(t, err) + expectedErr := assert.AnError.Error() + assert.Equal(t, expectedErr, resp.ErrorMessage) + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + &ModuleConfig{ + Logger: logger.Test(t), + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return nil, assert.AnError + }, + MaxFetchRequests: 1, + }, + store, + ) + + gotCode := fetchFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("NOK-fetch_fails_to_write_response", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + + // we add the request data to the store so that the fetch function can find it + store.m[testID] = &RequestData{ + ctx: func() context.Context { return tests.Context(t) }, + } + + fetchFn := createFetchFn( + logger.Test(t), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.FetchRequest{ + Id: testID, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + return -1 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + &ModuleConfig{ + Logger: logger.Test(t), + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{}, nil + }, + }, + store, + ) + + gotCode := fetchFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoFault, gotCode) + }) + + t.Run("NOK-fetch_fails_to_write_response_size", func(t *testing.T) { + store := &store{ + m: make(map[string]*RequestData), + mu: sync.RWMutex{}, + } + + // we add the request data to the store so that the fetch function can find it + store.m[testID] = &RequestData{ + ctx: func() context.Context { return tests.Context(t) }, + } + + fetchFn := createFetchFn( + logger.Test(t), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.FetchRequest{ + Id: testID, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return -1 + }), + &ModuleConfig{ + Logger: logger.Test(t), + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{}, nil + }, + }, + store, + ) + + gotCode := fetchFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoFault, gotCode) + }) +} + +func Test_read(t *testing.T) { + t.Run("successfully read from slice", func(t *testing.T) { + memory := []byte("hello, world") + got, err := read(memory, 0, int32(len(memory))) + assert.NoError(t, err) + assert.Equal(t, []byte("hello, world"), got) + }) + + t.Run("fail to read because out of bounds request", func(t *testing.T) { + memory := []byte("hello, world") + _, err := read(memory, 0, int32(len(memory)+1)) + assert.Error(t, err) + }) + + t.Run("fails to read because of invalid pointer or length", func(t *testing.T) { + memory := []byte("hello, world") + _, err := read(memory, 0, -1) + assert.Error(t, err) + + _, err = read(memory, -1, 1) + assert.Error(t, err) + }) + + t.Run("validate that memory is read only once copied", func(t *testing.T) { + memory := []byte("hello, world") + copied, err := read(memory, 0, int32(len(memory))) + assert.NoError(t, err) + + // mutate copy + copied[0] = 'H' + assert.Equal(t, []byte("Hello, world"), copied) + + // original memory is unchanged + assert.Equal(t, []byte("hello, world"), memory) + }) +} + +func Test_write(t *testing.T) { + t.Run("successfully write to slice", func(t *testing.T) { + giveSrc := []byte("hello, world") + memory := make([]byte, 12) + n := write(memory, giveSrc, 0, int32(len(giveSrc))) + assert.Equal(t, n, int64(len(giveSrc))) + assert.Equal(t, []byte("hello, world"), memory[:len(giveSrc)]) + }) + + t.Run("cannot write to slice because memory too small", func(t *testing.T) { + giveSrc := []byte("hello, world") + memory := make([]byte, len(giveSrc)-1) + n := write(memory, giveSrc, 0, int32(len(giveSrc))) + assert.Equal(t, n, int64(-1)) + }) + + t.Run("fails to write to invalid access", func(t *testing.T) { + giveSrc := []byte("hello, world") + memory := make([]byte, len(giveSrc)) + n := write(memory, giveSrc, 0, -1) + assert.Equal(t, n, int64(-1)) + + n = write(memory, giveSrc, -1, 1) + assert.Equal(t, n, int64(-1)) + }) +} + +// Test_writeUInt32 tests that a uint32 is written to memory correctly. +func Test_writeUInt32(t *testing.T) { + t.Run("success", func(t *testing.T) { + memory := make([]byte, 4) + n := writeUInt32(memory, 0, 42) + wantBuf := make([]byte, 4) + binary.LittleEndian.PutUint32(wantBuf, 42) + assert.Equal(t, n, int64(4)) + assert.Equal(t, wantBuf, memory) + }) +} + +func Test_toValidatedLabels(t *testing.T) { + t.Run("success", func(t *testing.T) { + msg := &wasmpb.EmitMessageRequest{ + Labels: &pb.Map{ + Fields: map[string]*pb.Value{ + "test": { + Value: &pb.Value_StringValue{ + StringValue: "value", + }, + }, + }, + }, + } + wantLabels := map[string]string{ + "test": "value", + } + gotLabels, err := toValidatedLabels(msg) + assert.NoError(t, err) + assert.Equal(t, wantLabels, gotLabels) + }) + + t.Run("success with empty labels", func(t *testing.T) { + msg := &wasmpb.EmitMessageRequest{} + wantLabels := map[string]string{} + gotLabels, err := toValidatedLabels(msg) + assert.NoError(t, err) + assert.Equal(t, wantLabels, gotLabels) + }) + + t.Run("fails with non string", func(t *testing.T) { + msg := &wasmpb.EmitMessageRequest{ + Labels: &pb.Map{ + Fields: map[string]*pb.Value{ + "test": { + Value: &pb.Value_Int64Value{ + Int64Value: *proto.Int64(42), + }, + }, + }, + }, + } + _, err := toValidatedLabels(msg) + assert.Error(t, err) + }) +} + +func Test_toEmissible(t *testing.T) { + t.Run("success", func(t *testing.T) { + reqID := "random-id" + msg := &wasmpb.EmitMessageRequest{ + RequestId: reqID, + Message: "hello, world", + Labels: &pb.Map{ + Fields: map[string]*pb.Value{ + "test": { + Value: &pb.Value_StringValue{ + StringValue: "value", + }, + }, + }, + }, + } + + b, err := proto.Marshal(msg) + assert.NoError(t, err) + + rid, gotMsg, gotLabels, err := toEmissible(b) + assert.NoError(t, err) + assert.Equal(t, "hello, world", gotMsg) + assert.Equal(t, map[string]string{"test": "value"}, gotLabels) + assert.Equal(t, reqID, rid) + }) + + t.Run("fails with bad message", func(t *testing.T) { + _, _, _, err := toEmissible([]byte("not proto bufs")) + assert.Error(t, err) + }) +} diff --git a/pkg/workflows/wasm/host/test/emit/cmd/main.go b/pkg/workflows/wasm/host/test/emit/cmd/main.go new file mode 100644 index 000000000..712b56e59 --- /dev/null +++ b/pkg/workflows/wasm/host/test/emit/cmd/main.go @@ -0,0 +1,40 @@ +//go:build wasip1 + +package main + +import ( + "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { + workflow := sdk.NewWorkflowSpecFactory( + sdk.NewWorkflowParams{}, + ) + + triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} + trigger := triggerCfg.New(workflow) + + sdk.Compute1[basictrigger.TriggerOutputs, bool]( + workflow, + "transform", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(rsdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (bool, error) { + if err := rsdk.Emitter(). + With("test-string-field-key", "this is a test field content"). + Emit("testing emit"); err != nil { + return false, err + } + return true, nil + }) + + return workflow +} + +func main() { + runner := wasm.NewRunner() + workflow := BuildWorkflow(runner.Config()) + runner.Run(workflow) +} diff --git a/pkg/workflows/wasm/host/test/fetchlimit/cmd/main.go b/pkg/workflows/wasm/host/test/fetchlimit/cmd/main.go new file mode 100644 index 000000000..0e7fd2998 --- /dev/null +++ b/pkg/workflows/wasm/host/test/fetchlimit/cmd/main.go @@ -0,0 +1,50 @@ +//go:build wasip1 + +package main + +import ( + "net/http" + + "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { + workflow := sdk.NewWorkflowSpecFactory( + sdk.NewWorkflowParams{ + Name: "tester", + Owner: "ryan", + }, + ) + + triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} + trigger := triggerCfg.New(workflow) + + sdk.Compute1[basictrigger.TriggerOutputs, bool]( + workflow, + "transform", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(rsdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (bool, error) { + + for i := 0; i < 6; i++ { + _, err := rsdk.Fetch(sdk.FetchRequest{ + Method: http.MethodGet, + URL: "https://min-api.cryptocompare.com/data/pricemultifull?fsyms=ETH&tsyms=BTC", + }) + if err != nil { + return false, err + } + } + + return true, nil + }) + + return workflow +} +func main() { + runner := wasm.NewRunner() + workflow := BuildWorkflow(runner.Config()) + runner.Run(workflow) +} diff --git a/pkg/workflows/wasm/host/test/oom/cmd/main.go b/pkg/workflows/wasm/host/test/oom/cmd/main.go index 21ecb6965..a14775cb5 100644 --- a/pkg/workflows/wasm/host/test/oom/cmd/main.go +++ b/pkg/workflows/wasm/host/test/oom/cmd/main.go @@ -6,5 +6,5 @@ import "math" func main() { // allocate more bytes than the binary should be able to access, 64 megs - _ = make([]byte, int64(128*math.Pow(10, 6))) + _ = make([]byte, int64(512*math.Pow(10, 6))) } diff --git a/pkg/workflows/wasm/host/test/runnerapi/cmd/main.go b/pkg/workflows/wasm/host/test/runnerapi/cmd/main.go new file mode 100644 index 000000000..2f0461bb4 --- /dev/null +++ b/pkg/workflows/wasm/host/test/runnerapi/cmd/main.go @@ -0,0 +1,28 @@ +package main + +import ( + "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger" + "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" +) + +func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { + workflow := sdk.NewWorkflowSpecFactory( + sdk.NewWorkflowParams{ + Name: "tester", + Owner: "ryan", + }, + ) + + triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} + _ = triggerCfg.New(workflow) + + return workflow +} + +func main() { + runner := wasm.NewRunner() + workflow := BuildWorkflow(runner.Config()) + runner.Run(workflow) +} diff --git a/pkg/workflows/wasm/host/test/runnerapi/cmd/main_test.go b/pkg/workflows/wasm/host/test/runnerapi/cmd/main_test.go new file mode 100644 index 000000000..ab6a1ac09 --- /dev/null +++ b/pkg/workflows/wasm/host/test/runnerapi/cmd/main_test.go @@ -0,0 +1,11 @@ +package main + +import ( + "testing" +) + +func TestSingleFileCanTestBuildWorkflow(t *testing.T) { + // No assertions, we're just checking that we don't get + // `BuildWorkflow` not found. + _ = BuildWorkflow([]byte("")) +} diff --git a/pkg/workflows/wasm/host/wasip1.go b/pkg/workflows/wasm/host/wasip1.go index 28950a16d..08235e23e 100644 --- a/pkg/workflows/wasm/host/wasip1.go +++ b/pkg/workflows/wasm/host/wasip1.go @@ -81,7 +81,7 @@ func clockTimeGet(caller *wasmtime.Caller, id int32, precision int64, resultTime uint64Size := int32(8) trg := make([]byte, uint64Size) binary.LittleEndian.PutUint64(trg, uint64(val)) - copyBuffer(caller, trg, resultTimestamp, uint64Size) + wasmWrite(caller, trg, resultTimestamp, uint64Size) return ErrnoSuccess } @@ -105,7 +105,7 @@ func pollOneoff(caller *wasmtime.Caller, subscriptionptr int32, eventsptr int32, return ErrnoInval } - subs, err := safeMem(caller, subscriptionptr, nsubscriptions*subscriptionLen) + subs, err := wasmRead(caller, subscriptionptr, nsubscriptions*subscriptionLen) if err != nil { return ErrnoFault } @@ -176,13 +176,13 @@ func pollOneoff(caller *wasmtime.Caller, subscriptionptr int32, eventsptr int32, binary.LittleEndian.PutUint32(rne, uint32(nsubscriptions)) // Write the number of events to `resultNevents` - size := copyBuffer(caller, rne, resultNevents, uint32Size) + size := wasmWrite(caller, rne, resultNevents, uint32Size) if size == -1 { return ErrnoFault } // Write the events to `events` - size = copyBuffer(caller, events, eventsptr, nsubscriptions*eventsLen) + size = wasmWrite(caller, events, eventsptr, nsubscriptions*eventsLen) if size == -1 { return ErrnoFault } @@ -221,7 +221,7 @@ func createRandomGet(cfg *ModuleConfig) func(caller *wasmtime.Caller, buf, bufLe } // Copy the random bytes into the wasm module memory - if n := copyBuffer(caller, randOutput, buf, bufLen); n != int64(len(randOutput)) { + if n := wasmWrite(caller, randOutput, buf, bufLen); n != int64(len(randOutput)) { return ErrnoFault } diff --git a/pkg/workflows/wasm/host/wasm.go b/pkg/workflows/wasm/host/wasm.go index d5d824460..2e0dea4d3 100644 --- a/pkg/workflows/wasm/host/wasm.go +++ b/pkg/workflows/wasm/host/wasm.go @@ -1,6 +1,7 @@ package host import ( + "context" "errors" "fmt" @@ -12,7 +13,7 @@ import ( wasmpb "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/pb" ) -func GetWorkflowSpec(modCfg *ModuleConfig, binary []byte, config []byte) (*sdk.WorkflowSpec, error) { +func GetWorkflowSpec(ctx context.Context, modCfg *ModuleConfig, binary []byte, config []byte) (*sdk.WorkflowSpec, error) { m, err := NewModule(modCfg, binary, WithDeterminism()) if err != nil { return nil, fmt.Errorf("could not instantiate module: %w", err) @@ -28,7 +29,7 @@ func GetWorkflowSpec(modCfg *ModuleConfig, binary []byte, config []byte) (*sdk.W SpecRequest: &emptypb.Empty{}, }, } - resp, err := m.Run(req) + resp, err := m.Run(ctx, req) if err != nil { return nil, err } diff --git a/pkg/workflows/wasm/host/wasm_test.go b/pkg/workflows/wasm/host/wasm_test.go index 4692ef96d..cccaf9443 100644 --- a/pkg/workflows/wasm/host/wasm_test.go +++ b/pkg/workflows/wasm/host/wasm_test.go @@ -2,6 +2,7 @@ package host import ( "bytes" + "context" _ "embed" "fmt" "io" @@ -21,37 +22,42 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/capabilities/pb" capabilitiespb "github.com/smartcontractkit/chainlink-common/pkg/capabilities/pb" "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/utils/tests" valuespb "github.com/smartcontractkit/chainlink-common/pkg/values/pb" "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" wasmpb "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/pb" ) const ( - successBinaryLocation = "test/success/cmd/testmodule.wasm" - successBinaryCmd = "test/success/cmd" - failureBinaryLocation = "test/fail/cmd/testmodule.wasm" - failureBinaryCmd = "test/fail/cmd" - oomBinaryLocation = "test/oom/cmd/testmodule.wasm" - oomBinaryCmd = "test/oom/cmd" - sleepBinaryLocation = "test/sleep/cmd/testmodule.wasm" - sleepBinaryCmd = "test/sleep/cmd" - filesBinaryLocation = "test/files/cmd/testmodule.wasm" - filesBinaryCmd = "test/files/cmd" - dirsBinaryLocation = "test/dirs/cmd/testmodule.wasm" - dirsBinaryCmd = "test/dirs/cmd" - httpBinaryLocation = "test/http/cmd/testmodule.wasm" - httpBinaryCmd = "test/http/cmd" - envBinaryLocation = "test/env/cmd/testmodule.wasm" - envBinaryCmd = "test/env/cmd" - logBinaryLocation = "test/log/cmd/testmodule.wasm" - logBinaryCmd = "test/log/cmd" - fetchBinaryLocation = "test/fetch/cmd/testmodule.wasm" - fetchBinaryCmd = "test/fetch/cmd" - randBinaryLocation = "test/rand/cmd/testmodule.wasm" - randBinaryCmd = "test/rand/cmd" + successBinaryLocation = "test/success/cmd/testmodule.wasm" + successBinaryCmd = "test/success/cmd" + failureBinaryLocation = "test/fail/cmd/testmodule.wasm" + failureBinaryCmd = "test/fail/cmd" + oomBinaryLocation = "test/oom/cmd/testmodule.wasm" + oomBinaryCmd = "test/oom/cmd" + sleepBinaryLocation = "test/sleep/cmd/testmodule.wasm" + sleepBinaryCmd = "test/sleep/cmd" + filesBinaryLocation = "test/files/cmd/testmodule.wasm" + filesBinaryCmd = "test/files/cmd" + dirsBinaryLocation = "test/dirs/cmd/testmodule.wasm" + dirsBinaryCmd = "test/dirs/cmd" + httpBinaryLocation = "test/http/cmd/testmodule.wasm" + httpBinaryCmd = "test/http/cmd" + envBinaryLocation = "test/env/cmd/testmodule.wasm" + envBinaryCmd = "test/env/cmd" + logBinaryLocation = "test/log/cmd/testmodule.wasm" + logBinaryCmd = "test/log/cmd" + fetchBinaryLocation = "test/fetch/cmd/testmodule.wasm" + fetchBinaryCmd = "test/fetch/cmd" + fetchlimitBinaryLocation = "test/fetchlimit/cmd/testmodule.wasm" + fetchlimitBinaryCmd = "test/fetchlimit/cmd" + randBinaryLocation = "test/rand/cmd/testmodule.wasm" + randBinaryCmd = "test/rand/cmd" + emitBinaryLocation = "test/emit/cmd/testmodule.wasm" + emitBinaryCmd = "test/emit/cmd" ) -func createTestBinary(outputPath, path string, compress bool, t *testing.T) []byte { +func createTestBinary(outputPath, path string, uncompressed bool, t *testing.T) []byte { cmd := exec.Command("go", "build", "-o", path, fmt.Sprintf("github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/host/%s", outputPath)) // #nosec cmd.Env = append(os.Environ(), "GOOS=wasip1", "GOARCH=wasm") @@ -61,7 +67,7 @@ func createTestBinary(outputPath, path string, compress bool, t *testing.T) []by binary, err := os.ReadFile(path) require.NoError(t, err) - if !compress { + if uncompressed { return binary } @@ -77,11 +83,15 @@ func createTestBinary(outputPath, path string, compress bool, t *testing.T) []by } func Test_GetWorkflowSpec(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(successBinaryCmd, successBinaryLocation, true, t) spec, err := GetWorkflowSpec( + ctx, &ModuleConfig{ - Logger: logger.Test(t), + Logger: logger.Test(t), + IsUncompressed: true, }, binary, []byte(""), @@ -93,12 +103,15 @@ func Test_GetWorkflowSpec(t *testing.T) { } func Test_GetWorkflowSpec_UncompressedBinary(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(successBinaryCmd, successBinaryLocation, false, t) spec, err := GetWorkflowSpec( + ctx, &ModuleConfig{ Logger: logger.Test(t), - IsUncompressed: true, + IsUncompressed: false, }, binary, []byte(""), @@ -110,11 +123,14 @@ func Test_GetWorkflowSpec_UncompressedBinary(t *testing.T) { } func Test_GetWorkflowSpec_BinaryErrors(t *testing.T) { + ctx := tests.Context(t) failBinary := createTestBinary(failureBinaryCmd, failureBinaryLocation, true, t) _, err := GetWorkflowSpec( + ctx, &ModuleConfig{ - Logger: logger.Test(t), + Logger: logger.Test(t), + IsUncompressed: true, }, failBinary, []byte(""), @@ -124,13 +140,17 @@ func Test_GetWorkflowSpec_BinaryErrors(t *testing.T) { } func Test_GetWorkflowSpec_Timeout(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(successBinaryCmd, successBinaryLocation, true, t) d := time.Duration(0) _, err := GetWorkflowSpec( + ctx, &ModuleConfig{ - Timeout: &d, - Logger: logger.Test(t), + Timeout: &d, + Logger: logger.Test(t), + IsUncompressed: true, }, binary, // use the success binary with a zero timeout []byte(""), @@ -140,12 +160,15 @@ func Test_GetWorkflowSpec_Timeout(t *testing.T) { } func Test_Compute_Logs(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(logBinaryCmd, logBinaryLocation, true, t) logger, logs := logger.TestObserved(t, zapcore.InfoLevel) m, err := NewModule(&ModuleConfig{ - Logger: logger, - Fetch: func(req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + Logger: logger, + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { return nil, nil }, }, binary) @@ -167,7 +190,7 @@ func Test_Compute_Logs(t *testing.T) { }, }, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.Nil(t, err) require.Len(t, logs.AllUntimed(), 1) @@ -187,10 +210,148 @@ func Test_Compute_Logs(t *testing.T) { } } +func Test_Compute_Emit(t *testing.T) { + t.Parallel() + binary := createTestBinary(emitBinaryCmd, emitBinaryLocation, true, t) + + lggr := logger.Test(t) + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + WorkflowId: "workflow-id", + WorkflowName: "workflow-name", + WorkflowOwner: "workflow-owner", + WorkflowExecutionId: "workflow-execution-id", + }, + }, + }, + }, + } + + fetchFunc := func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return nil, nil + } + + t.Run("successfully call emit with metadata in labels", func(t *testing.T) { + ctx := tests.Context(t) + m, err := NewModule(&ModuleConfig{ + Logger: lggr, + Fetch: fetchFunc, + IsUncompressed: true, + Labeler: newMockMessageEmitter(func(gotCtx context.Context, msg string, kvs map[string]string) error { + t.Helper() + + assert.Equal(t, ctx, gotCtx) + assert.Equal(t, "testing emit", msg) + assert.Equal(t, "this is a test field content", kvs["test-string-field-key"]) + assert.Equal(t, "workflow-id", kvs["workflow_id"]) + assert.Equal(t, "workflow-name", kvs["workflow_name"]) + assert.Equal(t, "workflow-owner", kvs["workflow_owner_address"]) + assert.Equal(t, "workflow-execution-id", kvs["workflow_execution_id"]) + return nil + }), + }, binary) + require.NoError(t, err) + + m.Start() + + _, err = m.Run(ctx, req) + assert.Nil(t, err) + }) + + t.Run("failure on emit writes to error chain and logs", func(t *testing.T) { + lggr, logs := logger.TestObserved(t, zapcore.InfoLevel) + + m, err := NewModule(&ModuleConfig{ + Logger: lggr, + Fetch: fetchFunc, + IsUncompressed: true, + Labeler: newMockMessageEmitter(func(_ context.Context, msg string, kvs map[string]string) error { + t.Helper() + + assert.Equal(t, "testing emit", msg) + assert.Equal(t, "this is a test field content", kvs["test-string-field-key"]) + assert.Equal(t, "workflow-id", kvs["workflow_id"]) + assert.Equal(t, "workflow-name", kvs["workflow_name"]) + assert.Equal(t, "workflow-owner", kvs["workflow_owner_address"]) + assert.Equal(t, "workflow-execution-id", kvs["workflow_execution_id"]) + + return assert.AnError + }), + }, binary) + require.NoError(t, err) + + m.Start() + + ctx := tests.Context(t) + _, err = m.Run(ctx, req) + assert.Error(t, err) + assert.ErrorContains(t, err, assert.AnError.Error()) + + require.Len(t, logs.AllUntimed(), 1) + + expectedEntries := []Entry{ + { + Log: zapcore.Entry{Level: zapcore.ErrorLevel, Message: fmt.Sprintf("error emitting message: %s", assert.AnError)}, + }, + } + for i := range expectedEntries { + assert.Equal(t, expectedEntries[i].Log.Level, logs.AllUntimed()[i].Entry.Level) + assert.Equal(t, expectedEntries[i].Log.Message, logs.AllUntimed()[i].Entry.Message) + } + }) + + t.Run("failure on emit due to missing workflow identifying metadata", func(t *testing.T) { + lggr := logger.Test(t) + + m, err := NewModule(&ModuleConfig{ + Logger: lggr, + Fetch: fetchFunc, + IsUncompressed: true, + Labeler: newMockMessageEmitter(func(_ context.Context, msg string, labels map[string]string) error { + return nil + }), // never called + }, binary) + require.NoError(t, err) + + m.Start() + + req = &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + }, + }, + } + + ctx := tests.Context(t) + _, err = m.Run(ctx, req) + assert.Error(t, err) + assert.ErrorContains(t, err, "failed to create emission") + }) +} + func Test_Compute_Fetch(t *testing.T) { + t.Parallel() binary := createTestBinary(fetchBinaryCmd, fetchBinaryLocation, true, t) t.Run("OK_default_runtime_cfg", func(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) expected := sdk.FetchResponse{ ExecutionError: false, Body: []byte("valid-response"), @@ -199,8 +360,9 @@ func Test_Compute_Fetch(t *testing.T) { } m, err := NewModule(&ModuleConfig{ - Logger: logger.Test(t), - Fetch: func(req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { return &wasmpb.FetchResponse{ ExecutionError: expected.ExecutionError, Body: expected.Body, @@ -226,7 +388,7 @@ func Test_Compute_Fetch(t *testing.T) { }, }, } - response, err := m.Run(req) + response, err := m.Run(ctx, req) assert.Nil(t, err) actual := sdk.FetchResponse{} @@ -239,6 +401,8 @@ func Test_Compute_Fetch(t *testing.T) { }) t.Run("OK_custom_runtime_cfg", func(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) expected := sdk.FetchResponse{ ExecutionError: false, Body: []byte("valid-response"), @@ -247,8 +411,9 @@ func Test_Compute_Fetch(t *testing.T) { } m, err := NewModule(&ModuleConfig{ - Logger: logger.Test(t), - Fetch: func(req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { return &wasmpb.FetchResponse{ ExecutionError: expected.ExecutionError, Body: expected.Body, @@ -277,7 +442,7 @@ func Test_Compute_Fetch(t *testing.T) { }, }, } - response, err := m.Run(req) + response, err := m.Run(ctx, req) assert.Nil(t, err) actual := sdk.FetchResponse{} @@ -290,11 +455,14 @@ func Test_Compute_Fetch(t *testing.T) { }) t.Run("NOK_fetch_error_returned", func(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) logger, logs := logger.TestObserved(t, zapcore.InfoLevel) m, err := NewModule(&ModuleConfig{ - Logger: logger, - Fetch: func(req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + Logger: logger, + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { return nil, assert.AnError }, }, binary) @@ -316,8 +484,9 @@ func Test_Compute_Fetch(t *testing.T) { }, }, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.NotNil(t, err) + assert.ErrorContains(t, err, assert.AnError.Error()) require.Len(t, logs.AllUntimed(), 1) expectedEntries := []Entry{ @@ -330,28 +499,322 @@ func Test_Compute_Fetch(t *testing.T) { assert.Equal(t, expectedEntries[i].Log.Message, logs.AllUntimed()[i].Entry.Message) } }) + + t.Run("OK_context_propagation", func(t *testing.T) { + t.Parallel() + type testkey string + var key testkey = "test-key" + var expectedValue string = "test-value" + + expected := sdk.FetchResponse{ + ExecutionError: false, + Body: []byte(expectedValue), + StatusCode: http.StatusOK, + Headers: map[string]any{}, + } + + m, err := NewModule(&ModuleConfig{ + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{ + ExecutionError: expected.ExecutionError, + Body: []byte(ctx.Value(key).(string)), + StatusCode: uint32(expected.StatusCode), + }, nil + }, + }, binary) + require.NoError(t, err) + + m.Start() + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + RuntimeConfig: &wasmpb.RuntimeConfig{ + MaxFetchResponseSizeBytes: 2 * 1024, + }, + }, + }, + } + + ctx := context.WithValue(tests.Context(t), key, expectedValue) + response, err := m.Run(ctx, req) + assert.Nil(t, err) + + actual := sdk.FetchResponse{} + r, err := pb.CapabilityResponseFromProto(response.GetComputeResponse().GetResponse()) + require.NoError(t, err) + err = r.Value.Underlying["Value"].UnwrapTo(&actual) + require.NoError(t, err) + + assert.Equal(t, expected, actual) + }) + + t.Run("OK_context_cancelation", func(t *testing.T) { + t.Parallel() + m, err := NewModule(&ModuleConfig{ + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + select { + case <-ctx.Done(): + return nil, assert.AnError + default: + return &wasmpb.FetchResponse{}, nil + } + }, + }, binary) + require.NoError(t, err) + + m.Start() + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + RuntimeConfig: &wasmpb.RuntimeConfig{ + MaxFetchResponseSizeBytes: 2 * 1024, + }, + }, + }, + } + + ctx, cancel := context.WithCancel(tests.Context(t)) + cancel() + _, err = m.Run(ctx, req) + require.NotNil(t, err) + assert.ErrorContains(t, err, fmt.Sprintf("error executing runner: error executing custom compute: %s", assert.AnError)) + }) + + t.Run("NOK_exceed_amout_of_defined_max_fetch_calls", func(t *testing.T) { + t.Parallel() + binary := createTestBinary(fetchlimitBinaryCmd, fetchlimitBinaryLocation, true, t) + ctx := tests.Context(t) + expected := sdk.FetchResponse{ + ExecutionError: false, + Body: []byte("valid-response"), + StatusCode: http.StatusOK, + Headers: map[string]any{}, + } + + m, err := NewModule(&ModuleConfig{ + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{ + ExecutionError: expected.ExecutionError, + Body: expected.Body, + StatusCode: uint32(expected.StatusCode), + }, nil + }, + MaxFetchRequests: 1, + }, binary) + require.NoError(t, err) + + m.Start() + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + }, + }, + } + _, err = m.Run(ctx, req) + require.NotNil(t, err) + }) + + t.Run("NOK_exceed_amout_of_default_max_fetch_calls", func(t *testing.T) { + t.Parallel() + binary := createTestBinary(fetchlimitBinaryCmd, fetchlimitBinaryLocation, true, t) + ctx := tests.Context(t) + expected := sdk.FetchResponse{ + ExecutionError: false, + Body: []byte("valid-response"), + StatusCode: http.StatusOK, + Headers: map[string]any{}, + } + + m, err := NewModule(&ModuleConfig{ + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{ + ExecutionError: expected.ExecutionError, + Body: expected.Body, + StatusCode: uint32(expected.StatusCode), + }, nil + }, + }, binary) + require.NoError(t, err) + + m.Start() + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + }, + }, + } + _, err = m.Run(ctx, req) + require.NotNil(t, err) + }) + + t.Run("OK_making_up_to_max_fetch_calls", func(t *testing.T) { + t.Parallel() + binary := createTestBinary(fetchlimitBinaryCmd, fetchlimitBinaryLocation, true, t) + ctx := tests.Context(t) + expected := sdk.FetchResponse{ + ExecutionError: false, + Body: []byte("valid-response"), + StatusCode: http.StatusOK, + Headers: map[string]any{}, + } + + m, err := NewModule(&ModuleConfig{ + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{ + ExecutionError: expected.ExecutionError, + Body: expected.Body, + StatusCode: uint32(expected.StatusCode), + }, nil + }, + MaxFetchRequests: 6, + }, binary) + require.NoError(t, err) + + m.Start() + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + }, + }, + } + _, err = m.Run(ctx, req) + require.Nil(t, err) + }) + + t.Run("OK_multiple_request_reusing_module", func(t *testing.T) { + t.Parallel() + binary := createTestBinary(fetchlimitBinaryCmd, fetchlimitBinaryLocation, true, t) + ctx := tests.Context(t) + expected := sdk.FetchResponse{ + ExecutionError: false, + Body: []byte("valid-response"), + StatusCode: http.StatusOK, + Headers: map[string]any{}, + } + + m, err := NewModule(&ModuleConfig{ + Logger: logger.Test(t), + IsUncompressed: true, + Fetch: func(ctx context.Context, req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return &wasmpb.FetchResponse{ + ExecutionError: expected.ExecutionError, + Body: expected.Body, + StatusCode: uint32(expected.StatusCode), + }, nil + }, + MaxFetchRequests: 6, + }, binary) + require.NoError(t, err) + + m.Start() + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + }, + }, + } + _, err = m.Run(ctx, req) + require.Nil(t, err) + + // we can reuse the request because after completion it gets deleted from the store + _, err = m.Run(ctx, req) + require.Nil(t, err) + }) + } func TestModule_Errors(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(successBinaryCmd, successBinaryLocation, true, t) - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t)}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t)}, binary) require.NoError(t, err) - _, err = m.Run(nil) - assert.ErrorContains(t, err, "invariant violation: invalid request to runner") + _, err = m.Run(ctx, nil) + assert.ErrorContains(t, err, "invalid request: can't be nil") req := &wasmpb.Request{ + Id: "", + } + _, err = m.Run(ctx, req) + assert.ErrorContains(t, err, "invalid request: can't be empty") + + req = &wasmpb.Request{ Id: uuid.New().String(), } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.ErrorContains(t, err, "invalid request: message must be SpecRequest or ComputeRequest") req = &wasmpb.Request{ Id: uuid.New().String(), Message: &wasmpb.Request_ComputeRequest{}, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.ErrorContains(t, err, "invalid compute request: nil request") m.Start() @@ -368,14 +831,15 @@ func TestModule_Errors(t *testing.T) { }, }, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.ErrorContains(t, err, "invalid compute request: could not find compute function for id doesnt-exist") } func TestModule_Sandbox_Memory(t *testing.T) { + ctx := tests.Context(t) binary := createTestBinary(oomBinaryCmd, oomBinaryLocation, true, t) - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t)}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t)}, binary) require.NoError(t, err) m.Start() @@ -384,14 +848,16 @@ func TestModule_Sandbox_Memory(t *testing.T) { Id: uuid.New().String(), Message: &wasmpb.Request_SpecRequest{}, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.ErrorContains(t, err, "exit status 2") } func TestModule_Sandbox_SleepIsStubbedOut(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(sleepBinaryCmd, sleepBinaryLocation, true, t) - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t)}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t)}, binary) require.NoError(t, err) m.Start() @@ -402,7 +868,7 @@ func TestModule_Sandbox_SleepIsStubbedOut(t *testing.T) { } start := time.Now() - _, err = m.Run(req) + _, err = m.Run(ctx, req) end := time.Now() // The binary sleeps for 1 hour, @@ -413,10 +879,11 @@ func TestModule_Sandbox_SleepIsStubbedOut(t *testing.T) { } func TestModule_Sandbox_Timeout(t *testing.T) { + ctx := tests.Context(t) binary := createTestBinary(sleepBinaryCmd, sleepBinaryLocation, true, t) tmt := 10 * time.Millisecond - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t), Timeout: &tmt}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t), Timeout: &tmt}, binary) require.NoError(t, err) m.Start() @@ -426,15 +893,17 @@ func TestModule_Sandbox_Timeout(t *testing.T) { Message: &wasmpb.Request_SpecRequest{}, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.ErrorContains(t, err, "interrupt") } func TestModule_Sandbox_CantReadFiles(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(filesBinaryCmd, filesBinaryLocation, true, t) - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t)}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t)}, binary) require.NoError(t, err) m.Start() @@ -453,14 +922,16 @@ func TestModule_Sandbox_CantReadFiles(t *testing.T) { }, }, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.ErrorContains(t, err, "open /tmp/file") } func TestModule_Sandbox_CantCreateDir(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(dirsBinaryCmd, dirsBinaryLocation, true, t) - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t)}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t)}, binary) require.NoError(t, err) m.Start() @@ -479,14 +950,16 @@ func TestModule_Sandbox_CantCreateDir(t *testing.T) { }, }, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.ErrorContains(t, err, "mkdir") } func TestModule_Sandbox_HTTPRequest(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(httpBinaryCmd, httpBinaryLocation, true, t) - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t)}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t)}, binary) require.NoError(t, err) m.Start() @@ -505,14 +978,16 @@ func TestModule_Sandbox_HTTPRequest(t *testing.T) { }, }, } - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.NotNil(t, err) } func TestModule_Sandbox_ReadEnv(t *testing.T) { + t.Parallel() + ctx := tests.Context(t) binary := createTestBinary(envBinaryCmd, envBinaryLocation, true, t) - m, err := NewModule(&ModuleConfig{Logger: logger.Test(t)}, binary) + m, err := NewModule(&ModuleConfig{IsUncompressed: true, Logger: logger.Test(t)}, binary) require.NoError(t, err) m.Start() @@ -535,11 +1010,12 @@ func TestModule_Sandbox_ReadEnv(t *testing.T) { }, } // This will return an error if FOO == BAR in the WASM binary - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.Nil(t, err) } func TestModule_Sandbox_RandomGet(t *testing.T) { + t.Parallel() req := &wasmpb.Request{ Id: uuid.New().String(), Message: &wasmpb.Request_ComputeRequest{ @@ -555,10 +1031,12 @@ func TestModule_Sandbox_RandomGet(t *testing.T) { }, } t.Run("success: deterministic override via module config", func(t *testing.T) { + ctx := tests.Context(t) binary := createTestBinary(randBinaryCmd, randBinaryLocation, true, t) m, err := NewModule(&ModuleConfig{ - Logger: logger.Test(t), + Logger: logger.Test(t), + IsUncompressed: true, Determinism: &DeterminismConfig{ Seed: 42, }, @@ -567,21 +1045,23 @@ func TestModule_Sandbox_RandomGet(t *testing.T) { m.Start() - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.Nil(t, err) }) t.Run("success: default module config is non deterministic", func(t *testing.T) { + ctx := tests.Context(t) binary := createTestBinary(randBinaryCmd, randBinaryLocation, true, t) m, err := NewModule(&ModuleConfig{ - Logger: logger.Test(t), + Logger: logger.Test(t), + IsUncompressed: true, }, binary) require.NoError(t, err) m.Start() - _, err = m.Run(req) + _, err = m.Run(ctx, req) assert.Error(t, err) assert.ErrorContains(t, err, "expected deterministic output") }) diff --git a/pkg/workflows/wasm/pb/wasm.pb.go b/pkg/workflows/wasm/pb/wasm.pb.go index 95a8839a0..315f7e7a0 100644 --- a/pkg/workflows/wasm/pb/wasm.pb.go +++ b/pkg/workflows/wasm/pb/wasm.pb.go @@ -587,22 +587,96 @@ func (*Response_ComputeResponse) isResponse_Message() {} func (*Response_SpecResponse) isResponse_Message() {} +// NOTE: This message was added because it is needed to be used as part of the request and for metrics. +type FetchRequestMetadata struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + WorkflowId string `protobuf:"bytes,1,opt,name=workflowId,proto3" json:"workflowId,omitempty"` + WorkflowName string `protobuf:"bytes,2,opt,name=workflowName,proto3" json:"workflowName,omitempty"` + WorkflowOwner string `protobuf:"bytes,3,opt,name=workflowOwner,proto3" json:"workflowOwner,omitempty"` + WorkflowExecutionId string `protobuf:"bytes,4,opt,name=workflowExecutionId,proto3" json:"workflowExecutionId,omitempty"` +} + +func (x *FetchRequestMetadata) Reset() { + *x = FetchRequestMetadata{} + if protoimpl.UnsafeEnabled { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FetchRequestMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FetchRequestMetadata) ProtoMessage() {} + +func (x *FetchRequestMetadata) ProtoReflect() protoreflect.Message { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FetchRequestMetadata.ProtoReflect.Descriptor instead. +func (*FetchRequestMetadata) Descriptor() ([]byte, []int) { + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{8} +} + +func (x *FetchRequestMetadata) GetWorkflowId() string { + if x != nil { + return x.WorkflowId + } + return "" +} + +func (x *FetchRequestMetadata) GetWorkflowName() string { + if x != nil { + return x.WorkflowName + } + return "" +} + +func (x *FetchRequestMetadata) GetWorkflowOwner() string { + if x != nil { + return x.WorkflowOwner + } + return "" +} + +func (x *FetchRequestMetadata) GetWorkflowExecutionId() string { + if x != nil { + return x.WorkflowExecutionId + } + return "" +} + type FetchRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` - Method string `protobuf:"bytes,2,opt,name=method,proto3" json:"method,omitempty"` - Headers *pb1.Map `protobuf:"bytes,3,opt,name=headers,proto3" json:"headers,omitempty"` - Body []byte `protobuf:"bytes,4,opt,name=body,proto3" json:"body,omitempty"` - TimeoutMs uint32 `protobuf:"varint,5,opt,name=timeoutMs,proto3" json:"timeoutMs,omitempty"` + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + Method string `protobuf:"bytes,2,opt,name=method,proto3" json:"method,omitempty"` + Headers *pb1.Map `protobuf:"bytes,3,opt,name=headers,proto3" json:"headers,omitempty"` + Body []byte `protobuf:"bytes,4,opt,name=body,proto3" json:"body,omitempty"` + TimeoutMs uint32 `protobuf:"varint,5,opt,name=timeoutMs,proto3" json:"timeoutMs,omitempty"` + Id string `protobuf:"bytes,6,opt,name=id,proto3" json:"id,omitempty"` + Metadata *FetchRequestMetadata `protobuf:"bytes,7,opt,name=metadata,proto3" json:"metadata,omitempty"` } func (x *FetchRequest) Reset() { *x = FetchRequest{} if protoimpl.UnsafeEnabled { - mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[8] + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -615,7 +689,7 @@ func (x *FetchRequest) String() string { func (*FetchRequest) ProtoMessage() {} func (x *FetchRequest) ProtoReflect() protoreflect.Message { - mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[8] + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -628,7 +702,7 @@ func (x *FetchRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use FetchRequest.ProtoReflect.Descriptor instead. func (*FetchRequest) Descriptor() ([]byte, []int) { - return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{8} + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{9} } func (x *FetchRequest) GetUrl() string { @@ -666,22 +740,37 @@ func (x *FetchRequest) GetTimeoutMs() uint32 { return 0 } +func (x *FetchRequest) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *FetchRequest) GetMetadata() *FetchRequestMetadata { + if x != nil { + return x.Metadata + } + return nil +} + type FetchResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - ExecutionError bool `protobuf:"varint,1,opt,name=executionError,proto3" json:"executionError,omitempty"` - ErrorMessage string `protobuf:"bytes,2,opt,name=errorMessage,proto3" json:"errorMessage,omitempty"` - StatusCode uint32 `protobuf:"varint,3,opt,name=statusCode,proto3" json:"statusCode,omitempty"` // NOTE: this is actually a uint8, but proto doesn't support this. - Headers *pb1.Map `protobuf:"bytes,4,opt,name=headers,proto3" json:"headers,omitempty"` - Body []byte `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` + ExecutionError bool `protobuf:"varint,1,opt,name=executionError,proto3" json:"executionError,omitempty"` + ErrorMessage string `protobuf:"bytes,2,opt,name=errorMessage,proto3" json:"errorMessage,omitempty"` + // NOTE: this is actually a uint8, but proto doesn't support this. + StatusCode uint32 `protobuf:"varint,3,opt,name=statusCode,proto3" json:"statusCode,omitempty"` + Headers *pb1.Map `protobuf:"bytes,4,opt,name=headers,proto3" json:"headers,omitempty"` + Body []byte `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` } func (x *FetchResponse) Reset() { *x = FetchResponse{} if protoimpl.UnsafeEnabled { - mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[9] + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -694,7 +783,7 @@ func (x *FetchResponse) String() string { func (*FetchResponse) ProtoMessage() {} func (x *FetchResponse) ProtoReflect() protoreflect.Message { - mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[9] + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -707,7 +796,7 @@ func (x *FetchResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use FetchResponse.ProtoReflect.Descriptor instead. func (*FetchResponse) Descriptor() ([]byte, []int) { - return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{9} + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{10} } func (x *FetchResponse) GetExecutionError() bool { @@ -745,6 +834,163 @@ func (x *FetchResponse) GetBody() []byte { return nil } +type EmitMessageRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` + Labels *pb1.Map `protobuf:"bytes,2,opt,name=labels,proto3" json:"labels,omitempty"` + RequestId string `protobuf:"bytes,3,opt,name=requestId,proto3" json:"requestId,omitempty"` +} + +func (x *EmitMessageRequest) Reset() { + *x = EmitMessageRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EmitMessageRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EmitMessageRequest) ProtoMessage() {} + +func (x *EmitMessageRequest) ProtoReflect() protoreflect.Message { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EmitMessageRequest.ProtoReflect.Descriptor instead. +func (*EmitMessageRequest) Descriptor() ([]byte, []int) { + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{11} +} + +func (x *EmitMessageRequest) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *EmitMessageRequest) GetLabels() *pb1.Map { + if x != nil { + return x.Labels + } + return nil +} + +func (x *EmitMessageRequest) GetRequestId() string { + if x != nil { + return x.RequestId + } + return "" +} + +type Error struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` +} + +func (x *Error) Reset() { + *x = Error{} + if protoimpl.UnsafeEnabled { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Error) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Error) ProtoMessage() {} + +func (x *Error) ProtoReflect() protoreflect.Message { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Error.ProtoReflect.Descriptor instead. +func (*Error) Descriptor() ([]byte, []int) { + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{12} +} + +func (x *Error) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +type EmitMessageResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Error *Error `protobuf:"bytes,1,opt,name=error,proto3" json:"error,omitempty"` +} + +func (x *EmitMessageResponse) Reset() { + *x = EmitMessageResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EmitMessageResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EmitMessageResponse) ProtoMessage() {} + +func (x *EmitMessageResponse) ProtoReflect() protoreflect.Message { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EmitMessageResponse.ProtoReflect.Descriptor instead. +func (*EmitMessageResponse) Descriptor() ([]byte, []int) { + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{13} +} + +func (x *EmitMessageResponse) GetError() *Error { + if x != nil { + return x.Error + } + return nil +} + var File_workflows_wasm_pb_wasm_proto protoreflect.FileDescriptor var file_workflows_wasm_pb_wasm_proto_rawDesc = []byte{ @@ -829,32 +1075,61 @@ var file_workflows_wasm_pb_wasm_proto_rawDesc = []byte{ 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x73, 0x64, 0x6b, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x53, 0x70, 0x65, 0x63, 0x48, 0x00, 0x52, 0x0c, 0x73, 0x70, 0x65, 0x63, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x22, 0x91, 0x01, 0x0a, 0x0c, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x16, 0x0a, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, - 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x12, - 0x25, 0x0a, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x07, 0x68, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x69, - 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x4d, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x09, 0x74, - 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x4d, 0x73, 0x22, 0xb6, 0x01, 0x0a, 0x0d, 0x46, 0x65, 0x74, - 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x26, 0x0a, 0x0e, 0x65, 0x78, - 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x08, 0x52, 0x0e, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, - 0x6f, 0x72, 0x12, 0x22, 0x0a, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x43, 0x6f, 0x64, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x25, 0x0a, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, - 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x12, 0x12, 0x0a, - 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x62, 0x6f, 0x64, - 0x79, 0x42, 0x43, 0x5a, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, - 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, - 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, - 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x2f, - 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x73, 0x61, 0x67, 0x65, 0x22, 0xb2, 0x01, 0x0a, 0x14, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1e, 0x0a, + 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x49, 0x64, 0x12, 0x22, 0x0a, + 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4e, 0x61, 0x6d, + 0x65, 0x12, 0x24, 0x0a, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x4f, 0x77, 0x6e, + 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, + 0x6f, 0x77, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x12, 0x30, 0x0a, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x66, + 0x6c, 0x6f, 0x77, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0xd8, 0x01, 0x0a, 0x0c, 0x46, 0x65, + 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, + 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x16, 0x0a, 0x06, + 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d, 0x65, + 0x74, 0x68, 0x6f, 0x64, 0x12, 0x25, 0x0a, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, + 0x61, 0x70, 0x52, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x62, + 0x6f, 0x64, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x12, + 0x1c, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x4d, 0x73, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x0d, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x4d, 0x73, 0x12, 0x0e, 0x0a, + 0x02, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x35, 0x0a, + 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x19, 0x2e, 0x73, 0x64, 0x6b, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x22, 0xb6, 0x01, 0x0a, 0x0d, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x26, 0x0a, 0x0e, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, + 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, + 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x22, + 0x0a, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x43, 0x6f, 0x64, 0x65, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x43, 0x6f, + 0x64, 0x65, 0x12, 0x25, 0x0a, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, + 0x52, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x62, 0x6f, 0x64, + 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0x71, 0x0a, + 0x12, 0x45, 0x6d, 0x69, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x23, 0x0a, + 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x49, 0x64, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x49, 0x64, + 0x22, 0x21, 0x0a, 0x05, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x22, 0x37, 0x0a, 0x13, 0x45, 0x6d, 0x69, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x20, 0x0a, 0x05, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x73, 0x64, 0x6b, 0x2e, + 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x43, 0x5a, 0x41, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, + 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, + 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x70, + 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -869,7 +1144,7 @@ func file_workflows_wasm_pb_wasm_proto_rawDescGZIP() []byte { return file_workflows_wasm_pb_wasm_proto_rawDescData } -var file_workflows_wasm_pb_wasm_proto_msgTypes = make([]protoimpl.MessageInfo, 10) +var file_workflows_wasm_pb_wasm_proto_msgTypes = make([]protoimpl.MessageInfo, 14) var file_workflows_wasm_pb_wasm_proto_goTypes = []interface{}{ (*RuntimeConfig)(nil), // 0: sdk.RuntimeConfig (*ComputeRequest)(nil), // 1: sdk.ComputeRequest @@ -879,35 +1154,42 @@ var file_workflows_wasm_pb_wasm_proto_goTypes = []interface{}{ (*StepDefinition)(nil), // 5: sdk.StepDefinition (*WorkflowSpec)(nil), // 6: sdk.WorkflowSpec (*Response)(nil), // 7: sdk.Response - (*FetchRequest)(nil), // 8: sdk.FetchRequest - (*FetchResponse)(nil), // 9: sdk.FetchResponse - (*pb.CapabilityRequest)(nil), // 10: capabilities.CapabilityRequest - (*emptypb.Empty)(nil), // 11: google.protobuf.Empty - (*pb.CapabilityResponse)(nil), // 12: capabilities.CapabilityResponse - (*pb1.Map)(nil), // 13: values.Map + (*FetchRequestMetadata)(nil), // 8: sdk.FetchRequestMetadata + (*FetchRequest)(nil), // 9: sdk.FetchRequest + (*FetchResponse)(nil), // 10: sdk.FetchResponse + (*EmitMessageRequest)(nil), // 11: sdk.EmitMessageRequest + (*Error)(nil), // 12: sdk.Error + (*EmitMessageResponse)(nil), // 13: sdk.EmitMessageResponse + (*pb.CapabilityRequest)(nil), // 14: capabilities.CapabilityRequest + (*emptypb.Empty)(nil), // 15: google.protobuf.Empty + (*pb.CapabilityResponse)(nil), // 16: capabilities.CapabilityResponse + (*pb1.Map)(nil), // 17: values.Map } var file_workflows_wasm_pb_wasm_proto_depIdxs = []int32{ - 10, // 0: sdk.ComputeRequest.request:type_name -> capabilities.CapabilityRequest + 14, // 0: sdk.ComputeRequest.request:type_name -> capabilities.CapabilityRequest 0, // 1: sdk.ComputeRequest.runtimeConfig:type_name -> sdk.RuntimeConfig 1, // 2: sdk.Request.computeRequest:type_name -> sdk.ComputeRequest - 11, // 3: sdk.Request.specRequest:type_name -> google.protobuf.Empty - 12, // 4: sdk.ComputeResponse.response:type_name -> capabilities.CapabilityResponse - 13, // 5: sdk.StepInputs.mapping:type_name -> values.Map + 15, // 3: sdk.Request.specRequest:type_name -> google.protobuf.Empty + 16, // 4: sdk.ComputeResponse.response:type_name -> capabilities.CapabilityResponse + 17, // 5: sdk.StepInputs.mapping:type_name -> values.Map 4, // 6: sdk.StepDefinition.inputs:type_name -> sdk.StepInputs - 13, // 7: sdk.StepDefinition.config:type_name -> values.Map + 17, // 7: sdk.StepDefinition.config:type_name -> values.Map 5, // 8: sdk.WorkflowSpec.triggers:type_name -> sdk.StepDefinition 5, // 9: sdk.WorkflowSpec.actions:type_name -> sdk.StepDefinition 5, // 10: sdk.WorkflowSpec.consensus:type_name -> sdk.StepDefinition 5, // 11: sdk.WorkflowSpec.targets:type_name -> sdk.StepDefinition 3, // 12: sdk.Response.computeResponse:type_name -> sdk.ComputeResponse 6, // 13: sdk.Response.specResponse:type_name -> sdk.WorkflowSpec - 13, // 14: sdk.FetchRequest.headers:type_name -> values.Map - 13, // 15: sdk.FetchResponse.headers:type_name -> values.Map - 16, // [16:16] is the sub-list for method output_type - 16, // [16:16] is the sub-list for method input_type - 16, // [16:16] is the sub-list for extension type_name - 16, // [16:16] is the sub-list for extension extendee - 0, // [0:16] is the sub-list for field type_name + 17, // 14: sdk.FetchRequest.headers:type_name -> values.Map + 8, // 15: sdk.FetchRequest.metadata:type_name -> sdk.FetchRequestMetadata + 17, // 16: sdk.FetchResponse.headers:type_name -> values.Map + 17, // 17: sdk.EmitMessageRequest.labels:type_name -> values.Map + 12, // 18: sdk.EmitMessageResponse.error:type_name -> sdk.Error + 19, // [19:19] is the sub-list for method output_type + 19, // [19:19] is the sub-list for method input_type + 19, // [19:19] is the sub-list for extension type_name + 19, // [19:19] is the sub-list for extension extendee + 0, // [0:19] is the sub-list for field type_name } func init() { file_workflows_wasm_pb_wasm_proto_init() } @@ -1013,7 +1295,7 @@ func file_workflows_wasm_pb_wasm_proto_init() { } } file_workflows_wasm_pb_wasm_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FetchRequest); i { + switch v := v.(*FetchRequestMetadata); i { case 0: return &v.state case 1: @@ -1025,6 +1307,18 @@ func file_workflows_wasm_pb_wasm_proto_init() { } } file_workflows_wasm_pb_wasm_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FetchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_workflows_wasm_pb_wasm_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*FetchResponse); i { case 0: return &v.state @@ -1036,6 +1330,42 @@ func file_workflows_wasm_pb_wasm_proto_init() { return nil } } + file_workflows_wasm_pb_wasm_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*EmitMessageRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_workflows_wasm_pb_wasm_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Error); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_workflows_wasm_pb_wasm_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*EmitMessageResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } } file_workflows_wasm_pb_wasm_proto_msgTypes[2].OneofWrappers = []interface{}{ (*Request_ComputeRequest)(nil), @@ -1051,7 +1381,7 @@ func file_workflows_wasm_pb_wasm_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_workflows_wasm_pb_wasm_proto_rawDesc, NumEnums: 0, - NumMessages: 10, + NumMessages: 14, NumExtensions: 0, NumServices: 0, }, diff --git a/pkg/workflows/wasm/pb/wasm.proto b/pkg/workflows/wasm/pb/wasm.proto index 180b2cd12..524daa838 100644 --- a/pkg/workflows/wasm/pb/wasm.proto +++ b/pkg/workflows/wasm/pb/wasm.proto @@ -8,9 +8,7 @@ import "capabilities/pb/capabilities.proto"; import "values/pb/values.proto"; import "google/protobuf/empty.proto"; -message RuntimeConfig { - int64 maxFetchResponseSizeBytes = 1; -} +message RuntimeConfig { int64 maxFetchResponseSizeBytes = 1; } message ComputeRequest { capabilities.CapabilityRequest request = 1; @@ -27,9 +25,7 @@ message Request { } } -message ComputeResponse { - capabilities.CapabilityResponse response = 1; -} +message ComputeResponse { capabilities.CapabilityResponse response = 1; } message StepInputs { string outputRef = 1; @@ -63,18 +59,40 @@ message Response { } } +// NOTE: This message was added because it is needed to be used as part of the request and for metrics. +message FetchRequestMetadata { + string workflowId = 1; + string workflowName = 2; + string workflowOwner = 3; + string workflowExecutionId = 4; +} + message FetchRequest { string url = 1; string method = 2; values.Map headers = 3; bytes body = 4; uint32 timeoutMs = 5; + string id = 6; + FetchRequestMetadata metadata = 7; } message FetchResponse { bool executionError = 1; string errorMessage = 2; - uint32 statusCode = 3; // NOTE: this is actually a uint8, but proto doesn't support this. + + // NOTE: this is actually a uint8, but proto doesn't support this. + uint32 statusCode = 3; values.Map headers = 4; bytes body = 5; } + +message EmitMessageRequest { + string message = 1; + values.Map labels = 2; + string requestId = 3; +} + +message Error { string message = 1; } + +message EmitMessageResponse { Error error = 1; } diff --git a/pkg/workflows/wasm/runner.go b/pkg/workflows/wasm/runner.go index 1372117fa..0d8ab006e 100644 --- a/pkg/workflows/wasm/runner.go +++ b/pkg/workflows/wasm/runner.go @@ -26,7 +26,7 @@ var _ sdk.Runner = (*Runner)(nil) type Runner struct { sendResponse func(payload *wasmpb.Response) - sdkFactory func(cfg *RuntimeConfig) *Runtime + sdkFactory func(cfg *RuntimeConfig, opts ...func(*RuntimeConfig)) *Runtime args []string req *wasmpb.Request } @@ -156,7 +156,7 @@ func (r *Runner) handleComputeRequest(factory *sdk.WorkflowSpecFactory, id strin } // Extract the config from the request - drc := defaultRuntimeConfig() + drc := defaultRuntimeConfig(id, &creq.Metadata) if rc := computeReq.GetRuntimeConfig(); rc != nil { if rc.MaxFetchResponseSizeBytes != 0 { drc.MaxFetchResponseSizeBytes = rc.MaxFetchResponseSizeBytes diff --git a/pkg/workflows/wasm/runner_notwasip1.go b/pkg/workflows/wasm/runner_notwasip1.go new file mode 100644 index 000000000..e751b353a --- /dev/null +++ b/pkg/workflows/wasm/runner_notwasip1.go @@ -0,0 +1,7 @@ +//go:build !wasip1 + +package wasm + +func NewRunner() *Runner { + panic("error: NewRunner() is only intended for use with `GOOS=wasip1 GOARCH=wasm`. For testing, use testutils.NewRunner() instead.") +} diff --git a/pkg/workflows/wasm/runner_test.go b/pkg/workflows/wasm/runner_test.go index e81ce8862..bed65e172 100644 --- a/pkg/workflows/wasm/runner_test.go +++ b/pkg/workflows/wasm/runner_test.go @@ -2,7 +2,9 @@ package wasm import ( "encoding/base64" + "encoding/binary" "testing" + "unsafe" "github.com/google/uuid" "github.com/stretchr/testify/assert" @@ -15,6 +17,7 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictarget" "github.com/smartcontractkit/chainlink-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger" capabilitiespb "github.com/smartcontractkit/chainlink-common/pkg/capabilities/pb" + "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink-common/pkg/values" "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" wasmpb "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/pb" @@ -133,7 +136,9 @@ func TestRunner_Run_ExecuteCompute(t *testing.T) { runner := &Runner{ args: []string{"wasm", str}, sendResponse: responseFn, - sdkFactory: func(cfg *RuntimeConfig) *Runtime { return nil }, + sdkFactory: func(cfg *RuntimeConfig, _ ...func(*RuntimeConfig)) *Runtime { + return nil + }, } runner.Run(workflow) @@ -212,3 +217,167 @@ func TestRunner_Run_GetWorkflowSpec(t *testing.T) { // Verify the target is included in the workflow spec assert.Equal(t, targetConfig.Number, uint64(gotSpec.Targets[0].Config["number"].(int64))) } + +// Test_createEmitFn validates the runtime's emit function implementation. Uses mocks of the +// imported wasip1 emit function. +func Test_createEmitFn(t *testing.T) { + var ( + l = logger.Test(t) + reqId = "random-id" + sdkConfig = &RuntimeConfig{ + MaxFetchResponseSizeBytes: 1_000, + Metadata: &capabilities.RequestMetadata{ + WorkflowID: "workflow_id", + WorkflowExecutionID: "workflow_execution_id", + WorkflowName: "workflow_name", + WorkflowOwner: "workflow_owner_address", + }, + RequestID: &reqId, + } + giveMsg = "testing guest" + giveLabels = map[string]string{ + "some-key": "some-value", + } + ) + + t.Run("success", func(t *testing.T) { + hostEmit := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + return 0 + } + runtimeEmit := createEmitFn(sdkConfig, l, hostEmit) + err := runtimeEmit(giveMsg, giveLabels) + assert.NoError(t, err) + }) + + t.Run("successfully read error message when emit fails", func(t *testing.T) { + hostEmit := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + // marshall the protobufs + b, err := proto.Marshal(&wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: assert.AnError.Error(), + }, + }) + assert.NoError(t, err) + + // write the marshalled response message to memory + resp := unsafe.Slice((*byte)(respptr), len(b)) + copy(resp, b) + + // write the length of the response to memory in little endian + respLen := unsafe.Slice((*byte)(resplenptr), uint32Size) + binary.LittleEndian.PutUint32(respLen, uint32(len(b))) + + return 0 + } + runtimeEmit := createEmitFn(sdkConfig, l, hostEmit) + err := runtimeEmit(giveMsg, giveLabels) + assert.Error(t, err) + assert.ErrorContains(t, err, assert.AnError.Error()) + }) + + t.Run("fail to deserialize response from memory", func(t *testing.T) { + hostEmit := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + // b is a non-protobuf byte slice + b := []byte(assert.AnError.Error()) + + // write the marshalled response message to memory + resp := unsafe.Slice((*byte)(respptr), len(b)) + copy(resp, b) + + // write the length of the response to memory in little endian + respLen := unsafe.Slice((*byte)(resplenptr), uint32Size) + binary.LittleEndian.PutUint32(respLen, uint32(len(b))) + + return 0 + } + + runtimeEmit := createEmitFn(sdkConfig, l, hostEmit) + err := runtimeEmit(giveMsg, giveLabels) + assert.Error(t, err) + assert.ErrorContains(t, err, "invalid wire-format data") + }) + + t.Run("fail with nonzero code from emit", func(t *testing.T) { + hostEmit := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + return 42 + } + runtimeEmit := createEmitFn(sdkConfig, l, hostEmit) + err := runtimeEmit(giveMsg, giveLabels) + assert.Error(t, err) + assert.ErrorContains(t, err, "emit failed with errno 42") + }) +} + +func Test_createFetchFn(t *testing.T) { + var ( + l = logger.Test(t) + requestID = uuid.New().String() + sdkConfig = &RuntimeConfig{ + RequestID: &requestID, + MaxFetchResponseSizeBytes: 1_000, + Metadata: &capabilities.RequestMetadata{ + WorkflowID: "workflow_id", + WorkflowExecutionID: "workflow_execution_id", + WorkflowName: "workflow_name", + WorkflowOwner: "workflow_owner_address", + }, + } + ) + + t.Run("OK-success", func(t *testing.T) { + hostFetch := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + return 0 + } + runtimeFetch := createFetchFn(sdkConfig, l, hostFetch) + response, err := runtimeFetch(sdk.FetchRequest{}) + assert.NoError(t, err) + assert.Equal(t, sdk.FetchResponse{ + Headers: map[string]any{}, + }, response) + }) + + t.Run("NOK-config_missing_request_id", func(t *testing.T) { + invalidConfig := &RuntimeConfig{ + RequestID: nil, + MaxFetchResponseSizeBytes: 1_000, + Metadata: &capabilities.RequestMetadata{ + WorkflowID: "workflow_id", + WorkflowExecutionID: "workflow_execution_id", + WorkflowName: "workflow_name", + WorkflowOwner: "workflow_owner_address", + }, + } + hostFetch := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + return 0 + } + runtimeFetch := createFetchFn(invalidConfig, l, hostFetch) + _, err := runtimeFetch(sdk.FetchRequest{}) + assert.ErrorContains(t, err, "request ID is required to fetch") + }) + + t.Run("NOK-fetch_returns_handled_error", func(t *testing.T) { + hostFetch := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + fetchResponse := &wasmpb.FetchResponse{ + ExecutionError: true, + ErrorMessage: assert.AnError.Error(), + } + respBytes, perr := proto.Marshal(fetchResponse) + if perr != nil { + return 0 + } + + // write the marshalled response message to memory + resp := unsafe.Slice((*byte)(respptr), len(respBytes)) + copy(resp, respBytes) + + // write the length of the response to memory in little endian + respLen := unsafe.Slice((*byte)(resplenptr), uint32Size) + binary.LittleEndian.PutUint32(respLen, uint32(len(respBytes))) + + return 0 + } + runtimeFetch := createFetchFn(sdkConfig, l, hostFetch) + _, err := runtimeFetch(sdk.FetchRequest{}) + assert.ErrorContains(t, err, assert.AnError.Error()) + }) +} diff --git a/pkg/workflows/wasm/runner_wasip1.go b/pkg/workflows/wasm/runner_wasip1.go index 6a85a43db..c31aa7427 100644 --- a/pkg/workflows/wasm/runner_wasip1.go +++ b/pkg/workflows/wasm/runner_wasip1.go @@ -1,17 +1,12 @@ package wasm import ( - "encoding/binary" - "errors" - "fmt" "os" "unsafe" "google.golang.org/protobuf/proto" "github.com/smartcontractkit/chainlink-common/pkg/logger" - "github.com/smartcontractkit/chainlink-common/pkg/values" - "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" wasmpb "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/pb" ) @@ -24,103 +19,59 @@ func log(respptr unsafe.Pointer, respptrlen int32) //go:wasmimport env fetch func fetch(respptr unsafe.Pointer, resplenptr unsafe.Pointer, reqptr unsafe.Pointer, reqptrlen int32) int32 -const uint32Size = int32(4) - -func bufferToPointerLen(buf []byte) (unsafe.Pointer, int32) { - return unsafe.Pointer(&buf[0]), int32(len(buf)) -} +//go:wasmimport env emit +func emit(respptr unsafe.Pointer, resplenptr unsafe.Pointer, reqptr unsafe.Pointer, reqptrlen int32) int32 func NewRunner() *Runner { l := logger.NewWithSync(&wasmWriteSyncer{}) return &Runner{ - sendResponse: func(response *wasmpb.Response) { - pb, err := proto.Marshal(response) - if err != nil { - // We somehow couldn't marshal the response, so let's - // exit with a special error code letting the host know - // what happened. - os.Exit(CodeInvalidResponse) - } - - // unknownID will only be set when we've failed to parse - // the request. Like before, let's bubble this up. - if response.Id == unknownID { - os.Exit(CodeInvalidRequest) - } - - ptr, ptrlen := bufferToPointerLen(pb) - errno := sendResponse(ptr, ptrlen) - if errno != 0 { - os.Exit(CodeHostErr) - } - - code := CodeSuccess - if response.ErrMsg != "" { - code = CodeRunnerErr + sendResponse: sendResponseFn, + sdkFactory: func(sdkConfig *RuntimeConfig, opts ...func(*RuntimeConfig)) *Runtime { + for _, opt := range opts { + opt(sdkConfig) } - os.Exit(code) - }, - sdkFactory: func(sdkConfig *RuntimeConfig) *Runtime { return &Runtime{ - logger: l, - fetchFn: func(req sdk.FetchRequest) (sdk.FetchResponse, error) { - headerspb, err := values.NewMap(req.Headers) - if err != nil { - return sdk.FetchResponse{}, fmt.Errorf("failed to create headers map: %w", err) - } - - b, err := proto.Marshal(&wasmpb.FetchRequest{ - Url: req.URL, - Method: req.Method, - Headers: values.ProtoMap(headerspb), - Body: req.Body, - TimeoutMs: req.TimeoutMs, - }) - if err != nil { - return sdk.FetchResponse{}, fmt.Errorf("failed to marshal fetch request: %w", err) - } - reqptr, reqptrlen := bufferToPointerLen(b) - - respBuffer := make([]byte, sdkConfig.MaxFetchResponseSizeBytes) - respptr, _ := bufferToPointerLen(respBuffer) - - resplenBuffer := make([]byte, uint32Size) - resplenptr, _ := bufferToPointerLen(resplenBuffer) - - errno := fetch(respptr, resplenptr, reqptr, reqptrlen) - if errno != 0 { - return sdk.FetchResponse{}, errors.New("failed to execute fetch") - } - - responseSize := binary.LittleEndian.Uint32(resplenBuffer) - response := &wasmpb.FetchResponse{} - err = proto.Unmarshal(respBuffer[:responseSize], response) - if err != nil { - return sdk.FetchResponse{}, fmt.Errorf("failed to unmarshal fetch response: %w", err) - } - - fields := response.Headers.GetFields() - headersResp := make(map[string]any, len(fields)) - for k, v := range fields { - headersResp[k] = v - } - - return sdk.FetchResponse{ - ExecutionError: response.ExecutionError, - ErrorMessage: response.ErrorMessage, - StatusCode: uint8(response.StatusCode), - Headers: headersResp, - Body: response.Body, - }, nil - }, + logger: l, + fetchFn: createFetchFn(sdkConfig, l, fetch), + emitFn: createEmitFn(sdkConfig, l, emit), } }, args: os.Args, } } +// sendResponseFn implements sendResponse for import into WASM. +func sendResponseFn(response *wasmpb.Response) { + pb, err := proto.Marshal(response) + if err != nil { + // We somehow couldn't marshal the response, so let's + // exit with a special error code letting the host know + // what happened. + os.Exit(CodeInvalidResponse) + } + + // unknownID will only be set when we've failed to parse + // the request. Like before, let's bubble this up. + if response.Id == unknownID { + os.Exit(CodeInvalidRequest) + } + + ptr, ptrlen := bufferToPointerLen(pb) + errno := sendResponse(ptr, ptrlen) + if errno != 0 { + os.Exit(CodeHostErr) + } + + code := CodeSuccess + if response.ErrMsg != "" { + code = CodeRunnerErr + } + + os.Exit(code) +} + type wasmWriteSyncer struct{} // Write is used to proxy log requests from the WASM binary back to the host diff --git a/pkg/workflows/wasm/sdk.go b/pkg/workflows/wasm/sdk.go index d6c29a009..5c33a0fb1 100644 --- a/pkg/workflows/wasm/sdk.go +++ b/pkg/workflows/wasm/sdk.go @@ -1,26 +1,47 @@ package wasm import ( + "encoding/binary" + "errors" + "fmt" + "unsafe" + + "google.golang.org/protobuf/proto" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/events" + "github.com/smartcontractkit/chainlink-common/pkg/custmsg" "github.com/smartcontractkit/chainlink-common/pkg/logger" + "github.com/smartcontractkit/chainlink-common/pkg/values" "github.com/smartcontractkit/chainlink-common/pkg/workflows/sdk" + wasmpb "github.com/smartcontractkit/chainlink-common/pkg/workflows/wasm/pb" ) +// Length of responses are encoded into 4 byte buffers in little endian. uint32Size is the size +// of that buffer. +const uint32Size = int32(4) + type Runtime struct { fetchFn func(req sdk.FetchRequest) (sdk.FetchResponse, error) + emitFn func(msg string, labels map[string]string) error logger logger.Logger } type RuntimeConfig struct { MaxFetchResponseSizeBytes int64 + RequestID *string + Metadata *capabilities.RequestMetadata } const ( defaultMaxFetchResponseSizeBytes = 5 * 1024 ) -func defaultRuntimeConfig() *RuntimeConfig { +func defaultRuntimeConfig(id string, md *capabilities.RequestMetadata) *RuntimeConfig { return &RuntimeConfig{ MaxFetchResponseSizeBytes: defaultMaxFetchResponseSizeBytes, + RequestID: &id, + Metadata: md, } } @@ -33,3 +54,215 @@ func (r *Runtime) Fetch(req sdk.FetchRequest) (sdk.FetchResponse, error) { func (r *Runtime) Logger() logger.Logger { return r.logger } + +func (r *Runtime) Emitter() sdk.MessageEmitter { + return newWasmGuestEmitter(r.emitFn) +} + +type wasmGuestEmitter struct { + base custmsg.MessageEmitter + emitFn func(string, map[string]string) error + labels map[string]string +} + +func newWasmGuestEmitter(emitFn func(string, map[string]string) error) wasmGuestEmitter { + return wasmGuestEmitter{ + emitFn: emitFn, + labels: make(map[string]string), + base: custmsg.NewLabeler(), + } +} + +func (w wasmGuestEmitter) Emit(msg string) error { + return w.emitFn(msg, w.labels) +} + +func (w wasmGuestEmitter) With(keyValues ...string) sdk.MessageEmitter { + newEmitter := newWasmGuestEmitter(w.emitFn) + newEmitter.base = w.base.With(keyValues...) + newEmitter.labels = newEmitter.base.Labels() + return newEmitter +} + +// createEmitFn builds the runtime's emit function implementation, which is a function +// that handles marshalling and unmarshalling messages for the WASM to act on. +func createEmitFn( + sdkConfig *RuntimeConfig, + l logger.Logger, + emit func(respptr unsafe.Pointer, resplenptr unsafe.Pointer, reqptr unsafe.Pointer, reqptrlen int32) int32, +) func(string, map[string]string) error { + emitFn := func(msg string, labels map[string]string) error { + // Prepare the labels to be emitted + if sdkConfig.Metadata == nil { + return NewEmissionError(fmt.Errorf("metadata is required to emit")) + } + + labels, err := toEmitLabels(sdkConfig.Metadata, labels) + if err != nil { + return NewEmissionError(err) + } + + vm, err := values.NewMap(labels) + if err != nil { + return NewEmissionError(fmt.Errorf("could not wrap labels to map: %w", err)) + } + + // Marshal the message and labels into a protobuf message + b, err := proto.Marshal(&wasmpb.EmitMessageRequest{ + RequestId: *sdkConfig.RequestID, + Message: msg, + Labels: values.ProtoMap(vm), + }) + if err != nil { + return err + } + + // Prepare the request to be sent to the host memory by allocating space for the + // response and response length buffers. + respBuffer := make([]byte, sdkConfig.MaxFetchResponseSizeBytes) + respptr, _ := bufferToPointerLen(respBuffer) + + resplenBuffer := make([]byte, uint32Size) + resplenptr, _ := bufferToPointerLen(resplenBuffer) + + // The request buffer is the wasm memory, get a pointer to the first element and the length + // of the protobuf message. + reqptr, reqptrlen := bufferToPointerLen(b) + + // Emit the message via the method imported from the host + errno := emit(respptr, resplenptr, reqptr, reqptrlen) + if errno != 0 { + return NewEmissionError(fmt.Errorf("emit failed with errno %d", errno)) + } + + // Attempt to read and handle the response from the host memory + responseSize := binary.LittleEndian.Uint32(resplenBuffer) + response := &wasmpb.EmitMessageResponse{} + if err := proto.Unmarshal(respBuffer[:responseSize], response); err != nil { + l.Errorw("failed to unmarshal emit response", "error", err.Error()) + return NewEmissionError(err) + } + + if response.Error != nil && response.Error.Message != "" { + return NewEmissionError(errors.New(response.Error.Message)) + } + + return nil + } + + return emitFn +} + +// createFetchFn injects dependencies and creates a fetch function that can be used by the WASM +// binary. +func createFetchFn( + sdkConfig *RuntimeConfig, + l logger.Logger, + fetch func(respptr unsafe.Pointer, resplenptr unsafe.Pointer, reqptr unsafe.Pointer, reqptrlen int32) int32, +) func(sdk.FetchRequest) (sdk.FetchResponse, error) { + fetchFn := func(req sdk.FetchRequest) (sdk.FetchResponse, error) { + headerspb, err := values.NewMap(req.Headers) + if err != nil { + return sdk.FetchResponse{}, fmt.Errorf("failed to create headers map: %w", err) + } + + if sdkConfig.RequestID == nil { + return sdk.FetchResponse{}, fmt.Errorf("request ID is required to fetch") + } + + b, err := proto.Marshal(&wasmpb.FetchRequest{ + Id: *sdkConfig.RequestID, + Url: req.URL, + Method: req.Method, + Headers: values.ProtoMap(headerspb), + Body: req.Body, + TimeoutMs: req.TimeoutMs, + + Metadata: &wasmpb.FetchRequestMetadata{ + WorkflowId: sdkConfig.Metadata.WorkflowID, + WorkflowName: sdkConfig.Metadata.WorkflowName, + WorkflowOwner: sdkConfig.Metadata.WorkflowOwner, + WorkflowExecutionId: sdkConfig.Metadata.WorkflowExecutionID, + }, + }) + if err != nil { + return sdk.FetchResponse{}, fmt.Errorf("failed to marshal fetch request: %w", err) + } + reqptr, reqptrlen := bufferToPointerLen(b) + + respBuffer := make([]byte, sdkConfig.MaxFetchResponseSizeBytes) + respptr, _ := bufferToPointerLen(respBuffer) + + resplenBuffer := make([]byte, uint32Size) + resplenptr, _ := bufferToPointerLen(resplenBuffer) + + errno := fetch(respptr, resplenptr, reqptr, reqptrlen) + if errno != 0 { + return sdk.FetchResponse{}, fmt.Errorf("fetch failed with errno %d", errno) + } + responseSize := binary.LittleEndian.Uint32(resplenBuffer) + response := &wasmpb.FetchResponse{} + err = proto.Unmarshal(respBuffer[:responseSize], response) + if err != nil { + return sdk.FetchResponse{}, fmt.Errorf("failed to unmarshal fetch response: %w", err) + } + if response.ExecutionError && response.ErrorMessage != "" { + return sdk.FetchResponse{ + ExecutionError: response.ExecutionError, + ErrorMessage: response.ErrorMessage, + }, errors.New(response.ErrorMessage) + } + + fields := response.Headers.GetFields() + headersResp := make(map[string]any, len(fields)) + for k, v := range fields { + headersResp[k] = v + } + + return sdk.FetchResponse{ + StatusCode: uint8(response.StatusCode), + Headers: headersResp, + Body: response.Body, + }, nil + } + return fetchFn +} + +// bufferToPointerLen returns a pointer to the first element of the buffer and the length of the buffer. +func bufferToPointerLen(buf []byte) (unsafe.Pointer, int32) { + return unsafe.Pointer(&buf[0]), int32(len(buf)) +} + +// toEmitLabels ensures that the required metadata is present in the labels map +func toEmitLabels(md *capabilities.RequestMetadata, labels map[string]string) (map[string]string, error) { + if md.WorkflowID == "" { + return nil, fmt.Errorf("must provide workflow id to emit event") + } + + if md.WorkflowName == "" { + return nil, fmt.Errorf("must provide workflow name to emit event") + } + + if md.WorkflowOwner == "" { + return nil, fmt.Errorf("must provide workflow owner to emit event") + } + + labels[events.LabelWorkflowExecutionID] = md.WorkflowExecutionID + labels[events.LabelWorkflowOwner] = md.WorkflowOwner + labels[events.LabelWorkflowID] = md.WorkflowID + labels[events.LabelWorkflowName] = md.WorkflowName + return labels, nil +} + +// EmissionError wraps all errors that occur during the emission process for the runtime to handle. +type EmissionError struct { + Wrapped error +} + +func NewEmissionError(err error) *EmissionError { + return &EmissionError{Wrapped: err} +} + +func (e *EmissionError) Error() string { + return fmt.Errorf("failed to create emission: %w", e.Wrapped).Error() +} diff --git a/pkg/workflows/wasm/sdk_test.go b/pkg/workflows/wasm/sdk_test.go new file mode 100644 index 000000000..312dba7c7 --- /dev/null +++ b/pkg/workflows/wasm/sdk_test.go @@ -0,0 +1,66 @@ +package wasm + +import ( + "testing" + + "github.com/smartcontractkit/chainlink-common/pkg/capabilities" + + "github.com/stretchr/testify/assert" +) + +func Test_toEmitLabels(t *testing.T) { + t.Run("successfully transforms metadata", func(t *testing.T) { + md := &capabilities.RequestMetadata{ + WorkflowID: "workflow-id", + WorkflowName: "workflow-name", + WorkflowOwner: "workflow-owner", + } + empty := make(map[string]string, 0) + + gotLabels, err := toEmitLabels(md, empty) + assert.NoError(t, err) + + assert.Equal(t, map[string]string{ + "workflow_id": "workflow-id", + "workflow_name": "workflow-name", + "workflow_owner_address": "workflow-owner", + "workflow_execution_id": "", + }, gotLabels) + }) + + t.Run("fails on missing workflow id", func(t *testing.T) { + md := &capabilities.RequestMetadata{ + WorkflowName: "workflow-name", + WorkflowOwner: "workflow-owner", + } + empty := make(map[string]string, 0) + + _, err := toEmitLabels(md, empty) + assert.Error(t, err) + assert.ErrorContains(t, err, "workflow id") + }) + + t.Run("fails on missing workflow name", func(t *testing.T) { + md := &capabilities.RequestMetadata{ + WorkflowID: "workflow-id", + WorkflowOwner: "workflow-owner", + } + empty := make(map[string]string, 0) + + _, err := toEmitLabels(md, empty) + assert.Error(t, err) + assert.ErrorContains(t, err, "workflow name") + }) + + t.Run("fails on missing workflow owner", func(t *testing.T) { + md := &capabilities.RequestMetadata{ + WorkflowID: "workflow-id", + WorkflowName: "workflow-name", + } + empty := make(map[string]string, 0) + + _, err := toEmitLabels(md, empty) + assert.Error(t, err) + assert.ErrorContains(t, err, "workflow owner") + }) +} diff --git a/sonar-project.properties b/sonar-project.properties index a29d74421..3310f7904 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -2,6 +2,7 @@ sonar.projectKey=smartcontractkit_chainlink-common sonar.sources=. sonar.sourceEncoding=UTF-8 +sonar.python.version=3.8 # Full exclusions from the static analysis sonar.exclusions=\ @@ -16,18 +17,23 @@ sonar.exclusions=\ **/*report.xml,\ **/*.txt,\ **/*.abi,\ -**/*.bin +**/*.bin,\ +**/generated_*,\ +**/*_generated.go,\ +**/mock_*.go # Coverage exclusions sonar.coverage.exclusions=\ **/test/**/*,\ **/*_test.go,\ observability-lib/**,\ -**/fuzz/**/* +**/fuzz/**/*,\ +**/capabilities/**/*test/**/* + # Tests' root folder, inclusions (tests to check and count) and exclusions sonar.tests=. sonar.test.inclusions=**/*_test.go # Duplication exclusions -sonar.cpd.exclusions=observability-lib/** \ No newline at end of file +sonar.cpd.exclusions=**/observability-lib/**/* \ No newline at end of file