diff --git a/.github/workflows/golangci_lint.yml b/.github/workflows/golangci_lint.yml index 2b78784..aeca662 100644 --- a/.github/workflows/golangci_lint.yml +++ b/.github/workflows/golangci_lint.yml @@ -5,23 +5,13 @@ on: [pull_request] jobs: golangci-lint: runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + actions: read steps: - - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 - - name: Set up Go - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 - with: - go-version-file: "go.mod" - - name: Build binary - shell: bash - run: go build ./... - name: golangci-lint - uses: golangci/golangci-lint-action@aaa42aa0628b4ae2578232a66b541047968fac86 # v6.1.0 + uses: goplugin/.github/actions/ci-lint-go@2ac9d97a83a5edded09af7fcf4ea5bce7a4473a4 # v0.2.6 with: - version: v1.60.1 - # only-new-issues is only applicable to PRs, otherwise it is always set to false - only-new-issues: true - args: --out-format colored-line-number,checkstyle:golangci-lint-report.xml - - name: Print lint report artifact - if: failure() - shell: bash - run: cat ./golangci-lint-report.xml + golangci-lint-version: v1.61.0 + \ No newline at end of file diff --git a/.github/workflows/llm-action-error-reporter.yml b/.github/workflows/llm-action-error-reporter.yml new file mode 100644 index 0000000..f8e8d3d --- /dev/null +++ b/.github/workflows/llm-action-error-reporter.yml @@ -0,0 +1,23 @@ +name: LLM Action Error Reporter +on: + workflow_run: + workflows: ["PKG Build and Test"] # As soon as one of the listed worfklows is completed, reporter is triggered + types: + - completed + +jobs: + analyze_logs: + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: write + repository-projects: read + actions: read + steps: + - name: Analyze logs + uses: goplugin/.github/actions/llm-action-error-reporter@d125ca9fe5e3b410de7c6db4a4ce3ed7a0728cd6 # v0.3.0 + with: + parent-workflow-conclusion: ${{ github.event.workflow_run.conclusion }} + skip-on-success: true # Skip posting comment if no errors are found + gh-token: ${{ github.token }} + openai-api-key: ${{ secrets.OPENAI_API_KEY }} \ No newline at end of file diff --git a/.github/workflows/pkg.yml b/.github/workflows/pkg.yml index 4140ba0..e89eeaa 100644 --- a/.github/workflows/pkg.yml +++ b/.github/workflows/pkg.yml @@ -18,7 +18,7 @@ jobs: run: go build -v ./... - name: Unit Tests - run: GORACE="log_path=$PWD/race" go test -race ./... -coverpkg=./... -coverprofile=pkg_coverage.out + run: GORACE="log_path=$PWD/race" go test -race ./... -coverpkg=./... -coverprofile=coverage.txt - name: Print Races if: failure() @@ -42,20 +42,13 @@ jobs: if: failure() run: find . -type f|fgrep '/testdata/fuzz/'|while read f; do echo $f; cat $f; done - - name: Upload Fuzz Tests Failing Inputs - if: failure() - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 - with: - name: failing-fuzz-inputs - path: "**/testdata/fuzz/**" - - name: Upload Go test results if: always() uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 with: name: go-test-results path: | - ./pkg_coverage.out + ./coverage.txt ./race.* check-tidy: diff --git a/.github/workflows/sonar-scan.yml b/.github/workflows/sonar-scan.yml index 26ca5f8..efddd5d 100644 --- a/.github/workflows/sonar-scan.yml +++ b/.github/workflows/sonar-scan.yml @@ -14,11 +14,11 @@ jobs: ref: ${{ github.event.pull_request.head.sha || github.event.merge_group.head_sha }} - name: Wait for workflows - uses: goplugin/plugin-github-actions/utils/wait-for-workflows@e29366cdecfe6befff9ab8c3cfe4825218505d58 # v2.3.16 + uses: goplugin/.github/actions/wait-for-workflows@dca9ab89d734e82738b8aa52bd25d09b205ec6ee # v0.1.1 with: - max-timeout: "900" + max-timeout: "1200" polling-interval: "30" - exclude-workflow-names: "" + exclude-workflow-names: "Build External Repositories, Observability Lib Checks, Run Benchmarks, LLM Action Error Reporter" exclude-workflow-ids: "" github-token: ${{ secrets.GITHUB_TOKEN }} env: @@ -30,46 +30,12 @@ jobs: runs-on: ubuntu-latest if: always() steps: - - name: Checkout the repo - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 - with: - fetch-depth: 0 # fetches all history for all tags and branches to provide more metadata for sonar reports - - - name: Download Golangci-lint report - if: always() - uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6 - with: - workflow: golangci_lint.yml - workflow_conclusion: "" - name_is_regexp: true - name: golangci-lint-report - if_no_artifact_found: warn - - - name: Download Go PKG test reports - if: always() - uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6 - with: - workflow: pkg.yml - workflow_conclusion: "" - name_is_regexp: true - name: go-test-results - if_no_artifact_found: warn - - - name: Set SonarQube Report Paths - if: always() - id: sonarqube_report_paths - shell: bash - run: | - echo "sonarqube_coverage_report_paths=$(find -type f -name '*coverage.out' -printf "%p,")" >> $GITHUB_OUTPUT - echo "sonarqube_golangci_report_paths=$(find -type f -name 'golangci-lint-report.xml' -printf "%p,")" >> $GITHUB_OUTPUT - - name: SonarQube Scan - if: always() - uses: sonarsource/sonarqube-scan-action@53c3e3207fe4b8d52e2f1ac9d6eb1d2506f626c0 # v2.0.2 + uses: goplugin/.github/actions/ci-sonarqube-go@5f4a9c9c3407dd499a1ebbc658a45b9beb9bf675 # v0.3.0 with: - args: > - -Dsonar.go.coverage.reportPaths=${{ steps.sonarqube_report_paths.outputs.sonarqube_coverage_report_paths }} - -Dsonar.go.golangci-lint.reportPaths=${{ steps.sonarqube_report_paths.outputs.sonarqube_golangci_report_paths }} - env: - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - SONAR_HOST_URL: ${{ secrets.SONAR_HOST_URL }} + # sonarqube inputs + include-lint: "true" + test-report-workflow: pkg.yml + lint-report-workflow: golangci_lint.yml + sonar-token: ${{ secrets.SONAR_TOKEN }} + sonar-host-url: ${{ secrets.SONAR_HOST_URL }} diff --git a/.gitignore b/.gitignore index 6d1bc40..493ae60 100644 --- a/.gitignore +++ b/.gitignore @@ -19,11 +19,13 @@ **/testdata/fuzz/* # Dependency directories (remove the comment below to include it) -# vendor/ +vendor/ # IntelliJ IDE .idea -vendor/ +# Visual Studio Code +.vscode +# Generated files *.wasm diff --git a/.golangci.yml b/.golangci.yml index 231db3e..04aaa34 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -2,23 +2,29 @@ run: timeout: 15m0s linters: enable: + - containedctx + - depguard + - errname + - errorlint - exhaustive - exportloopref - - revive + - fatcontext + - ginkgolinter - goimports - gosec + - loggercheck + - mirror - misspell + - noctx + - perfsprint + - prealloc + - revive - rowserrcheck - - errorlint - - unconvert + - spancheck - sqlclosecheck - - noctx + - testifylint + - unconvert - whitespace - - depguard - - containedctx - - fatcontext - - mirror - - loggercheck linters-settings: exhaustive: default-signifies-exhaustive: true diff --git a/.tool-versions b/.tool-versions index b82d197..6b6bb42 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1,5 +1,5 @@ golang 1.22.7 protoc 25.1 protoc-gen-go-grpc 1.3.0 -golangci-lint 1.55.2 +golangci-lint 1.61.0 mockery 2.43.2 diff --git a/go.mod b/go.mod index 2d61049..482095c 100644 --- a/go.mod +++ b/go.mod @@ -38,26 +38,29 @@ require ( github.com/goplugin/plugin-libocr v0.1.1 //plugin update changes github.com/stretchr/testify v1.9.0 go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.52.0 - go.opentelemetry.io/otel v1.28.0 + go.opentelemetry.io/otel v1.30.0 go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.0.0-20240823153156-2a54df7bffb9 + go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.6.0 go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.28.0 + go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.30.0 go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0 + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.30.0 go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.4.0 go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.28.0 go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.28.0 - go.opentelemetry.io/otel/log v0.4.0 - go.opentelemetry.io/otel/metric v1.28.0 - go.opentelemetry.io/otel/sdk v1.28.0 - go.opentelemetry.io/otel/sdk/log v0.4.0 - go.opentelemetry.io/otel/sdk/metric v1.28.0 - go.opentelemetry.io/otel/trace v1.28.0 + go.opentelemetry.io/otel/log v0.6.0 + go.opentelemetry.io/otel/metric v1.30.0 + go.opentelemetry.io/otel/sdk v1.30.0 + go.opentelemetry.io/otel/sdk/log v0.6.0 + go.opentelemetry.io/otel/sdk/metric v1.30.0 + go.opentelemetry.io/otel/trace v1.30.0 go.uber.org/goleak v1.3.0 go.uber.org/zap v1.27.0 golang.org/x/crypto v0.27.0 golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 golang.org/x/tools v0.25.0 gonum.org/v1/gonum v0.15.0 - google.golang.org/grpc v1.65.0 + google.golang.org/grpc v1.66.1 google.golang.org/protobuf v1.34.2 sigs.k8s.io/yaml v1.4.0 ) @@ -97,7 +100,7 @@ require ( github.com/stretchr/objx v0.5.2 // indirect github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect github.com/x448/float16 v0.8.4 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.30.0 // indirect go.opentelemetry.io/proto/otlp v1.3.1 // indirect go.uber.org/multierr v1.11.0 // indirect golang.org/x/mod v0.21.0 // indirect @@ -106,7 +109,7 @@ require ( golang.org/x/sys v0.25.0 // indirect golang.org/x/text v0.18.0 // indirect golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20240822170219-fc7c04adadcd // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index f550120..6d7b9ae 100644 --- a/go.sum +++ b/go.sum @@ -253,34 +253,40 @@ go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.52.0 h1:vS1Ao/R55RNV4O7TA2Qopok8yN+X0LIP6RVWLFkprck= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.52.0/go.mod h1:BMsdeOxN04K0L5FNUBfjFdvwWGNe/rkmSwH4Aelu/X0= -go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo= -go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4= +go.opentelemetry.io/otel v1.30.0 h1:F2t8sK4qf1fAmY9ua4ohFS/K+FUuOPemHUIXHtktrts= +go.opentelemetry.io/otel v1.30.0/go.mod h1:tFw4Br9b7fOS+uEao81PJjVMjW/5fvNCbpsDIXqP0pc= go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.0.0-20240823153156-2a54df7bffb9 h1:UiRNKd1OgqsLbFwE+wkAWTdiAxXtCBqKIHeBIse4FUA= go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.0.0-20240823153156-2a54df7bffb9/go.mod h1:eqZlW3pJWhjyexnDPrdQxix1pn0wwhI4AO4GKpP/bMI= +go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.6.0 h1:QSKmLBzbFULSyHzOdO9JsN9lpE4zkrz1byYGmJecdVE= +go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.6.0/go.mod h1:sTQ/NH8Yrirf0sJ5rWqVu+oT82i4zL9FaF6rWcqnptM= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.28.0 h1:U2guen0GhqH8o/G2un8f/aG/y++OuW6MyCo6hT9prXk= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.28.0/go.mod h1:yeGZANgEcpdx/WK0IvvRFC+2oLiMS2u4L/0Rj2M2Qr0= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0 h1:3Q/xZUyC1BBkualc9ROb4G8qkH90LXEIICcs5zv1OYY= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0/go.mod h1:s75jGIWA9OfCMzF0xr+ZgfrB5FEbbV7UuYo32ahUiFI= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.30.0 h1:VrMAbeJz4gnVDg2zEzjHG4dEH86j4jO6VYB+NgtGD8s= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.30.0/go.mod h1:qqN/uFdpeitTvm+JDqqnjm517pmQRYxTORbETHq5tOc= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.30.0 h1:lsInsfvhVIfOI6qHVyysXMNDnjO9Npvl7tlDPJFBVd4= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.30.0/go.mod h1:KQsVNh4OjgjTG0G6EiNi1jVpnaeeKsKMRwbLN+f1+8M= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0 h1:R3X6ZXmNPRR8ul6i3WgFURCHzaXjHdm0karRG/+dj3s= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0/go.mod h1:QWFXnDavXWwMx2EEcZsf3yxgEKAqsxQ+Syjp+seyInw= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.30.0 h1:umZgi92IyxfXd/l4kaDhnKgY8rnN/cZcF1LKc6I8OQ8= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.30.0/go.mod h1:4lVs6obhSVRb1EW5FhOuBTyiQhtRtAnnva9vD3yRfq8= go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.4.0 h1:0MH3f8lZrflbUWXVxyBg/zviDFdGE062uKh5+fu8Vv0= go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.4.0/go.mod h1:Vh68vYiHY5mPdekTr0ox0sALsqjoVy0w3Os278yX5SQ= go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.28.0 h1:BJee2iLkfRfl9lc7aFmBwkWxY/RI1RDdXepSF6y8TPE= go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.28.0/go.mod h1:DIzlHs3DRscCIBU3Y9YSzPfScwnYnzfnCd4g8zA7bZc= go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.28.0 h1:EVSnY9JbEEW92bEkIYOVMw4q1WJxIAGoFTrtYOzWuRQ= go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.28.0/go.mod h1:Ea1N1QQryNXpCD0I1fdLibBAIpQuBkznMmkdKrapk1Y= -go.opentelemetry.io/otel/log v0.4.0 h1:/vZ+3Utqh18e8TPjuc3ecg284078KWrR8BRz+PQAj3o= -go.opentelemetry.io/otel/log v0.4.0/go.mod h1:DhGnQvky7pHy82MIRV43iXh3FlKN8UUKftn0KbLOq6I= -go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q= -go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s= -go.opentelemetry.io/otel/sdk v1.28.0 h1:b9d7hIry8yZsgtbmM0DKyPWMMUMlK9NEKuIG4aBqWyE= -go.opentelemetry.io/otel/sdk v1.28.0/go.mod h1:oYj7ClPUA7Iw3m+r7GeEjz0qckQRJK2B8zjcZEfu7Pg= -go.opentelemetry.io/otel/sdk/log v0.4.0 h1:1mMI22L82zLqf6KtkjrRy5BbagOTWdJsqMY/HSqILAA= -go.opentelemetry.io/otel/sdk/log v0.4.0/go.mod h1:AYJ9FVF0hNOgAVzUG/ybg/QttnXhUePWAupmCqtdESo= -go.opentelemetry.io/otel/sdk/metric v1.28.0 h1:OkuaKgKrgAbYrrY0t92c+cC+2F6hsFNnCQArXCKlg08= -go.opentelemetry.io/otel/sdk/metric v1.28.0/go.mod h1:cWPjykihLAPvXKi4iZc1dpER3Jdq2Z0YLse3moQUCpg= -go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g= -go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI= +go.opentelemetry.io/otel/log v0.6.0 h1:nH66tr+dmEgW5y+F9LanGJUBYPrRgP4g2EkmPE3LeK8= +go.opentelemetry.io/otel/log v0.6.0/go.mod h1:KdySypjQHhP069JX0z/t26VHwa8vSwzgaKmXtIB3fJM= +go.opentelemetry.io/otel/metric v1.30.0 h1:4xNulvn9gjzo4hjg+wzIKG7iNFEaBMX00Qd4QIZs7+w= +go.opentelemetry.io/otel/metric v1.30.0/go.mod h1:aXTfST94tswhWEb+5QjlSqG+cZlmyXy/u8jFpor3WqQ= +go.opentelemetry.io/otel/sdk v1.30.0 h1:cHdik6irO49R5IysVhdn8oaiR9m8XluDaJAs4DfOrYE= +go.opentelemetry.io/otel/sdk v1.30.0/go.mod h1:p14X4Ok8S+sygzblytT1nqG98QG2KYKv++HE0LY/mhg= +go.opentelemetry.io/otel/sdk/log v0.6.0 h1:4J8BwXY4EeDE9Mowg+CyhWVBhTSLXVXodiXxS/+PGqI= +go.opentelemetry.io/otel/sdk/log v0.6.0/go.mod h1:L1DN8RMAduKkrwRAFDEX3E3TLOq46+XMGSbUfHU/+vE= +go.opentelemetry.io/otel/sdk/metric v1.30.0 h1:QJLT8Pe11jyHBHfSAgYH7kEmT24eX792jZO1bo4BXkM= +go.opentelemetry.io/otel/sdk/metric v1.30.0/go.mod h1:waS6P3YqFNzeP01kuo/MBBYqaoBJl7efRQHOaydhy1Y= +go.opentelemetry.io/otel/trace v1.30.0 h1:7UBkkYzeg3C7kQX8VAidWh2biiQbtAKjyIML8dQ9wmc= +go.opentelemetry.io/otel/trace v1.30.0/go.mod h1:5EyKqTzzmyqB9bwtCCq6pDLktPK6fmGf/Dph+8VI02o= go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0= go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= @@ -376,17 +382,17 @@ google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoA google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20210401141331-865547bb08e2/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto/googleapis/api v0.0.0-20240822170219-fc7c04adadcd h1:BBOTEWLuuEGQy9n1y9MhVJ9Qt0BDu21X8qZs71/uPZo= -google.golang.org/genproto/googleapis/api v0.0.0-20240822170219-fc7c04adadcd/go.mod h1:fO8wJzT2zbQbAjbIoos1285VfEIYKDDY+Dt+WpTkh6g= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd h1:6TEm2ZxXoQmFWFlt1vNxvVOa1Q0dXFQD1m/rYjXmS0E= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240822170219-fc7c04adadcd/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= +google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1 h1:hjSy6tcFQZ171igDaN5QHOw2n6vx40juYbC/x67CEhc= +google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:qpvKtACPCQhAdu3PyQgV4l3LMXZEtft7y8QcarRsp9I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.65.0 h1:bs/cUb4lp1G5iImFFd3u5ixQzweKizoZJAwBNLR42lc= -google.golang.org/grpc v1.65.0/go.mod h1:WgYC2ypjlB0EiQi6wdKixMqukr6lBc0Vo+oOgjrM5ZQ= +google.golang.org/grpc v1.66.1 h1:hO5qAXR19+/Z44hmvIM4dQFMSYX9XcWsByfoxutBpAM= +google.golang.org/grpc v1.66.1/go.mod h1:s3/l6xSSCURdVfAnL+TqCNMyTDAGN6+lZeVxnZR128Y= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= diff --git a/pkg/assets/link_test.go b/pkg/assets/link_test.go index baed234..b5a85fc 100644 --- a/pkg/assets/link_test.go +++ b/pkg/assets/link_test.go @@ -187,20 +187,20 @@ func TestLink(t *testing.T) { {"1", "1"}, {"1 juels", "1"}, {"100000000000", "100000000000"}, - {"0.0000001 link", "100000000000"}, - {"1000000000000", "0.000001 link"}, - {"100000000000000", "0.0001 link"}, - {"0.0001 link", "0.0001 link"}, - {"10000000000000000", "0.01 link"}, - {"0.01 link", "0.01 link"}, - {"100000000000000000", "0.1 link"}, - {"0.1 link", "0.1 link"}, - {"1.0 link", "1 link"}, - {"1000000000000000000", "1 link"}, - {"1000000000000000000 juels", "1 link"}, - {"1100000000000000000", "1.1 link"}, - {"1.1link", "1.1 link"}, - {"1.1 link", "1.1 link"}, + {"0.0000001 pli", "100000000000"}, + {"1000000000000", "0.000001 pli"}, + {"100000000000000", "0.0001 pli"}, + {"0.0001 pli", "0.0001 pli"}, + {"10000000000000000", "0.01 pli"}, + {"0.01 pli", "0.01 pli"}, + {"100000000000000000", "0.1 pli"}, + {"0.1 pli", "0.1 pli"}, + {"1.0 pli", "1 pli"}, + {"1000000000000000000", "1 pli"}, + {"1000000000000000000 juels", "1 pli"}, + {"1100000000000000000", "1.1 pli"}, + {"1.1pli", "1.1 pli"}, + {"1.1 pli", "1.1 pli"}, } { t.Run(tt.input, func(t *testing.T) { var l assets.Link @@ -215,12 +215,12 @@ func TestLink(t *testing.T) { func FuzzLink(f *testing.F) { f.Add("1") - f.Add("1 link") - f.Add("1.1link") + f.Add("1 pli") + f.Add("1.1pli") f.Add("2.3") - f.Add("2.3 link") - f.Add("00005 link") - f.Add("0.0005link") + f.Add("2.3 pli") + f.Add("00005 pli") + f.Add("0.0005pli") f.Add("1100000000000000000000000000000") f.Add("1100000000000000000000000000000 juels") f.Fuzz(func(t *testing.T, v string) { diff --git a/pkg/beholder/client.go b/pkg/beholder/client.go index 63a9192..c1ee755 100644 --- a/pkg/beholder/client.go +++ b/pkg/beholder/client.go @@ -6,6 +6,7 @@ import ( "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc" + "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp" "go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc" "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc" otellog "go.opentelemetry.io/otel/log" @@ -53,27 +54,40 @@ type Client struct { // NewClient creates a new Client with initialized OpenTelemetry components // To handle OpenTelemetry errors use [otel.SetErrorHandler](https://pkg.go.dev/go.opentelemetry.io/otel#SetErrorHandler) func NewClient(cfg Config) (*Client, error) { + if cfg.OtelExporterGRPCEndpoint != "" && cfg.OtelExporterHTTPEndpoint != "" { + return nil, errors.New("only one exporter endpoint should be set") + } + if cfg.OtelExporterGRPCEndpoint == "" && cfg.OtelExporterHTTPEndpoint == "" { + return nil, errors.New("at least one exporter endpoint should be set") + } + if cfg.OtelExporterHTTPEndpoint != "" { + factory := func(options ...otlploghttp.Option) (sdklog.Exporter, error) { + // note: context is unused internally + return otlploghttp.New(context.Background(), options...) //nolint + } + return newHTTPClient(cfg, factory) + } + factory := func(options ...otlploggrpc.Option) (sdklog.Exporter, error) { // note: context is unused internally return otlploggrpc.New(context.Background(), options...) //nolint } - return newClient(cfg, factory) + return newGRPCClient(cfg, factory) } // Used for testing to override the default exporter type otlploggrpcFactory func(options ...otlploggrpc.Option) (sdklog.Exporter, error) -func newClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { +func newGRPCClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { baseResource, err := newOtelResource(cfg) - noop := NewNoopClient() if err != nil { - return noop, err + return nil, err } creds := insecure.NewCredentials() if !cfg.InsecureConnection && cfg.CACertFile != "" { creds, err = credentials.NewClientTLSFromFile(cfg.CACertFile, "") if err != nil { - return noop, err + return nil, err } } sharedLogExporter, err := otlploggrpcNew( @@ -81,7 +95,7 @@ func newClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { otlploggrpc.WithEndpoint(cfg.OtelExporterGRPCEndpoint), ) if err != nil { - return noop, err + return nil, err } // Logger @@ -102,7 +116,7 @@ func newClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { baseResource, ) if err != nil { - return noop, err + return nil, err } loggerProvider := sdklog.NewLoggerProvider( sdklog.WithResource(loggerResource), @@ -113,14 +127,14 @@ func newClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { // Tracer tracerProvider, err := newTracerProvider(cfg, baseResource, creds) if err != nil { - return noop, err + return nil, err } tracer := tracerProvider.Tracer(defaultPackageName) // Meter meterProvider, err := newMeterProvider(cfg, baseResource, creds) if err != nil { - return noop, err + return nil, err } meter := meterProvider.Meter(defaultPackageName) @@ -143,7 +157,7 @@ func newClient(cfg Config, otlploggrpcNew otlploggrpcFactory) (*Client, error) { baseResource, ) if err != nil { - return noop, err + return nil, err } messageLoggerProvider := sdklog.NewLoggerProvider( @@ -247,6 +261,7 @@ type shutdowner interface { func newTracerProvider(config Config, resource *sdkresource.Resource, creds credentials.TransportCredentials) (*sdktrace.TracerProvider, error) { ctx := context.Background() + // note: context is used internally exporter, err := otlptracegrpc.New(ctx, otlptracegrpc.WithTLSCredentials(creds), otlptracegrpc.WithEndpoint(config.OtelExporterGRPCEndpoint), @@ -273,6 +288,7 @@ func newTracerProvider(config Config, resource *sdkresource.Resource, creds cred func newMeterProvider(config Config, resource *sdkresource.Resource, creds credentials.TransportCredentials) (*sdkmetric.MeterProvider, error) { ctx := context.Background() + // note: context is unused internally exporter, err := otlpmetricgrpc.New( ctx, otlpmetricgrpc.WithTLSCredentials(creds), diff --git a/pkg/beholder/client_test.go b/pkg/beholder/client_test.go index 57fd54b..3a99672 100644 --- a/pkg/beholder/client_test.go +++ b/pkg/beholder/client_test.go @@ -10,6 +10,7 @@ import ( "github.com/stretchr/testify/mock" "go.opentelemetry.io/otel" "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc" + "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp" otellog "go.opentelemetry.io/otel/log" sdklog "go.opentelemetry.io/otel/sdk/log" @@ -49,11 +50,37 @@ func TestClient(t *testing.T) { "byte_key_1": []byte("byte_val_1"), "str_slice_key_1": []string{"str_val_1", "str_val_2"}, "nil_key_1": nil, + "beholder_domain": "TestDomain", // Required field + "beholder_entity": "TestEntity", // Required field "beholder_data_schema": "/schemas/ids/1001", // Required field, URI } } defaultMessageBody := []byte("body bytes") + mustNewGRPCClient := func(t *testing.T, exporterMock *mocks.OTLPExporter) *Client { + // Override exporter factory which is used by Client + exporterFactory := func(...otlploggrpc.Option) (sdklog.Exporter, error) { + return exporterMock, nil + } + client, err := newGRPCClient(TestDefaultConfig(), exporterFactory) + if err != nil { + t.Fatalf("Error creating beholder client: %v", err) + } + return client + } + + mustNewHTTPClient := func(t *testing.T, exporterMock *mocks.OTLPExporter) *Client { + // Override exporter factory which is used by Client + exporterFactory := func(...otlploghttp.Option) (sdklog.Exporter, error) { + return exporterMock, nil + } + client, err := newHTTPClient(TestDefaultConfigHTTPClient(), exporterFactory) + if err != nil { + t.Fatalf("Error creating beholder client: %v", err) + } + return client + } + testCases := []struct { name string makeCustomAttributes func() map[string]any @@ -62,9 +89,10 @@ func TestClient(t *testing.T) { exporterMockErrorCount int exporterOutputExpected bool messageGenerator func(client *Client, messageBody []byte, customAttributes map[string]any) + mustNewGrpcClient func(*testing.T, *mocks.OTLPExporter) *Client }{ { - name: "Test Emit", + name: "Test Emit (GRPC Client)", makeCustomAttributes: defaultCustomAttributes, messageBody: defaultMessageBody, messageCount: 10, @@ -74,6 +102,21 @@ func TestClient(t *testing.T) { err := client.Emitter.Emit(tests.Context(t), messageBody, customAttributes) assert.NoError(t, err) }, + mustNewGrpcClient: mustNewGRPCClient, + }, + + { + name: "Test Emit (HTTP Client)", + makeCustomAttributes: defaultCustomAttributes, + messageBody: defaultMessageBody, + messageCount: 10, + exporterMockErrorCount: 0, + exporterOutputExpected: true, + messageGenerator: func(client *Client, messageBody []byte, customAttributes map[string]any) { + err := client.Emitter.Emit(tests.Context(t), messageBody, customAttributes) + assert.NoError(t, err) + }, + mustNewGrpcClient: mustNewHTTPClient, }, } @@ -82,14 +125,8 @@ func TestClient(t *testing.T) { exporterMock := mocks.NewOTLPExporter(t) defer exporterMock.AssertExpectations(t) - // Override exporter factory which is used by Client - exporterFactory := func(...otlploggrpc.Option) (sdklog.Exporter, error) { - return exporterMock, nil - } - client, err := newClient(TestDefaultConfig(), exporterFactory) - if err != nil { - t.Fatalf("Error creating beholder client: %v", err) - } + client := tc.mustNewGrpcClient(t, exporterMock) + otel.SetErrorHandler(otelMustNotErr(t)) // Number of exported messages exportedMessageCount := 0 @@ -138,7 +175,7 @@ func TestClient(t *testing.T) { func TestEmitterMessageValidation(t *testing.T) { getEmitter := func(exporterMock *mocks.OTLPExporter) Emitter { - client, err := newClient( + client, err := newGRPCClient( TestDefaultConfig(), // Override exporter factory which is used by Client func(...otlploggrpc.Option) (sdklog.Exporter, error) { @@ -167,15 +204,69 @@ func TestEmitterMessageValidation(t *testing.T) { { name: "Invalid URI", attrs: Attributes{ + "beholder_domain": "TestDomain", + "beholder_entity": "TestEntity", "beholder_data_schema": "example-schema", }, exporterCalledTimes: 0, expectedError: "'Metadata.BeholderDataSchema' Error:Field validation for 'BeholderDataSchema' failed on the 'uri' tag", }, { - name: "Valid URI", + name: "Invalid Beholder domain (double underscore)", + attrs: Attributes{ + "beholder_data_schema": "/example-schema/versions/1", + "beholder_entity": "TestEntity", + "beholder_domain": "Test__Domain", + }, + exporterCalledTimes: 0, + expectedError: "'Metadata.BeholderDomain' Error:Field validation for 'BeholderDomain' failed on the 'domain_entity' tag", + }, + { + name: "Invalid Beholder domain (special characters)", + attrs: Attributes{ + "beholder_data_schema": "/example-schema/versions/1", + "beholder_entity": "TestEntity", + "beholder_domain": "TestDomain*$", + }, + exporterCalledTimes: 0, + expectedError: "'Metadata.BeholderDomain' Error:Field validation for 'BeholderDomain' failed on the 'domain_entity' tag", + }, + { + name: "Invalid Beholder entity (double underscore)", + attrs: Attributes{ + "beholder_data_schema": "/example-schema/versions/1", + "beholder_entity": "Test__Entity", + "beholder_domain": "TestDomain", + }, + exporterCalledTimes: 0, + expectedError: "'Metadata.BeholderEntity' Error:Field validation for 'BeholderEntity' failed on the 'domain_entity' tag", + }, + { + name: "Invalid Beholder entity (special characters)", + attrs: Attributes{ + "beholder_data_schema": "/example-schema/versions/1", + "beholder_entity": "TestEntity*$", + "beholder_domain": "TestDomain", + }, + exporterCalledTimes: 0, + expectedError: "'Metadata.BeholderEntity' Error:Field validation for 'BeholderEntity' failed on the 'domain_entity' tag", + }, + { + name: "Valid Attributes", exporterCalledTimes: 1, attrs: Attributes{ + "beholder_domain": "TestDomain", + "beholder_entity": "TestEntity", + "beholder_data_schema": "/example-schema/versions/1", + }, + expectedError: "", + }, + { + name: "Valid Attributes (special characters)", + exporterCalledTimes: 1, + attrs: Attributes{ + "beholder_domain": "Test.Domain_42-1", + "beholder_entity": "Test.Entity_42-1", "beholder_data_schema": "/example-schema/versions/1", }, expectedError: "", @@ -252,3 +343,40 @@ func TestClient_ForPackage(t *testing.T) { func otelMustNotErr(t *testing.T) otel.ErrorHandlerFunc { return func(err error) { t.Fatalf("otel error: %v", err) } } + +func TestNewClient(t *testing.T) { + t.Run("both endpoints set", func(t *testing.T) { + client, err := NewClient(Config{ + OtelExporterGRPCEndpoint: "grpc-endpoint", + OtelExporterHTTPEndpoint: "http-endpoint", + }) + assert.Error(t, err) + assert.Nil(t, client) + assert.Equal(t, "only one exporter endpoint should be set", err.Error()) + }) + + t.Run("no endpoints set", func(t *testing.T) { + client, err := NewClient(Config{}) + assert.Error(t, err) + assert.Nil(t, client) + assert.Equal(t, "at least one exporter endpoint should be set", err.Error()) + }) + + t.Run("GRPC endpoint set", func(t *testing.T) { + client, err := NewClient(Config{ + OtelExporterGRPCEndpoint: "grpc-endpoint", + }) + assert.NoError(t, err) + assert.NotNil(t, client) + assert.IsType(t, &Client{}, client) + }) + + t.Run("HTTP endpoint set", func(t *testing.T) { + client, err := NewClient(Config{ + OtelExporterHTTPEndpoint: "http-endpoint", + }) + assert.NoError(t, err) + assert.NotNil(t, client) + assert.IsType(t, &Client{}, client) + }) +} diff --git a/pkg/beholder/config.go b/pkg/beholder/config.go index b80021b..713f8a9 100644 --- a/pkg/beholder/config.go +++ b/pkg/beholder/config.go @@ -11,6 +11,7 @@ type Config struct { InsecureConnection bool CACertFile string OtelExporterGRPCEndpoint string + OtelExporterHTTPEndpoint string // OTel Resource ResourceAttributes []otelattr.KeyValue @@ -68,3 +69,13 @@ func TestDefaultConfig() Config { config.LogBatchProcessor = false return config } + +func TestDefaultConfigHTTPClient() Config { + config := DefaultConfig() + // Should be only disabled for testing + config.EmitterBatchProcessor = false + config.LogBatchProcessor = false + config.OtelExporterGRPCEndpoint = "" + config.OtelExporterHTTPEndpoint = "localhost:4318" + return config +} diff --git a/pkg/beholder/config_test.go b/pkg/beholder/config_test.go index f32bcc8..d4cae6f 100644 --- a/pkg/beholder/config_test.go +++ b/pkg/beholder/config_test.go @@ -18,6 +18,7 @@ func ExampleConfig() { InsecureConnection: true, CACertFile: "", OtelExporterGRPCEndpoint: "localhost:4317", + OtelExporterHTTPEndpoint: "localhost:4318", // Resource ResourceAttributes: []otelattr.KeyValue{ otelattr.String("package_name", packageName), @@ -37,5 +38,5 @@ func ExampleConfig() { } fmt.Printf("%+v", config) // Output: - // {InsecureConnection:true CACertFile: OtelExporterGRPCEndpoint:localhost:4317 ResourceAttributes:[{Key:package_name Value:{vtype:4 numeric:0 stringly:beholder slice:}} {Key:sender Value:{vtype:4 numeric:0 stringly:beholderclient slice:}}] EmitterExportTimeout:1s EmitterBatchProcessor:true TraceSampleRatio:1 TraceBatchTimeout:1s TraceSpanExporter: MetricReaderInterval:1s LogExportTimeout:1s LogBatchProcessor:true} + // {InsecureConnection:true CACertFile: OtelExporterGRPCEndpoint:localhost:4317 OtelExporterHTTPEndpoint:localhost:4318 ResourceAttributes:[{Key:package_name Value:{vtype:4 numeric:0 stringly:beholder slice:}} {Key:sender Value:{vtype:4 numeric:0 stringly:beholderclient slice:}}] EmitterExportTimeout:1s EmitterBatchProcessor:true TraceSampleRatio:1 TraceBatchTimeout:1s TraceSpanExporter: MetricReaderInterval:1s LogExportTimeout:1s LogBatchProcessor:true} } diff --git a/pkg/beholder/example_test.go b/pkg/beholder/example_test.go index 1329981..b30c5fd 100644 --- a/pkg/beholder/example_test.go +++ b/pkg/beholder/example_test.go @@ -45,6 +45,8 @@ func ExampleNewClient() { for range 10 { err := beholder.GetEmitter().Emit(context.Background(), payloadBytes, "beholder_data_schema", "/custom-message/versions/1", // required + "beholder_domain", "ExampleDomain", // required + "beholder_entity", "ExampleEntity", // required "beholder_data_type", "custom_message", "foo", "bar", ) @@ -105,6 +107,8 @@ func ExampleNewNoopClient() { err := beholder.GetEmitter().Emit(context.Background(), []byte("test message"), "beholder_data_schema", "/custom-message/versions/1", // required + "beholder_domain", "ExampleDomain", // required + "beholder_entity", "ExampleEntity", // required ) if err != nil { log.Printf("Error emitting message: %v", err) diff --git a/pkg/beholder/httpclient.go b/pkg/beholder/httpclient.go new file mode 100644 index 0000000..97f1b42 --- /dev/null +++ b/pkg/beholder/httpclient.go @@ -0,0 +1,204 @@ +package beholder + +import ( + "context" + "crypto/tls" + "crypto/x509" + "errors" + "fmt" + "os" + + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp" + "go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp" + sdklog "go.opentelemetry.io/otel/sdk/log" + sdkmetric "go.opentelemetry.io/otel/sdk/metric" + sdkresource "go.opentelemetry.io/otel/sdk/resource" + "go.opentelemetry.io/otel/sdk/trace" + sdktrace "go.opentelemetry.io/otel/sdk/trace" +) + +// Used for testing to override the default exporter +type otlploghttpFactory func(options ...otlploghttp.Option) (sdklog.Exporter, error) + +func newCertFromFile(certFile string) (*x509.CertPool, error) { + b, err := os.ReadFile(certFile) + if err != nil { + return nil, err + } + cp := x509.NewCertPool() + if !cp.AppendCertsFromPEM(b) { + return nil, fmt.Errorf("credentials: failed to append certificates") + } + return cp, nil +} + +func newHTTPClient(cfg Config, otlploghttpNew otlploghttpFactory) (*Client, error) { + baseResource, err := newOtelResource(cfg) + if err != nil { + return nil, err + } + var tlsConfig *tls.Config + if !cfg.InsecureConnection { + tlsConfig = &tls.Config{ + MinVersion: tls.VersionTLS12, + } + if cfg.CACertFile != "" { + rootCAs, e := newCertFromFile(cfg.CACertFile) + if e != nil { + return nil, e + } + tlsConfig.RootCAs = rootCAs + } + } + tlsConfigOption := otlploghttp.WithInsecure() + if tlsConfig != nil { + tlsConfigOption = otlploghttp.WithTLSClientConfig(tlsConfig) + } + sharedLogExporter, err := otlploghttpNew( + tlsConfigOption, + otlploghttp.WithEndpoint(cfg.OtelExporterHTTPEndpoint), + ) + if err != nil { + return nil, err + } + + // Logger + var loggerProcessor sdklog.Processor + if cfg.LogBatchProcessor { + loggerProcessor = sdklog.NewBatchProcessor( + sharedLogExporter, + sdklog.WithExportTimeout(cfg.LogExportTimeout), // Default is 30s + ) + } else { + loggerProcessor = sdklog.NewSimpleProcessor(sharedLogExporter) + } + loggerAttributes := []attribute.KeyValue{ + attribute.String("beholder_data_type", "zap_log_message"), + } + loggerResource, err := sdkresource.Merge( + sdkresource.NewSchemaless(loggerAttributes...), + baseResource, + ) + if err != nil { + return nil, err + } + loggerProvider := sdklog.NewLoggerProvider( + sdklog.WithResource(loggerResource), + sdklog.WithProcessor(loggerProcessor), + ) + logger := loggerProvider.Logger(defaultPackageName) + + // Tracer + tracerProvider, err := newHTTPTracerProvider(cfg, baseResource, tlsConfig) + if err != nil { + return nil, err + } + tracer := tracerProvider.Tracer(defaultPackageName) + + // Meter + meterProvider, err := newHTTPMeterProvider(cfg, baseResource, tlsConfig) + if err != nil { + return nil, err + } + meter := meterProvider.Meter(defaultPackageName) + + // Message Emitter + var messageLogProcessor sdklog.Processor + if cfg.EmitterBatchProcessor { + messageLogProcessor = sdklog.NewBatchProcessor( + sharedLogExporter, + sdklog.WithExportTimeout(cfg.EmitterExportTimeout), // Default is 30s + ) + } else { + messageLogProcessor = sdklog.NewSimpleProcessor(sharedLogExporter) + } + + messageAttributes := []attribute.KeyValue{ + attribute.String("beholder_data_type", "custom_message"), + } + messageLoggerResource, err := sdkresource.Merge( + sdkresource.NewSchemaless(messageAttributes...), + baseResource, + ) + if err != nil { + return nil, err + } + + messageLoggerProvider := sdklog.NewLoggerProvider( + sdklog.WithResource(messageLoggerResource), + sdklog.WithProcessor(messageLogProcessor), + ) + messageLogger := messageLoggerProvider.Logger(defaultPackageName) + + emitter := messageEmitter{ + messageLogger: messageLogger, + } + + onClose := func() (err error) { + for _, provider := range []shutdowner{messageLoggerProvider, loggerProvider, tracerProvider, meterProvider, messageLoggerProvider} { + err = errors.Join(err, provider.Shutdown(context.Background())) + } + return + } + return &Client{cfg, logger, tracer, meter, emitter, loggerProvider, tracerProvider, meterProvider, messageLoggerProvider, onClose}, nil +} + +func newHTTPTracerProvider(config Config, resource *sdkresource.Resource, tlsConfig *tls.Config) (*sdktrace.TracerProvider, error) { + ctx := context.Background() + + tlsConfigOption := otlptracehttp.WithInsecure() + if tlsConfig != nil { + tlsConfigOption = otlptracehttp.WithTLSClientConfig(tlsConfig) + } + // note: context is unused internally + exporter, err := otlptracehttp.New(ctx, + tlsConfigOption, + otlptracehttp.WithEndpoint(config.OtelExporterHTTPEndpoint), + ) + if err != nil { + return nil, err + } + + opts := []sdktrace.TracerProviderOption{ + sdktrace.WithBatcher(exporter, trace.WithBatchTimeout(config.TraceBatchTimeout)), // Default is 5s + sdktrace.WithResource(resource), + sdktrace.WithSampler( + sdktrace.ParentBased( + sdktrace.TraceIDRatioBased(config.TraceSampleRatio), + ), + ), + } + if config.TraceSpanExporter != nil { + opts = append(opts, sdktrace.WithBatcher(config.TraceSpanExporter)) + } + return sdktrace.NewTracerProvider(opts...), nil +} + +func newHTTPMeterProvider(config Config, resource *sdkresource.Resource, tlsConfig *tls.Config) (*sdkmetric.MeterProvider, error) { + ctx := context.Background() + + tlsConfigOption := otlpmetrichttp.WithInsecure() + if tlsConfig != nil { + tlsConfigOption = otlpmetrichttp.WithTLSClientConfig(tlsConfig) + } + // note: context is unused internally + exporter, err := otlpmetrichttp.New(ctx, + tlsConfigOption, + otlpmetrichttp.WithEndpoint(config.OtelExporterHTTPEndpoint), + ) + if err != nil { + return nil, err + } + + mp := sdkmetric.NewMeterProvider( + sdkmetric.WithReader( + sdkmetric.NewPeriodicReader( + exporter, + sdkmetric.WithInterval(config.MetricReaderInterval), // Default is 10s + )), + sdkmetric.WithResource(resource), + ) + return mp, nil +} diff --git a/pkg/beholder/internal/exporter.go b/pkg/beholder/internal/exporter.go index 271077a..033854d 100644 --- a/pkg/beholder/internal/exporter.go +++ b/pkg/beholder/internal/exporter.go @@ -4,12 +4,17 @@ import ( "context" "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc" + "go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp" + sdklog "go.opentelemetry.io/otel/sdk/log" ) var _ sdklog.Exporter = (*otlploggrpc.Exporter)(nil) var _ OTLPExporter = (*otlploggrpc.Exporter)(nil) +var _ sdklog.Exporter = (*otlploghttp.Exporter)(nil) +var _ OTLPExporter = (*otlploggrpc.Exporter)(nil) + // Copy of sdklog.Exporter interface, used for mocking type OTLPExporter interface { Export(ctx context.Context, records []sdklog.Record) error diff --git a/pkg/beholder/message.go b/pkg/beholder/message.go index 2b1d89d..7ea967f 100644 --- a/pkg/beholder/message.go +++ b/pkg/beholder/message.go @@ -2,6 +2,8 @@ package beholder import ( "fmt" + "regexp" + "strings" "github.com/go-playground/validator/v10" "go.opentelemetry.io/otel/attribute" @@ -16,6 +18,8 @@ type Message struct { type Metadata struct { // REQUIRED FIELDS // Schema Registry URI to fetch schema + BeholderDomain string `validate:"required,domain_entity"` + BeholderEntity string `validate:"required,domain_entity"` BeholderDataSchema string `validate:"required,uri"` // OPTIONAL FIELDS @@ -55,6 +59,8 @@ func (m Metadata) Attributes() Attributes { "workflow_owner_address": m.WorkflowOwnerAddress, "workflow_spec_id": m.WorkflowSpecID, "workflow_execution_id": m.WorkflowExecutionID, + "beholder_domain": m.BeholderDomain, + "beholder_entity": m.BeholderEntity, "beholder_data_schema": m.BeholderDataSchema, "capability_contract_address": m.CapabilityContractAddress, "capability_id": m.CapabilityID, @@ -199,6 +205,10 @@ func (m *Metadata) FromAttributes(attrs Attributes) *Metadata { m.WorkflowSpecID = v.(string) case "workflow_execution_id": m.WorkflowExecutionID = v.(string) + case "beholder_domain": + m.BeholderDomain = v.(string) + case "beholder_entity": + m.BeholderEntity = v.(string) case "beholder_data_schema": m.BeholderDataSchema = v.(string) case "capability_contract_address": @@ -222,8 +232,35 @@ func NewMetadata(attrs Attributes) *Metadata { return m } -func (m *Metadata) Validate() error { +// validDomainAndEntityRegex allows for alphanumeric characters and ._- +var validDomainAndEntityRegex = regexp.MustCompile(`^[a-zA-Z0-9._-]+$`) + +func NewMetadataValidator() (*validator.Validate, error) { validate := validator.New() + err := validate.RegisterValidation("domain_entity", func(fl validator.FieldLevel) bool { + str, isStr := fl.Field().Interface().(string) + if !isStr { + return false + } + if strings.Contains(str, "__") { + return false + } + if !validDomainAndEntityRegex.MatchString(str) { + return false + } + return true + }) + if err != nil { + return nil, err + } + return validate, nil +} + +func (m *Metadata) Validate() error { + validate, err := NewMetadataValidator() + if err != nil { + return err + } return validate.Struct(m) } diff --git a/pkg/beholder/message_test.go b/pkg/beholder/message_test.go index 755e544..7014cdb 100644 --- a/pkg/beholder/message_test.go +++ b/pkg/beholder/message_test.go @@ -6,7 +6,6 @@ import ( "strings" "testing" - "github.com/go-playground/validator/v10" "github.com/stretchr/testify/assert" otellog "go.opentelemetry.io/otel/log" @@ -110,6 +109,8 @@ func testMetadata() beholder.Metadata { WorkflowOwnerAddress: "test_owner_address", WorkflowSpecID: "test_spec_id", WorkflowExecutionID: "test_execution_id", + BeholderDomain: "TestDomain", // required field + BeholderEntity: "TestEntity", // required field BeholderDataSchema: "/schemas/ids/test_schema", // required field, URI CapabilityContractAddress: "test_contract_address", CapabilityID: "test_capability_id", @@ -123,14 +124,20 @@ func ExampleMetadata() { fmt.Printf("%#v\n", m) fmt.Println(m.Attributes()) // Output: - // beholder.Metadata{BeholderDataSchema:"/schemas/ids/test_schema", NodeVersion:"v1.0.0", NodeCsaKey:"test_key", NodeCsaSignature:"test_signature", DonID:"test_don_id", NetworkName:[]string{"test_network"}, WorkflowID:"test_workflow_id", WorkflowName:"test_workflow_name", WorkflowOwnerAddress:"test_owner_address", WorkflowSpecID:"test_spec_id", WorkflowExecutionID:"test_execution_id", CapabilityContractAddress:"test_contract_address", CapabilityID:"test_capability_id", CapabilityVersion:"test_capability_version", CapabilityName:"test_capability_name", NetworkChainID:"test_chain_id"} - // map[beholder_data_schema:/schemas/ids/test_schema capability_contract_address:test_contract_address capability_id:test_capability_id capability_name:test_capability_name capability_version:test_capability_version don_id:test_don_id network_chain_id:test_chain_id network_name:[test_network] node_csa_key:test_key node_csa_signature:test_signature node_version:v1.0.0 workflow_execution_id:test_execution_id workflow_id:test_workflow_id workflow_name:test_workflow_name workflow_owner_address:test_owner_address workflow_spec_id:test_spec_id] + // beholder.Metadata{BeholderDomain:"TestDomain", BeholderEntity:"TestEntity", BeholderDataSchema:"/schemas/ids/test_schema", NodeVersion:"v1.0.0", NodeCsaKey:"test_key", NodeCsaSignature:"test_signature", DonID:"test_don_id", NetworkName:[]string{"test_network"}, WorkflowID:"test_workflow_id", WorkflowName:"test_workflow_name", WorkflowOwnerAddress:"test_owner_address", WorkflowSpecID:"test_spec_id", WorkflowExecutionID:"test_execution_id", CapabilityContractAddress:"test_contract_address", CapabilityID:"test_capability_id", CapabilityVersion:"test_capability_version", CapabilityName:"test_capability_name", NetworkChainID:"test_chain_id"} + // map[beholder_data_schema:/schemas/ids/test_schema beholder_domain:TestDomain beholder_entity:TestEntity capability_contract_address:test_contract_address capability_id:test_capability_id capability_name:test_capability_name capability_version:test_capability_version don_id:test_don_id network_chain_id:test_chain_id network_name:[test_network] node_csa_key:test_key node_csa_signature:test_signature node_version:v1.0.0 workflow_execution_id:test_execution_id workflow_id:test_workflow_id workflow_name:test_workflow_name workflow_owner_address:test_owner_address workflow_spec_id:test_spec_id] } -func ExampleValidate() { - validate := validator.New() +func ExampleMetadata_Validate() { + validate, err := beholder.NewMetadataValidator() + if err != nil { + fmt.Println(err) + } - metadata := beholder.Metadata{} + metadata := beholder.Metadata{ + BeholderDomain: "TestDomain", + BeholderEntity: "TestEntity", + } if err := validate.Struct(metadata); err != nil { fmt.Println(err) } diff --git a/pkg/capabilities/cli/cmd/generate_types.go b/pkg/capabilities/cli/cmd/generate_types.go index 3820aa7..7e77993 100644 --- a/pkg/capabilities/cli/cmd/generate_types.go +++ b/pkg/capabilities/cli/cmd/generate_types.go @@ -102,7 +102,7 @@ func schemaFilesFromDir(dir string) ([]string, error) { schemaPaths = append(schemaPaths, path) return nil }); err != nil { - return nil, fmt.Errorf("error walking the directory %v: %v", dir, err) + return nil, fmt.Errorf("error walking the directory %v: %w", dir, err) } return schemaPaths, nil } diff --git a/pkg/capabilities/consensus/ocr3/aggregators/identical.go b/pkg/capabilities/consensus/ocr3/aggregators/identical.go index ffd5d7c..05dde69 100644 --- a/pkg/capabilities/consensus/ocr3/aggregators/identical.go +++ b/pkg/capabilities/consensus/ocr3/aggregators/identical.go @@ -15,7 +15,6 @@ import ( type identicalAggregator struct { config aggregatorConfig - lggr logger.Logger } type aggregatorConfig struct { diff --git a/pkg/capabilities/events/events.go b/pkg/capabilities/events/events.go index 715a607..af35224 100644 --- a/pkg/capabilities/events/events.go +++ b/pkg/capabilities/events/events.go @@ -14,14 +14,14 @@ import ( const ( // Duplicates the attributes in beholder/message.go::Metadata - labelWorkflowOwner = "workflow_owner_address" - labelWorkflowID = "workflow_id" - labelWorkflowExecutionID = "workflow_execution_id" - labelWorkflowName = "workflow_name" - labelCapabilityContractAddress = "capability_contract_address" - labelCapabilityID = "capability_id" - labelCapabilityVersion = "capability_version" - labelCapabilityName = "capability_name" + LabelWorkflowOwner = "workflow_owner_address" + LabelWorkflowID = "workflow_id" + LabelWorkflowExecutionID = "workflow_execution_id" + LabelWorkflowName = "workflow_name" + LabelCapabilityContractAddress = "capability_contract_address" + LabelCapabilityID = "capability_id" + LabelCapabilityVersion = "capability_version" + LabelCapabilityName = "capability_name" ) type EmitMetadata struct { @@ -93,35 +93,35 @@ func (e EmitMetadata) attrs() []any { a := []any{} if e.WorkflowOwner != "" { - a = append(a, labelWorkflowOwner, e.WorkflowOwner) + a = append(a, LabelWorkflowOwner, e.WorkflowOwner) } if e.WorkflowID != "" { - a = append(a, labelWorkflowID, e.WorkflowID) + a = append(a, LabelWorkflowID, e.WorkflowID) } if e.WorkflowExecutionID != "" { - a = append(a, labelWorkflowExecutionID, e.WorkflowExecutionID) + a = append(a, LabelWorkflowExecutionID, e.WorkflowExecutionID) } if e.WorkflowName != "" { - a = append(a, labelWorkflowName, e.WorkflowName) + a = append(a, LabelWorkflowName, e.WorkflowName) } if e.CapabilityContractAddress != "" { - a = append(a, labelCapabilityContractAddress, e.CapabilityContractAddress) + a = append(a, LabelCapabilityContractAddress, e.CapabilityContractAddress) } if e.CapabilityID != "" { - a = append(a, labelCapabilityID, e.CapabilityID) + a = append(a, LabelCapabilityID, e.CapabilityID) } if e.CapabilityVersion != "" { - a = append(a, labelCapabilityVersion, e.CapabilityVersion) + a = append(a, LabelCapabilityVersion, e.CapabilityVersion) } if e.CapabilityName != "" { - a = append(a, labelCapabilityName, e.CapabilityName) + a = append(a, LabelCapabilityName, e.CapabilityName) } return a diff --git a/pkg/chains/label/label.go b/pkg/chains/label/label.go index 1488ea0..a8f84dc 100644 --- a/pkg/chains/label/label.go +++ b/pkg/chains/label/label.go @@ -1,8 +1,8 @@ package label const ( - MaxInFlightTransactionsWarning = `WARNING: If this happens a lot, you may need to increase Transactions.MaxInFlight to boost your node's transaction throughput, however you do this at your own risk. You MUST first ensure your node is configured not to ever evict local transactions that exceed this number otherwise the node can get permanently stuck. See the performance guide for more details: https://docs.goplugin.co/docs/evm-performance-configuration/` - MaxQueuedTransactionsWarning = `WARNING: Hitting Transactions.MaxQueued is a sanity limit and should never happen under normal operation. Unless you are operating with very high throughput, this error is unlikely to be a problem with your Plugin node configuration, and instead more likely to be caused by a problem with your node's connectivity. Check your node: it may not be broadcasting transactions to the network, or it might be overloaded and evicting Plugin's transactions from its mempool. It is recommended to run Plugin with multiple primary and sendonly nodes for redundancy and to ensure fast and reliable transaction propagation. Increasing Transactions.MaxQueued will allow Plugin to buffer more unsent transactions, but you should only do this if you need very high burst transmission rates. If you don't need very high burst throughput, increasing this limit is not the correct action to take here and will probably make things worse. See the performance guide for more details: https://docs.chain.link/docs/evm-performance-configuration/` + MaxInFlightTransactionsWarning = `WARNING: If this happens a lot, you may need to increase Transactions.MaxInFlight to boost your node's transaction throughput, however you do this at your own risk. You MUST first ensure your node is configured not to ever evict local transactions that exceed this number otherwise the node can get permanently stuck. See the performance guide for more details: https://docs.goplugin.codocs/evm-performance-configuration/` + MaxQueuedTransactionsWarning = `WARNING: Hitting Transactions.MaxQueued is a sanity limit and should never happen under normal operation. Unless you are operating with very high throughput, this error is unlikely to be a problem with your Plugin node configuration, and instead more likely to be caused by a problem with your node's connectivity. Check your node: it may not be broadcasting transactions to the network, or it might be overloaded and evicting Plugin's transactions from its mempool. It is recommended to run Plugin with multiple primary and sendonly nodes for redundancy and to ensure fast and reliable transaction propagation. Increasing Transactions.MaxQueued will allow Plugin to buffer more unsent transactions, but you should only do this if you need very high burst transmission rates. If you don't need very high burst throughput, increasing this limit is not the correct action to take here and will probably make things worse. See the performance guide for more details: https://docs.goplugin.co/docs/evm-performance-configuration/` NodeConnectivityProblemWarning = `WARNING: If this happens a lot, it may be a sign that your node has a connectivity problem, and your transactions are not making it to any miners. It is recommended to run Plugin with multiple primary and sendonly nodes for redundancy and to ensure fast and reliable transaction propagation. See the performance guide for more details: https://docs.goplugin.co/docs/evm-performance-configuration/` RPCTxFeeCapConfiguredIncorrectlyWarning = `WARNING: Gas price was rejected by the node for being too high. By default, go-ethereum (and clones) have a built-in upper limit for gas price. It is preferable to disable this and rely Plugin's internal gas limits instead. Your RPC node's RPCTxFeeCap needs to be disabled or increased (recommended configuration: --rpc.gascap=0 --rpc.txfeecap=0). If you want to limit Plugin's max gas price, you may do so by setting GasEstimator.PriceMax on the Plugin node. Plugin will never send a transaction with a total cost higher than GasEstimator.PriceMax. See the performance guide for more details: https://docs.goplugin.co/docs/evm-performance-configuration/` ) diff --git a/pkg/codec/encodings/type_codec_test.go b/pkg/codec/encodings/type_codec_test.go index 545d713..405b4c1 100644 --- a/pkg/codec/encodings/type_codec_test.go +++ b/pkg/codec/encodings/type_codec_test.go @@ -172,10 +172,10 @@ func (b *bigEndianInterfaceTester) encode(t *testing.T, bytes []byte, ts TestStr for _, oid := range ts.OracleIDs { bytes = append(bytes, byte(oid)) } - bytes = append(bytes, byte(len(ts.Account))) - bytes = append(bytes, ts.Account...) - bytes = rawbin.BigEndian.AppendUint32(bytes, uint32(len(ts.AccountStr))) - bytes = append(bytes, []byte(ts.AccountStr)...) + bytes = append(bytes, byte(len(ts.AccountStruct.Account))) + bytes = append(bytes, ts.AccountStruct.Account...) + bytes = rawbin.BigEndian.AppendUint32(bytes, uint32(len(ts.AccountStruct.AccountStr))) + bytes = append(bytes, []byte(ts.AccountStruct.AccountStr)...) bytes = append(bytes, byte(len(ts.Accounts))) for _, account := range ts.Accounts { bytes = append(bytes, byte(len(account))) @@ -240,6 +240,12 @@ func newTestStructCodec(t *testing.T, builder encodings.Builder) encodings.TypeC }) require.NoError(t, err) + accountStructCodec, err := encodings.NewStructCodec([]encodings.NamedTypeCodec{ + {Name: "Account", Codec: acc}, + {Name: "AccountStr", Codec: sCodec}, + }) + require.NoError(t, err) + oIDs, err := encodings.NewArray(32, builder.OracleID()) require.NoError(t, err) @@ -254,8 +260,7 @@ func newTestStructCodec(t *testing.T, builder encodings.Builder) encodings.TypeC {Name: "DifferentField", Codec: sCodec}, {Name: "OracleID", Codec: builder.OracleID()}, {Name: "OracleIDs", Codec: oIDs}, - {Name: "Account", Codec: acc}, - {Name: "AccountStr", Codec: sCodec}, + {Name: "AccountStruct", Codec: accountStructCodec}, {Name: "Accounts", Codec: accs}, {Name: "BigField", Codec: bi}, {Name: "NestedDynamicStruct", Codec: midDynamicCodec}, @@ -295,8 +300,8 @@ func (b *bigEndianInterfaceTester) GetCodec(t *testing.T) types.Codec { } mod, err := codec.NewHardCoder(map[string]any{ - "BigField": ts.BigField.String(), - "Account": ts.Account, + "BigField": ts.BigField.String(), + "AccountStruct.Account": ts.AccountStruct.Account, }, map[string]any{"ExtraField": AnyExtraValue}, codec.BigIntHook) require.NoError(t, err) diff --git a/pkg/codec/example_test.go b/pkg/codec/example_test.go index d6ae9cd..d4ce420 100644 --- a/pkg/codec/example_test.go +++ b/pkg/codec/example_test.go @@ -36,7 +36,7 @@ func (ExampleStructJSONCodec) GetMaxEncodingSize(_ context.Context, n int, _ str func (ExampleStructJSONCodec) Decode(_ context.Context, raw []byte, into any, _ string) error { err := json.Unmarshal(raw, into) if err != nil { - return fmt.Errorf("%w: %s", types.ErrInvalidType, err) + return fmt.Errorf("%w: %w", types.ErrInvalidType, err) } return nil } diff --git a/pkg/custmsg/custom_message.go b/pkg/custmsg/custom_message.go index 54e1e2d..dce16cf 100644 --- a/pkg/custmsg/custom_message.go +++ b/pkg/custmsg/custom_message.go @@ -19,17 +19,35 @@ func NewLabeler() Labeler { return Labeler{labels: make(map[string]string)} } +// WithMapLabels adds multiple key-value pairs to the CustomMessageLabeler for transmission +// With SendLogAsCustomMessage +func (l Labeler) WithMapLabels(labels map[string]string) Labeler { + newCustomMessageLabeler := NewLabeler() + + // Copy existing labels from the current agent + for k, v := range l.labels { + newCustomMessageLabeler.labels[k] = v + } + + // Add new key-value pairs + for k, v := range labels { + newCustomMessageLabeler.labels[k] = v + } + + return newCustomMessageLabeler +} + // With adds multiple key-value pairs to the CustomMessageLabeler for transmission With SendLogAsCustomMessage -func (c Labeler) With(keyValues ...string) Labeler { +func (l Labeler) With(keyValues ...string) Labeler { newCustomMessageLabeler := NewLabeler() if len(keyValues)%2 != 0 { // If an odd number of key-value arguments is passed, return the original CustomMessageLabeler unchanged - return c + return l } // Copy existing labels from the current agent - for k, v := range c.labels { + for k, v := range l.labels { newCustomMessageLabeler.labels[k] = v } @@ -43,10 +61,22 @@ func (c Labeler) With(keyValues ...string) Labeler { return newCustomMessageLabeler } +func (l Labeler) Emit(msg string) error { + return sendLogAsCustomMessageW(msg, l.labels) +} + +func (l Labeler) Labels() map[string]string { + copied := make(map[string]string, len(l.labels)) + for k, v := range l.labels { + copied[k] = v + } + return copied +} + // SendLogAsCustomMessage emits a BaseMessage With msg and labels as data. // any key in labels that is not part of orderedLabelKeys will not be transmitted -func (c Labeler) SendLogAsCustomMessage(msg string) error { - return sendLogAsCustomMessageW(msg, c.labels) +func (l Labeler) SendLogAsCustomMessage(msg string) error { + return sendLogAsCustomMessageW(msg, l.labels) } func sendLogAsCustomMessageW(msg string, labels map[string]string) error { diff --git a/pkg/custmsg/custom_message_test.go b/pkg/custmsg/custom_message_test.go index 4d41408..4ae8269 100644 --- a/pkg/custmsg/custom_message_test.go +++ b/pkg/custmsg/custom_message_test.go @@ -12,5 +12,19 @@ func Test_CustomMessageAgent(t *testing.T) { cma1 := cma.With("key1", "value1") cma2 := cma1.With("key2", "value2") - assert.NotEqual(t, cma1.labels, cma2.labels) + assert.NotEqual(t, cma1.Labels(), cma2.Labels()) +} + +func Test_CustomMessageAgent_With(t *testing.T) { + cma := NewLabeler() + cma = cma.With("key1", "value1") + + assert.Equal(t, cma.Labels(), map[string]string{"key1": "value1"}) +} + +func Test_CustomMessageAgent_WithMapLabels(t *testing.T) { + cma := NewLabeler() + cma = cma.WithMapLabels(map[string]string{"key1": "value1"}) + + assert.Equal(t, cma.Labels(), map[string]string{"key1": "value1"}) } diff --git a/pkg/loop/config.go b/pkg/loop/config.go index 7250c94..5165ff1 100644 --- a/pkg/loop/config.go +++ b/pkg/loop/config.go @@ -87,7 +87,7 @@ func (e *EnvConfig) parse() error { var err error e.DatabaseURL, err = getDatabaseURL() if err != nil { - return fmt.Errorf("failed to parse %s: %q", envDatabaseURL, err) + return fmt.Errorf("failed to parse %s: %w", envDatabaseURL, err) } e.PrometheusPort, err = strconv.Atoi(promPortStr) diff --git a/pkg/loop/internal/keystore/keystore.go b/pkg/loop/internal/keystore/keystore.go new file mode 100644 index 0000000..c49f2b0 --- /dev/null +++ b/pkg/loop/internal/keystore/keystore.go @@ -0,0 +1,306 @@ +package keystore + +import ( + "context" + + "google.golang.org/grpc" + + "github.com/goplugin/plugin-common/pkg/logger" + "github.com/goplugin/plugin-common/pkg/loop/internal/goplugin" + "github.com/goplugin/plugin-common/pkg/loop/internal/net" + keystorepb "github.com/goplugin/plugin-common/pkg/loop/internal/pb/keystore" + "github.com/goplugin/plugin-common/pkg/services" + "github.com/goplugin/plugin-common/pkg/types/keystore" +) + +var _ keystore.Keystore = (*Client)(nil) + +type Client struct { + services.Service + *goplugin.PluginClient + + grpc keystorepb.KeystoreClient +} + +func NewKeystoreClient(broker net.Broker, brokerCfg net.BrokerConfig, conn *grpc.ClientConn) *Client { + brokerCfg.Logger = logger.Named(brokerCfg.Logger, "KeystoreClient") + pc := goplugin.NewPluginClient(broker, brokerCfg, conn) + return &Client{PluginClient: pc, grpc: keystorepb.NewKeystoreClient(pc)} +} + +func (c *Client) Sign(ctx context.Context, keyID []byte, data []byte) ([]byte, error) { + reply, err := c.grpc.Sign(ctx, &keystorepb.SignRequest{ + KeyID: keyID, + Data: data, + }) + + if err != nil { + return nil, err + } + return reply.Data, nil +} + +func (c *Client) SignBatch(ctx context.Context, keyID []byte, data [][]byte) ([][]byte, error) { + reply, err := c.grpc.SignBatch(ctx, &keystorepb.SignBatchRequest{ + KeyID: keyID, + Data: data, + }) + + if err != nil { + return nil, err + } + return reply.Data, nil +} + +func (c *Client) Verify(ctx context.Context, keyID []byte, data []byte) (bool, error) { + reply, err := c.grpc.Verify(ctx, &keystorepb.VerifyRequest{ + KeyID: keyID, + Data: data, + }) + + if err != nil { + return false, err + } + return reply.Valid, nil +} + +func (c *Client) VerifyBatch(ctx context.Context, keyID []byte, data [][]byte) ([]bool, error) { + reply, err := c.grpc.VerifyBatch(ctx, &keystorepb.VerifyBatchRequest{ + KeyID: keyID, + Data: data, + }) + + if err != nil { + return nil, err + } + return reply.Valid, nil +} + +func (c *Client) ListKeys(ctx context.Context, tags []string) ([][]byte, error) { + reply, err := c.grpc.ListKeys(ctx, &keystorepb.ListKeysRequest{ + Tags: tags, + }) + + if err != nil { + return nil, err + } + return reply.KeyIDs, nil +} + +func (c *Client) RunUDF(ctx context.Context, name string, keyID []byte, data []byte) ([]byte, error) { + reply, err := c.grpc.RunUDF(ctx, &keystorepb.RunUDFRequest{ + Name: name, + KeyID: keyID, + Data: data, + }) + + if err != nil { + return nil, err + } + return reply.Data, nil +} + +func (c *Client) ImportKey(ctx context.Context, keyType string, data []byte, tags []string) ([]byte, error) { + reply, err := c.grpc.ImportKey(ctx, &keystorepb.ImportKeyRequest{ + KeyType: keyType, + Data: data, + Tags: tags, + }) + + if err != nil { + return nil, err + } + return reply.KeyID, nil +} + +func (c *Client) ExportKey(ctx context.Context, keyID []byte) ([]byte, error) { + reply, err := c.grpc.ExportKey(ctx, &keystorepb.ExportKeyRequest{ + KeyID: keyID, + }) + + if err != nil { + return nil, err + } + return reply.Data, nil +} + +func (c *Client) CreateKey(ctx context.Context, keyType string, tags []string) ([]byte, error) { + reply, err := c.grpc.CreateKey(ctx, &keystorepb.CreateKeyRequest{ + KeyType: keyType, + Tags: tags, + }) + + if err != nil { + return nil, err + } + return reply.KeyID, nil +} + +func (c *Client) DeleteKey(ctx context.Context, keyID []byte) error { + _, err := c.grpc.DeleteKey(ctx, &keystorepb.DeleteKeyRequest{ + KeyID: keyID, + }) + + if err != nil { + return err + } + return nil +} + +func (c *Client) AddTag(ctx context.Context, keyID []byte, tag string) error { + _, err := c.grpc.AddTag(ctx, &keystorepb.AddTagRequest{ + KeyID: keyID, + Tag: tag, + }) + + if err != nil { + return err + } + return nil +} + +func (c *Client) RemoveTag(ctx context.Context, keyID []byte, tag string) error { + _, err := c.grpc.RemoveTag(ctx, &keystorepb.RemoveTagRequest{ + KeyID: keyID, + Tag: tag, + }) + + if err != nil { + return err + } + return nil +} + +func (c *Client) ListTags(ctx context.Context, keyID []byte) ([]string, error) { + reply, err := c.grpc.ListTags(ctx, &keystorepb.ListTagsRequest{ + KeyID: keyID, + }) + + if err != nil { + return nil, err + } + return reply.Tags, nil +} + +var _ keystorepb.KeystoreServer = (*server)(nil) + +type server struct { + *net.BrokerExt + keystorepb.UnimplementedKeystoreServer + + impl GRPCService +} + +func RegisterKeystoreServer(server *grpc.Server, broker net.Broker, brokerCfg net.BrokerConfig, impl GRPCService) error { + keystorepb.RegisterKeystoreServer(server, newKeystoreServer(broker, brokerCfg, impl)) + return nil +} + +func newKeystoreServer(broker net.Broker, brokerCfg net.BrokerConfig, impl GRPCService) *server { + brokerCfg.Logger = logger.Named(brokerCfg.Logger, "KeystoreServer") + return &server{BrokerExt: &net.BrokerExt{Broker: broker, BrokerConfig: brokerCfg}, impl: impl} +} + +func (s *server) Sign(ctx context.Context, request *keystorepb.SignRequest) (*keystorepb.SignResponse, error) { + data, err := s.impl.Sign(ctx, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.SignResponse{Data: data}, err +} + +func (s *server) SignBatch(ctx context.Context, request *keystorepb.SignBatchRequest) (*keystorepb.SignBatchResponse, error) { + data, err := s.impl.SignBatch(ctx, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.SignBatchResponse{Data: data}, err +} + +func (s *server) Verify(ctx context.Context, request *keystorepb.VerifyRequest) (*keystorepb.VerifyResponse, error) { + valid, err := s.impl.Verify(ctx, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.VerifyResponse{Valid: valid}, err +} + +func (s *server) VerifyBatch(ctx context.Context, request *keystorepb.VerifyBatchRequest) (*keystorepb.VerifyBatchResponse, error) { + valid, err := s.impl.VerifyBatch(ctx, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.VerifyBatchResponse{Valid: valid}, err +} + +func (s *server) ListKeys(ctx context.Context, request *keystorepb.ListKeysRequest) (*keystorepb.ListKeysResponse, error) { + keyIDs, err := s.impl.ListKeys(ctx, request.Tags) + if err != nil { + return nil, err + } + return &keystorepb.ListKeysResponse{KeyIDs: keyIDs}, err +} + +func (s *server) RunUDF(ctx context.Context, request *keystorepb.RunUDFRequest) (*keystorepb.RunUDFResponse, error) { + data, err := s.impl.RunUDF(ctx, request.Name, request.KeyID, request.Data) + if err != nil { + return nil, err + } + return &keystorepb.RunUDFResponse{Data: data}, err +} + +func (s *server) ImportKey(ctx context.Context, request *keystorepb.ImportKeyRequest) (*keystorepb.ImportKeyResponse, error) { + keyIDs, err := s.impl.ImportKey(ctx, request.KeyType, request.Data, request.Tags) + if err != nil { + return nil, err + } + return &keystorepb.ImportKeyResponse{KeyID: keyIDs}, err +} + +func (s *server) ExportKey(ctx context.Context, request *keystorepb.ExportKeyRequest) (*keystorepb.ExportKeyResponse, error) { + data, err := s.impl.ExportKey(ctx, request.KeyID) + if err != nil { + return nil, err + } + return &keystorepb.ExportKeyResponse{Data: data}, err +} + +func (s *server) CreateKey(ctx context.Context, request *keystorepb.CreateKeyRequest) (*keystorepb.CreateKeyResponse, error) { + keyIDs, err := s.impl.CreateKey(ctx, request.KeyType, request.Tags) + if err != nil { + return nil, err + } + return &keystorepb.CreateKeyResponse{KeyID: keyIDs}, err +} + +func (s *server) DeleteKey(ctx context.Context, request *keystorepb.DeleteKeyRequest) (*keystorepb.DeleteKeyResponse, error) { + err := s.impl.DeleteKey(ctx, request.KeyID) + if err != nil { + return nil, err + } + return &keystorepb.DeleteKeyResponse{}, err +} + +func (s *server) AddTag(ctx context.Context, request *keystorepb.AddTagRequest) (*keystorepb.AddTagResponse, error) { + err := s.impl.AddTag(ctx, request.KeyID, request.Tag) + if err != nil { + return nil, err + } + return &keystorepb.AddTagResponse{}, err +} + +func (s *server) RemoveTag(ctx context.Context, request *keystorepb.RemoveTagRequest) (*keystorepb.RemoveTagResponse, error) { + err := s.impl.RemoveTag(ctx, request.KeyID, request.Tag) + if err != nil { + return nil, err + } + return &keystorepb.RemoveTagResponse{}, err +} + +func (s *server) ListTags(ctx context.Context, request *keystorepb.ListTagsRequest) (*keystorepb.ListTagsResponse, error) { + tags, err := s.impl.ListTags(ctx, request.KeyID) + if err != nil { + return nil, err + } + return &keystorepb.ListTagsResponse{Tags: tags}, nil +} diff --git a/pkg/loop/internal/keystore/keystore_test.go b/pkg/loop/internal/keystore/keystore_test.go new file mode 100644 index 0000000..53d4593 --- /dev/null +++ b/pkg/loop/internal/keystore/keystore_test.go @@ -0,0 +1,255 @@ +package keystore + +import ( + "bytes" + "context" + "errors" + "fmt" + "reflect" + "testing" + + "github.com/hashicorp/go-plugin" + "github.com/stretchr/testify/require" + "google.golang.org/grpc" + + "github.com/goplugin/plugin-common/pkg/logger" + "github.com/goplugin/plugin-common/pkg/loop/internal/net" + "github.com/goplugin/plugin-common/pkg/services" + "github.com/goplugin/plugin-common/pkg/utils/tests" +) + +func TestKeystore(t *testing.T) { + ctx := tests.Context(t) + stopCh := make(chan struct{}) + log := logger.Test(t) + + pluginName := "keystore-test" + client, server := plugin.TestPluginGRPCConn( + t, + true, + map[string]plugin.Plugin{ + pluginName: &testKeystorePlugin{ + log: log, + impl: &testKeystore{}, + brokerExt: &net.BrokerExt{ + BrokerConfig: net.BrokerConfig{ + StopCh: stopCh, + Logger: log, + }, + }, + }, + }, + ) + + defer client.Close() + defer server.Stop() + + keystoreClient, err := client.Dispense(pluginName) + require.NoError(t, err) + + ks, ok := keystoreClient.(*Client) + require.True(t, ok) + + r, err := ks.Sign(ctx, keyID, data) + require.NoError(t, err) + require.Equal(t, r, sign) + + r2, err := ks.SignBatch(ctx, keyID, dataList) + require.NoError(t, err) + require.Equal(t, r2, signBatch) + + r3, err := ks.Verify(ctx, keyID, data) + require.NoError(t, err) + require.Equal(t, r3, verify) + + r4, err := ks.VerifyBatch(ctx, keyID, dataList) + require.NoError(t, err) + require.Equal(t, r4, verifyBatch) + + r5, err := ks.ListKeys(ctx, tags) + require.NoError(t, err) + require.Equal(t, r5, list) + + r6, err := ks.RunUDF(ctx, udfName, keyID, data) + require.NoError(t, err) + require.Equal(t, r6, runUDF) + + r7, err := ks.ImportKey(ctx, keyType, data, tags) + require.NoError(t, err) + require.Equal(t, r7, importResponse) + + r8, err := ks.ExportKey(ctx, keyID) + require.NoError(t, err) + require.Equal(t, r8, export) + + r9, err := ks.CreateKey(ctx, keyType, tags) + require.NoError(t, err) + require.Equal(t, r9, create) + + err = ks.DeleteKey(ctx, keyID) + require.ErrorContains(t, err, errDelete.Error()) + + err = ks.AddTag(ctx, keyID, tag) + require.ErrorContains(t, err, errAddTag.Error()) + + err = ks.RemoveTag(ctx, keyID, tag) + require.ErrorContains(t, err, errRemoveTag.Error()) + + r10, err := ks.ListTags(ctx, keyID) + require.NoError(t, err) + require.Equal(t, r10, listTag) +} + +var ( + //Inputs + keyID = []byte("this-is-a-keyID") + data = []byte("some-data") + dataList = [][]byte{[]byte("some-data-in-a-list"), []byte("some-more-data-in-a-list")} + tags = []string{"tag1", "tag2"} + tag = "just-one-tag" + udfName = "i-am-a-udf-method-name" + keyType = "some-keyType" + + //Outputs + sign = []byte("signed") + signBatch = [][]byte{[]byte("signed1"), []byte("signed2")} + verify = true + verifyBatch = []bool{true, false} + list = [][]byte{[]byte("item1"), []byte("item2")} + runUDF = []byte("udf-response") + importResponse = []byte("imported") + export = []byte("exported") + create = []byte("created") + listTag = []string{"tag1", "tag2"} + errDelete = errors.New("delete-err") + errAddTag = errors.New("add-tag-err") + errRemoveTag = errors.New("remove-tag-err") +) + +type testKeystorePlugin struct { + log logger.Logger + plugin.NetRPCUnsupportedPlugin + brokerExt *net.BrokerExt + impl GRPCService +} + +func (r *testKeystorePlugin) GRPCClient(ctx context.Context, broker *plugin.GRPCBroker, client *grpc.ClientConn) (any, error) { + r.brokerExt.Broker = broker + + return NewKeystoreClient(r.brokerExt.Broker, r.brokerExt.BrokerConfig, client), nil +} + +func (r *testKeystorePlugin) GRPCServer(broker *plugin.GRPCBroker, server *grpc.Server) error { + r.brokerExt.Broker = broker + + err := RegisterKeystoreServer(server, r.brokerExt.Broker, r.brokerExt.BrokerConfig, r.impl) + if err != nil { + return err + } + return nil +} + +type testKeystore struct { + services.Service +} + +func checkKeyID(target []byte) error { + if !bytes.Equal(target, keyID) { + return fmt.Errorf("checkKeyID: expected %v but got %v", keyID, target) + } + return nil +} + +func checkData(target []byte) error { + if !bytes.Equal(target, data) { + return fmt.Errorf("checkData: expected %v but got %v", data, target) + } + return nil +} + +func checkDataList(target [][]byte) error { + if !reflect.DeepEqual(target, dataList) { + return fmt.Errorf("checkDataList: nexpected %v but got %v", data, target) + } + return nil +} + +func checkTags(target []string) error { + if !reflect.DeepEqual(target, tags) { + return fmt.Errorf("checkTags: expected %v but got %v", tags, target) + } + return nil +} + +func checkUdfName(target string) error { + if target != udfName { + return fmt.Errorf("checkUdfName: expected %v but got %v", udfName, target) + } + return nil +} + +func checkKeyType(target string) error { + if target != keyType { + return fmt.Errorf("checkKeyType: expected %q but got %q", keyType, target) + } + return nil +} + +func checkTag(target string) error { + if target != tag { + return fmt.Errorf("checkTag: expected %q but got %q", tag, target) + } + return nil +} + +func (t testKeystore) Sign(ctx context.Context, _keyID []byte, _data []byte) ([]byte, error) { + return sign, errors.Join(checkKeyID(_keyID), checkData(_data)) +} + +func (t testKeystore) SignBatch(ctx context.Context, _keyID []byte, _dataList [][]byte) ([][]byte, error) { + return signBatch, errors.Join(checkKeyID(_keyID), checkDataList(_dataList)) +} + +func (t testKeystore) Verify(ctx context.Context, _keyID []byte, _data []byte) (bool, error) { + return verify, errors.Join(checkKeyID(_keyID), checkData(_data)) +} + +func (t testKeystore) VerifyBatch(ctx context.Context, _keyID []byte, _dataList [][]byte) ([]bool, error) { + return verifyBatch, errors.Join(checkKeyID(_keyID), checkDataList(_dataList)) +} + +func (t testKeystore) ListKeys(ctx context.Context, _tags []string) ([][]byte, error) { + return list, checkTags(_tags) +} + +func (t testKeystore) RunUDF(ctx context.Context, _udfName string, _keyID []byte, _data []byte) ([]byte, error) { + return runUDF, errors.Join(checkUdfName(_udfName), checkKeyID(_keyID), checkData(_data)) +} + +func (t testKeystore) ImportKey(ctx context.Context, _keyType string, _data []byte, _tags []string) ([]byte, error) { + return importResponse, errors.Join(checkKeyType(_keyType), checkData(_data), checkTags(_tags)) +} + +func (t testKeystore) ExportKey(ctx context.Context, _keyID []byte) ([]byte, error) { + return export, checkKeyID(_keyID) +} + +func (t testKeystore) CreateKey(ctx context.Context, _keyType string, _tags []string) ([]byte, error) { + return create, errors.Join(checkKeyType(_keyType), checkTags(_tags)) +} + +func (t testKeystore) DeleteKey(ctx context.Context, _keyID []byte) error { + return errors.Join(errDelete, checkKeyID(_keyID)) +} + +func (t testKeystore) AddTag(ctx context.Context, _keyID []byte, _tag string) error { + return errors.Join(errAddTag, checkKeyID(_keyID), checkTag(_tag)) +} + +func (t testKeystore) RemoveTag(ctx context.Context, _keyID []byte, _tag string) error { + return errors.Join(errRemoveTag, checkKeyID(_keyID), checkTag(_tag)) +} + +func (t testKeystore) ListTags(ctx context.Context, _keyID []byte) ([]string, error) { + return listTag, checkKeyID(_keyID) +} diff --git a/pkg/loop/internal/keystore/types.go b/pkg/loop/internal/keystore/types.go new file mode 100644 index 0000000..b10fd45 --- /dev/null +++ b/pkg/loop/internal/keystore/types.go @@ -0,0 +1,30 @@ +package keystore + +import ( + "context" + + "github.com/goplugin/plugin-common/pkg/services" +) + +// GRPCService This interface contains all the functionalities of the GRPC layer of the LOOPP keystore +type GRPCService interface { + services.Service + Sign(ctx context.Context, keyID []byte, data []byte) ([]byte, error) + SignBatch(ctx context.Context, keyID []byte, data [][]byte) ([][]byte, error) + Verify(ctx context.Context, keyID []byte, data []byte) (bool, error) + VerifyBatch(ctx context.Context, keyID []byte, data [][]byte) ([]bool, error) + + ListKeys(ctx context.Context, tags []string) ([][]byte, error) + + RunUDF(ctx context.Context, name string, keyID []byte, data []byte) ([]byte, error) + + ImportKey(ctx context.Context, keyType string, data []byte, tags []string) ([]byte, error) + ExportKey(ctx context.Context, keyID []byte) ([]byte, error) + + CreateKey(ctx context.Context, keyType string, tags []string) ([]byte, error) + DeleteKey(ctx context.Context, keyID []byte) error + + AddTag(ctx context.Context, keyID []byte, tag string) error + RemoveTag(ctx context.Context, keyID []byte, tag string) error + ListTags(ctx context.Context, keyID []byte) ([]string, error) +} diff --git a/pkg/loop/internal/pb/keystore/generate.go b/pkg/loop/internal/pb/keystore/generate.go new file mode 100644 index 0000000..e6cc69d --- /dev/null +++ b/pkg/loop/internal/pb/keystore/generate.go @@ -0,0 +1,2 @@ +//go:generate protoc --proto_path=.:..:. --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative keystore.proto +package keystorepb diff --git a/pkg/loop/internal/pb/keystore/keystore.pb.go b/pkg/loop/internal/pb/keystore/keystore.pb.go new file mode 100644 index 0000000..f385c3d --- /dev/null +++ b/pkg/loop/internal/pb/keystore/keystore.pb.go @@ -0,0 +1,1891 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.31.0 +// protoc v4.25.1 +// source: keystore.proto + +package keystorepb + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type SignRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *SignRequest) Reset() { + *x = SignRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SignRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignRequest) ProtoMessage() {} + +func (x *SignRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignRequest.ProtoReflect.Descriptor instead. +func (*SignRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{0} +} + +func (x *SignRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *SignRequest) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type SignResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *SignResponse) Reset() { + *x = SignResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SignResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignResponse) ProtoMessage() {} + +func (x *SignResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignResponse.ProtoReflect.Descriptor instead. +func (*SignResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{1} +} + +func (x *SignResponse) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type SignBatchRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data [][]byte `protobuf:"bytes,2,rep,name=data,proto3" json:"data,omitempty"` +} + +func (x *SignBatchRequest) Reset() { + *x = SignBatchRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SignBatchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignBatchRequest) ProtoMessage() {} + +func (x *SignBatchRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignBatchRequest.ProtoReflect.Descriptor instead. +func (*SignBatchRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{2} +} + +func (x *SignBatchRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *SignBatchRequest) GetData() [][]byte { + if x != nil { + return x.Data + } + return nil +} + +type SignBatchResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data [][]byte `protobuf:"bytes,1,rep,name=data,proto3" json:"data,omitempty"` +} + +func (x *SignBatchResponse) Reset() { + *x = SignBatchResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SignBatchResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignBatchResponse) ProtoMessage() {} + +func (x *SignBatchResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignBatchResponse.ProtoReflect.Descriptor instead. +func (*SignBatchResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{3} +} + +func (x *SignBatchResponse) GetData() [][]byte { + if x != nil { + return x.Data + } + return nil +} + +type VerifyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *VerifyRequest) Reset() { + *x = VerifyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *VerifyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VerifyRequest) ProtoMessage() {} + +func (x *VerifyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VerifyRequest.ProtoReflect.Descriptor instead. +func (*VerifyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{4} +} + +func (x *VerifyRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *VerifyRequest) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type VerifyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Valid bool `protobuf:"varint,1,opt,name=valid,proto3" json:"valid,omitempty"` +} + +func (x *VerifyResponse) Reset() { + *x = VerifyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *VerifyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VerifyResponse) ProtoMessage() {} + +func (x *VerifyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VerifyResponse.ProtoReflect.Descriptor instead. +func (*VerifyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{5} +} + +func (x *VerifyResponse) GetValid() bool { + if x != nil { + return x.Valid + } + return false +} + +type VerifyBatchRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data [][]byte `protobuf:"bytes,2,rep,name=data,proto3" json:"data,omitempty"` +} + +func (x *VerifyBatchRequest) Reset() { + *x = VerifyBatchRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *VerifyBatchRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VerifyBatchRequest) ProtoMessage() {} + +func (x *VerifyBatchRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VerifyBatchRequest.ProtoReflect.Descriptor instead. +func (*VerifyBatchRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{6} +} + +func (x *VerifyBatchRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *VerifyBatchRequest) GetData() [][]byte { + if x != nil { + return x.Data + } + return nil +} + +type VerifyBatchResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Valid []bool `protobuf:"varint,1,rep,packed,name=valid,proto3" json:"valid,omitempty"` +} + +func (x *VerifyBatchResponse) Reset() { + *x = VerifyBatchResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *VerifyBatchResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*VerifyBatchResponse) ProtoMessage() {} + +func (x *VerifyBatchResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use VerifyBatchResponse.ProtoReflect.Descriptor instead. +func (*VerifyBatchResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{7} +} + +func (x *VerifyBatchResponse) GetValid() []bool { + if x != nil { + return x.Valid + } + return nil +} + +type ListKeysRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Tags []string `protobuf:"bytes,1,rep,name=tags,proto3" json:"tags,omitempty"` +} + +func (x *ListKeysRequest) Reset() { + *x = ListKeysRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListKeysRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListKeysRequest) ProtoMessage() {} + +func (x *ListKeysRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListKeysRequest.ProtoReflect.Descriptor instead. +func (*ListKeysRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{8} +} + +func (x *ListKeysRequest) GetTags() []string { + if x != nil { + return x.Tags + } + return nil +} + +type ListKeysResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyIDs [][]byte `protobuf:"bytes,1,rep,name=keyIDs,proto3" json:"keyIDs,omitempty"` +} + +func (x *ListKeysResponse) Reset() { + *x = ListKeysResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListKeysResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListKeysResponse) ProtoMessage() {} + +func (x *ListKeysResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListKeysResponse.ProtoReflect.Descriptor instead. +func (*ListKeysResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{9} +} + +func (x *ListKeysResponse) GetKeyIDs() [][]byte { + if x != nil { + return x.KeyIDs + } + return nil +} + +type RunUDFRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + KeyID []byte `protobuf:"bytes,2,opt,name=keyID,proto3" json:"keyID,omitempty"` + Data []byte `protobuf:"bytes,3,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *RunUDFRequest) Reset() { + *x = RunUDFRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RunUDFRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RunUDFRequest) ProtoMessage() {} + +func (x *RunUDFRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RunUDFRequest.ProtoReflect.Descriptor instead. +func (*RunUDFRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{10} +} + +func (x *RunUDFRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *RunUDFRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *RunUDFRequest) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type RunUDFResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *RunUDFResponse) Reset() { + *x = RunUDFResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RunUDFResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RunUDFResponse) ProtoMessage() {} + +func (x *RunUDFResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RunUDFResponse.ProtoReflect.Descriptor instead. +func (*RunUDFResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{11} +} + +func (x *RunUDFResponse) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type ImportKeyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyType string `protobuf:"bytes,1,opt,name=keyType,proto3" json:"keyType,omitempty"` + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + Tags []string `protobuf:"bytes,3,rep,name=tags,proto3" json:"tags,omitempty"` +} + +func (x *ImportKeyRequest) Reset() { + *x = ImportKeyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ImportKeyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ImportKeyRequest) ProtoMessage() {} + +func (x *ImportKeyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ImportKeyRequest.ProtoReflect.Descriptor instead. +func (*ImportKeyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{12} +} + +func (x *ImportKeyRequest) GetKeyType() string { + if x != nil { + return x.KeyType + } + return "" +} + +func (x *ImportKeyRequest) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +func (x *ImportKeyRequest) GetTags() []string { + if x != nil { + return x.Tags + } + return nil +} + +type ImportKeyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *ImportKeyResponse) Reset() { + *x = ImportKeyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ImportKeyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ImportKeyResponse) ProtoMessage() {} + +func (x *ImportKeyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ImportKeyResponse.ProtoReflect.Descriptor instead. +func (*ImportKeyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{13} +} + +func (x *ImportKeyResponse) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type ExportKeyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *ExportKeyRequest) Reset() { + *x = ExportKeyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ExportKeyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExportKeyRequest) ProtoMessage() {} + +func (x *ExportKeyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExportKeyRequest.ProtoReflect.Descriptor instead. +func (*ExportKeyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{14} +} + +func (x *ExportKeyRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type ExportKeyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` +} + +func (x *ExportKeyResponse) Reset() { + *x = ExportKeyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ExportKeyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExportKeyResponse) ProtoMessage() {} + +func (x *ExportKeyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExportKeyResponse.ProtoReflect.Descriptor instead. +func (*ExportKeyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{15} +} + +func (x *ExportKeyResponse) GetData() []byte { + if x != nil { + return x.Data + } + return nil +} + +type CreateKeyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyType string `protobuf:"bytes,1,opt,name=keyType,proto3" json:"keyType,omitempty"` + Tags []string `protobuf:"bytes,2,rep,name=tags,proto3" json:"tags,omitempty"` +} + +func (x *CreateKeyRequest) Reset() { + *x = CreateKeyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateKeyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateKeyRequest) ProtoMessage() {} + +func (x *CreateKeyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateKeyRequest.ProtoReflect.Descriptor instead. +func (*CreateKeyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{16} +} + +func (x *CreateKeyRequest) GetKeyType() string { + if x != nil { + return x.KeyType + } + return "" +} + +func (x *CreateKeyRequest) GetTags() []string { + if x != nil { + return x.Tags + } + return nil +} + +type CreateKeyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *CreateKeyResponse) Reset() { + *x = CreateKeyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CreateKeyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateKeyResponse) ProtoMessage() {} + +func (x *CreateKeyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateKeyResponse.ProtoReflect.Descriptor instead. +func (*CreateKeyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{17} +} + +func (x *CreateKeyResponse) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type DeleteKeyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *DeleteKeyRequest) Reset() { + *x = DeleteKeyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteKeyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteKeyRequest) ProtoMessage() {} + +func (x *DeleteKeyRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteKeyRequest.ProtoReflect.Descriptor instead. +func (*DeleteKeyRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{18} +} + +func (x *DeleteKeyRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type DeleteKeyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeleteKeyResponse) Reset() { + *x = DeleteKeyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteKeyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteKeyResponse) ProtoMessage() {} + +func (x *DeleteKeyResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteKeyResponse.ProtoReflect.Descriptor instead. +func (*DeleteKeyResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{19} +} + +type AddTagRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Tag string `protobuf:"bytes,2,opt,name=tag,proto3" json:"tag,omitempty"` +} + +func (x *AddTagRequest) Reset() { + *x = AddTagRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AddTagRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AddTagRequest) ProtoMessage() {} + +func (x *AddTagRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AddTagRequest.ProtoReflect.Descriptor instead. +func (*AddTagRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{20} +} + +func (x *AddTagRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *AddTagRequest) GetTag() string { + if x != nil { + return x.Tag + } + return "" +} + +type AddTagResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *AddTagResponse) Reset() { + *x = AddTagResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AddTagResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AddTagResponse) ProtoMessage() {} + +func (x *AddTagResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AddTagResponse.ProtoReflect.Descriptor instead. +func (*AddTagResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{21} +} + +type RemoveTagRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` + Tag string `protobuf:"bytes,2,opt,name=tag,proto3" json:"tag,omitempty"` +} + +func (x *RemoveTagRequest) Reset() { + *x = RemoveTagRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RemoveTagRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RemoveTagRequest) ProtoMessage() {} + +func (x *RemoveTagRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RemoveTagRequest.ProtoReflect.Descriptor instead. +func (*RemoveTagRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{22} +} + +func (x *RemoveTagRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +func (x *RemoveTagRequest) GetTag() string { + if x != nil { + return x.Tag + } + return "" +} + +type RemoveTagResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *RemoveTagResponse) Reset() { + *x = RemoveTagResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *RemoveTagResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RemoveTagResponse) ProtoMessage() {} + +func (x *RemoveTagResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RemoveTagResponse.ProtoReflect.Descriptor instead. +func (*RemoveTagResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{23} +} + +type ListTagsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + KeyID []byte `protobuf:"bytes,1,opt,name=keyID,proto3" json:"keyID,omitempty"` +} + +func (x *ListTagsRequest) Reset() { + *x = ListTagsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListTagsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListTagsRequest) ProtoMessage() {} + +func (x *ListTagsRequest) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListTagsRequest.ProtoReflect.Descriptor instead. +func (*ListTagsRequest) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{24} +} + +func (x *ListTagsRequest) GetKeyID() []byte { + if x != nil { + return x.KeyID + } + return nil +} + +type ListTagsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Tags []string `protobuf:"bytes,1,rep,name=tags,proto3" json:"tags,omitempty"` +} + +func (x *ListTagsResponse) Reset() { + *x = ListTagsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_keystore_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListTagsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListTagsResponse) ProtoMessage() {} + +func (x *ListTagsResponse) ProtoReflect() protoreflect.Message { + mi := &file_keystore_proto_msgTypes[25] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListTagsResponse.ProtoReflect.Descriptor instead. +func (*ListTagsResponse) Descriptor() ([]byte, []int) { + return file_keystore_proto_rawDescGZIP(), []int{25} +} + +func (x *ListTagsResponse) GetTags() []string { + if x != nil { + return x.Tags + } + return nil +} + +var File_keystore_proto protoreflect.FileDescriptor + +var file_keystore_proto_rawDesc = []byte{ + 0x0a, 0x0e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x12, 0x19, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, + 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x22, 0x37, 0x0a, 0x0b, 0x53, + 0x69, 0x67, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, + 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, + 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, + 0x64, 0x61, 0x74, 0x61, 0x22, 0x22, 0x0a, 0x0c, 0x53, 0x69, 0x67, 0x6e, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x3c, 0x0a, 0x10, 0x53, 0x69, 0x67, 0x6e, + 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, + 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, + 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, + 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x27, 0x0a, 0x11, 0x53, 0x69, 0x67, 0x6e, 0x42, 0x61, + 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, + 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, + 0x39, 0x0a, 0x0d, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x26, 0x0a, 0x0e, 0x56, 0x65, + 0x72, 0x69, 0x66, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x69, 0x64, 0x22, 0x3e, 0x0a, 0x12, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, + 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, + 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x12, + 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, + 0x74, 0x61, 0x22, 0x2b, 0x0a, 0x13, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, + 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x03, 0x28, 0x08, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x22, + 0x25, 0x0a, 0x0f, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, 0x79, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x22, 0x2a, 0x0a, 0x10, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, + 0x79, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6b, 0x65, + 0x79, 0x49, 0x44, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x06, 0x6b, 0x65, 0x79, 0x49, + 0x44, 0x73, 0x22, 0x4d, 0x0a, 0x0d, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x12, 0x0a, + 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, + 0x61, 0x22, 0x24, 0x0a, 0x0e, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x54, 0x0a, 0x10, 0x49, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6b, + 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6b, 0x65, + 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, + 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x22, 0x29, 0x0a, + 0x11, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x22, 0x28, 0x0a, 0x10, 0x45, 0x78, 0x70, 0x6f, + 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, + 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, + 0x49, 0x44, 0x22, 0x27, 0x0a, 0x11, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x40, 0x0a, 0x10, 0x43, + 0x72, 0x65, 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x18, 0x0a, 0x07, 0x6b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x07, 0x6b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, + 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x74, 0x61, 0x67, 0x73, 0x22, 0x29, 0x0a, + 0x11, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x22, 0x28, 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, + 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, + 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, + 0x49, 0x44, 0x22, 0x13, 0x0a, 0x11, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x37, 0x0a, 0x0d, 0x41, 0x64, 0x64, 0x54, 0x61, + 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, + 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x10, + 0x0a, 0x03, 0x74, 0x61, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x74, 0x61, 0x67, + 0x22, 0x10, 0x0a, 0x0e, 0x41, 0x64, 0x64, 0x54, 0x61, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x3a, 0x0a, 0x10, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x54, 0x61, 0x67, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x12, 0x10, 0x0a, 0x03, + 0x74, 0x61, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x74, 0x61, 0x67, 0x22, 0x13, + 0x0a, 0x11, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x54, 0x61, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x27, 0x0a, 0x0f, 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x44, 0x22, 0x26, 0x0a, 0x10, + 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, + 0x74, 0x61, 0x67, 0x73, 0x32, 0xa8, 0x0a, 0x0a, 0x08, 0x4b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x12, 0x57, 0x0a, 0x04, 0x53, 0x69, 0x67, 0x6e, 0x12, 0x26, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, + 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x27, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x69, + 0x67, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, 0x53, 0x69, + 0x67, 0x6e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, + 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x5d, 0x0a, 0x06, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x12, 0x28, 0x2e, 0x6c, + 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, + 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, + 0x72, 0x65, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x6c, 0x0a, 0x0b, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, 0x68, + 0x12, 0x2d, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x56, 0x65, 0x72, + 0x69, 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x2e, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, + 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x56, 0x65, 0x72, 0x69, + 0x66, 0x79, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x63, 0x0a, 0x08, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, 0x79, 0x73, 0x12, 0x2a, 0x2e, 0x6c, 0x6f, + 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, + 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, 0x79, 0x73, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4b, 0x65, 0x79, 0x73, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, + 0x79, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, + 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, + 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, + 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, + 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, + 0x72, 0x65, 0x2e, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x4b, 0x65, + 0x79, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, + 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, + 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, + 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, + 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, + 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, + 0x72, 0x65, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5d, 0x0a, 0x06, 0x41, 0x64, 0x64, 0x54, 0x61, 0x67, 0x12, 0x28, + 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, + 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x64, 0x64, 0x54, 0x61, + 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, + 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, + 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x64, 0x64, 0x54, 0x61, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x09, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x54, 0x61, 0x67, + 0x12, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x6d, + 0x6f, 0x76, 0x65, 0x54, 0x61, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, + 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, + 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, + 0x54, 0x61, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x63, 0x0a, 0x08, 0x4c, + 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x12, 0x2a, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, + 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, + 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, + 0x4c, 0x69, 0x73, 0x74, 0x54, 0x61, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x5d, 0x0a, 0x06, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x12, 0x28, 0x2e, 0x6c, 0x6f, 0x6f, + 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, + 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x6c, 0x6f, 0x6f, 0x70, 0x2e, 0x69, 0x6e, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x2e, 0x70, 0x62, 0x2e, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, + 0x2e, 0x52, 0x75, 0x6e, 0x55, 0x44, 0x46, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, + 0x57, 0x5a, 0x55, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x6d, + 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, 0x2f, 0x63, + 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, + 0x70, 0x6b, 0x67, 0x2f, 0x6c, 0x6f, 0x6f, 0x70, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x2f, 0x70, 0x62, 0x2f, 0x6b, 0x65, 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x3b, 0x6b, 0x65, + 0x79, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_keystore_proto_rawDescOnce sync.Once + file_keystore_proto_rawDescData = file_keystore_proto_rawDesc +) + +func file_keystore_proto_rawDescGZIP() []byte { + file_keystore_proto_rawDescOnce.Do(func() { + file_keystore_proto_rawDescData = protoimpl.X.CompressGZIP(file_keystore_proto_rawDescData) + }) + return file_keystore_proto_rawDescData +} + +var file_keystore_proto_msgTypes = make([]protoimpl.MessageInfo, 26) +var file_keystore_proto_goTypes = []interface{}{ + (*SignRequest)(nil), // 0: loop.internal.pb.keystore.SignRequest + (*SignResponse)(nil), // 1: loop.internal.pb.keystore.SignResponse + (*SignBatchRequest)(nil), // 2: loop.internal.pb.keystore.SignBatchRequest + (*SignBatchResponse)(nil), // 3: loop.internal.pb.keystore.SignBatchResponse + (*VerifyRequest)(nil), // 4: loop.internal.pb.keystore.VerifyRequest + (*VerifyResponse)(nil), // 5: loop.internal.pb.keystore.VerifyResponse + (*VerifyBatchRequest)(nil), // 6: loop.internal.pb.keystore.VerifyBatchRequest + (*VerifyBatchResponse)(nil), // 7: loop.internal.pb.keystore.VerifyBatchResponse + (*ListKeysRequest)(nil), // 8: loop.internal.pb.keystore.ListKeysRequest + (*ListKeysResponse)(nil), // 9: loop.internal.pb.keystore.ListKeysResponse + (*RunUDFRequest)(nil), // 10: loop.internal.pb.keystore.RunUDFRequest + (*RunUDFResponse)(nil), // 11: loop.internal.pb.keystore.RunUDFResponse + (*ImportKeyRequest)(nil), // 12: loop.internal.pb.keystore.ImportKeyRequest + (*ImportKeyResponse)(nil), // 13: loop.internal.pb.keystore.ImportKeyResponse + (*ExportKeyRequest)(nil), // 14: loop.internal.pb.keystore.ExportKeyRequest + (*ExportKeyResponse)(nil), // 15: loop.internal.pb.keystore.ExportKeyResponse + (*CreateKeyRequest)(nil), // 16: loop.internal.pb.keystore.CreateKeyRequest + (*CreateKeyResponse)(nil), // 17: loop.internal.pb.keystore.CreateKeyResponse + (*DeleteKeyRequest)(nil), // 18: loop.internal.pb.keystore.DeleteKeyRequest + (*DeleteKeyResponse)(nil), // 19: loop.internal.pb.keystore.DeleteKeyResponse + (*AddTagRequest)(nil), // 20: loop.internal.pb.keystore.AddTagRequest + (*AddTagResponse)(nil), // 21: loop.internal.pb.keystore.AddTagResponse + (*RemoveTagRequest)(nil), // 22: loop.internal.pb.keystore.RemoveTagRequest + (*RemoveTagResponse)(nil), // 23: loop.internal.pb.keystore.RemoveTagResponse + (*ListTagsRequest)(nil), // 24: loop.internal.pb.keystore.ListTagsRequest + (*ListTagsResponse)(nil), // 25: loop.internal.pb.keystore.ListTagsResponse +} +var file_keystore_proto_depIdxs = []int32{ + 0, // 0: loop.internal.pb.keystore.Keystore.Sign:input_type -> loop.internal.pb.keystore.SignRequest + 2, // 1: loop.internal.pb.keystore.Keystore.SignBatch:input_type -> loop.internal.pb.keystore.SignBatchRequest + 4, // 2: loop.internal.pb.keystore.Keystore.Verify:input_type -> loop.internal.pb.keystore.VerifyRequest + 6, // 3: loop.internal.pb.keystore.Keystore.VerifyBatch:input_type -> loop.internal.pb.keystore.VerifyBatchRequest + 8, // 4: loop.internal.pb.keystore.Keystore.ListKeys:input_type -> loop.internal.pb.keystore.ListKeysRequest + 12, // 5: loop.internal.pb.keystore.Keystore.ImportKey:input_type -> loop.internal.pb.keystore.ImportKeyRequest + 14, // 6: loop.internal.pb.keystore.Keystore.ExportKey:input_type -> loop.internal.pb.keystore.ExportKeyRequest + 16, // 7: loop.internal.pb.keystore.Keystore.CreateKey:input_type -> loop.internal.pb.keystore.CreateKeyRequest + 18, // 8: loop.internal.pb.keystore.Keystore.DeleteKey:input_type -> loop.internal.pb.keystore.DeleteKeyRequest + 20, // 9: loop.internal.pb.keystore.Keystore.AddTag:input_type -> loop.internal.pb.keystore.AddTagRequest + 22, // 10: loop.internal.pb.keystore.Keystore.RemoveTag:input_type -> loop.internal.pb.keystore.RemoveTagRequest + 24, // 11: loop.internal.pb.keystore.Keystore.ListTags:input_type -> loop.internal.pb.keystore.ListTagsRequest + 10, // 12: loop.internal.pb.keystore.Keystore.RunUDF:input_type -> loop.internal.pb.keystore.RunUDFRequest + 1, // 13: loop.internal.pb.keystore.Keystore.Sign:output_type -> loop.internal.pb.keystore.SignResponse + 3, // 14: loop.internal.pb.keystore.Keystore.SignBatch:output_type -> loop.internal.pb.keystore.SignBatchResponse + 5, // 15: loop.internal.pb.keystore.Keystore.Verify:output_type -> loop.internal.pb.keystore.VerifyResponse + 7, // 16: loop.internal.pb.keystore.Keystore.VerifyBatch:output_type -> loop.internal.pb.keystore.VerifyBatchResponse + 9, // 17: loop.internal.pb.keystore.Keystore.ListKeys:output_type -> loop.internal.pb.keystore.ListKeysResponse + 13, // 18: loop.internal.pb.keystore.Keystore.ImportKey:output_type -> loop.internal.pb.keystore.ImportKeyResponse + 15, // 19: loop.internal.pb.keystore.Keystore.ExportKey:output_type -> loop.internal.pb.keystore.ExportKeyResponse + 17, // 20: loop.internal.pb.keystore.Keystore.CreateKey:output_type -> loop.internal.pb.keystore.CreateKeyResponse + 19, // 21: loop.internal.pb.keystore.Keystore.DeleteKey:output_type -> loop.internal.pb.keystore.DeleteKeyResponse + 21, // 22: loop.internal.pb.keystore.Keystore.AddTag:output_type -> loop.internal.pb.keystore.AddTagResponse + 23, // 23: loop.internal.pb.keystore.Keystore.RemoveTag:output_type -> loop.internal.pb.keystore.RemoveTagResponse + 25, // 24: loop.internal.pb.keystore.Keystore.ListTags:output_type -> loop.internal.pb.keystore.ListTagsResponse + 11, // 25: loop.internal.pb.keystore.Keystore.RunUDF:output_type -> loop.internal.pb.keystore.RunUDFResponse + 13, // [13:26] is the sub-list for method output_type + 0, // [0:13] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_keystore_proto_init() } +func file_keystore_proto_init() { + if File_keystore_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_keystore_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SignRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SignResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SignBatchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SignBatchResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*VerifyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*VerifyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*VerifyBatchRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*VerifyBatchResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListKeysRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListKeysResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RunUDFRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RunUDFResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ImportKeyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ImportKeyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ExportKeyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ExportKeyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateKeyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CreateKeyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteKeyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DeleteKeyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AddTagRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AddTagResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RemoveTagRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*RemoveTagResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListTagsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_keystore_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListTagsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_keystore_proto_rawDesc, + NumEnums: 0, + NumMessages: 26, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_keystore_proto_goTypes, + DependencyIndexes: file_keystore_proto_depIdxs, + MessageInfos: file_keystore_proto_msgTypes, + }.Build() + File_keystore_proto = out.File + file_keystore_proto_rawDesc = nil + file_keystore_proto_goTypes = nil + file_keystore_proto_depIdxs = nil +} diff --git a/pkg/loop/internal/pb/keystore/keystore.proto b/pkg/loop/internal/pb/keystore/keystore.proto new file mode 100644 index 0000000..d4ed242 --- /dev/null +++ b/pkg/loop/internal/pb/keystore/keystore.proto @@ -0,0 +1,134 @@ +syntax = "proto3"; + +option go_package = "github.com/goplugin/plugin-common/pkg/loop/internal/pb/keystore;keystorepb"; + +package loop.internal.pb.keystore; + +service Keystore { + rpc Sign(SignRequest) returns (SignResponse); + rpc SignBatch(SignBatchRequest)returns (SignBatchResponse); + rpc Verify(VerifyRequest)returns (VerifyResponse); + rpc VerifyBatch(VerifyBatchRequest)returns (VerifyBatchResponse); + + rpc ListKeys(ListKeysRequest)returns (ListKeysResponse); + rpc ImportKey(ImportKeyRequest)returns(ImportKeyResponse); + rpc ExportKey(ExportKeyRequest)returns(ExportKeyResponse); + + rpc CreateKey(CreateKeyRequest)returns(CreateKeyResponse); + rpc DeleteKey(DeleteKeyRequest)returns(DeleteKeyResponse); + + rpc AddTag(AddTagRequest)returns(AddTagResponse); + rpc RemoveTag(RemoveTagRequest)returns(RemoveTagResponse); + rpc ListTags(ListTagsRequest)returns(ListTagsResponse); + + rpc RunUDF(RunUDFRequest)returns (RunUDFResponse); +} + +message SignRequest { + bytes keyID = 1; + bytes data = 2; +} + +message SignResponse { + bytes data = 1; +} + +message SignBatchRequest { + bytes keyID = 1; + repeated bytes data = 2; +} + +message SignBatchResponse { + repeated bytes data = 1; +} + +message VerifyRequest { + bytes keyID = 1; + bytes data = 2; +} + +message VerifyResponse { + bool valid = 1; +} + +message VerifyBatchRequest { + bytes keyID = 1; + repeated bytes data = 2; +} + +message VerifyBatchResponse { + repeated bool valid = 1; +} + +message ListKeysRequest { + repeated string tags = 1; +} + +message ListKeysResponse { + repeated bytes keyIDs = 1; +} + +message RunUDFRequest { + string name = 1; + bytes keyID = 2; + bytes data = 3; +} + +message RunUDFResponse { + bytes data = 1; +} + +message ImportKeyRequest { + string keyType = 1; + bytes data = 2; + repeated string tags = 3; +} + +message ImportKeyResponse { + bytes keyID = 1; +} + +message ExportKeyRequest { + bytes keyID = 1; +} + +message ExportKeyResponse { + bytes data = 1; +} + +message CreateKeyRequest { + string keyType = 1; + repeated string tags = 2; +} + +message CreateKeyResponse{ + bytes keyID =1; +} + +message DeleteKeyRequest{ + bytes keyID =1; +} + +message DeleteKeyResponse{} + +message AddTagRequest{ + bytes keyID=1; + string tag =2; +} + +message AddTagResponse{} + +message RemoveTagRequest{ + bytes keyID =1; + string tag =2; +} + +message RemoveTagResponse{} + +message ListTagsRequest{ + bytes keyID=1; +} + +message ListTagsResponse{ + repeated string tags=1; +} diff --git a/pkg/loop/internal/pb/keystore/keystore_grpc.pb.go b/pkg/loop/internal/pb/keystore/keystore_grpc.pb.go new file mode 100644 index 0000000..6752c21 --- /dev/null +++ b/pkg/loop/internal/pb/keystore/keystore_grpc.pb.go @@ -0,0 +1,553 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.3.0 +// - protoc v4.25.1 +// source: keystore.proto + +package keystorepb + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +const ( + Keystore_Sign_FullMethodName = "/loop.internal.pb.keystore.Keystore/Sign" + Keystore_SignBatch_FullMethodName = "/loop.internal.pb.keystore.Keystore/SignBatch" + Keystore_Verify_FullMethodName = "/loop.internal.pb.keystore.Keystore/Verify" + Keystore_VerifyBatch_FullMethodName = "/loop.internal.pb.keystore.Keystore/VerifyBatch" + Keystore_ListKeys_FullMethodName = "/loop.internal.pb.keystore.Keystore/ListKeys" + Keystore_ImportKey_FullMethodName = "/loop.internal.pb.keystore.Keystore/ImportKey" + Keystore_ExportKey_FullMethodName = "/loop.internal.pb.keystore.Keystore/ExportKey" + Keystore_CreateKey_FullMethodName = "/loop.internal.pb.keystore.Keystore/CreateKey" + Keystore_DeleteKey_FullMethodName = "/loop.internal.pb.keystore.Keystore/DeleteKey" + Keystore_AddTag_FullMethodName = "/loop.internal.pb.keystore.Keystore/AddTag" + Keystore_RemoveTag_FullMethodName = "/loop.internal.pb.keystore.Keystore/RemoveTag" + Keystore_ListTags_FullMethodName = "/loop.internal.pb.keystore.Keystore/ListTags" + Keystore_RunUDF_FullMethodName = "/loop.internal.pb.keystore.Keystore/RunUDF" +) + +// KeystoreClient is the client API for Keystore service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type KeystoreClient interface { + Sign(ctx context.Context, in *SignRequest, opts ...grpc.CallOption) (*SignResponse, error) + SignBatch(ctx context.Context, in *SignBatchRequest, opts ...grpc.CallOption) (*SignBatchResponse, error) + Verify(ctx context.Context, in *VerifyRequest, opts ...grpc.CallOption) (*VerifyResponse, error) + VerifyBatch(ctx context.Context, in *VerifyBatchRequest, opts ...grpc.CallOption) (*VerifyBatchResponse, error) + ListKeys(ctx context.Context, in *ListKeysRequest, opts ...grpc.CallOption) (*ListKeysResponse, error) + ImportKey(ctx context.Context, in *ImportKeyRequest, opts ...grpc.CallOption) (*ImportKeyResponse, error) + ExportKey(ctx context.Context, in *ExportKeyRequest, opts ...grpc.CallOption) (*ExportKeyResponse, error) + CreateKey(ctx context.Context, in *CreateKeyRequest, opts ...grpc.CallOption) (*CreateKeyResponse, error) + DeleteKey(ctx context.Context, in *DeleteKeyRequest, opts ...grpc.CallOption) (*DeleteKeyResponse, error) + AddTag(ctx context.Context, in *AddTagRequest, opts ...grpc.CallOption) (*AddTagResponse, error) + RemoveTag(ctx context.Context, in *RemoveTagRequest, opts ...grpc.CallOption) (*RemoveTagResponse, error) + ListTags(ctx context.Context, in *ListTagsRequest, opts ...grpc.CallOption) (*ListTagsResponse, error) + RunUDF(ctx context.Context, in *RunUDFRequest, opts ...grpc.CallOption) (*RunUDFResponse, error) +} + +type keystoreClient struct { + cc grpc.ClientConnInterface +} + +func NewKeystoreClient(cc grpc.ClientConnInterface) KeystoreClient { + return &keystoreClient{cc} +} + +func (c *keystoreClient) Sign(ctx context.Context, in *SignRequest, opts ...grpc.CallOption) (*SignResponse, error) { + out := new(SignResponse) + err := c.cc.Invoke(ctx, Keystore_Sign_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) SignBatch(ctx context.Context, in *SignBatchRequest, opts ...grpc.CallOption) (*SignBatchResponse, error) { + out := new(SignBatchResponse) + err := c.cc.Invoke(ctx, Keystore_SignBatch_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) Verify(ctx context.Context, in *VerifyRequest, opts ...grpc.CallOption) (*VerifyResponse, error) { + out := new(VerifyResponse) + err := c.cc.Invoke(ctx, Keystore_Verify_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) VerifyBatch(ctx context.Context, in *VerifyBatchRequest, opts ...grpc.CallOption) (*VerifyBatchResponse, error) { + out := new(VerifyBatchResponse) + err := c.cc.Invoke(ctx, Keystore_VerifyBatch_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) ListKeys(ctx context.Context, in *ListKeysRequest, opts ...grpc.CallOption) (*ListKeysResponse, error) { + out := new(ListKeysResponse) + err := c.cc.Invoke(ctx, Keystore_ListKeys_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) ImportKey(ctx context.Context, in *ImportKeyRequest, opts ...grpc.CallOption) (*ImportKeyResponse, error) { + out := new(ImportKeyResponse) + err := c.cc.Invoke(ctx, Keystore_ImportKey_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) ExportKey(ctx context.Context, in *ExportKeyRequest, opts ...grpc.CallOption) (*ExportKeyResponse, error) { + out := new(ExportKeyResponse) + err := c.cc.Invoke(ctx, Keystore_ExportKey_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) CreateKey(ctx context.Context, in *CreateKeyRequest, opts ...grpc.CallOption) (*CreateKeyResponse, error) { + out := new(CreateKeyResponse) + err := c.cc.Invoke(ctx, Keystore_CreateKey_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) DeleteKey(ctx context.Context, in *DeleteKeyRequest, opts ...grpc.CallOption) (*DeleteKeyResponse, error) { + out := new(DeleteKeyResponse) + err := c.cc.Invoke(ctx, Keystore_DeleteKey_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) AddTag(ctx context.Context, in *AddTagRequest, opts ...grpc.CallOption) (*AddTagResponse, error) { + out := new(AddTagResponse) + err := c.cc.Invoke(ctx, Keystore_AddTag_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) RemoveTag(ctx context.Context, in *RemoveTagRequest, opts ...grpc.CallOption) (*RemoveTagResponse, error) { + out := new(RemoveTagResponse) + err := c.cc.Invoke(ctx, Keystore_RemoveTag_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) ListTags(ctx context.Context, in *ListTagsRequest, opts ...grpc.CallOption) (*ListTagsResponse, error) { + out := new(ListTagsResponse) + err := c.cc.Invoke(ctx, Keystore_ListTags_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keystoreClient) RunUDF(ctx context.Context, in *RunUDFRequest, opts ...grpc.CallOption) (*RunUDFResponse, error) { + out := new(RunUDFResponse) + err := c.cc.Invoke(ctx, Keystore_RunUDF_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// KeystoreServer is the server API for Keystore service. +// All implementations must embed UnimplementedKeystoreServer +// for forward compatibility +type KeystoreServer interface { + Sign(context.Context, *SignRequest) (*SignResponse, error) + SignBatch(context.Context, *SignBatchRequest) (*SignBatchResponse, error) + Verify(context.Context, *VerifyRequest) (*VerifyResponse, error) + VerifyBatch(context.Context, *VerifyBatchRequest) (*VerifyBatchResponse, error) + ListKeys(context.Context, *ListKeysRequest) (*ListKeysResponse, error) + ImportKey(context.Context, *ImportKeyRequest) (*ImportKeyResponse, error) + ExportKey(context.Context, *ExportKeyRequest) (*ExportKeyResponse, error) + CreateKey(context.Context, *CreateKeyRequest) (*CreateKeyResponse, error) + DeleteKey(context.Context, *DeleteKeyRequest) (*DeleteKeyResponse, error) + AddTag(context.Context, *AddTagRequest) (*AddTagResponse, error) + RemoveTag(context.Context, *RemoveTagRequest) (*RemoveTagResponse, error) + ListTags(context.Context, *ListTagsRequest) (*ListTagsResponse, error) + RunUDF(context.Context, *RunUDFRequest) (*RunUDFResponse, error) + mustEmbedUnimplementedKeystoreServer() +} + +// UnimplementedKeystoreServer must be embedded to have forward compatible implementations. +type UnimplementedKeystoreServer struct { +} + +func (UnimplementedKeystoreServer) Sign(context.Context, *SignRequest) (*SignResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Sign not implemented") +} +func (UnimplementedKeystoreServer) SignBatch(context.Context, *SignBatchRequest) (*SignBatchResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method SignBatch not implemented") +} +func (UnimplementedKeystoreServer) Verify(context.Context, *VerifyRequest) (*VerifyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Verify not implemented") +} +func (UnimplementedKeystoreServer) VerifyBatch(context.Context, *VerifyBatchRequest) (*VerifyBatchResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method VerifyBatch not implemented") +} +func (UnimplementedKeystoreServer) ListKeys(context.Context, *ListKeysRequest) (*ListKeysResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListKeys not implemented") +} +func (UnimplementedKeystoreServer) ImportKey(context.Context, *ImportKeyRequest) (*ImportKeyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ImportKey not implemented") +} +func (UnimplementedKeystoreServer) ExportKey(context.Context, *ExportKeyRequest) (*ExportKeyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ExportKey not implemented") +} +func (UnimplementedKeystoreServer) CreateKey(context.Context, *CreateKeyRequest) (*CreateKeyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateKey not implemented") +} +func (UnimplementedKeystoreServer) DeleteKey(context.Context, *DeleteKeyRequest) (*DeleteKeyResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method DeleteKey not implemented") +} +func (UnimplementedKeystoreServer) AddTag(context.Context, *AddTagRequest) (*AddTagResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method AddTag not implemented") +} +func (UnimplementedKeystoreServer) RemoveTag(context.Context, *RemoveTagRequest) (*RemoveTagResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method RemoveTag not implemented") +} +func (UnimplementedKeystoreServer) ListTags(context.Context, *ListTagsRequest) (*ListTagsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListTags not implemented") +} +func (UnimplementedKeystoreServer) RunUDF(context.Context, *RunUDFRequest) (*RunUDFResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method RunUDF not implemented") +} +func (UnimplementedKeystoreServer) mustEmbedUnimplementedKeystoreServer() {} + +// UnsafeKeystoreServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to KeystoreServer will +// result in compilation errors. +type UnsafeKeystoreServer interface { + mustEmbedUnimplementedKeystoreServer() +} + +func RegisterKeystoreServer(s grpc.ServiceRegistrar, srv KeystoreServer) { + s.RegisterService(&Keystore_ServiceDesc, srv) +} + +func _Keystore_Sign_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SignRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).Sign(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_Sign_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).Sign(ctx, req.(*SignRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_SignBatch_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SignBatchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).SignBatch(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_SignBatch_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).SignBatch(ctx, req.(*SignBatchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_Verify_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(VerifyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).Verify(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_Verify_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).Verify(ctx, req.(*VerifyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_VerifyBatch_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(VerifyBatchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).VerifyBatch(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_VerifyBatch_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).VerifyBatch(ctx, req.(*VerifyBatchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_ListKeys_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListKeysRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).ListKeys(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_ListKeys_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).ListKeys(ctx, req.(*ListKeysRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_ImportKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).ImportKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_ImportKey_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).ImportKey(ctx, req.(*ImportKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_ExportKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).ExportKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_ExportKey_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).ExportKey(ctx, req.(*ExportKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_CreateKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).CreateKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_CreateKey_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).CreateKey(ctx, req.(*CreateKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_DeleteKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).DeleteKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_DeleteKey_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).DeleteKey(ctx, req.(*DeleteKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_AddTag_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddTagRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).AddTag(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_AddTag_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).AddTag(ctx, req.(*AddTagRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_RemoveTag_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemoveTagRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).RemoveTag(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_RemoveTag_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).RemoveTag(ctx, req.(*RemoveTagRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_ListTags_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTagsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).ListTags(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_ListTags_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).ListTags(ctx, req.(*ListTagsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Keystore_RunUDF_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunUDFRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeystoreServer).RunUDF(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: Keystore_RunUDF_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeystoreServer).RunUDF(ctx, req.(*RunUDFRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// Keystore_ServiceDesc is the grpc.ServiceDesc for Keystore service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Keystore_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "loop.internal.pb.keystore.Keystore", + HandlerType: (*KeystoreServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Sign", + Handler: _Keystore_Sign_Handler, + }, + { + MethodName: "SignBatch", + Handler: _Keystore_SignBatch_Handler, + }, + { + MethodName: "Verify", + Handler: _Keystore_Verify_Handler, + }, + { + MethodName: "VerifyBatch", + Handler: _Keystore_VerifyBatch_Handler, + }, + { + MethodName: "ListKeys", + Handler: _Keystore_ListKeys_Handler, + }, + { + MethodName: "ImportKey", + Handler: _Keystore_ImportKey_Handler, + }, + { + MethodName: "ExportKey", + Handler: _Keystore_ExportKey_Handler, + }, + { + MethodName: "CreateKey", + Handler: _Keystore_CreateKey_Handler, + }, + { + MethodName: "DeleteKey", + Handler: _Keystore_DeleteKey_Handler, + }, + { + MethodName: "AddTag", + Handler: _Keystore_AddTag_Handler, + }, + { + MethodName: "RemoveTag", + Handler: _Keystore_RemoveTag_Handler, + }, + { + MethodName: "ListTags", + Handler: _Keystore_ListTags_Handler, + }, + { + MethodName: "RunUDF", + Handler: _Keystore_RunUDF_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "keystore.proto", +} diff --git a/pkg/loop/internal/relayer/pluginprovider/contractreader/codec_test.go b/pkg/loop/internal/relayer/pluginprovider/contractreader/codec_test.go index 19b85f5..64eb3e3 100644 --- a/pkg/loop/internal/relayer/pluginprovider/contractreader/codec_test.go +++ b/pkg/loop/internal/relayer/pluginprovider/contractreader/codec_test.go @@ -144,7 +144,10 @@ func (f *fakeCodec) Encode(_ context.Context, item any, itemType string) ([]byte return []byte{}, nil case interfacetests.TestItemWithConfigExtra: ts := item.(*interfacetests.TestStruct) - ts.Account = anyAccountBytes + ts.AccountStruct = interfacetests.AccountStruct{ + Account: anyAccountBytes, + AccountStr: anyAccountString, + } ts.BigField = big.NewInt(2) return encoder.Marshal(ts) case interfacetests.TestItemType, interfacetests.TestItemSliceType, interfacetests.TestItemArray2Type, interfacetests.TestItemArray1Type: diff --git a/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader_test.go b/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader_test.go index 67f4be6..a6ebc9b 100644 --- a/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader_test.go +++ b/pkg/loop/internal/relayer/pluginprovider/contractreader/contract_reader_test.go @@ -484,7 +484,10 @@ func (f *fakeContractReader) GetLatestValue(_ context.Context, readIdentifier st rv := returnVal.(*TestStructWithExtraField) rv.TestStruct = *pv rv.ExtraField = AnyExtraValue - rv.Account = anyAccountBytes + rv.AccountStruct = AccountStruct{ + Account: anyAccountBytes, + AccountStr: anyAccountString, + } rv.BigField = big.NewInt(2) return nil } else if strings.HasSuffix(readIdentifier, EventName) { @@ -569,7 +572,10 @@ func (f *fakeContractReader) BatchGetLatestValues(_ context.Context, request typ *returnVal.(*[]uint64) = AnySliceToReadWithoutAnArgument } else if req.ReadName == MethodReturningSeenStruct { ts := *req.Params.(*TestStruct) - ts.Account = anyAccountBytes + ts.AccountStruct = AccountStruct{ + Account: anyAccountBytes, + AccountStr: anyAccountString, + } ts.BigField = big.NewInt(2) returnVal = &TestStructWithExtraField{ TestStruct: ts, diff --git a/pkg/loop/internal/relayer/pluginprovider/ext/median/median.go b/pkg/loop/internal/relayer/pluginprovider/ext/median/median.go index f9ca69b..3f6f657 100644 --- a/pkg/loop/internal/relayer/pluginprovider/ext/median/median.go +++ b/pkg/loop/internal/relayer/pluginprovider/ext/median/median.go @@ -74,8 +74,8 @@ func (p *ProviderClient) OnchainConfigCodec() median.OnchainConfigCodec { return p.onchainConfigCodec } -func (m *ProviderClient) ContractReader() types.ContractReader { - return m.contractReader +func (p *ProviderClient) ContractReader() types.ContractReader { + return p.contractReader } func (p *ProviderClient) Codec() types.Codec { diff --git a/pkg/loop/internal/types/types.go b/pkg/loop/internal/types/types.go index 254ad98..1df56f7 100644 --- a/pkg/loop/internal/types/types.go +++ b/pkg/loop/internal/types/types.go @@ -3,6 +3,8 @@ package internal import ( "context" + "github.com/goplugin/plugin-common/pkg/loop/internal/keystore" + "github.com/goplugin/plugin-common/pkg/services" "github.com/goplugin/plugin-common/pkg/types" "github.com/goplugin/plugin-common/pkg/types/core" ) @@ -54,3 +56,9 @@ type Relayer interface { NewPluginProvider(context.Context, types.RelayArgs, types.PluginArgs) (types.PluginProvider, error) NewLLOProvider(context.Context, types.RelayArgs, types.PluginArgs) (types.LLOProvider, error) } + +// Keystore This interface contains all the keystore GRPC functionality, keystore.Keystore is meant to be exposed to consumers and the keystore.Management interface in exposed only to the core node +type Keystore interface { + services.Service + keystore.GRPCService +} diff --git a/pkg/loop/keystore_service.go b/pkg/loop/keystore_service.go new file mode 100644 index 0000000..c711304 --- /dev/null +++ b/pkg/loop/keystore_service.go @@ -0,0 +1,32 @@ +package loop + +import ( + "context" + "fmt" + "os/exec" + + "github.com/goplugin/plugin-common/pkg/logger" + "github.com/goplugin/plugin-common/pkg/loop/internal/goplugin" + "github.com/goplugin/plugin-common/pkg/loop/internal/keystore" +) + +// KeystoreService is a [types.Service] that maintains an internal [keystore.Keystore]. +type KeystoreService struct { + goplugin.PluginService[*GRPCPluginKeystore, keystore.GRPCService] +} + +func NewKeystoreService(lggr logger.Logger, grpcOpts GRPCOpts, cmd func() *exec.Cmd, config []byte) *KeystoreService { + newService := func(ctx context.Context, instance any) (keystore.GRPCService, error) { + plug, ok := instance.(*keystore.Client) + if !ok { + return nil, fmt.Errorf("expected PluginKeystore but got %T", instance) + } + return plug, nil + } + stopCh := make(chan struct{}) + lggr = logger.Named(lggr, "KeystoreService") + var rs KeystoreService + broker := BrokerConfig{StopCh: stopCh, Logger: lggr, GRPCOpts: grpcOpts} + rs.Init(PluginKeystoreName, &GRPCPluginKeystore{BrokerConfig: broker}, newService, lggr, cmd, stopCh) + return &rs +} diff --git a/pkg/loop/plugin_keystore.go b/pkg/loop/plugin_keystore.go new file mode 100644 index 0000000..20e9081 --- /dev/null +++ b/pkg/loop/plugin_keystore.go @@ -0,0 +1,53 @@ +package loop + +import ( + "context" + + "github.com/hashicorp/go-plugin" + "google.golang.org/grpc" + + keystorepb "github.com/goplugin/plugin-common/pkg/loop/internal/keystore" + "github.com/goplugin/plugin-common/pkg/types/keystore" +) + +// PluginKeystoreName is the name for keystore.Keystore +const PluginKeystoreName = "keystore" + +func PluginKeystoreHandshakeConfig() plugin.HandshakeConfig { + return plugin.HandshakeConfig{ + MagicCookieKey: "CL_PLUGIN_KEYSTORE_MAGIC_COOKIE", + MagicCookieValue: "fe81b132-0d3d-4c16-9f13-c2f7bfd3c361", + } +} + +type GRPCPluginKeystore struct { + plugin.NetRPCUnsupportedPlugin + + BrokerConfig + + PluginServer keystorepb.GRPCService + + pluginClient *keystorepb.Client +} + +func (p *GRPCPluginKeystore) GRPCServer(broker *plugin.GRPCBroker, server *grpc.Server) error { + return keystorepb.RegisterKeystoreServer(server, broker, p.BrokerConfig, p.PluginServer) +} + +func (p *GRPCPluginKeystore) GRPCClient(_ context.Context, broker *plugin.GRPCBroker, conn *grpc.ClientConn) (interface{}, error) { + if p.pluginClient == nil { + p.pluginClient = keystorepb.NewKeystoreClient(broker, p.BrokerConfig, conn) + } else { + p.pluginClient.Refresh(broker, conn) + } + + return keystore.Keystore(p.pluginClient), nil +} + +func (p *GRPCPluginKeystore) ClientConfig() *plugin.ClientConfig { + c := &plugin.ClientConfig{ + HandshakeConfig: PluginKeystoreHandshakeConfig(), + Plugins: map[string]plugin.Plugin{PluginKeystoreName: p}, + } + return ManagedGRPCClientConfig(c, p.BrokerConfig) +} diff --git a/pkg/loop/server.go b/pkg/loop/server.go index 3eeca37..bae5185 100644 --- a/pkg/loop/server.go +++ b/pkg/loop/server.go @@ -58,7 +58,7 @@ func newServer(loggerName string) (*Server, error) { lggr, err := NewLogger() if err != nil { - return nil, fmt.Errorf("error creating logger: %s", err) + return nil, fmt.Errorf("error creating logger: %w", err) } lggr = logger.Named(lggr, loggerName) s.Logger = logger.Sugared(lggr) diff --git a/pkg/metrics/metrics_labeler.go b/pkg/metrics/metrics_labeler.go new file mode 100644 index 0000000..ade67d7 --- /dev/null +++ b/pkg/metrics/metrics_labeler.go @@ -0,0 +1,33 @@ +package metrics + +type Labeler struct { + Labels map[string]string +} + +func NewLabeler() Labeler { + return Labeler{Labels: make(map[string]string)} +} + +// With adds multiple key-value pairs to the Labeler to eventually be consumed by a Beholder metrics resource +func (c Labeler) With(keyValues ...string) Labeler { + newCustomMetricsLabeler := NewLabeler() + + if len(keyValues)%2 != 0 { + // If an odd number of key-value arguments is passed, return the original CustomMessageLabeler unchanged + return c + } + + // Copy existing labels from the current agent + for k, v := range c.Labels { + newCustomMetricsLabeler.Labels[k] = v + } + + // Add new key-value pairs + for i := 0; i < len(keyValues); i += 2 { + key := keyValues[i] + value := keyValues[i+1] + newCustomMetricsLabeler.Labels[key] = value + } + + return newCustomMetricsLabeler +} diff --git a/pkg/metrics/metrics_labeler_test.go b/pkg/metrics/metrics_labeler_test.go new file mode 100644 index 0000000..25d5e51 --- /dev/null +++ b/pkg/metrics/metrics_labeler_test.go @@ -0,0 +1,16 @@ +package metrics + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +// tests CustomMessageAgent does not share state across new instances created by `With` +func Test_CustomMessageAgent(t *testing.T) { + cma := NewLabeler() + cma1 := cma.With("key1", "value1") + cma2 := cma1.With("key2", "value2") + + assert.NotEqual(t, cma1.Labels, cma2.Labels) +} diff --git a/pkg/monitoring/metrics.go b/pkg/monitoring/metrics.go index 0d232f9..e2cfa47 100644 --- a/pkg/monitoring/metrics.go +++ b/pkg/monitoring/metrics.go @@ -60,7 +60,7 @@ var ( linkAvailableForPayment = promauto.NewGaugeVec( prometheus.GaugeOpts{ Name: "link_available_for_payments", - Help: "Reports the amount of link the contract can use to make payments to node operators. This may be different from the PLI balance of the contract since that can contain debt", + Help: "Reports the amount of pli the contract can use to make payments to node operators. This may be different from the PLI balance of the contract since that can contain debt", }, []string{"feed_id", "chain_id", "contract_status", "contract_type", "feed_name", "feed_path", "network_id", "network_name"}, ) diff --git a/pkg/services/service.go b/pkg/services/service.go index 2f6cd43..559f2c8 100644 --- a/pkg/services/service.go +++ b/pkg/services/service.go @@ -87,7 +87,7 @@ func (e *Engine) EmitHealthErr(err error) { e.emitHealthErr(err) } func (e *Engine) SetHealthCond(condition string, err error) { e.condsMu.Lock() defer e.condsMu.Unlock() - e.conds[condition] = fmt.Errorf("%s: %e", condition, err) + e.conds[condition] = fmt.Errorf("%s: %w", condition, err) } // ClearHealthCond removes a condition and error recorded by SetHealthCond. diff --git a/pkg/types/interfacetests/chain_components_interface_tests.go b/pkg/types/interfacetests/chain_components_interface_tests.go index 954a794..1f745a6 100644 --- a/pkg/types/interfacetests/chain_components_interface_tests.go +++ b/pkg/types/interfacetests/chain_components_interface_tests.go @@ -270,7 +270,7 @@ func runContractReaderGetLatestValueInterfaceTests[T TestingT[T]](t T, tester Ch ctx := tests.Context(t) testStruct := CreateTestStruct(0, tester) testStruct.BigField = nil - testStruct.Account = nil + testStruct.AccountStruct.Account = nil bindings := tester.GetBindings(t) bound := BindingsByName(bindings, AnyContractName)[0] @@ -529,7 +529,7 @@ func runContractReaderBatchGetLatestValuesInterfaceTests[T TestingT[T]](t T, tes // setup call data testStruct := CreateTestStruct(0, tester) testStruct.BigField = nil - testStruct.Account = nil + testStruct.AccountStruct.Account = nil actual := &TestStructWithExtraField{} batchGetLatestValueRequest := make(types.BatchGetLatestValuesRequest) bindings := tester.GetBindings(t) diff --git a/pkg/types/interfacetests/codec_interface_fuzz_tests.go b/pkg/types/interfacetests/codec_interface_fuzz_tests.go index 47d756b..84e5751 100644 --- a/pkg/types/interfacetests/codec_interface_fuzz_tests.go +++ b/pkg/types/interfacetests/codec_interface_fuzz_tests.go @@ -37,10 +37,12 @@ func RunCodecInterfaceFuzzTests(f *testing.F, tester CodecInterfaceTester) { DifferentField: differentField, OracleID: commontypes.OracleID(oracleId), OracleIDs: oids, - Account: tester.GetAccountBytes(accountSeed), - AccountStr: tester.GetAccountString(accountSeed), - Accounts: [][]byte{tester.GetAccountBytes(accountsSeed + 1), tester.GetAccountBytes(accountsSeed + 2)}, - BigField: big.NewInt(bigField), + AccountStruct: AccountStruct{ + Account: tester.GetAccountBytes(accountSeed), + AccountStr: tester.GetAccountString(accountSeed), + }, + Accounts: [][]byte{tester.GetAccountBytes(accountsSeed + 1), tester.GetAccountBytes(accountsSeed + 2)}, + BigField: big.NewInt(bigField), NestedDynamicStruct: MidLevelDynamicTestStruct{ FixedBytes: fb, Inner: InnerDynamicTestStruct{ diff --git a/pkg/types/interfacetests/codec_interface_tests.go b/pkg/types/interfacetests/codec_interface_tests.go index 808cd23..423d9c3 100644 --- a/pkg/types/interfacetests/codec_interface_tests.go +++ b/pkg/types/interfacetests/codec_interface_tests.go @@ -65,8 +65,7 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { req := &EncodeRequest{TestStructs: []TestStruct{item}, TestOn: TestItemType} resp := tester.EncodeFields(t, req) compatibleItem := compatibleTestStruct{ - Account: item.Account, - AccountStr: item.AccountStr, + AccountStruct: item.AccountStruct, Accounts: item.Accounts, BigField: item.BigField, DifferentField: item.DifferentField, @@ -95,8 +94,10 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { req := &EncodeRequest{TestStructs: []TestStruct{item}, TestOn: TestItemType} resp := tester.EncodeFields(t, req) compatibleMap := map[string]any{ - "Account": item.Account, - "AccountStr": item.AccountStr, + "AccountStruct": map[string]any{ + "Account": item.AccountStruct.Account, + "AccountStr": item.AccountStruct.AccountStr, + }, "Accounts": item.Accounts, "BigField": item.BigField, "DifferentField": item.DifferentField, @@ -140,8 +141,7 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { DifferentField: ts.DifferentField, OracleID: ts.OracleID, OracleIDs: ts.OracleIDs, - Account: ts.Account, - AccountStr: ts.AccountStr, + AccountStruct: ts.AccountStruct, Accounts: ts.Accounts, BigField: ts.BigField, NestedDynamicStruct: ts.NestedDynamicStruct, @@ -325,7 +325,7 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { cr := tester.GetCodec(t) modified := CreateTestStruct[*testing.T](0, tester) modified.BigField = nil - modified.Account = nil + modified.AccountStruct.Account = nil actual, err := cr.Encode(ctx, modified, TestItemWithConfigExtra) require.NoError(t, err) @@ -355,8 +355,7 @@ func RunCodecInterfaceTests(t *testing.T, tester CodecInterfaceTester) { DifferentField: "", OracleID: 0, OracleIDs: [32]commontypes.OracleID{}, - Account: nil, - AccountStr: "", + AccountStruct: AccountStruct{}, Accounts: nil, BigField: nil, NestedDynamicStruct: MidLevelDynamicTestStruct{}, diff --git a/pkg/types/interfacetests/utils.go b/pkg/types/interfacetests/utils.go index 809e45a..9717cf8 100644 --- a/pkg/types/interfacetests/utils.go +++ b/pkg/types/interfacetests/utils.go @@ -153,12 +153,16 @@ type MidLevelStaticTestStruct struct { Inner InnerStaticTestStruct } +type AccountStruct struct { + Account []byte + AccountStr string +} + type TestStruct struct { Field *int32 OracleID commontypes.OracleID OracleIDs [32]commontypes.OracleID - Account []byte - AccountStr string + AccountStruct AccountStruct Accounts [][]byte DifferentField string BigField *big.Int @@ -175,8 +179,7 @@ type TestStructMissingField struct { DifferentField string OracleID commontypes.OracleID OracleIDs [32]commontypes.OracleID - Account []byte - AccountStr string + AccountStruct AccountStruct Accounts [][]byte BigField *big.Int NestedDynamicStruct MidLevelDynamicTestStruct @@ -185,8 +188,7 @@ type TestStructMissingField struct { // compatibleTestStruct has fields in a different order type compatibleTestStruct struct { - Account []byte - AccountStr string + AccountStruct AccountStruct Accounts [][]byte BigField *big.Int DifferentField string @@ -217,11 +219,13 @@ func CreateTestStruct[T any](i int, tester BasicTester[T]) TestStruct { s := fmt.Sprintf("field%v", i) fv := int32(i) return TestStruct{ - Field: &fv, - OracleID: commontypes.OracleID(i + 1), - OracleIDs: [32]commontypes.OracleID{commontypes.OracleID(i + 2), commontypes.OracleID(i + 3)}, - Account: tester.GetAccountBytes(i + 3), - AccountStr: tester.GetAccountString(i + 3), + Field: &fv, + OracleID: commontypes.OracleID(i + 1), + OracleIDs: [32]commontypes.OracleID{commontypes.OracleID(i + 2), commontypes.OracleID(i + 3)}, + AccountStruct: AccountStruct{ + Account: tester.GetAccountBytes(i), + AccountStr: tester.GetAccountString(i), + }, Accounts: [][]byte{tester.GetAccountBytes(i + 4), tester.GetAccountBytes(i + 5)}, DifferentField: s, BigField: big.NewInt(int64((i + 1) * (i + 2))), diff --git a/pkg/types/keystore/types.go b/pkg/types/keystore/types.go new file mode 100644 index 0000000..b1ab498 --- /dev/null +++ b/pkg/types/keystore/types.go @@ -0,0 +1,37 @@ +package keystore + +import "context" + +// Keystore This interface is exposed to keystore consumers +type Keystore interface { + Sign(ctx context.Context, keyID []byte, data []byte) ([]byte, error) + SignBatch(ctx context.Context, keyID []byte, data [][]byte) ([][]byte, error) + Verify(ctx context.Context, keyID []byte, data []byte) (bool, error) + VerifyBatch(ctx context.Context, keyID []byte, data [][]byte) ([]bool, error) + + ListKeys(ctx context.Context, tags []string) ([][]byte, error) + + // RunUDF executes a user-defined function (UDF) on the keystore. + // This method is designed to provide flexibility by allowing users to define custom + // logic that can be executed without breaking the existing interface. While it enables + // future extensibility, developers should ensure that UDF implementations are safe + // and do not compromise the security of the keystore or the integrity of the data. + RunUDF(ctx context.Context, name string, keyID []byte, data []byte) ([]byte, error) +} + +// Management Core node exclusive +type Management interface { + AddPolicy(ctx context.Context, policy []byte) (string, error) + RemovePolicy(ctx context.Context, policyID string) error + ListPolicy(ctx context.Context) []byte + + ImportKey(ctx context.Context, keyType string, data []byte, tags []string) ([]byte, error) + ExportKey(ctx context.Context, keyID []byte) ([]byte, error) + + CreateKey(ctx context.Context, keyType string, tags []string) ([]byte, error) + DeleteKey(ctx context.Context, keyID []byte) error + + AddTag(ctx context.Context, keyID []byte, tag string) error + RemoveTag(ctx context.Context, keyID []byte, tag string) error + ListTags(ctx context.Context, keyID []byte) ([]string, error) +} diff --git a/pkg/utils/sleeper_task.go b/pkg/utils/sleeper_task.go index ee1d541..7d5db00 100644 --- a/pkg/utils/sleeper_task.go +++ b/pkg/utils/sleeper_task.go @@ -1,6 +1,7 @@ package utils import ( + "context" "fmt" "time" @@ -13,12 +14,18 @@ type Worker interface { Name() string } +// WorkerCtx is like Worker but includes [context.Context]. +type WorkerCtx interface { + Work(ctx context.Context) + Name() string +} + // SleeperTask represents a task that waits in the background to process some work. type SleeperTask struct { services.StateMachine - worker Worker + worker WorkerCtx chQueue chan struct{} - chStop chan struct{} + chStop services.StopChan chDone chan struct{} chWorkDone chan struct{} } @@ -31,16 +38,27 @@ type SleeperTask struct { // immediately after it is finished. For this reason you should take care to // make sure that Worker is idempotent. // WakeUp does not block. -func NewSleeperTask(worker Worker) *SleeperTask { +func NewSleeperTask(w Worker) *SleeperTask { + return NewSleeperTaskCtx(&worker{w}) +} + +type worker struct { + Worker +} + +func (w *worker) Work(ctx context.Context) { w.Worker.Work() } + +// NewSleeperTaskCtx is like NewSleeperTask but accepts a WorkerCtx with a [context.Context]. +func NewSleeperTaskCtx(w WorkerCtx) *SleeperTask { s := &SleeperTask{ - worker: worker, + worker: w, chQueue: make(chan struct{}, 1), chStop: make(chan struct{}), chDone: make(chan struct{}), chWorkDone: make(chan struct{}, 10), } - _ = s.StartOnce("SleeperTask-"+worker.Name(), func() error { + _ = s.StartOnce("SleeperTask-"+w.Name(), func() error { go s.workerLoop() return nil }) @@ -98,10 +116,13 @@ func (s *SleeperTask) WorkDone() <-chan struct{} { func (s *SleeperTask) workerLoop() { defer close(s.chDone) + ctx, cancel := s.chStop.NewCtx() + defer cancel() + for { select { case <-s.chQueue: - s.worker.Work() + s.worker.Work(ctx) s.workDone() case <-s.chStop: return diff --git a/pkg/values/map.go b/pkg/values/map.go index ed037dc..a9edca7 100644 --- a/pkg/values/map.go +++ b/pkg/values/map.go @@ -20,7 +20,7 @@ func EmptyMap() *Map { } } -func NewMap(m map[string]any) (*Map, error) { +func NewMap[T any](m map[string]T) (*Map, error) { mv := map[string]Value{} for k, v := range m { val, err := Wrap(v) diff --git a/pkg/values/value.go b/pkg/values/value.go index c2fed62..c8e2c9b 100644 --- a/pkg/values/value.go +++ b/pkg/values/value.go @@ -6,6 +6,7 @@ import ( "math" "math/big" "reflect" + "time" "github.com/go-viper/mapstructure/v2" "github.com/shopspring/decimal" @@ -76,6 +77,8 @@ func Wrap(v any) (Value, error) { return NewFloat64(float64(tv)), nil case *big.Int: return NewBigInt(tv), nil + case time.Time: + return NewTime(tv), nil case nil: return nil, nil @@ -95,6 +98,12 @@ func Wrap(v any) (Value, error) { return tv, nil case *Float64: return tv, nil + case *Bool: + return tv, nil + case *BigInt: + return tv, nil + case *Time: + return tv, nil } // Handle slices, structs, and pointers to structs diff --git a/pkg/values/value_test.go b/pkg/values/value_test.go index 23d02b7..9dbc9fb 100644 --- a/pkg/values/value_test.go +++ b/pkg/values/value_test.go @@ -1,6 +1,7 @@ package values import ( + "bytes" "math" "math/big" "reflect" @@ -334,6 +335,58 @@ func Test_StructWrapUnwrap(t *testing.T) { assert.Equal(t, expected, unwrapped) } +func Test_NestedValueWrapUnwrap(t *testing.T) { + now := time.Now() + + wrapInt, err := Wrap(int64(100)) + require.NoError(t, err) + wrapDeci, err := Wrap(decimal.NewFromInt(32)) + require.NoError(t, err) + wrapFloat, err := Wrap(float64(1.2)) + require.NoError(t, err) + wrapBuffer, err := Wrap(bytes.NewBufferString("immabuffer").Bytes()) + require.NoError(t, err) + wrapString, err := Wrap("wrapme") + require.NoError(t, err) + wrapBool, err := Wrap(false) + require.NoError(t, err) + wrapBI, err := Wrap(big.NewInt(1)) + require.NoError(t, err) + wrapT, err := Wrap(now) + require.NoError(t, err) + + valuesMap, err := NewMap(map[string]any{ + "Int64": wrapInt, + "Decimal": wrapDeci, + "Float": wrapFloat, + "Buffer": wrapBuffer, + "String": wrapString, + "Bool": wrapBool, + "BI": wrapBI, + "T": wrapT, + }) + require.NoError(t, err) + + unwrappedMap, err := valuesMap.Unwrap() + require.NoError(t, err) + + expectedMap := map[string]any{ + "Int64": int64(100), + "Decimal": decimal.NewFromInt(32), + "Float": float64(1.2), + "Buffer": bytes.NewBufferString("immabuffer").Bytes(), + "String": "wrapme", + "Bool": false, + "BI": big.NewInt(1), + "T": now, + } + require.Equal( + t, + expectedMap, + unwrappedMap, + ) +} + func Test_SameUnderlyingTypes(t *testing.T) { type str string type i int diff --git a/pkg/workflows/sdk/runtime.go b/pkg/workflows/sdk/runtime.go index 947bb22..17c01bd 100644 --- a/pkg/workflows/sdk/runtime.go +++ b/pkg/workflows/sdk/runtime.go @@ -7,9 +7,22 @@ import ( var BreakErr = capabilities.ErrStopExecution +type MessageEmitter interface { + // Emit sends a message to the labeler's destination. + Emit(string) error + + // With sets the labels for the message to be emitted. Labels are passed as key-value pairs + // and are cumulative. + With(kvs ...string) MessageEmitter +} + +// Guest interface type Runtime interface { Logger() logger.Logger Fetch(req FetchRequest) (FetchResponse, error) + + // Emitter sends the given message and labels to the configured collector. + Emitter() MessageEmitter } type FetchRequest struct { diff --git a/pkg/workflows/sdk/testutils/runtime.go b/pkg/workflows/sdk/testutils/runtime.go index 0251959..068e9d6 100644 --- a/pkg/workflows/sdk/testutils/runtime.go +++ b/pkg/workflows/sdk/testutils/runtime.go @@ -17,3 +17,7 @@ func (nr *NoopRuntime) Logger() logger.Logger { l, _ := logger.New() return l } + +func (nr *NoopRuntime) Emitter() sdk.MessageEmitter { + return nil +} diff --git a/pkg/workflows/wasm/host/module.go b/pkg/workflows/wasm/host/module.go index 3761ab5..88b85c0 100644 --- a/pkg/workflows/wasm/host/module.go +++ b/pkg/workflows/wasm/host/module.go @@ -19,36 +19,11 @@ import ( "google.golang.org/protobuf/proto" "github.com/goplugin/plugin-common/pkg/logger" + "github.com/goplugin/plugin-common/pkg/values" "github.com/goplugin/plugin-common/pkg/workflows/wasm" wasmpb "github.com/goplugin/plugin-common/pkg/workflows/wasm/pb" ) -// safeMem returns a copy of the wasm module memory at the given pointer and size. -func safeMem(caller *wasmtime.Caller, ptr int32, size int32) ([]byte, error) { - mem := caller.GetExport("memory").Memory() - data := mem.UnsafeData(caller) - if ptr+size > int32(len(data)) { - return nil, errors.New("out of bounds memory access") - } - - cd := make([]byte, size) - copy(cd, data[ptr:ptr+size]) - return cd, nil -} - -// copyBuffer copies the given src byte slice into the wasm module memory at the given pointer and size. -func copyBuffer(caller *wasmtime.Caller, src []byte, ptr int32, size int32) int64 { - mem := caller.GetExport("memory").Memory() - rawData := mem.UnsafeData(caller) - if int32(len(rawData)) < ptr+size { - return -1 - } - buffer := rawData[ptr : ptr+size] - dataLen := int64(len(src)) - copy(buffer, src) - return dataLen -} - type respStore struct { m map[string]*wasmpb.Response mu sync.RWMutex @@ -81,8 +56,8 @@ func (r *respStore) get(id string) (*wasmpb.Response, error) { var ( defaultTickInterval = 100 * time.Millisecond - defaultTimeout = 300 * time.Millisecond - defaultMaxMemoryMBs = 64 + defaultTimeout = 2 * time.Second + defaultMaxMemoryMBs = 256 DefaultInitialFuel = uint64(100_000_000) ) @@ -91,6 +66,14 @@ type DeterminismConfig struct { Seed int64 } +type MessageEmitter interface { + // Emit sends a message to the labeler's destination. + Emit(string) error + + // WithMapLabels sets the labels for the message to be emitted. Labels are cumulative. + WithMapLabels(map[string]string) MessageEmitter +} + type ModuleConfig struct { TickInterval time.Duration Timeout *time.Duration @@ -100,6 +83,9 @@ type ModuleConfig struct { IsUncompressed bool Fetch func(*wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) + // Labeler is used to emit messages from the module. + Labeler MessageEmitter + // If Determinism is set, the module will override the random_get function in the WASI API with // the provided seed to ensure deterministic behavior. Determinism *DeterminismConfig @@ -110,6 +96,7 @@ type Module struct { module *wasmtime.Module linker *wasmtime.Linker + // respStore collects responses from sendResponse mapped by request ID r *respStore cfg *ModuleConfig @@ -148,6 +135,10 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) } } + if modCfg.Labeler == nil { + modCfg.Labeler = &unimplementedMessageEmitter{} + } + logger := modCfg.Logger if modCfg.TickInterval == 0 { @@ -200,7 +191,7 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) "env", "sendResponse", func(caller *wasmtime.Caller, ptr int32, ptrlen int32) int32 { - b, innerErr := safeMem(caller, ptr, ptrlen) + b, innerErr := wasmRead(caller, ptr, ptrlen) if innerErr != nil { logger.Errorf("error calling sendResponse: %s", err) return ErrnoFault @@ -230,7 +221,7 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) "env", "log", func(caller *wasmtime.Caller, ptr int32, ptrlen int32) { - b, innerErr := safeMem(caller, ptr, ptrlen) + b, innerErr := wasmRead(caller, ptr, ptrlen) if innerErr != nil { logger.Errorf("error calling log: %s", err) return @@ -285,6 +276,15 @@ func NewModule(modCfg *ModuleConfig, binary []byte, opts ...func(*ModuleConfig)) return nil, fmt.Errorf("error wrapping fetch func: %w", err) } + err = linker.FuncWrap( + "env", + "emit", + createEmitFn(logger, modCfg.Labeler, wasmRead, wasmWrite, wasmWriteUInt32), + ) + if err != nil { + return nil, fmt.Errorf("error wrapping emit func: %w", err) + } + m := &Module{ engine: engine, module: mod, @@ -404,7 +404,7 @@ func containsCode(err error, code int) bool { func fetchFn(logger logger.Logger, modCfg *ModuleConfig) func(caller *wasmtime.Caller, respptr int32, resplenptr int32, reqptr int32, reqptrlen int32) int32 { const fetchErrSfx = "error calling fetch" return func(caller *wasmtime.Caller, respptr int32, resplenptr int32, reqptr int32, reqptrlen int32) int32 { - b, innerErr := safeMem(caller, reqptr, reqptrlen) + b, innerErr := wasmRead(caller, reqptr, reqptrlen) if innerErr != nil { logger.Errorf("%s: %s", fetchErrSfx, innerErr) return ErrnoFault @@ -429,19 +429,189 @@ func fetchFn(logger logger.Logger, modCfg *ModuleConfig) func(caller *wasmtime.C return ErrnoFault } - size := copyBuffer(caller, respBytes, respptr, int32(len(respBytes))) - if size == -1 { + if size := wasmWrite(caller, respBytes, respptr, int32(len(respBytes))); size == -1 { return ErrnoFault } - uint32Size := int32(4) - resplenBytes := make([]byte, uint32Size) - binary.LittleEndian.PutUint32(resplenBytes, uint32(len(respBytes))) - size = copyBuffer(caller, resplenBytes, resplenptr, uint32Size) - if size == -1 { + if size := wasmWriteUInt32(caller, resplenptr, uint32(len(respBytes))); size == -1 { return ErrnoFault } return ErrnoSuccess } } + +// createEmitFn injects dependencies and builds the emit function exposed by the WASM. Errors in +// Emit, if any, are returned in the Error Message of the response. +func createEmitFn( + l logger.Logger, + e MessageEmitter, + reader unsafeReaderFunc, + writer unsafeWriterFunc, + sizeWriter unsafeFixedLengthWriterFunc, +) func(caller *wasmtime.Caller, respptr, resplenptr, msgptr, msglen int32) int32 { + logErr := func(err error) { + l.Errorf("error emitting message: %s", err) + } + + return func(caller *wasmtime.Caller, respptr, resplenptr, msgptr, msglen int32) int32 { + // writeErr marshals and writes an error response to wasm + writeErr := func(err error) int32 { + logErr(err) + + resp := &wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: err.Error(), + }, + } + + respBytes, perr := proto.Marshal(resp) + if perr != nil { + logErr(perr) + return ErrnoFault + } + + if size := writer(caller, respBytes, respptr, int32(len(respBytes))); size == -1 { + logErr(errors.New("failed to write response")) + return ErrnoFault + } + + if size := sizeWriter(caller, resplenptr, uint32(len(respBytes))); size == -1 { + logErr(errors.New("failed to write response length")) + return ErrnoFault + } + + return ErrnoSuccess + } + + b, err := reader(caller, msgptr, msglen) + if err != nil { + return writeErr(err) + } + + msg, labels, err := toEmissible(b) + if err != nil { + return writeErr(err) + } + + if err := e.WithMapLabels(labels).Emit(msg); err != nil { + return writeErr(err) + } + + return ErrnoSuccess + } +} + +type unimplementedMessageEmitter struct{} + +func (u *unimplementedMessageEmitter) Emit(string) error { + return errors.New("unimplemented") +} + +func (u *unimplementedMessageEmitter) WithMapLabels(map[string]string) MessageEmitter { + return u +} + +func toEmissible(b []byte) (string, map[string]string, error) { + msg := &wasmpb.EmitMessageRequest{} + if err := proto.Unmarshal(b, msg); err != nil { + return "", nil, err + } + + validated, err := toValidatedLabels(msg) + if err != nil { + return "", nil, err + } + + return msg.Message, validated, nil +} + +func toValidatedLabels(msg *wasmpb.EmitMessageRequest) (map[string]string, error) { + vl, err := values.FromMapValueProto(msg.Labels) + if err != nil { + return nil, err + } + + // Handle the case of no labels before unwrapping. + if vl == nil { + vl = values.EmptyMap() + } + + var labels map[string]string + if err := vl.UnwrapTo(&labels); err != nil { + return nil, err + } + + return labels, nil +} + +// unsafeWriterFunc defines behavior for writing directly to wasm memory. A source slice of bytes +// is written to the location defined by the ptr. +type unsafeWriterFunc func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 + +// unsafeFixedLengthWriterFunc defines behavior for writing a uint32 value to wasm memory at the location defined +// by the ptr. +type unsafeFixedLengthWriterFunc func(c *wasmtime.Caller, ptr int32, val uint32) int64 + +// unsafeReaderFunc abstractly defines the behavior of reading from WASM memory. Returns a copy of +// the memory at the given pointer and size. +type unsafeReaderFunc func(c *wasmtime.Caller, ptr, len int32) ([]byte, error) + +// wasmMemoryAccessor is the default implementation for unsafely accessing the memory of the WASM module. +func wasmMemoryAccessor(caller *wasmtime.Caller) []byte { + return caller.GetExport("memory").Memory().UnsafeData(caller) +} + +// wasmRead returns a copy of the wasm module memory at the given pointer and size. +func wasmRead(caller *wasmtime.Caller, ptr int32, size int32) ([]byte, error) { + return read(wasmMemoryAccessor(caller), ptr, size) +} + +// Read acts on a byte slice that should represent an unsafely accessed slice of memory. It returns +// a copy of the memory at the given pointer and size. +func read(memory []byte, ptr int32, size int32) ([]byte, error) { + if size < 0 || ptr < 0 { + return nil, fmt.Errorf("invalid memory access: ptr: %d, size: %d", ptr, size) + } + + if ptr+size > int32(len(memory)) { + return nil, errors.New("out of bounds memory access") + } + + cd := make([]byte, size) + copy(cd, memory[ptr:ptr+size]) + return cd, nil +} + +// wasmWrite copies the given src byte slice into the wasm module memory at the given pointer and size. +func wasmWrite(caller *wasmtime.Caller, src []byte, ptr int32, size int32) int64 { + return write(wasmMemoryAccessor(caller), src, ptr, size) +} + +// wasmWriteUInt32 binary encodes and writes a uint32 to the wasm module memory at the given pointer. +func wasmWriteUInt32(caller *wasmtime.Caller, ptr int32, val uint32) int64 { + return writeUInt32(wasmMemoryAccessor(caller), ptr, val) +} + +// writeUInt32 binary encodes and writes a uint32 to the memory at the given pointer. +func writeUInt32(memory []byte, ptr int32, val uint32) int64 { + uint32Size := int32(4) + buffer := make([]byte, uint32Size) + binary.LittleEndian.PutUint32(buffer, val) + return write(memory, buffer, ptr, uint32Size) +} + +// write copies the given src byte slice into the memory at the given pointer and size. +func write(memory, src []byte, ptr, size int32) int64 { + if size < 0 || ptr < 0 { + return -1 + } + + if int32(len(memory)) < ptr+size { + return -1 + } + buffer := memory[ptr : ptr+size] + dataLen := int64(len(src)) + copy(buffer, src) + return dataLen +} diff --git a/pkg/workflows/wasm/host/module_test.go b/pkg/workflows/wasm/host/module_test.go new file mode 100644 index 0000000..cd9efee --- /dev/null +++ b/pkg/workflows/wasm/host/module_test.go @@ -0,0 +1,333 @@ +package host + +import ( + "encoding/binary" + "testing" + + "github.com/bytecodealliance/wasmtime-go/v23" + "github.com/stretchr/testify/assert" + "google.golang.org/protobuf/proto" + + "github.com/goplugin/plugin-common/pkg/logger" + "github.com/goplugin/plugin-common/pkg/values/pb" + wasmpb "github.com/goplugin/plugin-common/pkg/workflows/wasm/pb" +) + +type mockMessageEmitter struct { + e func(string, map[string]string) error + labels map[string]string +} + +func (m *mockMessageEmitter) Emit(msg string) error { + return m.e(msg, m.labels) +} + +func (m *mockMessageEmitter) WithMapLabels(labels map[string]string) MessageEmitter { + m.labels = labels + return m +} + +func newMockMessageEmitter(e func(string, map[string]string) error) MessageEmitter { + return &mockMessageEmitter{e: e} +} + +// Test_createEmitFn tests that the emit function used by the module is created correctly. Memory +// access functions are injected as mocks. +func Test_createEmitFn(t *testing.T) { + t.Run("success", func(t *testing.T) { + emitFn := createEmitFn( + logger.Test(t), + newMockMessageEmitter(func(_ string, _ map[string]string) error { + return nil + }), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.EmitMessageRequest{ + Message: "hello, world", + Labels: &pb.Map{ + Fields: map[string]*pb.Value{ + "foo": { + Value: &pb.Value_StringValue{ + StringValue: "bar", + }, + }, + }, + }, + }) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("success without labels", func(t *testing.T) { + emitFn := createEmitFn( + logger.Test(t), + newMockMessageEmitter(func(_ string, _ map[string]string) error { + return nil + }), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.EmitMessageRequest{}) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("successfully write error to memory on failure to read", func(t *testing.T) { + respBytes, err := proto.Marshal(&wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: assert.AnError.Error(), + }, + }) + assert.NoError(t, err) + + emitFn := createEmitFn( + logger.Test(t), + nil, + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + return nil, assert.AnError + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + assert.Equal(t, respBytes, src, "marshalled response not equal to bytes to write") + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + assert.Equal(t, uint32(len(respBytes)), val, "did not write length of response") + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, int32(len(respBytes)), 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode, "code mismatch") + }) + + t.Run("failure to emit writes error to memory", func(t *testing.T) { + respBytes, err := proto.Marshal(&wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: assert.AnError.Error(), + }, + }) + assert.NoError(t, err) + + emitFn := createEmitFn( + logger.Test(t), + newMockMessageEmitter(func(_ string, _ map[string]string) error { + return assert.AnError + }), + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + b, err := proto.Marshal(&wasmpb.EmitMessageRequest{}) + assert.NoError(t, err) + return b, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + assert.Equal(t, respBytes, src, "marshalled response not equal to bytes to write") + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + assert.Equal(t, uint32(len(respBytes)), val, "did not write length of response") + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) + + t.Run("bad read failure to unmarshal protos", func(t *testing.T) { + badData := []byte("not proto bufs") + msg := &wasmpb.EmitMessageRequest{} + marshallErr := proto.Unmarshal(badData, msg) + assert.Error(t, marshallErr) + + respBytes, err := proto.Marshal(&wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: marshallErr.Error(), + }, + }) + assert.NoError(t, err) + + emitFn := createEmitFn( + logger.Test(t), + nil, + unsafeReaderFunc(func(_ *wasmtime.Caller, _, _ int32) ([]byte, error) { + return badData, nil + }), + unsafeWriterFunc(func(c *wasmtime.Caller, src []byte, ptr, len int32) int64 { + assert.Equal(t, respBytes, src, "marshalled response not equal to bytes to write") + return 0 + }), + unsafeFixedLengthWriterFunc(func(c *wasmtime.Caller, ptr int32, val uint32) int64 { + assert.Equal(t, uint32(len(respBytes)), val, "did not write length of response") + return 0 + }), + ) + gotCode := emitFn(new(wasmtime.Caller), 0, 0, 0, 0) + assert.Equal(t, ErrnoSuccess, gotCode) + }) +} + +func Test_read(t *testing.T) { + t.Run("successfully read from slice", func(t *testing.T) { + memory := []byte("hello, world") + got, err := read(memory, 0, int32(len(memory))) + assert.NoError(t, err) + assert.Equal(t, []byte("hello, world"), got) + }) + + t.Run("fail to read because out of bounds request", func(t *testing.T) { + memory := []byte("hello, world") + _, err := read(memory, 0, int32(len(memory)+1)) + assert.Error(t, err) + }) + + t.Run("fails to read because of invalid pointer or length", func(t *testing.T) { + memory := []byte("hello, world") + _, err := read(memory, 0, -1) + assert.Error(t, err) + + _, err = read(memory, -1, 1) + assert.Error(t, err) + }) + + t.Run("validate that memory is read only once copied", func(t *testing.T) { + memory := []byte("hello, world") + copied, err := read(memory, 0, int32(len(memory))) + assert.NoError(t, err) + + // mutate copy + copied[0] = 'H' + assert.Equal(t, []byte("Hello, world"), copied) + + // original memory is unchanged + assert.Equal(t, []byte("hello, world"), memory) + }) +} + +func Test_write(t *testing.T) { + t.Run("successfully write to slice", func(t *testing.T) { + giveSrc := []byte("hello, world") + memory := make([]byte, 12) + n := write(memory, giveSrc, 0, int32(len(giveSrc))) + assert.Equal(t, n, int64(len(giveSrc))) + assert.Equal(t, []byte("hello, world"), memory[:len(giveSrc)]) + }) + + t.Run("cannot write to slice because memory too small", func(t *testing.T) { + giveSrc := []byte("hello, world") + memory := make([]byte, len(giveSrc)-1) + n := write(memory, giveSrc, 0, int32(len(giveSrc))) + assert.Equal(t, n, int64(-1)) + }) + + t.Run("fails to write to invalid access", func(t *testing.T) { + giveSrc := []byte("hello, world") + memory := make([]byte, len(giveSrc)) + n := write(memory, giveSrc, 0, -1) + assert.Equal(t, n, int64(-1)) + + n = write(memory, giveSrc, -1, 1) + assert.Equal(t, n, int64(-1)) + }) +} + +// Test_writeUInt32 tests that a uint32 is written to memory correctly. +func Test_writeUInt32(t *testing.T) { + t.Run("success", func(t *testing.T) { + memory := make([]byte, 4) + n := writeUInt32(memory, 0, 42) + wantBuf := make([]byte, 4) + binary.LittleEndian.PutUint32(wantBuf, 42) + assert.Equal(t, n, int64(4)) + assert.Equal(t, wantBuf, memory) + }) +} + +func Test_toValidatedLabels(t *testing.T) { + t.Run("success", func(t *testing.T) { + msg := &wasmpb.EmitMessageRequest{ + Labels: &pb.Map{ + Fields: map[string]*pb.Value{ + "test": { + Value: &pb.Value_StringValue{ + StringValue: "value", + }, + }, + }, + }, + } + wantLabels := map[string]string{ + "test": "value", + } + gotLabels, err := toValidatedLabels(msg) + assert.NoError(t, err) + assert.Equal(t, wantLabels, gotLabels) + }) + + t.Run("success with empty labels", func(t *testing.T) { + msg := &wasmpb.EmitMessageRequest{} + wantLabels := map[string]string{} + gotLabels, err := toValidatedLabels(msg) + assert.NoError(t, err) + assert.Equal(t, wantLabels, gotLabels) + }) + + t.Run("fails with non string", func(t *testing.T) { + msg := &wasmpb.EmitMessageRequest{ + Labels: &pb.Map{ + Fields: map[string]*pb.Value{ + "test": { + Value: &pb.Value_Int64Value{ + Int64Value: *proto.Int64(42), + }, + }, + }, + }, + } + _, err := toValidatedLabels(msg) + assert.Error(t, err) + }) +} + +func Test_toEmissible(t *testing.T) { + t.Run("success", func(t *testing.T) { + msg := &wasmpb.EmitMessageRequest{ + Message: "hello, world", + Labels: &pb.Map{ + Fields: map[string]*pb.Value{ + "test": { + Value: &pb.Value_StringValue{ + StringValue: "value", + }, + }, + }, + }, + } + + b, err := proto.Marshal(msg) + assert.NoError(t, err) + + gotMsg, gotLabels, err := toEmissible(b) + assert.NoError(t, err) + assert.Equal(t, "hello, world", gotMsg) + assert.Equal(t, map[string]string{"test": "value"}, gotLabels) + }) + + t.Run("fails with bad message", func(t *testing.T) { + _, _, err := toEmissible([]byte("not proto bufs")) + assert.Error(t, err) + }) +} diff --git a/pkg/workflows/wasm/host/test/emit/cmd/main.go b/pkg/workflows/wasm/host/test/emit/cmd/main.go new file mode 100644 index 0000000..4e1be22 --- /dev/null +++ b/pkg/workflows/wasm/host/test/emit/cmd/main.go @@ -0,0 +1,40 @@ +//go:build wasip1 + +package main + +import ( + "github.com/goplugin/plugin-common/pkg/workflows/wasm" + + "github.com/goplugin/plugin-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger" + "github.com/goplugin/plugin-common/pkg/workflows/sdk" +) + +func BuildWorkflow(config []byte) *sdk.WorkflowSpecFactory { + workflow := sdk.NewWorkflowSpecFactory( + sdk.NewWorkflowParams{}, + ) + + triggerCfg := basictrigger.TriggerConfig{Name: "trigger", Number: 100} + trigger := triggerCfg.New(workflow) + + sdk.Compute1[basictrigger.TriggerOutputs, bool]( + workflow, + "transform", + sdk.Compute1Inputs[basictrigger.TriggerOutputs]{Arg0: trigger}, + func(rsdk sdk.Runtime, outputs basictrigger.TriggerOutputs) (bool, error) { + if err := rsdk.Emitter(). + With("test-string-field-key", "this is a test field content"). + Emit("testing emit"); err != nil { + return false, err + } + return true, nil + }) + + return workflow +} + +func main() { + runner := wasm.NewRunner() + workflow := BuildWorkflow(runner.Config()) + runner.Run(workflow) +} diff --git a/pkg/workflows/wasm/host/test/oom/cmd/main.go b/pkg/workflows/wasm/host/test/oom/cmd/main.go index 21ecb69..a14775c 100644 --- a/pkg/workflows/wasm/host/test/oom/cmd/main.go +++ b/pkg/workflows/wasm/host/test/oom/cmd/main.go @@ -6,5 +6,5 @@ import "math" func main() { // allocate more bytes than the binary should be able to access, 64 megs - _ = make([]byte, int64(128*math.Pow(10, 6))) + _ = make([]byte, int64(512*math.Pow(10, 6))) } diff --git a/pkg/workflows/wasm/host/wasip1.go b/pkg/workflows/wasm/host/wasip1.go index 28950a1..08235e2 100644 --- a/pkg/workflows/wasm/host/wasip1.go +++ b/pkg/workflows/wasm/host/wasip1.go @@ -81,7 +81,7 @@ func clockTimeGet(caller *wasmtime.Caller, id int32, precision int64, resultTime uint64Size := int32(8) trg := make([]byte, uint64Size) binary.LittleEndian.PutUint64(trg, uint64(val)) - copyBuffer(caller, trg, resultTimestamp, uint64Size) + wasmWrite(caller, trg, resultTimestamp, uint64Size) return ErrnoSuccess } @@ -105,7 +105,7 @@ func pollOneoff(caller *wasmtime.Caller, subscriptionptr int32, eventsptr int32, return ErrnoInval } - subs, err := safeMem(caller, subscriptionptr, nsubscriptions*subscriptionLen) + subs, err := wasmRead(caller, subscriptionptr, nsubscriptions*subscriptionLen) if err != nil { return ErrnoFault } @@ -176,13 +176,13 @@ func pollOneoff(caller *wasmtime.Caller, subscriptionptr int32, eventsptr int32, binary.LittleEndian.PutUint32(rne, uint32(nsubscriptions)) // Write the number of events to `resultNevents` - size := copyBuffer(caller, rne, resultNevents, uint32Size) + size := wasmWrite(caller, rne, resultNevents, uint32Size) if size == -1 { return ErrnoFault } // Write the events to `events` - size = copyBuffer(caller, events, eventsptr, nsubscriptions*eventsLen) + size = wasmWrite(caller, events, eventsptr, nsubscriptions*eventsLen) if size == -1 { return ErrnoFault } @@ -221,7 +221,7 @@ func createRandomGet(cfg *ModuleConfig) func(caller *wasmtime.Caller, buf, bufLe } // Copy the random bytes into the wasm module memory - if n := copyBuffer(caller, randOutput, buf, bufLen); n != int64(len(randOutput)) { + if n := wasmWrite(caller, randOutput, buf, bufLen); n != int64(len(randOutput)) { return ErrnoFault } diff --git a/pkg/workflows/wasm/host/wasm_test.go b/pkg/workflows/wasm/host/wasm_test.go index d80ffe1..0b4dd2d 100644 --- a/pkg/workflows/wasm/host/wasm_test.go +++ b/pkg/workflows/wasm/host/wasm_test.go @@ -49,6 +49,8 @@ const ( fetchBinaryCmd = "test/fetch/cmd" randBinaryLocation = "test/rand/cmd/testmodule.wasm" randBinaryCmd = "test/rand/cmd" + emitBinaryLocation = "test/emit/cmd/testmodule.wasm" + emitBinaryCmd = "test/emit/cmd" ) func createTestBinary(outputPath, path string, compress bool, t *testing.T) []byte { @@ -187,6 +189,133 @@ func Test_Compute_Logs(t *testing.T) { } } +func Test_Compute_Emit(t *testing.T) { + binary := createTestBinary(emitBinaryCmd, emitBinaryLocation, true, t) + + lggr := logger.Test(t) + + req := &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + WorkflowId: "workflow-id", + WorkflowName: "workflow-name", + WorkflowOwner: "workflow-owner", + WorkflowExecutionId: "workflow-execution-id", + }, + }, + }, + }, + } + + fetchFunc := func(req *wasmpb.FetchRequest) (*wasmpb.FetchResponse, error) { + return nil, nil + } + + t.Run("successfully call emit with metadata in labels", func(t *testing.T) { + m, err := NewModule(&ModuleConfig{ + Logger: lggr, + Fetch: fetchFunc, + Labeler: newMockMessageEmitter(func(msg string, kvs map[string]string) error { + t.Helper() + + assert.Equal(t, "testing emit", msg) + assert.Equal(t, "this is a test field content", kvs["test-string-field-key"]) + assert.Equal(t, "workflow-id", kvs["workflow_id"]) + assert.Equal(t, "workflow-name", kvs["workflow_name"]) + assert.Equal(t, "workflow-owner", kvs["workflow_owner_address"]) + assert.Equal(t, "workflow-execution-id", kvs["workflow_execution_id"]) + return nil + }), + }, binary) + require.NoError(t, err) + + m.Start() + + _, err = m.Run(req) + assert.Nil(t, err) + }) + + t.Run("failure on emit writes to error chain and logs", func(t *testing.T) { + lggr, logs := logger.TestObserved(t, zapcore.InfoLevel) + + m, err := NewModule(&ModuleConfig{ + Logger: lggr, + Fetch: fetchFunc, + Labeler: newMockMessageEmitter(func(msg string, kvs map[string]string) error { + t.Helper() + + assert.Equal(t, "testing emit", msg) + assert.Equal(t, "this is a test field content", kvs["test-string-field-key"]) + assert.Equal(t, "workflow-id", kvs["workflow_id"]) + assert.Equal(t, "workflow-name", kvs["workflow_name"]) + assert.Equal(t, "workflow-owner", kvs["workflow_owner_address"]) + assert.Equal(t, "workflow-execution-id", kvs["workflow_execution_id"]) + + return assert.AnError + }), + }, binary) + require.NoError(t, err) + + m.Start() + + _, err = m.Run(req) + assert.Error(t, err) + assert.ErrorContains(t, err, assert.AnError.Error()) + + require.Len(t, logs.AllUntimed(), 1) + + expectedEntries := []Entry{ + { + Log: zapcore.Entry{Level: zapcore.ErrorLevel, Message: fmt.Sprintf("error emitting message: %s", assert.AnError)}, + }, + } + for i := range expectedEntries { + assert.Equal(t, expectedEntries[i].Log.Level, logs.AllUntimed()[i].Entry.Level) + assert.Equal(t, expectedEntries[i].Log.Message, logs.AllUntimed()[i].Entry.Message) + } + }) + + t.Run("failure on emit due to missing workflow identifying metadata", func(t *testing.T) { + lggr := logger.Test(t) + + m, err := NewModule(&ModuleConfig{ + Logger: lggr, + Fetch: fetchFunc, + Labeler: newMockMessageEmitter(func(msg string, labels map[string]string) error { + return nil + }), // never called + }, binary) + require.NoError(t, err) + + m.Start() + + req = &wasmpb.Request{ + Id: uuid.New().String(), + Message: &wasmpb.Request_ComputeRequest{ + ComputeRequest: &wasmpb.ComputeRequest{ + Request: &capabilitiespb.CapabilityRequest{ + Inputs: &valuespb.Map{}, + Config: &valuespb.Map{}, + Metadata: &capabilitiespb.RequestMetadata{ + ReferenceId: "transform", + }, + }, + }, + }, + } + + _, err = m.Run(req) + assert.Error(t, err) + assert.ErrorContains(t, err, "failed to create emission") + }) +} + func Test_Compute_Fetch(t *testing.T) { binary := createTestBinary(fetchBinaryCmd, fetchBinaryLocation, true, t) diff --git a/pkg/workflows/wasm/pb/wasm.pb.go b/pkg/workflows/wasm/pb/wasm.pb.go index 1566e01..11a6d77 100644 --- a/pkg/workflows/wasm/pb/wasm.pb.go +++ b/pkg/workflows/wasm/pb/wasm.pb.go @@ -671,11 +671,12 @@ type FetchResponse struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - ExecutionError bool `protobuf:"varint,1,opt,name=executionError,proto3" json:"executionError,omitempty"` - ErrorMessage string `protobuf:"bytes,2,opt,name=errorMessage,proto3" json:"errorMessage,omitempty"` - StatusCode uint32 `protobuf:"varint,3,opt,name=statusCode,proto3" json:"statusCode,omitempty"` // NOTE: this is actually a uint8, but proto doesn't support this. - Headers *pb1.Map `protobuf:"bytes,4,opt,name=headers,proto3" json:"headers,omitempty"` - Body []byte `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` + ExecutionError bool `protobuf:"varint,1,opt,name=executionError,proto3" json:"executionError,omitempty"` + ErrorMessage string `protobuf:"bytes,2,opt,name=errorMessage,proto3" json:"errorMessage,omitempty"` + // NOTE: this is actually a uint8, but proto doesn't support this. + StatusCode uint32 `protobuf:"varint,3,opt,name=statusCode,proto3" json:"statusCode,omitempty"` + Headers *pb1.Map `protobuf:"bytes,4,opt,name=headers,proto3" json:"headers,omitempty"` + Body []byte `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` } func (x *FetchResponse) Reset() { @@ -745,6 +746,155 @@ func (x *FetchResponse) GetBody() []byte { return nil } +type EmitMessageRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` + Labels *pb1.Map `protobuf:"bytes,2,opt,name=labels,proto3" json:"labels,omitempty"` +} + +func (x *EmitMessageRequest) Reset() { + *x = EmitMessageRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EmitMessageRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EmitMessageRequest) ProtoMessage() {} + +func (x *EmitMessageRequest) ProtoReflect() protoreflect.Message { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EmitMessageRequest.ProtoReflect.Descriptor instead. +func (*EmitMessageRequest) Descriptor() ([]byte, []int) { + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{10} +} + +func (x *EmitMessageRequest) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *EmitMessageRequest) GetLabels() *pb1.Map { + if x != nil { + return x.Labels + } + return nil +} + +type Error struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` +} + +func (x *Error) Reset() { + *x = Error{} + if protoimpl.UnsafeEnabled { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Error) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Error) ProtoMessage() {} + +func (x *Error) ProtoReflect() protoreflect.Message { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Error.ProtoReflect.Descriptor instead. +func (*Error) Descriptor() ([]byte, []int) { + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{11} +} + +func (x *Error) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +type EmitMessageResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Error *Error `protobuf:"bytes,1,opt,name=error,proto3" json:"error,omitempty"` +} + +func (x *EmitMessageResponse) Reset() { + *x = EmitMessageResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EmitMessageResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EmitMessageResponse) ProtoMessage() {} + +func (x *EmitMessageResponse) ProtoReflect() protoreflect.Message { + mi := &file_workflows_wasm_pb_wasm_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EmitMessageResponse.ProtoReflect.Descriptor instead. +func (*EmitMessageResponse) Descriptor() ([]byte, []int) { + return file_workflows_wasm_pb_wasm_proto_rawDescGZIP(), []int{12} +} + +func (x *EmitMessageResponse) GetError() *Error { + if x != nil { + return x.Error + } + return nil +} + var File_workflows_wasm_pb_wasm_proto protoreflect.FileDescriptor var file_workflows_wasm_pb_wasm_proto_rawDesc = []byte{ @@ -850,11 +1000,22 @@ var file_workflows_wasm_pb_wasm_proto_rawDesc = []byte{ 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x62, 0x6f, 0x64, - 0x79, 0x42, 0x43, 0x5a, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, - 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, 0x74, - 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, - 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, 0x2f, - 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x79, 0x22, 0x53, 0x0a, 0x12, 0x45, 0x6d, 0x69, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x12, 0x23, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x0b, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x4d, 0x61, 0x70, 0x52, 0x06, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x22, 0x21, 0x0a, 0x05, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, + 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x37, 0x0a, 0x13, 0x45, 0x6d, 0x69, + 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x20, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x0a, 0x2e, 0x73, 0x64, 0x6b, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x05, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x42, 0x43, 0x5a, 0x41, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2f, 0x73, 0x6d, 0x61, 0x72, 0x74, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x6b, 0x69, + 0x74, 0x2f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x6b, 0x2d, 0x63, 0x6f, 0x6d, 0x6d, + 0x6f, 0x6e, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x73, + 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -869,7 +1030,7 @@ func file_workflows_wasm_pb_wasm_proto_rawDescGZIP() []byte { return file_workflows_wasm_pb_wasm_proto_rawDescData } -var file_workflows_wasm_pb_wasm_proto_msgTypes = make([]protoimpl.MessageInfo, 10) +var file_workflows_wasm_pb_wasm_proto_msgTypes = make([]protoimpl.MessageInfo, 13) var file_workflows_wasm_pb_wasm_proto_goTypes = []interface{}{ (*RuntimeConfig)(nil), // 0: sdk.RuntimeConfig (*ComputeRequest)(nil), // 1: sdk.ComputeRequest @@ -881,33 +1042,38 @@ var file_workflows_wasm_pb_wasm_proto_goTypes = []interface{}{ (*Response)(nil), // 7: sdk.Response (*FetchRequest)(nil), // 8: sdk.FetchRequest (*FetchResponse)(nil), // 9: sdk.FetchResponse - (*pb.CapabilityRequest)(nil), // 10: capabilities.CapabilityRequest - (*emptypb.Empty)(nil), // 11: google.protobuf.Empty - (*pb.CapabilityResponse)(nil), // 12: capabilities.CapabilityResponse - (*pb1.Map)(nil), // 13: values.Map + (*EmitMessageRequest)(nil), // 10: sdk.EmitMessageRequest + (*Error)(nil), // 11: sdk.Error + (*EmitMessageResponse)(nil), // 12: sdk.EmitMessageResponse + (*pb.CapabilityRequest)(nil), // 13: capabilities.CapabilityRequest + (*emptypb.Empty)(nil), // 14: google.protobuf.Empty + (*pb.CapabilityResponse)(nil), // 15: capabilities.CapabilityResponse + (*pb1.Map)(nil), // 16: values.Map } var file_workflows_wasm_pb_wasm_proto_depIdxs = []int32{ - 10, // 0: sdk.ComputeRequest.request:type_name -> capabilities.CapabilityRequest + 13, // 0: sdk.ComputeRequest.request:type_name -> capabilities.CapabilityRequest 0, // 1: sdk.ComputeRequest.runtimeConfig:type_name -> sdk.RuntimeConfig 1, // 2: sdk.Request.computeRequest:type_name -> sdk.ComputeRequest - 11, // 3: sdk.Request.specRequest:type_name -> google.protobuf.Empty - 12, // 4: sdk.ComputeResponse.response:type_name -> capabilities.CapabilityResponse - 13, // 5: sdk.StepInputs.mapping:type_name -> values.Map + 14, // 3: sdk.Request.specRequest:type_name -> google.protobuf.Empty + 15, // 4: sdk.ComputeResponse.response:type_name -> capabilities.CapabilityResponse + 16, // 5: sdk.StepInputs.mapping:type_name -> values.Map 4, // 6: sdk.StepDefinition.inputs:type_name -> sdk.StepInputs - 13, // 7: sdk.StepDefinition.config:type_name -> values.Map + 16, // 7: sdk.StepDefinition.config:type_name -> values.Map 5, // 8: sdk.WorkflowSpec.triggers:type_name -> sdk.StepDefinition 5, // 9: sdk.WorkflowSpec.actions:type_name -> sdk.StepDefinition 5, // 10: sdk.WorkflowSpec.consensus:type_name -> sdk.StepDefinition 5, // 11: sdk.WorkflowSpec.targets:type_name -> sdk.StepDefinition 3, // 12: sdk.Response.computeResponse:type_name -> sdk.ComputeResponse 6, // 13: sdk.Response.specResponse:type_name -> sdk.WorkflowSpec - 13, // 14: sdk.FetchRequest.headers:type_name -> values.Map - 13, // 15: sdk.FetchResponse.headers:type_name -> values.Map - 16, // [16:16] is the sub-list for method output_type - 16, // [16:16] is the sub-list for method input_type - 16, // [16:16] is the sub-list for extension type_name - 16, // [16:16] is the sub-list for extension extendee - 0, // [0:16] is the sub-list for field type_name + 16, // 14: sdk.FetchRequest.headers:type_name -> values.Map + 16, // 15: sdk.FetchResponse.headers:type_name -> values.Map + 16, // 16: sdk.EmitMessageRequest.labels:type_name -> values.Map + 11, // 17: sdk.EmitMessageResponse.error:type_name -> sdk.Error + 18, // [18:18] is the sub-list for method output_type + 18, // [18:18] is the sub-list for method input_type + 18, // [18:18] is the sub-list for extension type_name + 18, // [18:18] is the sub-list for extension extendee + 0, // [0:18] is the sub-list for field type_name } func init() { file_workflows_wasm_pb_wasm_proto_init() } @@ -1036,6 +1202,42 @@ func file_workflows_wasm_pb_wasm_proto_init() { return nil } } + file_workflows_wasm_pb_wasm_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*EmitMessageRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_workflows_wasm_pb_wasm_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Error); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_workflows_wasm_pb_wasm_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*EmitMessageResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } } file_workflows_wasm_pb_wasm_proto_msgTypes[2].OneofWrappers = []interface{}{ (*Request_ComputeRequest)(nil), @@ -1051,7 +1253,7 @@ func file_workflows_wasm_pb_wasm_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_workflows_wasm_pb_wasm_proto_rawDesc, NumEnums: 0, - NumMessages: 10, + NumMessages: 13, NumExtensions: 0, NumServices: 0, }, diff --git a/pkg/workflows/wasm/pb/wasm.proto b/pkg/workflows/wasm/pb/wasm.proto index 720e5f4..22abbf0 100644 --- a/pkg/workflows/wasm/pb/wasm.proto +++ b/pkg/workflows/wasm/pb/wasm.proto @@ -8,9 +8,7 @@ import "capabilities/pb/capabilities.proto"; import "values/pb/values.proto"; import "google/protobuf/empty.proto"; -message RuntimeConfig { - int64 maxFetchResponseSizeBytes = 1; -} +message RuntimeConfig { int64 maxFetchResponseSizeBytes = 1; } message ComputeRequest { capabilities.CapabilityRequest request = 1; @@ -27,9 +25,7 @@ message Request { } } -message ComputeResponse { - capabilities.CapabilityResponse response = 1; -} +message ComputeResponse { capabilities.CapabilityResponse response = 1; } message StepInputs { string outputRef = 1; @@ -74,7 +70,18 @@ message FetchRequest { message FetchResponse { bool executionError = 1; string errorMessage = 2; - uint32 statusCode = 3; // NOTE: this is actually a uint8, but proto doesn't support this. + + // NOTE: this is actually a uint8, but proto doesn't support this. + uint32 statusCode = 3; values.Map headers = 4; bytes body = 5; } + +message EmitMessageRequest { + string message = 1; + values.Map labels = 2; +} + +message Error { string message = 1; } + +message EmitMessageResponse { Error error = 1; } diff --git a/pkg/workflows/wasm/runner.go b/pkg/workflows/wasm/runner.go index 193da1f..f4be71a 100644 --- a/pkg/workflows/wasm/runner.go +++ b/pkg/workflows/wasm/runner.go @@ -26,7 +26,7 @@ var _ sdk.Runner = (*Runner)(nil) type Runner struct { sendResponse func(payload *wasmpb.Response) - sdkFactory func(cfg *RuntimeConfig) *Runtime + sdkFactory func(cfg *RuntimeConfig, opts ...func(*RuntimeConfig)) *Runtime args []string req *wasmpb.Request } @@ -156,7 +156,7 @@ func (r *Runner) handleComputeRequest(factory *sdk.WorkflowSpecFactory, id strin } // Extract the config from the request - drc := defaultRuntimeConfig() + drc := defaultRuntimeConfig(id, &creq.Metadata) if rc := computeReq.GetRuntimeConfig(); rc != nil { if rc.MaxFetchResponseSizeBytes != 0 { drc.MaxFetchResponseSizeBytes = rc.MaxFetchResponseSizeBytes diff --git a/pkg/workflows/wasm/runner_test.go b/pkg/workflows/wasm/runner_test.go index b333525..a86cc37 100644 --- a/pkg/workflows/wasm/runner_test.go +++ b/pkg/workflows/wasm/runner_test.go @@ -2,7 +2,9 @@ package wasm import ( "encoding/base64" + "encoding/binary" "testing" + "unsafe" "github.com/google/uuid" "github.com/stretchr/testify/assert" @@ -14,6 +16,7 @@ import ( "github.com/goplugin/plugin-common/pkg/capabilities" "github.com/goplugin/plugin-common/pkg/capabilities/cli/cmd/testdata/fixtures/capabilities/basictrigger" capabilitiespb "github.com/goplugin/plugin-common/pkg/capabilities/pb" + "github.com/goplugin/plugin-common/pkg/logger" "github.com/goplugin/plugin-common/pkg/values" "github.com/goplugin/plugin-common/pkg/workflows/sdk" wasmpb "github.com/goplugin/plugin-common/pkg/workflows/wasm/pb" @@ -132,7 +135,9 @@ func TestRunner_Run_ExecuteCompute(t *testing.T) { runner := &Runner{ args: []string{"wasm", str}, sendResponse: responseFn, - sdkFactory: func(cfg *RuntimeConfig) *Runtime { return nil }, + sdkFactory: func(cfg *RuntimeConfig, _ ...func(*RuntimeConfig)) *Runtime { + return nil + }, } runner.Run(workflow) @@ -202,3 +207,91 @@ func TestRunner_Run_GetWorkflowSpec(t *testing.T) { gotSpec.Triggers[0].Config["number"] = int64(gotSpec.Triggers[0].Config["number"].(uint64)) assert.Equal(t, &gotSpec, spc) } + +// Test_createEmitFn validates the runtime's emit function implementation. Uses mocks of the +// imported wasip1 emit function. +func Test_createEmitFn(t *testing.T) { + var ( + l = logger.Test(t) + sdkConfig = &RuntimeConfig{ + MaxFetchResponseSizeBytes: 1_000, + Metadata: &capabilities.RequestMetadata{ + WorkflowID: "workflow_id", + WorkflowExecutionID: "workflow_execution_id", + WorkflowName: "workflow_name", + WorkflowOwner: "workflow_owner_address", + }, + } + giveMsg = "testing guest" + giveLabels = map[string]string{ + "some-key": "some-value", + } + ) + + t.Run("success", func(t *testing.T) { + hostEmit := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + return 0 + } + runtimeEmit := createEmitFn(sdkConfig, l, hostEmit) + err := runtimeEmit(giveMsg, giveLabels) + assert.NoError(t, err) + }) + + t.Run("successfully read error message when emit fails", func(t *testing.T) { + hostEmit := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + // marshall the protobufs + b, err := proto.Marshal(&wasmpb.EmitMessageResponse{ + Error: &wasmpb.Error{ + Message: assert.AnError.Error(), + }, + }) + assert.NoError(t, err) + + // write the marshalled response message to memory + resp := unsafe.Slice((*byte)(respptr), len(b)) + copy(resp, b) + + // write the length of the response to memory in little endian + respLen := unsafe.Slice((*byte)(resplenptr), uint32Size) + binary.LittleEndian.PutUint32(respLen, uint32(len(b))) + + return 0 + } + runtimeEmit := createEmitFn(sdkConfig, l, hostEmit) + err := runtimeEmit(giveMsg, giveLabels) + assert.Error(t, err) + assert.ErrorContains(t, err, assert.AnError.Error()) + }) + + t.Run("fail to deserialize response from memory", func(t *testing.T) { + hostEmit := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + // b is a non-protobuf byte slice + b := []byte(assert.AnError.Error()) + + // write the marshalled response message to memory + resp := unsafe.Slice((*byte)(respptr), len(b)) + copy(resp, b) + + // write the length of the response to memory in little endian + respLen := unsafe.Slice((*byte)(resplenptr), uint32Size) + binary.LittleEndian.PutUint32(respLen, uint32(len(b))) + + return 0 + } + + runtimeEmit := createEmitFn(sdkConfig, l, hostEmit) + err := runtimeEmit(giveMsg, giveLabels) + assert.Error(t, err) + assert.ErrorContains(t, err, "invalid wire-format data") + }) + + t.Run("fail with nonzero code from emit", func(t *testing.T) { + hostEmit := func(respptr, resplenptr, reqptr unsafe.Pointer, reqptrlen int32) int32 { + return 42 + } + runtimeEmit := createEmitFn(sdkConfig, l, hostEmit) + err := runtimeEmit(giveMsg, giveLabels) + assert.Error(t, err) + assert.ErrorContains(t, err, "emit failed with errno 42") + }) +} diff --git a/pkg/workflows/wasm/runner_wasip1.go b/pkg/workflows/wasm/runner_wasip1.go index b1ed13d..df44578 100644 --- a/pkg/workflows/wasm/runner_wasip1.go +++ b/pkg/workflows/wasm/runner_wasip1.go @@ -24,103 +24,118 @@ func log(respptr unsafe.Pointer, respptrlen int32) //go:wasmimport env fetch func fetch(respptr unsafe.Pointer, resplenptr unsafe.Pointer, reqptr unsafe.Pointer, reqptrlen int32) int32 -const uint32Size = int32(4) - -func bufferToPointerLen(buf []byte) (unsafe.Pointer, int32) { - return unsafe.Pointer(&buf[0]), int32(len(buf)) -} +//go:wasmimport env emit +func emit(respptr unsafe.Pointer, resplenptr unsafe.Pointer, reqptr unsafe.Pointer, reqptrlen int32) int32 func NewRunner() *Runner { l := logger.NewWithSync(&wasmWriteSyncer{}) return &Runner{ - sendResponse: func(response *wasmpb.Response) { - pb, err := proto.Marshal(response) - if err != nil { - // We somehow couldn't marshal the response, so let's - // exit with a special error code letting the host know - // what happened. - os.Exit(CodeInvalidResponse) - } - - // unknownID will only be set when we've failed to parse - // the request. Like before, let's bubble this up. - if response.Id == unknownID { - os.Exit(CodeInvalidRequest) - } - - ptr, ptrlen := bufferToPointerLen(pb) - errno := sendResponse(ptr, ptrlen) - if errno != 0 { - os.Exit(CodeHostErr) + sendResponse: sendResponseFn, + sdkFactory: func(sdkConfig *RuntimeConfig, opts ...func(*RuntimeConfig)) *Runtime { + for _, opt := range opts { + opt(sdkConfig) } - code := CodeSuccess - if response.ErrMsg != "" { - code = CodeRunnerErr - } - - os.Exit(code) - }, - sdkFactory: func(sdkConfig *RuntimeConfig) *Runtime { return &Runtime{ - logger: l, - fetchFn: func(req sdk.FetchRequest) (sdk.FetchResponse, error) { - headerspb, err := values.NewMap(req.Headers) - if err != nil { - return sdk.FetchResponse{}, fmt.Errorf("failed to create headers map: %w", err) - } - - b, err := proto.Marshal(&wasmpb.FetchRequest{ - Url: req.URL, - Method: req.Method, - Headers: values.ProtoMap(headerspb), - Body: req.Body, - TimeoutMs: req.TimeoutMs, - }) - if err != nil { - return sdk.FetchResponse{}, fmt.Errorf("failed to marshal fetch request: %w", err) - } - reqptr, reqptrlen := bufferToPointerLen(b) - - respBuffer := make([]byte, sdkConfig.MaxFetchResponseSizeBytes) - respptr, _ := bufferToPointerLen(respBuffer) - - resplenBuffer := make([]byte, uint32Size) - resplenptr, _ := bufferToPointerLen(resplenBuffer) - - errno := fetch(respptr, resplenptr, reqptr, reqptrlen) - if errno != 0 { - return sdk.FetchResponse{}, errors.New("failed to execute fetch") - } - - responseSize := binary.LittleEndian.Uint32(resplenBuffer) - response := &wasmpb.FetchResponse{} - err = proto.Unmarshal(respBuffer[:responseSize], response) - if err != nil { - return sdk.FetchResponse{}, fmt.Errorf("failed to unmarshal fetch response: %w", err) - } - - fields := response.Headers.GetFields() - headersResp := make(map[string]any, len(fields)) - for k, v := range fields { - headersResp[k] = v - } - - return sdk.FetchResponse{ - ExecutionError: response.ExecutionError, - ErrorMessage: response.ErrorMessage, - StatusCode: uint8(response.StatusCode), - Headers: headersResp, - Body: response.Body, - }, nil - }, + logger: l, + fetchFn: createFetchFn(sdkConfig, l), + emitFn: createEmitFn(sdkConfig, l, emit), } }, args: os.Args, } } +// sendResponseFn implements sendResponse for import into WASM. +func sendResponseFn(response *wasmpb.Response) { + pb, err := proto.Marshal(response) + if err != nil { + // We somehow couldn't marshal the response, so let's + // exit with a special error code letting the host know + // what happened. + os.Exit(CodeInvalidResponse) + } + + // unknownID will only be set when we've failed to parse + // the request. Like before, let's bubble this up. + if response.Id == unknownID { + os.Exit(CodeInvalidRequest) + } + + ptr, ptrlen := bufferToPointerLen(pb) + errno := sendResponse(ptr, ptrlen) + if errno != 0 { + os.Exit(CodeHostErr) + } + + code := CodeSuccess + if response.ErrMsg != "" { + code = CodeRunnerErr + } + + os.Exit(code) +} + +// createFetchFn injects dependencies and creates a fetch function that can be used by the WASM +// binary. +func createFetchFn( + sdkConfig *RuntimeConfig, + l logger.Logger, +) func(sdk.FetchRequest) (sdk.FetchResponse, error) { + fetchFn := func(req sdk.FetchRequest) (sdk.FetchResponse, error) { + headerspb, err := values.NewMap(req.Headers) + if err != nil { + return sdk.FetchResponse{}, fmt.Errorf("failed to create headers map: %w", err) + } + + b, err := proto.Marshal(&wasmpb.FetchRequest{ + Url: req.URL, + Method: req.Method, + Headers: values.ProtoMap(headerspb), + Body: req.Body, + TimeoutMs: req.TimeoutMs, + }) + if err != nil { + return sdk.FetchResponse{}, fmt.Errorf("failed to marshal fetch request: %w", err) + } + reqptr, reqptrlen := bufferToPointerLen(b) + + respBuffer := make([]byte, sdkConfig.MaxFetchResponseSizeBytes) + respptr, _ := bufferToPointerLen(respBuffer) + + resplenBuffer := make([]byte, uint32Size) + resplenptr, _ := bufferToPointerLen(resplenBuffer) + + errno := fetch(respptr, resplenptr, reqptr, reqptrlen) + if errno != 0 { + return sdk.FetchResponse{}, errors.New("failed to execute fetch") + } + + responseSize := binary.LittleEndian.Uint32(resplenBuffer) + response := &wasmpb.FetchResponse{} + err = proto.Unmarshal(respBuffer[:responseSize], response) + if err != nil { + return sdk.FetchResponse{}, fmt.Errorf("failed to unmarshal fetch response: %w", err) + } + + fields := response.Headers.GetFields() + headersResp := make(map[string]any, len(fields)) + for k, v := range fields { + headersResp[k] = v + } + + return sdk.FetchResponse{ + ExecutionError: response.ExecutionError, + ErrorMessage: response.ErrorMessage, + StatusCode: uint8(response.StatusCode), + Headers: headersResp, + Body: response.Body, + }, nil + } + return fetchFn +} + type wasmWriteSyncer struct{} // Write is used to proxy log requests from the WASM binary back to the host diff --git a/pkg/workflows/wasm/sdk.go b/pkg/workflows/wasm/sdk.go index 8624ba5..af66557 100644 --- a/pkg/workflows/wasm/sdk.go +++ b/pkg/workflows/wasm/sdk.go @@ -1,26 +1,47 @@ package wasm import ( + "encoding/binary" + "errors" + "fmt" + "unsafe" + + "google.golang.org/protobuf/proto" + + "github.com/goplugin/plugin-common/pkg/capabilities" + "github.com/goplugin/plugin-common/pkg/capabilities/events" + "github.com/goplugin/plugin-common/pkg/custmsg" "github.com/goplugin/plugin-common/pkg/logger" + "github.com/goplugin/plugin-common/pkg/values" "github.com/goplugin/plugin-common/pkg/workflows/sdk" + wasmpb "github.com/goplugin/plugin-common/pkg/workflows/wasm/pb" ) +// Length of responses are encoded into 4 byte buffers in little endian. uint32Size is the size +// of that buffer. +const uint32Size = int32(4) + type Runtime struct { fetchFn func(req sdk.FetchRequest) (sdk.FetchResponse, error) + emitFn func(msg string, labels map[string]string) error logger logger.Logger } type RuntimeConfig struct { MaxFetchResponseSizeBytes int64 + RequestID *string + Metadata *capabilities.RequestMetadata } const ( defaultMaxFetchResponseSizeBytes = 5 * 1024 ) -func defaultRuntimeConfig() *RuntimeConfig { +func defaultRuntimeConfig(id string, md *capabilities.RequestMetadata) *RuntimeConfig { return &RuntimeConfig{ MaxFetchResponseSizeBytes: defaultMaxFetchResponseSizeBytes, + RequestID: &id, + Metadata: md, } } @@ -33,3 +54,139 @@ func (r *Runtime) Fetch(req sdk.FetchRequest) (sdk.FetchResponse, error) { func (r *Runtime) Logger() logger.Logger { return r.logger } + +func (r *Runtime) Emitter() sdk.MessageEmitter { + return newWasmGuestEmitter(r.emitFn) +} + +type wasmGuestEmitter struct { + base custmsg.Labeler + emitFn func(string, map[string]string) error + labels map[string]string +} + +func newWasmGuestEmitter(emitFn func(string, map[string]string) error) wasmGuestEmitter { + return wasmGuestEmitter{ + emitFn: emitFn, + labels: make(map[string]string), + base: custmsg.NewLabeler(), + } +} + +func (w wasmGuestEmitter) Emit(msg string) error { + return w.emitFn(msg, w.labels) +} + +func (w wasmGuestEmitter) With(keyValues ...string) sdk.MessageEmitter { + newEmitter := newWasmGuestEmitter(w.emitFn) + newEmitter.base = w.base.With(keyValues...) + newEmitter.labels = newEmitter.base.Labels() + return newEmitter +} + +// createEmitFn builds the runtime's emit function implementation, which is a function +// that handles marshalling and unmarshalling messages for the WASM to act on. +func createEmitFn( + sdkConfig *RuntimeConfig, + l logger.Logger, + emit func(respptr unsafe.Pointer, resplenptr unsafe.Pointer, reqptr unsafe.Pointer, reqptrlen int32) int32, +) func(string, map[string]string) error { + emitFn := func(msg string, labels map[string]string) error { + // Prepare the labels to be emitted + if sdkConfig.Metadata == nil { + return NewEmissionError(fmt.Errorf("metadata is required to emit")) + } + + labels, err := toEmitLabels(sdkConfig.Metadata, labels) + if err != nil { + return NewEmissionError(err) + } + + vm, err := values.NewMap(labels) + if err != nil { + return NewEmissionError(fmt.Errorf("could not wrap labels to map: %w", err)) + } + + // Marshal the message and labels into a protobuf message + b, err := proto.Marshal(&wasmpb.EmitMessageRequest{ + Message: msg, + Labels: values.ProtoMap(vm), + }) + if err != nil { + return err + } + + // Prepare the request to be sent to the host memory by allocating space for the + // response and response length buffers. + respBuffer := make([]byte, sdkConfig.MaxFetchResponseSizeBytes) + respptr, _ := bufferToPointerLen(respBuffer) + + resplenBuffer := make([]byte, uint32Size) + resplenptr, _ := bufferToPointerLen(resplenBuffer) + + // The request buffer is the wasm memory, get a pointer to the first element and the length + // of the protobuf message. + reqptr, reqptrlen := bufferToPointerLen(b) + + // Emit the message via the method imported from the host + errno := emit(respptr, resplenptr, reqptr, reqptrlen) + if errno != 0 { + return NewEmissionError(fmt.Errorf("emit failed with errno %d", errno)) + } + + // Attempt to read and handle the response from the host memory + responseSize := binary.LittleEndian.Uint32(resplenBuffer) + response := &wasmpb.EmitMessageResponse{} + if err := proto.Unmarshal(respBuffer[:responseSize], response); err != nil { + l.Errorw("failed to unmarshal emit response", "error", err.Error()) + return NewEmissionError(err) + } + + if response.Error != nil && response.Error.Message != "" { + return NewEmissionError(errors.New(response.Error.Message)) + } + + return nil + } + + return emitFn +} + +// bufferToPointerLen returns a pointer to the first element of the buffer and the length of the buffer. +func bufferToPointerLen(buf []byte) (unsafe.Pointer, int32) { + return unsafe.Pointer(&buf[0]), int32(len(buf)) +} + +// toEmitLabels ensures that the required metadata is present in the labels map +func toEmitLabels(md *capabilities.RequestMetadata, labels map[string]string) (map[string]string, error) { + if md.WorkflowID == "" { + return nil, fmt.Errorf("must provide workflow id to emit event") + } + + if md.WorkflowName == "" { + return nil, fmt.Errorf("must provide workflow name to emit event") + } + + if md.WorkflowOwner == "" { + return nil, fmt.Errorf("must provide workflow owner to emit event") + } + + labels[events.LabelWorkflowExecutionID] = md.WorkflowExecutionID + labels[events.LabelWorkflowOwner] = md.WorkflowOwner + labels[events.LabelWorkflowID] = md.WorkflowID + labels[events.LabelWorkflowName] = md.WorkflowName + return labels, nil +} + +// EmissionError wraps all errors that occur during the emission process for the runtime to handle. +type EmissionError struct { + Wrapped error +} + +func NewEmissionError(err error) *EmissionError { + return &EmissionError{Wrapped: err} +} + +func (e *EmissionError) Error() string { + return fmt.Errorf("failed to create emission: %w", e.Wrapped).Error() +} diff --git a/pkg/workflows/wasm/sdk_test.go b/pkg/workflows/wasm/sdk_test.go new file mode 100644 index 0000000..a824af4 --- /dev/null +++ b/pkg/workflows/wasm/sdk_test.go @@ -0,0 +1,66 @@ +package wasm + +import ( + "testing" + + "github.com/goplugin/plugin-common/pkg/capabilities" + + "github.com/stretchr/testify/assert" +) + +func Test_toEmitLabels(t *testing.T) { + t.Run("successfully transforms metadata", func(t *testing.T) { + md := &capabilities.RequestMetadata{ + WorkflowID: "workflow-id", + WorkflowName: "workflow-name", + WorkflowOwner: "workflow-owner", + } + empty := make(map[string]string, 0) + + gotLabels, err := toEmitLabels(md, empty) + assert.NoError(t, err) + + assert.Equal(t, map[string]string{ + "workflow_id": "workflow-id", + "workflow_name": "workflow-name", + "workflow_owner_address": "workflow-owner", + "workflow_execution_id": "", + }, gotLabels) + }) + + t.Run("fails on missing workflow id", func(t *testing.T) { + md := &capabilities.RequestMetadata{ + WorkflowName: "workflow-name", + WorkflowOwner: "workflow-owner", + } + empty := make(map[string]string, 0) + + _, err := toEmitLabels(md, empty) + assert.Error(t, err) + assert.ErrorContains(t, err, "workflow id") + }) + + t.Run("fails on missing workflow name", func(t *testing.T) { + md := &capabilities.RequestMetadata{ + WorkflowID: "workflow-id", + WorkflowOwner: "workflow-owner", + } + empty := make(map[string]string, 0) + + _, err := toEmitLabels(md, empty) + assert.Error(t, err) + assert.ErrorContains(t, err, "workflow name") + }) + + t.Run("fails on missing workflow owner", func(t *testing.T) { + md := &capabilities.RequestMetadata{ + WorkflowID: "workflow-id", + WorkflowName: "workflow-name", + } + empty := make(map[string]string, 0) + + _, err := toEmitLabels(md, empty) + assert.Error(t, err) + assert.ErrorContains(t, err, "workflow owner") + }) +} diff --git a/sonar-project.properties b/sonar-project.properties index 71ae468..cdb27e1 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -2,6 +2,7 @@ sonar.projectKey=goplugin_plugin-common sonar.sources=. sonar.sourceEncoding=UTF-8 +sonar.python.version=3.8 # Full exclusions from the static analysis sonar.exclusions=\ @@ -16,18 +17,23 @@ sonar.exclusions=\ **/*report.xml,\ **/*.txt,\ **/*.abi,\ -**/*.bin +**/*.bin,\ +**/generated_*,\ +**/*_generated.go,\ +**/mock_*.go # Coverage exclusions sonar.coverage.exclusions=\ **/test/**/*,\ **/*_test.go,\ observability-lib/**,\ -**/fuzz/**/* +**/fuzz/**/*,\ +**/capabilities/**/*test/**/* + # Tests' root folder, inclusions (tests to check and count) and exclusions sonar.tests=. sonar.test.inclusions=**/*_test.go # Duplication exclusions -sonar.cpd.exclusions=observability-lib/** +sonar.cpd.exclusions=**/observability-lib/**/*