diff --git a/.github/wip_integration_tests.yml b/.github/wip_integration_tests.yml deleted file mode 100644 index 36312e3442..0000000000 --- a/.github/wip_integration_tests.yml +++ /dev/null @@ -1,78 +0,0 @@ ---- -name: CI - -on: - push: - pull_request: - types: [opened] - -env: - toolchain: nightly-2022-05-01 - -jobs: - integration: - name: integration - runs-on: ubuntu-20.04 - steps: - - name: checkout - uses: actions/checkout@v2 - - name: cache - uses: actions/cache@v2 - with: - path: | - ~/.cargo/registry - ~/.cargo/git - target - key: ${{ runner.os }}-cargo-build-release-${{ hashFiles('**/Cargo.lock') }} - - name: toolchain - uses: actions-rs/toolchain@v1 - with: - toolchain: ${{ env.toolchain }} - components: clippy, rustfmt - override: true - - name: dependencies - run: | - sudo apt-get update && \ - sudo apt-get -y install \ - libssl-dev \ - clang-10 \ - pkg-config \ - git \ - cmake \ - zip \ - libc++-dev \ - libc++abi-dev \ - libprotobuf-dev \ - protobuf-compiler - - name: npm ci and lint - run: | - node -v - cd integration_tests - npm ci - npm run check-fmt - npm run lint - - name: build - run: | - cargo build --release --bin tari_base_node - cargo build --release --bin tari_console_wallet - cargo build --release --bin tari_merge_mining_proxy - cargo build --release --bin tari_miner - cargo build --release --package tari_wallet_ffi - - name: run cucumber scenarios - run: | - cd integration_tests - mkdir -p cucumber_output - node_modules/.bin/cucumber-js --tags "not @long-running and not @broken and not @wallet-ffi" --format json:cucumber_output/tests.cucumber --exit --retry 2 --retryTagFilter "@flaky and not @broken" - - name: generate report - run: | - cd integration_tests - node ./generate_report.js - - name: run ffi wallet cucumber scenarios - run: | - cd integration_tests - mkdir -p cucumber_output - node_modules/.bin/cucumber-js --tags "not @long-running and not @broken and not @flaky and @wallet-ffi" --format json:cucumber_output/tests_ffi.cucumber --exit - - name: generate ffi report - run: | - cd integration_tests - node ./generate_report.js "cucumber_output/tests_ffi.cucumber" "temp/reports/cucumber_ffi_report.html" diff --git a/.github/workflows/audit.yml b/.github/workflows/audit.yml index 2fa01bb612..567844d0f3 100644 --- a/.github/workflows/audit.yml +++ b/.github/workflows/audit.yml @@ -1,11 +1,9 @@ --- -# Runs daily ---- -name: Security audit +name: Security audit - daily -on: +'on': schedule: - - cron: "43 05 * * *" + - cron: '43 05 * * *' jobs: security_audit: diff --git a/.github/workflows/base_node_binaries.json b/.github/workflows/base_node_binaries.json index 68641b7952..f3a72d7ae5 100644 --- a/.github/workflows/base_node_binaries.json +++ b/.github/workflows/base_node_binaries.json @@ -43,7 +43,8 @@ "target": "x86_64-pc-windows-msvc", "cross": false, "target_cpu": "x86-64", - "features": "safe" + "features": "safe", + "flags": "--workspace --exclude tari_libtor" }, { "name": "windows-arm64", diff --git a/.github/workflows/base_node_binaries.yml b/.github/workflows/base_node_binaries.yml index 7904f73ae4..01ce9f7101 100644 --- a/.github/workflows/base_node_binaries.yml +++ b/.github/workflows/base_node_binaries.yml @@ -187,7 +187,7 @@ jobs: with: use-cross: ${{ matrix.builds.cross }} command: build - args: --release --target ${{ matrix.builds.target }} --features ${{ matrix.builds.features }} ${{ matrix.builds.target_bins }} --locked + args: --release --target ${{ matrix.builds.target }} --features ${{ matrix.builds.features }} ${{ matrix.builds.target_bins }} ${{ matrix.builds.flags }} --locked - name: Copy binaries to folder for zipping shell: bash diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ea49e65d4d..9e4e18509c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,7 +1,7 @@ --- name: CI -on: +'on': workflow_dispatch: push: branches: @@ -24,7 +24,8 @@ env: jobs: clippy: name: clippy - runs-on: [ self-hosted, ubuntu18.04-high-cpu ] + #runs-on: [ self-hosted, ubuntu18.04-high-cpu ] + runs-on: [ ubuntu-20.04 ] steps: - name: checkout uses: actions/checkout@v2 @@ -36,19 +37,8 @@ jobs: override: true - name: ubuntu dependencies run: | - sudo apt-get update && \ - sudo apt-get -y install \ - build-essential \ - libgtk-3-dev \ - libwebkit2gtk-4.0-dev \ - libsoup2.4-dev \ - curl \ - wget \ - libappindicator3-dev \ - patchelf \ - librsvg2-dev \ - libprotobuf-dev \ - protobuf-compiler + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh - name: cargo fmt uses: actions-rs/cargo@v1 with: @@ -78,19 +68,8 @@ jobs: override: true - name: ubuntu dependencies run: | - sudo apt-get update && \ - sudo apt-get -y install \ - build-essential \ - libgtk-3-dev \ - libwebkit2gtk-4.0-dev \ - libsoup2.4-dev \ - curl \ - wget \ - libappindicator3-dev \ - patchelf \ - librsvg2-dev \ - libprotobuf-dev \ - protobuf-compiler + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh - name: cargo check uses: actions-rs/cargo@v1 with: @@ -116,19 +95,8 @@ jobs: - uses: Swatinem/rust-cache@v1 - name: ubuntu dependencies run: | - sudo apt-get update && \ - sudo apt-get -y install \ - build-essential \ - libgtk-3-dev \ - libwebkit2gtk-4.0-dev \ - libsoup2.4-dev \ - curl \ - wget \ - libappindicator3-dev \ - patchelf \ - librsvg2-dev \ - libprotobuf-dev \ - protobuf-compiler + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh - name: rustup show run: | rustup show @@ -167,19 +135,8 @@ jobs: toolchain: ${{ env.toolchain }} - name: ubuntu dependencies run: | - sudo apt-get update && \ - sudo apt-get -y install \ - build-essential \ - libgtk-3-dev \ - libwebkit2gtk-4.0-dev \ - libsoup2.4-dev \ - curl \ - wget \ - libappindicator3-dev \ - patchelf \ - librsvg2-dev \ - libprotobuf-dev \ - protobuf-compiler + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh - name: test key manager wasm run: | npm install -g wasm-pack @@ -196,9 +153,10 @@ jobs: with: command: test args: -v --all-features --release + # Allows other workflows to know the PR number artifacts: - name: test + name: pr_2_artifact runs-on: [ ubuntu-20.04 ] steps: - name: Save the PR number in an artifact diff --git a/.github/workflows/integration_tests.yml b/.github/workflows/integration_tests.yml new file mode 100644 index 0000000000..c6b88e47eb --- /dev/null +++ b/.github/workflows/integration_tests.yml @@ -0,0 +1,186 @@ +--- +name: Integration tests + +'on': + push: + paths-ignore: + - '**/*.md' + schedule: + - cron: '0 2 * * *' # daily @ 02h00 (non-critical) + - cron: '0 12 * * 6' # weekly - Saturday @ noon (long-running) + workflow_dispatch: + inputs: + ci_bins: + type: boolean + default: true + description: 'run ci on binaries' + ci_ffi: + type: boolean + default: true + description: 'run ci on ffi' + ci_profile: + default: ci + description: 'ci profile to run' + type: string + +env: + toolchain: nightly-2022-05-01 + # space seperated string list + build_binaries: "tari_base_node tari_console_wallet tari_merge_mining_proxy tari_miner" + +jobs: + cucumber_tests: + name: Cucumber tests + runs-on: ubuntu-latest + steps: + - name: Checkout source code + uses: actions/checkout@v3 + + - name: Envs setup + id: envs_setup + shell: bash + run: | + VAPPS_STRING="${{ env.build_binaries }}" + VAPPS_ARRAY=(${VAPPS_STRING}) + for i in "${!VAPPS_ARRAY[@]}"; do + if [ "${VAPPS_ARRAY[$i]:0:5}" = "tari_" ] ; then + VAPPS_TARGET_BINS="${VAPPS_TARGET_BINS} --bin ${VAPPS_ARRAY[$i]}" + fi + done + echo "TARGET_BINS=${VAPPS_TARGET_BINS}" >> $GITHUB_ENV + if [ "${{ github.event_name }}" == "schedule" ] ; then + echo "CI_FFI=false" >> $GITHUB_ENV + if [ "${{ github.event.schedule }}" == "0 2 * * *" ] ; then + echo "CI_PROFILE=non-critical" >> $GITHUB_ENV + elif [ "${{ github.event.schedule }}" == "0 12 * * 6" ] ; then + echo "CI_PROFILE=long-running" >> $GITHUB_ENV + fi + else + echo "CI ..." + echo "CI_PROFILE=ci" >> $GITHUB_ENV + CI_BINS=${{ inputs.ci_bins }} + echo "Run binary - ${CI_BINS}" + echo "CI_BINS=${CI_BINS:-true}" >> $GITHUB_ENV + CI_FFI=${{ inputs.ci_ffi }} + echo "Run FFI - ${CI_FFI}" + echo "CI_FFI=${CI_FFI:-true}" >> $GITHUB_ENV + fi + + - name: Install ubuntu dependencies + shell: bash + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + + - name: Setup rust toolchain + uses: actions-rs/toolchain@v1 + with: + profile: minimal + components: rustfmt, clippy + toolchain: ${{ env.toolchain }} + override: true + + - name: Cache cargo files and outputs + uses: Swatinem/rust-cache@v2 + + - name: Build binaries + uses: actions-rs/cargo@v1 + with: + use-cross: false + command: build + args: > + --release + --locked + ${{ env.TARGET_BINS }} + + - name: Build ffi + uses: actions-rs/cargo@v1 + with: + use-cross: false + command: build + args: > + --release + --locked + --package tari_wallet_ffi + + - name: CI folder prep + shell: bash + working-directory: integration_tests + run: | + mkdir -p cucumber_output + mkdir -p temp/reports + mkdir -p temp/out + cd ../target/release/ + cp -v ${{ env.build_binaries }} "$GITHUB_WORKSPACE/integration_tests/temp/out" + cd $GITHUB_WORKSPACE/integration_tests/temp/out + shasum -a 256 ${{ env.build_binaries }} > integration_tests.sha256sums + cat integration_tests.sha256sums + ls -alht + + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: 18 + cache: 'npm' + cache-dependency-path: integration_tests/package-lock.json + + - name: Run npm ci and lint + shell: bash + working-directory: integration_tests + run: | + node -v + npm install + npm run check-fmt + npm run lint + npm ci + cd ../clients/base_node_grpc_client + npm install + cd ../wallet_grpc_client + npm install + npm ci + + - name: Run ${{ env.CI_PROFILE }} integration tests for binaries + if: ${{ env.CI_BINS == 'true' }} + continue-on-error: true + timeout-minutes: 90 + shell: bash + working-directory: integration_tests + run: | + node_modules/.bin/cucumber-js --publish-quiet \ + --profile "${{ env.CI_PROFILE }}" \ + --tags "not @wallet-ffi" --format json:cucumber_output/tests.cucumber \ + --exit --retry 2 --retry-tag-filter "@flaky and not @broken" + + - name: Run ${{ env.CI_PROFILE }} integration tests for ffi + if: ${{ env.CI_FFI == 'true' }} + continue-on-error: true + timeout-minutes: 90 + shell: bash + working-directory: integration_tests + run: | + node_modules/.bin/cucumber-js --publish-quiet \ + --profile "${{ env.CI_PROFILE }}" \ + --tags "@wallet-ffi" --format json:cucumber_output/tests_ffi.cucumber \ + --exit --retry 2 --retry-tag-filter "@flaky and not @broken" + + - name: Generate report + continue-on-error: true + if: always() + shell: bash + working-directory: integration_tests + run: | + node ./generate_report.js + # Empty file check + if [ -s cucumber_output/tests_ffi.cucumber ] ; then + node ./generate_report.js "cucumber_output/tests_ffi.cucumber" "temp/reports/cucumber_ffi_report.html" + fi + + - name: Store ${{ env.CI_PROFILE }} test results + uses: actions/upload-artifact@v3 + if: always() + with: + name: ${{ env.CI_PROFILE }} test results + path: | + integration_tests/cucumber_output + integration_tests/temp/reports + integration_tests/temp/out diff --git a/.github/workflows/long_running.yml b/.github/workflows/long_running.yml deleted file mode 100644 index eafd55797e..0000000000 --- a/.github/workflows/long_running.yml +++ /dev/null @@ -1,95 +0,0 @@ ---- -# Runs weekly (saturday noon) ---- -name: Long running integration tests - -on: - schedule: - - cron: "0 12 * * 6" - -env: - toolchain: nightly-2022-05-01 - -jobs: - long-running: - name: Run long-running critical cucumber tests - runs-on: ubuntu-18.04 - steps: - - name: checkout - uses: actions/checkout@v2 - - name: toolchain - uses: actions-rs/toolchain@v1 - with: - toolchain: ${{ env.toolchain }} - profile: minimal - override: true - - uses: Swatinem/rust-cache@v1 - - name: ubuntu dependencies - run: | - sudo apt-get update && \ - sudo apt-get -y install \ - openssl \ - libssl-dev \ - pkg-config \ - libsqlite3-dev \ - clang-10 \ - git \ - cmake \ - libc++-dev \ - libc++abi-dev \ - libprotobuf-dev \ - protobuf-compiler \ - libncurses5-dev \ - libncursesw5-dev \ - zip \ - build-essential \ - libgtk-3-dev \ - libwebkit2gtk-4.0-dev \ - libsoup2.4-dev \ - curl \ - wget \ - libappindicator3-dev \ - patchelf \ - librsvg2-dev - - name: node -v - run: node -v - - name: build base node - uses: actions-rs/cargo@v1 - with: - command: build - args: --release --bin tari_base_node - - name: build console wallet - uses: actions-rs/cargo@v1 - with: - command: build - args: --release --bin tari_console_wallet - - name: build merge mining proxy - uses: actions-rs/cargo@v1 - with: - command: build - args: --release --bin tari_merge_mining_proxy - - name: build miner - uses: actions-rs/cargo@v1 - with: - command: build - args: --release --bin tari_miner - - name: build validator node - uses: actions-rs/cargo@v1 - with: - command: build - args: --release --bin tari_validator_node - - name: npm ci - run: cd integration_tests && npm ci && cd node_modules/wallet-grpc-client && npm ci - - name: Run integration tests - run: cd integration_tests && mkdir -p cucumber_output && node_modules/.bin/cucumber-js --profile "long-running" --tags "not @wallet-ffi" --format json:cucumber_output/tests.cucumber --exit --retry 2 --retry-tag-filter "@flaky and not @broken" - - name: Generate report - if: always() - run: cd integration_tests && node ./generate_report.js - - name: Store test results - uses: actions/upload-artifact@v3 - if: always() - with: - name: test results - path: | - integration_tests/cucumber_output - integration_tests/temp/reports diff --git a/.github/workflows/non_critical_integration_tests.yml b/.github/workflows/non_critical_integration_tests.yml deleted file mode 100644 index 1f57cf2c44..0000000000 --- a/.github/workflows/non_critical_integration_tests.yml +++ /dev/null @@ -1,95 +0,0 @@ ---- -# Runs daily (2am) ---- -name: Non critical integration tests - -on: - schedule: - - cron: "0 2 * * *" - -env: - toolchain: nightly-2022-05-01 - -jobs: - non-critical: - name: Run long-running critical cucumber tests - runs-on: ubuntu-18.04 - steps: - - name: checkout - uses: actions/checkout@v2 - - name: toolchain - uses: actions-rs/toolchain@v1 - with: - toolchain: ${{ env.toolchain }} - profile: minimal - override: true - - uses: Swatinem/rust-cache@v1 - - name: ubuntu dependencies - run: | - sudo apt-get update && \ - sudo apt-get -y install \ - openssl \ - libssl-dev \ - pkg-config \ - libsqlite3-dev \ - clang-10 \ - git \ - cmake \ - libc++-dev \ - libc++abi-dev \ - libprotobuf-dev \ - protobuf-compiler \ - libncurses5-dev \ - libncursesw5-dev \ - zip \ - build-essential \ - libgtk-3-dev \ - libwebkit2gtk-4.0-dev \ - libsoup2.4-dev \ - curl \ - wget \ - libappindicator3-dev \ - patchelf \ - librsvg2-dev - - name: node -v - run: node -v - - name: build base node - uses: actions-rs/cargo@v1 - with: - command: build - args: --release --bin tari_base_node - - name: build console wallet - uses: actions-rs/cargo@v1 - with: - command: build - args: --release --bin tari_console_wallet - - name: build merge mining proxy - uses: actions-rs/cargo@v1 - with: - command: build - args: --release --bin tari_merge_mining_proxy - - name: build miner - uses: actions-rs/cargo@v1 - with: - command: build - args: --release --bin tari_miner - - name: build validator node - uses: actions-rs/cargo@v1 - with: - command: build - args: --release --bin tari_validator_node - - name: npm ci - run: cd integration_tests && npm ci && cd node_modules/wallet-grpc-client && npm ci - - name: Run integration tests - run: cd integration_tests && mkdir -p cucumber_output && node_modules/.bin/cucumber-js --profile "non-critical" --tags "not @wallet-ffi" --format json:cucumber_output/tests.cucumber --exit --retry 2 --retry-tag-filter "@flaky and not @broken" - - name: Generate report - if: always() - run: cd integration_tests && node ./generate_report.js - - name: Store test results - uses: actions/upload-artifact@v3 - if: always() - with: - name: test results - path: | - integration_tests/cucumber_output - integration_tests/temp/reports diff --git a/Cargo.lock b/Cargo.lock index 2598b0f3c5..79bc281d7e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14,7 +14,7 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b613b8e1e3cf911a086f53f03bf286f52fd7a7258e4fa606f0ef220d39d8877" dependencies = [ - "generic-array 0.14.5", + "generic-array", ] [[package]] @@ -24,7 +24,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c192eb8f11fc081b0fe4259ba5af04217d4e0faddd02417310a927911abd7c8" dependencies = [ "crypto-common", - "generic-array 0.14.5", + "generic-array", ] [[package]] @@ -35,8 +35,8 @@ checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8" dependencies = [ "cfg-if 1.0.0", "cipher 0.3.0", - "cpufeatures 0.2.2", - "opaque-debug 0.3.0", + "cpufeatures", + "opaque-debug", ] [[package]] @@ -66,13 +66,22 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "0.7.18" +version = "0.7.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" +checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e" dependencies = [ "memchr", ] +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "ansi_term" version = "0.12.1" @@ -84,15 +93,15 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.57" +version = "1.0.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f9b8508dccb7687a1d6c4ce66b2b0ecef467c94667de27d8d7fe1f8d2a9cdc" +checksum = "98161a4e3e2184da77bb14f02184cdd111e83bbbcc9979dfee3c44b9a85f5602" [[package]] name = "arc-swap" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5d78ce20460b82d3fa150275ed9d55e21064fc7951177baacf86a145c4a4b1f" +checksum = "983cd8b9d4b02a6dc6ffa557262eb5858a27a0038ffffe21a0f133eaa819a164" [[package]] name = "argon2" @@ -159,9 +168,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.56" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96cf8829f67d2eab0b2dfa42c5d0ef737e0724e4a82b01b3e292456202b19716" +checksum = "76464446b8bc32758d7e88ee1a804d9914cd9b1cb264c029899680b0be29826f" dependencies = [ "proc-macro2", "quote", @@ -238,9 +247,9 @@ dependencies = [ [[package]] name = "base64ct" -version = "1.1.1" +version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6b4d9b1225d28d360ec6a231d65af1fd99a2a095154c8040689617290569c5c" +checksum = "ea2b2456fd614d856680dcd9fcc660a51a820fa09daef2e49772b56a193c8474" [[package]] name = "bigdecimal" @@ -313,7 +322,7 @@ checksum = "0a4e37d16930f5459780f5621038b6382b9bb37c19016f39fb6b5808d831f174" dependencies = [ "crypto-mac", "digest 0.9.0", - "opaque-debug 0.3.0", + "opaque-debug", ] [[package]] @@ -325,35 +334,23 @@ dependencies = [ "digest 0.10.3", ] -[[package]] -name = "block-buffer" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" -dependencies = [ - "block-padding 0.1.5", - "byte-tools", - "byteorder", - "generic-array 0.12.4", -] - [[package]] name = "block-buffer" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ - "block-padding 0.2.1", - "generic-array 0.14.5", + "block-padding", + "generic-array", ] [[package]] name = "block-buffer" -version = "0.10.2" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" +checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" dependencies = [ - "generic-array 0.14.5", + "generic-array", ] [[package]] @@ -362,19 +359,10 @@ version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2cb03d1bed155d89dce0f845b7899b18a9a163e148fd004e1c28421a783e2d8e" dependencies = [ - "block-padding 0.2.1", + "block-padding", "cipher 0.3.0", ] -[[package]] -name = "block-padding" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" -dependencies = [ - "byte-tools", -] - [[package]] name = "block-padding" version = "0.2.1" @@ -389,7 +377,7 @@ checksum = "fe3ff3fc1de48c1ac2e3341c4df38b0d1bfb8fdf04632a187c8b75aaa319a7ab" dependencies = [ "byteorder", "cipher 0.3.0", - "opaque-debug 0.3.0", + "opaque-debug", ] [[package]] @@ -428,21 +416,15 @@ checksum = "40e38929add23cdf8a366df9b0e088953150724bcbe5fc330b0d8eb3b328eec8" [[package]] name = "bumpalo" -version = "3.10.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37ccbd214614c6783386c1af30caf03192f17891059cecc394b4fb119e363de3" - -[[package]] -name = "byte-tools" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" +checksum = "c1ad822118d20d2c234f427000d5acc36eabe1e29a348c89b63dd60b13f28e5d" [[package]] name = "bytemuck" -version = "1.9.1" +version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdead85bdec19c194affaeeb670c0e41fe23de31459efd1c174d049269cf02cc" +checksum = "2f5715e491b5a1598fc2bef5a606847b5dc1d48ea625bd3c02c00de8285591da" [[package]] name = "byteorder" @@ -458,9 +440,9 @@ checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" [[package]] name = "bytes" -version = "1.1.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" +checksum = "ec8a7b6a70fde80372154c65702f00a0f56f3e1c36abbc6c440484be248856db" dependencies = [ "serde", ] @@ -477,9 +459,15 @@ version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c24dab4283a142afa2fdca129b80ad2c6284e073930f964c3a1293c225ee39a" dependencies = [ - "rustc_version 0.4.0", + "rustc_version", ] +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + [[package]] name = "cast5" version = "0.10.0" @@ -488,7 +476,7 @@ checksum = "f69790da27038b52ffcf09e7874e1aae353c674d65242549a733ad9372e7281f" dependencies = [ "byteorder", "cipher 0.3.0", - "opaque-debug 0.3.0", + "opaque-debug", ] [[package]] @@ -516,7 +504,7 @@ version = "0.24.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6358dedf60f4d9b8db43ad187391afe959746101346fe51bb978126bec61dfb" dependencies = [ - "clap 3.2.15", + "clap 3.2.21", "heck 0.4.0", "indexmap", "log", @@ -567,14 +555,13 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chacha20" -version = "0.7.1" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fee7ad89dc1128635074c268ee661f90c3f7e83d9fd12910608c36b47d6c3412" +checksum = "f08493fa7707effc63254c66c6ea908675912493cd67952eda23c09fae2610b1" dependencies = [ "cfg-if 1.0.0", "cipher 0.3.0", - "cpufeatures 0.1.5", - "zeroize", + "cpufeatures", ] [[package]] @@ -585,7 +572,7 @@ checksum = "5c80e5460aa66fe3b91d40bcbdab953a597b60053e34d684ac6903f863b680a6" dependencies = [ "cfg-if 1.0.0", "cipher 0.3.0", - "cpufeatures 0.2.2", + "cpufeatures", "zeroize", ] @@ -597,20 +584,7 @@ checksum = "c7fc89c7c5b9e7a02dfe45cd2367bae382f9ed31c61ca8debe5f827c420a2f08" dependencies = [ "cfg-if 1.0.0", "cipher 0.4.3", - "cpufeatures 0.2.2", -] - -[[package]] -name = "chacha20poly1305" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1580317203210c517b6d44794abfbe600698276db18127e37ad3e69bf5e848e5" -dependencies = [ - "aead 0.4.3", - "chacha20 0.7.1", - "cipher 0.3.0", - "poly1305 0.7.2", - "zeroize", + "cpufeatures", ] [[package]] @@ -647,15 +621,17 @@ checksum = "17cc5e6b5ab06331c33589842070416baa137e8b0eb912b008cfd4a78ada7919" [[package]] name = "chrono" -version = "0.4.19" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" +checksum = "bfd4d1b31faaa3a89d7934dbded3111da0d2ef28e3ebccdb4f0179f5929d1ef1" dependencies = [ - "libc", + "iana-time-zone", + "js-sys", "num-integer", "num-traits", "serde", "time 0.1.44", + "wasm-bindgen", "winapi", ] @@ -675,7 +651,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7" dependencies = [ - "generic-array 0.14.5", + "generic-array", ] [[package]] @@ -723,9 +699,9 @@ dependencies = [ [[package]] name = "clap" -version = "3.2.15" +version = "3.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44bbe24bbd31a185bc2c4f7c2abe80bea13a20d57ee4e55be70ac512bdc76417" +checksum = "1ed5341b2301a26ab80be5cbdced622e80ed808483c52e45e3310a877d3b37d7" dependencies = [ "atty", "bitflags 1.3.2", @@ -740,9 +716,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "3.2.15" +version = "3.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ba52acd3b0a5c33aeada5cdaa3267cdc7c594a98731d4268cdc1532f4264cb4" +checksum = "ea0c8bce528c4be4da13ea6fead8965e95b6073585a2f05204bd8f4119f82a65" dependencies = [ "heck 0.4.0", "proc-macro-error", @@ -753,9 +729,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.2.2" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5538cd660450ebeb4234cfecf8f2284b844ffc4c50531e66d584ad5b91293613" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" dependencies = [ "os_str_bytes", ] @@ -771,9 +747,9 @@ dependencies = [ [[package]] name = "clipboard-win" -version = "4.4.1" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f3e1238132dc01f081e1cbb9dace14e5ef4c3a51ee244bd982275fb514605db" +checksum = "c4ab1b92798304eedc095b53942963240037c0516452cb11aeba709d420b2219" dependencies = [ "error-code", "str-buf", @@ -797,9 +773,9 @@ checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" [[package]] name = "config" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ea917b74b6edfb5024e3b55d3c8f710b5f4ed92646429601a42e96f0812b31b" +checksum = "11f1667b8320afa80d69d8bbe40830df2c8a06003d86f73d8e003b2c48df416d" dependencies = [ "async-trait", "json5 0.4.1", @@ -863,18 +839,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66c99696f6c9dd7f35d486b9d04d7e6e202aa3e8c40d553f2fdf5e7e0c6a71ef" -dependencies = [ - "libc", -] - -[[package]] -name = "cpufeatures" -version = "0.2.2" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b" +checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" dependencies = [ "libc", ] @@ -901,7 +868,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0363053954f3e679645fc443321ca128b7b950a6fe288cf5f9335cc22ee58394" dependencies = [ "atty", - "cast", + "cast 0.2.7", "clap 2.34.0", "criterion-plot 0.3.1", "csv", @@ -923,16 +890,16 @@ dependencies = [ [[package]] name = "criterion" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1604dafd25fba2fe2d5895a9da139f8dc9b319a5fe5354ca137cbbce4e178d10" +checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" dependencies = [ "atty", - "cast", + "cast 0.3.0", "clap 2.34.0", - "criterion-plot 0.4.4", + "criterion-plot 0.4.5", "csv", - "itertools 0.10.3", + "itertools 0.10.4", "lazy_static", "num-traits", "oorandom", @@ -954,18 +921,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76f9212ddf2f4a9eb2d401635190600656a1f88a932ef53d06e7fa4c7e02fb8e" dependencies = [ "byteorder", - "cast", + "cast 0.2.7", "itertools 0.8.2", ] [[package]] name = "criterion-plot" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d00996de9f2f7559f7f4dc286073197f83e92256a59ed395f9aac01fe717da57" +checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876" dependencies = [ - "cast", - "itertools 0.10.3", + "cast 0.3.0", + "itertools 0.10.4", ] [[package]] @@ -992,9 +959,9 @@ dependencies = [ [[package]] name = "crossbeam" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae5588f6b3c3cb05239e90bd110f257254aecd01e4635400391aeae07497845" +checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c" dependencies = [ "cfg-if 1.0.0", "crossbeam-channel", @@ -1006,9 +973,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.4" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aaa7bd5fb665c6864b5f963dd9097905c54125909c7aa94c9e18507cdbe6c53" +checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" dependencies = [ "cfg-if 1.0.0", "crossbeam-utils", @@ -1016,9 +983,9 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" +checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" dependencies = [ "cfg-if 1.0.0", "crossbeam-epoch", @@ -1027,23 +994,23 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.8" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1145cf131a2c6ba0615079ab6a638f7e1973ac9c2634fcbeaaad6114246efe8c" +checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1" dependencies = [ "autocfg", "cfg-if 1.0.0", "crossbeam-utils", - "lazy_static", "memoffset", + "once_cell", "scopeguard", ] [[package]] name = "crossbeam-queue" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f25d8400f4a7a5778f0e4e52384a48cbd9b5c495d110786187fc750075277a2" +checksum = "1cd42583b04998a5363558e5f9291ee5a5ff6b49944332103f251e7479a82aa7" dependencies = [ "cfg-if 1.0.0", "crossbeam-utils", @@ -1051,12 +1018,12 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.8" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38" +checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc" dependencies = [ "cfg-if 1.0.0", - "lazy_static", + "once_cell", ] [[package]] @@ -1101,7 +1068,7 @@ dependencies = [ "crossterm_winapi 0.9.0", "futures-core", "libc", - "mio 0.8.3", + "mio 0.8.4", "parking_lot 0.12.1", "signal-hook 0.3.14", "signal-hook-mio", @@ -1147,7 +1114,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03c6a1d5fa1de37e071642dfa44ec552ca5b299adb128fab16138e24b548fd21" dependencies = [ - "generic-array 0.14.5", + "generic-array", "subtle", ] @@ -1157,7 +1124,7 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "generic-array 0.14.5", + "generic-array", "rand_core 0.6.3", "typenum", ] @@ -1168,7 +1135,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" dependencies = [ - "generic-array 0.14.5", + "generic-array", "subtle", ] @@ -1205,9 +1172,9 @@ dependencies = [ [[package]] name = "curl-sys" -version = "0.4.55+curl-7.83.1" +version = "0.4.56+curl-7.83.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23734ec77368ec583c2e61dd3f0b0e5c98b93abe6d2a004ca06b91dd7e3e2762" +checksum = "6093e169dd4de29e468fa649fbae11cdcd5551c81fe5bf1b0677adad7ef3d26f" dependencies = [ "cc", "libc", @@ -1232,6 +1199,19 @@ dependencies = [ "zeroize", ] +[[package]] +name = "curve25519-dalek" +version = "4.0.0-pre.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4033478fbf70d6acf2655ac70da91ee65852d69daf7a67bf7a2f518fb47aafcf" +dependencies = [ + "byteorder", + "digest 0.9.0", + "rand_core 0.6.3", + "subtle", + "zeroize", +] + [[package]] name = "curve25519-dalek-ng" version = "4.1.1" @@ -1290,9 +1270,9 @@ checksum = "3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57" [[package]] name = "decimal-rs" -version = "0.1.38" +version = "0.1.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa3ab4f7b3df4f77b57f228261f2761db6d9bb0b803d5b9d5dee3d84f9a67439" +checksum = "b2492291a982ad198a2c3b84b091b48348372ffe8a9f7194cc90a2d8b901762c" dependencies = [ "ethnum", "fast-float", @@ -1363,7 +1343,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "rustc_version 0.4.0", + "rustc_version", "syn", ] @@ -1375,7 +1355,7 @@ checksum = "ac41dd49fb554432020d52c875fc290e110113f864c6b1b525cd62c7e7747a5d" dependencies = [ "byteorder", "cipher 0.3.0", - "opaque-debug 0.3.0", + "opaque-debug", ] [[package]] @@ -1417,22 +1397,13 @@ dependencies = [ "migrations_macros", ] -[[package]] -name = "digest" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" -dependencies = [ - "generic-array 0.12.4", -] - [[package]] name = "digest" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" dependencies = [ - "generic-array 0.14.5", + "generic-array", ] [[package]] @@ -1441,7 +1412,7 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" dependencies = [ - "block-buffer 0.10.2", + "block-buffer 0.10.3", "crypto-common", "subtle", ] @@ -1498,19 +1469,19 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" dependencies = [ - "curve25519-dalek", + "curve25519-dalek 3.2.0", "ed25519", "rand 0.7.3", "serde", - "sha2", + "sha2 0.9.9", "zeroize", ] [[package]] name = "either" -version = "1.6.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" +checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" [[package]] name = "encoding_rs" @@ -1585,15 +1556,9 @@ dependencies = [ [[package]] name = "ethnum" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f4ea34740bd5042b688060cbff8b010f5a324719d5e111284d648035bccc47" - -[[package]] -name = "fake-simd" -version = "0.1.2" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" +checksum = "87e4a7b7dde9ed6aed8eb4dd7474d22fb1713a4b05ac5071cdb60d9903248ad3" [[package]] name = "fast-float" @@ -1603,22 +1568,22 @@ checksum = "95765f67b4b18863968b4a1bd5bb576f732b29a4a28c7cd84c09fa3e2875f33c" [[package]] name = "fastrand" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" +checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" dependencies = [ "instant", ] [[package]] name = "fd-lock" -version = "3.0.5" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46e245f4c8ec30c6415c56cb132c07e69e74f1942f6b4a4061da748b49f486ca" +checksum = "e11dcc7e4d79a8c89b9ab4c6f5c30b1fc4a83c420792da3542fd31179ed5f517" dependencies = [ "cfg-if 1.0.0", "rustix", - "windows-sys 0.30.0", + "windows-sys", ] [[package]] @@ -1641,9 +1606,9 @@ checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d" [[package]] name = "fixedbitset" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "279fb028e20b3c4c320317955b77c5e0c9701f05a1d309905d6fc702cdc5053e" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" @@ -1678,12 +1643,11 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" +checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" dependencies = [ - "matches", - "percent-encoding 2.1.0", + "percent-encoding 2.2.0", ] [[package]] @@ -1716,9 +1680,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] name = "futures" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f73fe65f54d1e12b726f517d3e2135ca3125a437b6d998caf1962961f7172d9e" +checksum = "7f21eda599937fba36daeb58a22e8f5cee2d14c4a17b5b7739c7c8e5e3b8230c" dependencies = [ "futures-channel", "futures-core", @@ -1731,9 +1695,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" +checksum = "30bdd20c28fadd505d0fd6712cdfcb0d4b5648baf45faef7f852afb2399bb050" dependencies = [ "futures-core", "futures-sink", @@ -1741,15 +1705,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" +checksum = "4e5aa3de05362c3fb88de6531e6296e85cde7739cccad4b9dfeeb7f6ebce56bf" [[package]] name = "futures-executor" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9420b90cfa29e327d0429f19be13e7ddb68fa1cccb09d65e5706b8c7a749b8a6" +checksum = "9ff63c23854bee61b6e9cd331d523909f238fc7636290b96826e9cfa5faa00ab" dependencies = [ "futures-core", "futures-task", @@ -1758,15 +1722,15 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" +checksum = "bbf4d2a7a308fd4578637c0b17c7e1c7ba127b8f6ba00b29f717e9655d85eb68" [[package]] name = "futures-macro" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33c1e13800337f4d4d7a316bf45a567dbcb6ffe087f16424852d97e97a91f512" +checksum = "42cd15d1c7456c04dbdf7e88bcd69760d74f3a798d6444e16974b505b0e62f17" dependencies = [ "proc-macro2", "quote", @@ -1775,21 +1739,21 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" +checksum = "21b20ba5a92e727ba30e72834706623d94ac93a725410b6a6b6fbc1b07f7ba56" [[package]] name = "futures-task" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" +checksum = "a6508c467c73851293f390476d4491cf4d227dbabcd4170f3bb6044959b294f1" [[package]] name = "futures-test" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c3e9379dbbfb35dd6df79e895d73c0f75558827fe68eb853b858ff417a8ee98" +checksum = "1ee87d68bf5bca8a0270f477fa1ceab0fbdf735fa21ea17e617ed5381b634fa4" dependencies = [ "futures-core", "futures-executor", @@ -1798,15 +1762,15 @@ dependencies = [ "futures-sink", "futures-task", "futures-util", - "pin-project 1.0.10", + "pin-project 1.0.12", "pin-utils", ] [[package]] name = "futures-util" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" +checksum = "44fb6cb1be61cc1d2e43b262516aafcf63b241cffdb1d3fa115f91d9c7b09c90" dependencies = [ "futures 0.1.31", "futures-channel", @@ -1823,18 +1787,9 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.12.4" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd" -dependencies = [ - "typenum", -] - -[[package]] -name = "generic-array" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" +checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" dependencies = [ "typenum", "version_check 0.9.4", @@ -1870,7 +1825,7 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1583cc1656d7839fd3732b80cf4f38850336cdb9b8ded1cd399ca62958de3c99" dependencies = [ - "opaque-debug 0.3.0", + "opaque-debug", "polyval", ] @@ -1897,11 +1852,11 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" [[package]] name = "h2" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37a82c6d637fc9515a4694bbf1cb2457b79d81ce52b3108bdeea58b07dd34a57" +checksum = "5ca32592cf21ac7ccab1825cd87f6c9b3d9022c44d086172ed0966bec8af30be" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "fnv", "futures-core", "futures-sink", @@ -1910,7 +1865,7 @@ dependencies = [ "indexmap", "slab", "tokio", - "tokio-util 0.7.3", + "tokio-util 0.7.4", "tracing", ] @@ -1922,47 +1877,37 @@ checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" [[package]] name = "hashbrown" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" - -[[package]] -name = "hashbrown" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ "ahash", ] [[package]] name = "hdrhistogram" -version = "7.5.0" +version = "7.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31672b7011be2c4f7456c4ddbcb40e7e9a4a9fad8efe49a6ebaf5f307d0109c0" +checksum = "6ea9fe3952d32674a14e0975009a3547af9ea364995b5ec1add2e23c2ae523ab" dependencies = [ - "base64 0.13.0", "byteorder", - "crossbeam-channel", - "flate2", - "nom 7.1.1", "num-traits", ] [[package]] name = "headers" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cff78e5788be1e0ab65b04d306b2ed5092c815ec97ec70f4ebd5aee158aa55d" +checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584" dependencies = [ "base64 0.13.0", "bitflags 1.3.2", - "bytes 1.1.0", + "bytes 1.2.1", "headers-core", "http", "httpdate", "mime", - "sha-1 0.10.0", + "sha1 0.10.4", ] [[package]] @@ -2016,9 +1961,9 @@ version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "fnv", - "itoa 1.0.2", + "itoa 1.0.3", ] [[package]] @@ -2027,16 +1972,16 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "http", "pin-project-lite", ] [[package]] name = "httparse" -version = "1.7.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "496ce29bb5a52785b44e0f7ca2847ae0bb839c9bd28f69acac9b99d461c0c04c" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" @@ -2061,11 +2006,11 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.19" +version = "0.14.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42dc3c131584288d375f2d07f822b0cb012d8c6fb899a5b9fdb3cb7eb9b6004f" +checksum = "02c929dc5c39e335a03c405292728118860721b10190d98c2a0f0efd5baafbac" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "futures-channel", "futures-core", "futures-util", @@ -2074,7 +2019,7 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa 1.0.2", + "itoa 1.0.3", "pin-project-lite", "socket2", "tokio", @@ -2101,13 +2046,27 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "hyper", "native-tls", "tokio", "tokio-native-tls", ] +[[package]] +name = "iana-time-zone" +version = "0.1.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "237a0714f28b1ee39ccec0770ccb544eb02c9ef2c82bb096230eefcffa6468b0" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "js-sys", + "once_cell", + "wasm-bindgen", + "winapi", +] + [[package]] name = "ident_case" version = "1.0.1" @@ -2136,6 +2095,16 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "idna" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "image" version = "0.23.14" @@ -2152,12 +2121,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.8.2" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6012d540c5baa3589337a98ce73408de9b5a25ec9fc2c6fd6be8f0d39e0ca5a" +checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" dependencies = [ "autocfg", - "hashbrown 0.11.2", + "hashbrown", ] [[package]] @@ -2166,7 +2135,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" dependencies = [ - "generic-array 0.14.5", + "generic-array", ] [[package]] @@ -2186,15 +2155,15 @@ checksum = "48dc51180a9b377fd75814d0cc02199c20f8e99433d6762f650d39cdbbd3b56f" [[package]] name = "integer-encoding" -version = "3.0.3" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e85a1509a128c855368e135cffcde7eac17d8e1083f41e2b98c58bc1a5074be" +checksum = "8bb03732005da905c88227371639bf1ad885cc712789c011c31c5fb3ab3ccf02" [[package]] name = "io-lifetimes" -version = "0.6.1" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9448015e586b611e5d322f6703812bbca2f1e709d5773ecd38ddb4e3bb649504" +checksum = "1ea37f355c05dde75b84bba2d767906ad522e97cd9e2eef2be7a4ab7fb442c06" [[package]] name = "ipnet" @@ -2213,9 +2182,9 @@ dependencies = [ [[package]] name = "itertools" -version = "0.10.3" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +checksum = "d8bf247779e67a9082a4790b45e71ac7cfd1321331a5c856a74a9faebdab78d0" dependencies = [ "either", ] @@ -2228,9 +2197,9 @@ checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" [[package]] name = "itoa" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" +checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754" [[package]] name = "js-sys" @@ -2298,9 +2267,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.126" +version = "0.2.132" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" +checksum = "8371e4e5341c3a96db127eb2465ac681ced4c433e01dd0e938adbef26ba93ba5" [[package]] name = "libgit2-sys" @@ -2344,9 +2313,9 @@ checksum = "7fc7aa29613bd6a620df431842069224d8bc9011086b1db4c0e0cd47fa03ec9a" [[package]] name = "libm" -version = "0.2.2" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33a33a362ce288760ec6a508b94caaec573ae7d3bbbd91b87aa0bad4456839db" +checksum = "292a948cd991e376cf75541fe5b97a1081d713c618b4f1b9500f8844e49eb565" [[package]] name = "libsqlite3-sys" @@ -2383,7 +2352,7 @@ dependencies = [ "libtor-sys", "log", "rand 0.8.5", - "sha1", + "sha1 0.6.0", ] [[package]] @@ -2424,9 +2393,9 @@ dependencies = [ [[package]] name = "linked-hash-map" -version = "0.5.4" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" @@ -2457,9 +2426,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53" +checksum = "9f80bf5aacaf25cbfc8210d1cfb718f2bf3b11c4c54e5afe36c236853a8ec390" dependencies = [ "autocfg", "scopeguard", @@ -2507,12 +2476,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "maplit" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" - [[package]] name = "matchers" version = "0.0.1" @@ -2536,7 +2499,7 @@ checksum = "7b5a279bb9607f9f53c22d496eade00d138d1bdcccd07d74650387cf94942a15" dependencies = [ "block-buffer 0.9.0", "digest 0.9.0", - "opaque-debug 0.3.0", + "opaque-debug", ] [[package]] @@ -2611,9 +2574,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc" +checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34" dependencies = [ "adler", ] @@ -2633,14 +2596,14 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "713d550d9b44d89174e066b7a6217ae06234c10cb47819a88290d2b353c31799" +checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf" dependencies = [ "libc", "log", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.36.1", + "windows-sys", ] [[package]] @@ -2658,7 +2621,7 @@ version = "0.17.2" source = "git+https://github.com/tari-project/monero-rs.git?branch=main#7aebfd0aa037025cac6cbded3f72d73bf3c18123" dependencies = [ "base58-monero 1.0.0", - "curve25519-dalek", + "curve25519-dalek 3.2.0", "fixed-hash", "hex", "hex-literal", @@ -2680,18 +2643,18 @@ dependencies = [ "byteorder", "data-encoding", "multihash", - "percent-encoding 2.1.0", + "percent-encoding 2.2.0", "serde", "static_assertions", "unsigned-varint", - "url 2.2.2", + "url 2.3.1", ] [[package]] name = "multihash" -version = "0.16.2" +version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3db354f401db558759dfc1e568d010a5d4146f4d3f637be1275ec4a3cf09689" +checksum = "1c346cf9999c631f002d8f977c4eaeaa0e6386f16007202308d0b3757522c2cc" dependencies = [ "core2", "multihash-derive", @@ -2834,7 +2797,7 @@ checksum = "566d173b2f9406afbc5510a90925d5a2cd80cae4605631f1212303df265de011" dependencies = [ "byteorder", "lazy_static", - "libm 0.2.2", + "libm 0.2.5", "num-integer", "num-iter", "num-traits", @@ -2904,7 +2867,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ "autocfg", - "libm 0.2.2", + "libm 0.2.5", ] [[package]] @@ -2928,9 +2891,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.12.0" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225" +checksum = "2f7254b99e31cad77da24b08ebf628882739a608578bb1bcdfc1f9c21260d7c0" [[package]] name = "oorandom" @@ -2938,12 +2901,6 @@ version = "11.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" -[[package]] -name = "opaque-debug" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" - [[package]] name = "opaque-debug" version = "0.3.0" @@ -2984,9 +2941,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-src" -version = "111.20.0+1.1.1o" +version = "111.22.0+1.1.1q" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92892c4f87d56e376e469ace79f1128fdaded07646ddf73aa0be4706ff712dec" +checksum = "8f31f0d509d1c1ae9cada2f9539ff8f37933831fd5098879e482aa687d659853" dependencies = [ "cc", ] @@ -3013,11 +2970,11 @@ checksum = "e1cf9b1c4e9a6c4de793c632496fa490bdc0e1eea73f0c91394f7b6990935d22" dependencies = [ "async-trait", "crossbeam-channel", - "futures 0.3.21", + "futures 0.3.24", "js-sys", "lazy_static", - "percent-encoding 2.1.0", - "pin-project 1.0.10", + "percent-encoding 2.2.0", + "pin-project 1.0.12", "rand 0.8.5", "thiserror", "tokio", @@ -3031,7 +2988,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d50ceb0b0e8b75cb3e388a2571a807c8228dabc5d6670f317b6eb21301095373" dependencies = [ "async-trait", - "bytes 1.1.0", + "bytes 1.2.1", "futures-util", "http", "opentelemetry", @@ -3091,14 +3048,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccd746e37177e1711c20dd619a1620f34f5c8b569c53590a72dedd5344d8924a" dependencies = [ "dlv-list", - "hashbrown 0.12.1", + "hashbrown", ] [[package]] name = "os_str_bytes" -version = "6.1.0" +version = "6.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21326818e99cfe6ce1e524c2a805c189a99b5ae555a35d19f9a284b427d86afa" +checksum = "9ff7415e9ae3fff1225851df9e0d9e4e5479f947619774677a63572e55e80eff" [[package]] name = "packed_simd_2" @@ -3127,7 +3084,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" dependencies = [ "instant", - "lock_api 0.4.7", + "lock_api 0.4.8", "parking_lot_core 0.8.5", ] @@ -3137,7 +3094,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ - "lock_api 0.4.7", + "lock_api 0.4.8", "parking_lot_core 0.9.3", ] @@ -3179,7 +3136,7 @@ dependencies = [ "libc", "redox_syscall 0.2.16", "smallvec", - "windows-sys 0.36.1", + "windows-sys", ] [[package]] @@ -3239,24 +3196,25 @@ checksum = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" [[package]] name = "percent-encoding" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" [[package]] name = "pest" -version = "2.1.3" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" +checksum = "cb779fcf4bb850fbbb0edc96ff6cf34fd90c4b1a112ce042653280d9a7364048" dependencies = [ + "thiserror", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.1.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0" +checksum = "502b62a6d0245378b04ffe0a7fb4f4419a4815fce813bd8a0ec89a56e07d67b1" dependencies = [ "pest", "pest_generator", @@ -3264,9 +3222,9 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.1.3" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55" +checksum = "451e629bf49b750254da26132f1a5a9d11fd8a95a3df51d15c4abd1ba154cb6c" dependencies = [ "pest", "pest_meta", @@ -3277,13 +3235,13 @@ dependencies = [ [[package]] name = "pest_meta" -version = "2.1.3" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54be6e404f5317079812fc8f9f5279de376d8856929e21c184ecf6bbd692a11d" +checksum = "bcec162c71c45e269dfc3fc2916eaeb97feab22993a21bcce4721d08cd7801a6" dependencies = [ - "maplit", + "once_cell", "pest", - "sha-1 0.8.2", + "sha1 0.10.4", ] [[package]] @@ -3302,7 +3260,7 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6d5014253a1331579ce62aa67443b4a658c5e7dd03d4bc6d302b94474888143" dependencies = [ - "fixedbitset 0.4.1", + "fixedbitset 0.4.2", "indexmap", ] @@ -3316,7 +3274,7 @@ dependencies = [ "base64 0.13.0", "bitfield", "block-modes", - "block-padding 0.2.1", + "block-padding", "blowfish", "buf_redux", "byteorder", @@ -3332,7 +3290,7 @@ dependencies = [ "digest 0.9.0", "ed25519-dalek", "flate2", - "generic-array 0.14.5", + "generic-array", "hex", "lazy_static", "log", @@ -3344,8 +3302,8 @@ dependencies = [ "rand 0.8.5", "ripemd160", "rsa", - "sha-1 0.9.8", - "sha2", + "sha-1", + "sha2 0.9.9", "sha3", "signature", "smallvec", @@ -3357,27 +3315,27 @@ dependencies = [ [[package]] name = "pin-project" -version = "0.4.29" +version = "0.4.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9615c18d31137579e9ff063499264ddc1278e7b1982757ebc111028c4d1dc909" +checksum = "3ef0f924a5ee7ea9cbcea77529dba45f8a9ba9f622419fe3386ca581a3ae9d5a" dependencies = [ - "pin-project-internal 0.4.29", + "pin-project-internal 0.4.30", ] [[package]] name = "pin-project" -version = "1.0.10" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" dependencies = [ - "pin-project-internal 1.0.10", + "pin-project-internal 1.0.12", ] [[package]] name = "pin-project-internal" -version = "0.4.29" +version = "0.4.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "044964427019eed9d49d9d5bbce6047ef18f37100ea400912a9fa4a3523ab12a" +checksum = "851c8d0ce9bebe43790dedfc86614c23494ac9f423dd618d3a61fc693eafe61e" dependencies = [ "proc-macro2", "quote", @@ -3386,9 +3344,9 @@ dependencies = [ [[package]] name = "pin-project-internal" -version = "1.0.10" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" dependencies = [ "proc-macro2", "quote", @@ -3437,9 +3395,9 @@ checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae" [[package]] name = "plotters" -version = "0.3.1" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a3fd9ec30b9749ce28cd91f255d569591cdf937fe280c312143e3c4bad6f2a" +checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97" dependencies = [ "num-traits", "plotters-backend", @@ -3450,15 +3408,15 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.2" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d88417318da0eaf0fdcdb51a0ee6c3bed624333bff8f946733049380be67ac1c" +checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142" [[package]] name = "plotters-svg" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "521fa9638fa597e1dc53e9412a4f9cefb01187ee1f7413076f9e6749e2885ba9" +checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f" dependencies = [ "plotters-backend", ] @@ -3469,8 +3427,8 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "048aeb476be11a4b6ca432ca569e375810de9294ae78f4774e78ea98a9246ede" dependencies = [ - "cpufeatures 0.2.2", - "opaque-debug 0.3.0", + "cpufeatures", + "opaque-debug", "universal-hash 0.4.1", ] @@ -3480,8 +3438,8 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" dependencies = [ - "cpufeatures 0.2.2", - "opaque-debug 0.3.0", + "cpufeatures", + "opaque-debug", "universal-hash 0.5.0", ] @@ -3492,8 +3450,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8419d2b623c7c0896ff2d5d96e2cb4ede590fed28fcc34934f4c33c036e620a1" dependencies = [ "cfg-if 1.0.0", - "cpufeatures 0.2.2", - "opaque-debug 0.3.0", + "cpufeatures", + "opaque-debug", "universal-hash 0.4.1", ] @@ -3505,10 +3463,11 @@ checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" [[package]] name = "proc-macro-crate" -version = "1.1.3" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e17d47ce914bf4de440332250b0edd23ce48c005f59fab39d3335866b114f11a" +checksum = "eda0fc3b0fb7c975631757e14d9049da17374063edb6ebbcbc54d880d4fe94e9" dependencies = [ + "once_cell", "thiserror", "toml", ] @@ -3539,18 +3498,18 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.42" +version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c278e965f1d8cf32d6e0e96de3d3e79712178ae67986d9cf9151f51e95aac89b" +checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab" dependencies = [ "unicode-ident", ] [[package]] name = "prometheus" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cface98dfa6d645ea4c789839f176e4b072265d085bfcc48eaa8d137f58d3c39" +checksum = "45c8babc29389186697fe5a2a4859d697825496b83db5d0b65271cdc0488e88c" dependencies = [ "cfg-if 1.0.0", "fnv", @@ -3567,7 +3526,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "prost-derive", ] @@ -3577,9 +3536,9 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "heck 0.3.3", - "itertools 0.10.3", + "itertools 0.10.4", "lazy_static", "log", "multimap", @@ -3598,7 +3557,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9cc1a3263e07e0bf68e96268f37665207b49560d98739662cdfaae215c720fe" dependencies = [ "anyhow", - "itertools 0.10.3", + "itertools 0.10.4", "proc-macro2", "quote", "syn", @@ -3610,7 +3569,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "prost", ] @@ -3647,12 +3606,12 @@ dependencies = [ [[package]] name = "r2d2" -version = "0.8.9" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "545c5bc2b880973c9c10e4067418407a0ccaa3091781d1671d46eb35107cb26f" +checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93" dependencies = [ "log", - "parking_lot 0.11.2", + "parking_lot 0.12.1", "scheduled-thread-pool", ] @@ -3847,9 +3806,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.5.6" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1" +checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" dependencies = [ "aho-corasick", "memchr", @@ -3867,9 +3826,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.26" +version = "0.6.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64" +checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" [[package]] name = "remove_dir_all" @@ -3887,7 +3846,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b75aa69a3f06bbcc66ede33af2af253c6f7a86b1ca0033f60c580a27074fbf92" dependencies = [ "base64 0.13.0", - "bytes 1.1.0", + "bytes 1.2.1", "encoding_rs", "futures-core", "futures-util", @@ -3902,7 +3861,7 @@ dependencies = [ "log", "mime", "native-tls", - "percent-encoding 2.1.0", + "percent-encoding 2.2.0", "pin-project-lite", "serde", "serde_json", @@ -3910,7 +3869,7 @@ dependencies = [ "tokio", "tokio-native-tls", "tower-service", - "url 2.2.2", + "url 2.3.1", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -3940,14 +3899,14 @@ checksum = "2eca4ecc81b7f313189bf73ce724400a07da2a6dac19588b03c8bd76a2dcc251" dependencies = [ "block-buffer 0.9.0", "digest 0.9.0", - "opaque-debug 0.3.0", + "opaque-debug", ] [[package]] name = "ron" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b861ecaade43ac97886a512b360d01d66be9f41f3c61088b42cedf92e03d678" +checksum = "88073939a61e5b7680558e6be56b419e208420c2adb92be54921fa6b72283f1a" dependencies = [ "base64 0.13.0", "bitflags 1.3.2", @@ -4006,36 +3965,27 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" -[[package]] -name = "rustc_version" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" -dependencies = [ - "semver 0.11.0", -] - [[package]] name = "rustc_version" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.10", + "semver", ] [[package]] name = "rustix" -version = "0.34.8" +version = "0.35.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2079c267b8394eb529872c3cf92e181c378b41fea36e68130357b52493701d2e" +checksum = "72c825b8aa8010eb9ee99b75f05e10180b9278d161583034d7574c9d617aeada" dependencies = [ "bitflags 1.3.2", "errno", "io-lifetimes", "libc", "linux-raw-sys", - "winapi", + "windows-sys", ] [[package]] @@ -4095,9 +4045,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" +checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" [[package]] name = "safemem" @@ -4121,7 +4071,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2" dependencies = [ "lazy_static", - "windows-sys 0.36.1", + "windows-sys", ] [[package]] @@ -4169,9 +4119,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.6.1" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dc14f172faf8a0194a3aded622712b0de276821addc574fa54fc0a1167e10dc" +checksum = "2bc1bb97804af6631813c55739f771071e0f2ed33ee20b68c86ec505d906356c" dependencies = [ "bitflags 1.3.2", "core-foundation", @@ -4192,33 +4142,15 @@ dependencies = [ [[package]] name = "semver" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" -dependencies = [ - "semver-parser", -] - -[[package]] -name = "semver" -version = "1.0.10" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a41d061efea015927ac527063765e73601444cdc344ba855bc7bd44578b25e1c" - -[[package]] -name = "semver-parser" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" -dependencies = [ - "pest", -] +checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4" [[package]] name = "serde" -version = "1.0.143" +version = "1.0.144" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53e8e5d5b70924f74ff5c6d64d9a5acd91422117c60f48c4e07855238a254553" +checksum = "0f747710de3dcd43b88c9168773254e809d8ddbdf9653b84e2554ab219f17860" dependencies = [ "serde_derive", ] @@ -4254,9 +4186,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.143" +version = "1.0.144" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3d8e8de557aee63c26b85b947f5e59b690d0454c753f3adeb5cd7835ab88391" +checksum = "94ed3a816fb1d101812f83e789f888322c34e291f894f19590dc310963e87a00" dependencies = [ "proc-macro2", "quote", @@ -4265,20 +4197,20 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.81" +version = "1.0.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c" +checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44" dependencies = [ - "itoa 1.0.2", + "itoa 1.0.3", "ryu", "serde", ] [[package]] name = "serde_repr" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2ad84e47328a31223de7fed7a4f5087f2d6ddfe586cf3ca25b7a165bc0a5aed" +checksum = "1fe39d9fbb0ebf5eb2c7cb7e2a47e4f462fad1379f1166b8ae49ad9eae89a7ca" dependencies = [ "proc-macro2", "quote", @@ -4292,16 +4224,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa 1.0.2", + "itoa 1.0.3", "ryu", "serde", ] [[package]] name = "serde_yaml" -version = "0.8.24" +version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707d15895415db6628332b737c838b88c598522e4dc70647e59b72312924aebc" +checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b" dependencies = [ "indexmap", "ryu", @@ -4309,18 +4241,6 @@ dependencies = [ "yaml-rust", ] -[[package]] -name = "sha-1" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df" -dependencies = [ - "block-buffer 0.7.3", - "digest 0.8.1", - "fake-simd", - "opaque-debug 0.2.3", -] - [[package]] name = "sha-1" version = "0.9.8" @@ -4329,27 +4249,27 @@ checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" dependencies = [ "block-buffer 0.9.0", "cfg-if 1.0.0", - "cpufeatures 0.2.2", + "cpufeatures", "digest 0.9.0", - "opaque-debug 0.3.0", + "opaque-debug", ] [[package]] -name = "sha-1" -version = "0.10.0" +name = "sha1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f" -dependencies = [ - "cfg-if 1.0.0", - "cpufeatures 0.2.2", - "digest 0.10.3", -] +checksum = "2579985fda508104f7587689507983eadd6a6e84dd35d6d115361f530916fa0d" [[package]] name = "sha1" -version = "0.6.0" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2579985fda508104f7587689507983eadd6a6e84dd35d6d115361f530916fa0d" +checksum = "006769ba83e921b3085caa8334186b00cf92b4cb1a6cf4632fbccc8eff5c7549" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.3", +] [[package]] name = "sha2" @@ -4359,9 +4279,20 @@ checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" dependencies = [ "block-buffer 0.9.0", "cfg-if 1.0.0", - "cpufeatures 0.2.2", + "cpufeatures", "digest 0.9.0", - "opaque-debug 0.3.0", + "opaque-debug", +] + +[[package]] +name = "sha2" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf9db03534dff993187064c4e0c05a5708d2a9728ace9a8959b77bedf415dac5" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.3", ] [[package]] @@ -4373,7 +4304,7 @@ dependencies = [ "block-buffer 0.9.0", "digest 0.9.0", "keccak", - "opaque-debug 0.3.0", + "opaque-debug", ] [[package]] @@ -4420,7 +4351,7 @@ checksum = "29ad2e15f37ec9a6cc544097b78a1ec90001e9f71b81338ca39f430adaca99af" dependencies = [ "libc", "mio 0.7.14", - "mio 0.8.3", + "mio 0.8.4", "signal-hook 0.3.14", ] @@ -4435,44 +4366,46 @@ dependencies = [ [[package]] name = "signature" -version = "1.5.0" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f054c6c1a6e95179d6f23ed974060dcefb2d9388bb7256900badad682c499de4" +checksum = "e90531723b08e4d6d71b791108faf51f03e1b4a7784f96b2b87f852ebc247228" [[package]] name = "slab" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb703cfe953bccee95685111adeedb76fabe4e97549a58d16f03ea7b9367bb32" +checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +dependencies = [ + "autocfg", +] [[package]] name = "smallvec" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" +checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1" [[package]] name = "snow" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6142f7c25e94f6fd25a32c3348ec230df9109b463f59c8c7acc4bd34936babb7" +checksum = "774d05a3edae07ce6d68ea6984f3c05e9bba8927e3dd591e3b479e5b03213d0d" dependencies = [ "aes-gcm", - "blake2 0.9.2", - "chacha20poly1305 0.8.0", - "rand 0.8.5", + "blake2 0.10.4", + "chacha20poly1305 0.9.1", + "curve25519-dalek 4.0.0-pre.1", "rand_core 0.6.3", - "rustc_version 0.3.3", - "sha2", + "rustc_version", + "sha2 0.10.5", "subtle", - "x25519-dalek", ] [[package]] name = "socket2" -version = "0.4.4" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0" +checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" dependencies = [ "libc", "winapi", @@ -4618,7 +4551,7 @@ dependencies = [ [[package]] name = "tari_app_grpc" -version = "0.37.0" +version = "0.38.3" dependencies = [ "argon2 0.4.1", "base64 0.13.0", @@ -4643,12 +4576,12 @@ dependencies = [ [[package]] name = "tari_app_utilities" -version = "0.37.0" +version = "0.38.3" dependencies = [ - "clap 3.2.15", + "clap 3.2.21", "config", "dirs-next 1.0.2", - "futures 0.3.21", + "futures 0.3.24", "json5 0.2.8", "log", "rand 0.8.5", @@ -4658,7 +4591,6 @@ dependencies = [ "tari_common_types", "tari_comms", "tari_crypto", - "tari_p2p", "tari_utilities", "thiserror", "tokio", @@ -4666,18 +4598,18 @@ dependencies = [ [[package]] name = "tari_base_node" -version = "0.37.0" +version = "0.38.3" dependencies = [ "anyhow", "async-trait", "bincode", "chrono", - "clap 3.2.15", + "clap 3.2.21", "config", "crossterm 0.23.2", "derive_more", "either", - "futures 0.3.21", + "futures 0.3.24", "log", "log-mdc", "nom 7.1.1", @@ -4758,7 +4690,7 @@ dependencies = [ [[package]] name = "tari_common" -version = "0.37.0" +version = "0.38.3" dependencies = [ "anyhow", "blake2 0.9.2", @@ -4774,10 +4706,9 @@ dependencies = [ "prost-build", "serde", "serde_json", - "sha2", + "sha2 0.9.9", "sha3", "structopt", - "tari_common_types", "tari_crypto", "tari_test_utils", "tempfile", @@ -4787,7 +4718,7 @@ dependencies = [ [[package]] name = "tari_common_sqlite" -version = "0.37.0" +version = "0.38.3" dependencies = [ "diesel", "log", @@ -4796,7 +4727,7 @@ dependencies = [ [[package]] name = "tari_common_types" -version = "0.37.0" +version = "0.38.3" dependencies = [ "base64 0.13.0", "digest 0.9.0", @@ -4812,13 +4743,13 @@ dependencies = [ [[package]] name = "tari_comms" -version = "0.37.0" +version = "0.38.3" dependencies = [ "anyhow", "async-trait", "bitflags 1.3.2", "blake2 0.10.4", - "bytes 1.1.0", + "bytes 1.2.1", "chrono", "cidr", "clear_on_drop", @@ -4826,7 +4757,7 @@ dependencies = [ "derivative", "digest 0.9.0", "env_logger", - "futures 0.3.21", + "futures 0.3.24", "lazy_static", "lmdb-zero", "log", @@ -4834,7 +4765,7 @@ dependencies = [ "multiaddr", "nom 5.1.2", "once_cell", - "pin-project 1.0.10", + "pin-project 1.0.12", "prost", "prost-types", "rand 0.8.5", @@ -4862,12 +4793,12 @@ dependencies = [ [[package]] name = "tari_comms_dht" -version = "0.37.0" +version = "0.38.3" dependencies = [ "anyhow", "bitflags 1.3.2", "bytes 0.5.6", - "chacha20 0.7.1", + "chacha20 0.7.3", "chacha20poly1305 0.9.1", "chrono", "clap 2.34.0", @@ -4875,7 +4806,7 @@ dependencies = [ "diesel_migrations", "digest 0.9.0", "env_logger", - "futures 0.3.21", + "futures 0.3.24", "futures-test", "futures-util", "lazy_static", @@ -4884,7 +4815,7 @@ dependencies = [ "log", "log-mdc", "petgraph 0.5.1", - "pin-project 0.4.29", + "pin-project 0.4.30", "prost", "prost-types", "rand 0.8.5", @@ -4909,9 +4840,9 @@ dependencies = [ [[package]] name = "tari_comms_rpc_macros" -version = "0.37.0" +version = "0.38.3" dependencies = [ - "futures 0.3.21", + "futures 0.3.24", "proc-macro2", "prost", "quote", @@ -4924,16 +4855,16 @@ dependencies = [ [[package]] name = "tari_console_wallet" -version = "0.37.0" +version = "0.38.3" dependencies = [ "base64 0.13.0", "bitflags 1.3.2", "chrono", - "clap 3.2.15", + "clap 3.2.21", "config", "crossterm 0.17.7", "digest 0.9.0", - "futures 0.3.21", + "futures 0.3.24", "log", "opentelemetry", "opentelemetry-jaeger", @@ -4944,7 +4875,7 @@ dependencies = [ "rustyline", "serde", "serde_json", - "sha2", + "sha2 0.9.9", "strum", "strum_macros", "tari_app_grpc", @@ -4974,7 +4905,7 @@ dependencies = [ [[package]] name = "tari_core" -version = "0.37.0" +version = "0.38.3" dependencies = [ "async-trait", "bincode", @@ -4984,16 +4915,16 @@ dependencies = [ "chacha20poly1305 0.9.1", "chrono", "config", - "criterion 0.3.5", + "criterion 0.3.6", "croaring", "decimal-rs", "derivative", "digest 0.9.0", "env_logger", "fs2", - "futures 0.3.21", + "futures 0.3.24", "hex", - "integer-encoding 3.0.3", + "integer-encoding 3.0.4", "lmdb-zero", "log", "log-mdc", @@ -5037,8 +4968,8 @@ dependencies = [ [[package]] name = "tari_crypto" -version = "0.15.3" -source = "git+https://github.com/tari-project/tari-crypto.git?tag=v0.15.4#e5a6db0f3784c3141a342b345226695c7a20edb5" +version = "0.15.5" +source = "git+https://github.com/tari-project/tari-crypto.git?tag=v0.15.5#a531063441b51415b9bafad6e8dbeea31247fc4a" dependencies = [ "base64 0.10.1", "blake2 0.9.2", @@ -5062,12 +4993,12 @@ dependencies = [ [[package]] name = "tari_key_manager" -version = "0.37.0" +version = "0.38.3" dependencies = [ "argon2 0.2.4", "arrayvec 0.7.2", "blake2 0.9.2", - "chacha20 0.7.1", + "chacha20 0.7.3", "clear_on_drop", "console_error_panic_hook", "crc32fast", @@ -5079,7 +5010,7 @@ dependencies = [ "serde", "serde_derive", "serde_json", - "sha2", + "sha2 0.9.9", "strum", "strum_macros", "tari_common_types", @@ -5109,18 +5040,17 @@ dependencies = [ [[package]] name = "tari_merge_mining_proxy" -version = "0.37.0" +version = "0.38.3" dependencies = [ "anyhow", "bincode", - "bytes 1.1.0", + "bytes 1.2.1", "chrono", - "clap 3.2.15", + "clap 3.2.21", "config", "crossterm 0.17.7", "derivative", - "env_logger", - "futures 0.3.21", + "futures 0.3.24", "hex", "hyper", "jsonrpc", @@ -5133,6 +5063,7 @@ dependencies = [ "tari_app_grpc", "tari_app_utilities", "tari_common", + "tari_common_types", "tari_comms", "tari_core", "tari_crypto", @@ -5141,7 +5072,7 @@ dependencies = [ "tokio", "tonic", "tracing", - "url 2.2.2", + "url 2.3.1", ] [[package]] @@ -5149,7 +5080,7 @@ name = "tari_metrics" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.21", + "futures 0.3.24", "log", "once_cell", "prometheus", @@ -5161,17 +5092,17 @@ dependencies = [ [[package]] name = "tari_miner" -version = "0.37.0" +version = "0.38.3" dependencies = [ "base64 0.13.0", "bufstream", "chrono", - "clap 3.2.15", + "clap 3.2.21", "config", "crossbeam", "crossterm 0.17.7", "derivative", - "futures 0.3.21", + "futures 0.3.24", "hex", "log", "native-tls", @@ -5197,7 +5128,7 @@ dependencies = [ [[package]] name = "tari_mining_helper_ffi" -version = "0.37.0" +version = "0.38.3" dependencies = [ "hex", "libc", @@ -5214,7 +5145,7 @@ dependencies = [ [[package]] name = "tari_mmr" -version = "0.37.0" +version = "0.38.3" dependencies = [ "bincode", "blake2 0.9.2", @@ -5233,7 +5164,7 @@ dependencies = [ [[package]] name = "tari_p2p" -version = "0.37.0" +version = "0.38.3" dependencies = [ "anyhow", "bytes 0.5.6", @@ -5241,7 +5172,7 @@ dependencies = [ "clap 2.34.0", "config", "fs2", - "futures 0.3.21", + "futures 0.3.24", "lazy_static", "lmdb-zero", "log", @@ -5250,7 +5181,7 @@ dependencies = [ "rand 0.8.5", "reqwest", "rustls", - "semver 1.0.10", + "semver", "serde", "serde_derive", "tari_common", @@ -5278,13 +5209,11 @@ version = "0.12.0" dependencies = [ "blake2 0.9.2", "digest 0.9.0", - "integer-encoding 3.0.3", + "integer-encoding 3.0.4", "rand 0.8.5", "serde", - "sha2", + "sha2 0.9.9", "sha3", - "tari_common", - "tari_common_types", "tari_crypto", "tari_utilities", "thiserror", @@ -5292,11 +5221,11 @@ dependencies = [ [[package]] name = "tari_service_framework" -version = "0.37.0" +version = "0.38.3" dependencies = [ "anyhow", "async-trait", - "futures 0.3.21", + "futures 0.3.24", "futures-test", "log", "tari_shutdown", @@ -5309,15 +5238,15 @@ dependencies = [ [[package]] name = "tari_shutdown" -version = "0.37.0" +version = "0.38.3" dependencies = [ - "futures 0.3.21", + "futures 0.3.24", "tokio", ] [[package]] name = "tari_storage" -version = "0.37.0" +version = "0.38.3" dependencies = [ "bincode", "lmdb-zero", @@ -5331,9 +5260,9 @@ dependencies = [ [[package]] name = "tari_test_utils" -version = "0.37.0" +version = "0.38.3" dependencies = [ - "futures 0.3.21", + "futures 0.3.24", "futures-test", "rand 0.8.5", "tari_shutdown", @@ -5358,7 +5287,7 @@ dependencies = [ [[package]] name = "tari_wallet" -version = "0.37.0" +version = "0.38.3" dependencies = [ "argon2 0.2.4", "async-trait", @@ -5367,15 +5296,14 @@ dependencies = [ "chacha20poly1305 0.10.1", "chrono", "clear_on_drop", - "crossbeam-channel", "derivative", "diesel", "diesel_migrations", "digest 0.9.0", "env_logger", "fs2", - "futures 0.3.21", - "itertools 0.10.3", + "futures 0.3.24", + "itertools 0.10.4", "libsqlite3-sys", "lmdb-zero", "log", @@ -5384,7 +5312,7 @@ dependencies = [ "rand 0.8.5", "serde", "serde_json", - "sha2", + "sha2 0.9.9", "strum", "strum_macros", "tari_common", @@ -5410,13 +5338,12 @@ dependencies = [ [[package]] name = "tari_wallet_ffi" -version = "0.37.0" +version = "0.38.3" dependencies = [ "cbindgen 0.24.3", "chrono", - "env_logger", - "futures 0.3.21", - "itertools 0.10.3", + "futures 0.3.24", + "itertools 0.10.4", "lazy_static", "libc", "log", @@ -5484,18 +5411,18 @@ checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb" [[package]] name = "thiserror" -version = "1.0.31" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a" +checksum = "c53f98874615aea268107765aa1ed8f6116782501d18e53d08b471733bea6c85" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.31" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" +checksum = "f8b463991b4eab2d801e724172285ec4195c650e8ec79b149e6c2a8e6dd3f783" dependencies = [ "proc-macro2", "quote", @@ -5557,9 +5484,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.9" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd" +checksum = "3c3f9a28b618c3a6b9251b6908e9c99e04b9e5c02e6581ccbb67d59c34ef7f9b" dependencies = [ "libc", "num_threads", @@ -5601,14 +5528,15 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.19.2" +version = "1.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c51a52ed6686dd62c320f9b89299e9dfb46f730c7a48e635c19f21d116cb1439" +checksum = "0020c875007ad96677dcc890298f4b942882c5d4eb7cc8f439fc3bf813dc9c95" dependencies = [ - "bytes 1.1.0", + "autocfg", + "bytes 1.2.1", "libc", "memchr", - "mio 0.8.3", + "mio 0.8.4", "num_cpus", "once_cell", "pin-project-lite", @@ -5669,7 +5597,7 @@ dependencies = [ "futures-core", "pin-project-lite", "tokio", - "tokio-util 0.7.3", + "tokio-util 0.7.4", ] [[package]] @@ -5678,7 +5606,7 @@ version = "0.6.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "futures-core", "futures-io", "futures-sink", @@ -5689,11 +5617,11 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc463cd8deddc3770d20f9852143d50bf6094e640b485cb2e189a2099085ff45" +checksum = "0bb2e075f03b3d66d8d8785356224ba688d2906a371015e225beeb65ca92c740" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "futures-core", "futures-sink", "pin-project-lite", @@ -5719,7 +5647,7 @@ dependencies = [ "async-stream", "async-trait", "base64 0.13.0", - "bytes 1.1.0", + "bytes 1.2.1", "futures-core", "futures-util", "h2", @@ -5727,8 +5655,8 @@ dependencies = [ "http-body", "hyper", "hyper-timeout", - "percent-encoding 2.1.0", - "pin-project 1.0.10", + "percent-encoding 2.2.0", + "pin-project 1.0.12", "prost", "prost-derive", "tokio", @@ -5762,26 +5690,26 @@ dependencies = [ "hex", "hex-literal", "rand 0.8.5", - "sha1", + "sha1 0.6.0", "thiserror", ] [[package]] name = "tower" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a89fd63ad6adf737582df5db40d286574513c69a11dac5214dc3b5603d6713e" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ "futures-core", "futures-util", "hdrhistogram", "indexmap", - "pin-project 1.0.10", + "pin-project 1.0.12", "pin-project-lite", "rand 0.8.5", "slab", "tokio", - "tokio-util 0.7.3", + "tokio-util 0.7.4", "tower-layer", "tower-service", "tracing", @@ -5795,15 +5723,15 @@ checksum = "343bc9466d3fe6b0f960ef45960509f84480bf4fd96f92901afe7ff3df9d3a62" [[package]] name = "tower-service" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.35" +version = "0.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a400e31aa60b9d44a52a8ee0343b5b18566b03a8321e0d321f695cf56e940160" +checksum = "2fce9567bd60a67d08a16488756721ba392f24f29006402881e43b19aac64307" dependencies = [ "cfg-if 1.0.0", "log", @@ -5814,9 +5742,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.21" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c" +checksum = "11c75893af559bc8e10716548bdef5cb2b983f8e637db9d0e15126b61b484ee2" dependencies = [ "proc-macro2", "quote", @@ -5825,9 +5753,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.27" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7709595b8878a4965ce5e87ebf880a7d39c9afc6837721b21a5a816a8117d921" +checksum = "5aeea4303076558a00714b823f9ad67d58a3bbda1df83d8827d21193156e22f7" dependencies = [ "once_cell", "valuable", @@ -5839,7 +5767,7 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" dependencies = [ - "pin-project 1.0.10", + "pin-project 1.0.12", "tracing", ] @@ -5922,7 +5850,7 @@ dependencies = [ "ring", "rustls", "thiserror", - "time 0.3.9", + "time 0.3.14", "tokio", "trust-dns-proto", "webpki 0.22.0", @@ -5954,7 +5882,7 @@ dependencies = [ "tinyvec", "tokio", "tokio-rustls", - "url 2.2.2", + "url 2.3.1", "webpki 0.22.0", ] @@ -5985,7 +5913,7 @@ checksum = "728f6b7e784825d272fe9d2a77e44063f4197a570cbedc6fdcc90a6ddac91296" dependencies = [ "byteorder", "cipher 0.3.0", - "opaque-debug 0.3.0", + "opaque-debug", ] [[package]] @@ -6005,9 +5933,9 @@ checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" [[package]] name = "ucd-trie" -version = "0.1.3" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" +checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" [[package]] name = "uint" @@ -6038,30 +5966,30 @@ checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992" [[package]] name = "unicode-ident" -version = "1.0.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c" +checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd" [[package]] name = "unicode-normalization" -version = "0.1.19" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" +checksum = "854cbdc4f7bc6ae19c820d44abdc3277ac3e1b2b93db20a636825d9322fb60e6" dependencies = [ "tinyvec", ] [[package]] name = "unicode-segmentation" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" +checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" [[package]] name = "unicode-width" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" [[package]] name = "unicode-xid" @@ -6075,7 +6003,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f214e8f697e925001e66ec2c6e37a4ef93f0f78c2eed7814394e10c62025b05" dependencies = [ - "generic-array 0.14.5", + "generic-array", "subtle", ] @@ -6123,14 +6051,13 @@ dependencies = [ [[package]] name = "url" -version = "2.2.2" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" +checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" dependencies = [ "form_urlencoded", - "idna 0.2.3", - "matches", - "percent-encoding 2.1.0", + "idna 0.3.0", + "percent-encoding 2.2.0", ] [[package]] @@ -6196,7 +6123,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3cef4e1e9114a4b7f1ac799f16ce71c14de5778500c5450ec6b7b920c55b587e" dependencies = [ - "bytes 1.1.0", + "bytes 1.2.1", "futures-channel", "futures-util", "headers", @@ -6205,8 +6132,8 @@ dependencies = [ "log", "mime", "mime_guess", - "percent-encoding 2.1.0", - "pin-project 1.0.10", + "percent-encoding 2.2.0", + "pin-project 1.0.12", "scoped-tls", "serde", "serde_json", @@ -6360,13 +6287,13 @@ dependencies = [ [[package]] name = "which" -version = "4.2.5" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c4fb54e6113b6a8772ee41c3404fb0301ac79604489467e0a9ce1f3e97c24ae" +checksum = "1c831fbbee9e129a8cf93e7747a82da9d95ba8e16621cae60ec2cdc849bacb7b" dependencies = [ "either", - "lazy_static", "libc", + "once_cell", ] [[package]] @@ -6400,86 +6327,43 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -[[package]] -name = "windows-sys" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "030b7ff91626e57a05ca64a07c481973cbb2db774e4852c9c7ca342408c6a99a" -dependencies = [ - "windows_aarch64_msvc 0.30.0", - "windows_i686_gnu 0.30.0", - "windows_i686_msvc 0.30.0", - "windows_x86_64_gnu 0.30.0", - "windows_x86_64_msvc 0.30.0", -] - [[package]] name = "windows-sys" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" dependencies = [ - "windows_aarch64_msvc 0.36.1", - "windows_i686_gnu 0.36.1", - "windows_i686_msvc 0.36.1", - "windows_x86_64_gnu 0.36.1", - "windows_x86_64_msvc 0.36.1", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_msvc", ] -[[package]] -name = "windows_aarch64_msvc" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29277a4435d642f775f63c7d1faeb927adba532886ce0287bd985bffb16b6bca" - [[package]] name = "windows_aarch64_msvc" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" -[[package]] -name = "windows_i686_gnu" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1145e1989da93956c68d1864f32fb97c8f561a8f89a5125f6a2b7ea75524e4b8" - [[package]] name = "windows_i686_gnu" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" -[[package]] -name = "windows_i686_msvc" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4a09e3a0d4753b73019db171c1339cd4362c8c44baf1bcea336235e955954a6" - [[package]] name = "windows_i686_msvc" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" -[[package]] -name = "windows_x86_64_gnu" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ca64fcb0220d58db4c119e050e7af03c69e6f4f415ef69ec1773d9aab422d5a" - [[package]] name = "windows_x86_64_gnu" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" -[[package]] -name = "windows_x86_64_msvc" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08cabc9f0066848fef4bc6a1c1668e6efce38b661d2aeec75d18d8617eebb5f1" - [[package]] name = "windows_x86_64_msvc" version = "0.36.1" @@ -6501,7 +6385,7 @@ version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a0c105152107e3b96f6a00a65e86ce82d9b125230e1c4302940eca58ff71f4f" dependencies = [ - "curve25519-dalek", + "curve25519-dalek 3.2.0", "rand_core 0.5.1", "zeroize", ] @@ -6517,14 +6401,14 @@ dependencies = [ [[package]] name = "yamux" -version = "0.9.0" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7d9028f208dd5e63c614be69f115c1b53cacc1111437d4c765185856666c107" +checksum = "e5d9ba232399af1783a58d8eb26f6b5006fbefe2dc9ef36bd283324792d03ea5" dependencies = [ - "futures 0.3.21", + "futures 0.3.24", "log", "nohash-hasher", - "parking_lot 0.11.2", + "parking_lot 0.12.1", "rand 0.8.5", "static_assertions", ] diff --git a/applications/tari_app_grpc/Cargo.toml b/applications/tari_app_grpc/Cargo.toml index 80a5a40484..a1311193c4 100644 --- a/applications/tari_app_grpc/Cargo.toml +++ b/applications/tari_app_grpc/Cargo.toml @@ -4,14 +4,14 @@ authors = ["The Tari Development Community"] description = "This crate is to provide a single source for all cross application grpc files and conversions to and from tari::core" repository = "https://github.com/tari-project/tari" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [dependencies] -tari_common_types = { version = "^0.37", path = "../../base_layer/common_types" } +tari_common_types = { version = "^0.38", path = "../../base_layer/common_types" } tari_comms = { path = "../../comms/core" } tari_core = { path = "../../base_layer/core" } -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } tari_script = { path = "../../infrastructure/tari_script" } tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } diff --git a/applications/tari_app_grpc/proto/wallet.proto b/applications/tari_app_grpc/proto/wallet.proto index 7e46740fb0..164fda5ef1 100644 --- a/applications/tari_app_grpc/proto/wallet.proto +++ b/applications/tari_app_grpc/proto/wallet.proto @@ -76,8 +76,6 @@ service Wallet { rpc SetBaseNode(SetBaseNodeRequest) returns (SetBaseNodeResponse); rpc StreamTransactionEvents(TransactionEventRequest) returns (stream TransactionEventResponse); - rpc SeedWords(Empty) returns (SeedWordsResponse); - rpc DeleteSeedWordsFile(Empty) returns (FileDeletedResponse); rpc RegisterValidatorNode(RegisterValidatorNodeRequest) returns (RegisterValidatorNodeResponse); } @@ -322,14 +320,6 @@ message TransactionEventResponse { TransactionEvent transaction = 1; } -message SeedWordsResponse { - repeated string words = 1; -} - -message FileDeletedResponse { - -} - message RegisterValidatorNodeRequest { string validator_node_public_key = 1; Signature validator_node_signature = 2; diff --git a/applications/tari_app_utilities/Cargo.toml b/applications/tari_app_utilities/Cargo.toml index d73ebf22b8..42e6f247df 100644 --- a/applications/tari_app_utilities/Cargo.toml +++ b/applications/tari_app_utilities/Cargo.toml @@ -1,16 +1,15 @@ [package] name = "tari_app_utilities" -version = "0.37.0" +version = "0.38.3" authors = ["The Tari Development Community"] edition = "2018" license = "BSD-3-Clause" [dependencies] tari_comms = { path = "../../comms/core" } -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } tari_common = { path = "../../common" } tari_common_types = { path = "../../base_layer/common_types" } -tari_p2p = { path = "../../base_layer/p2p", features = ["auto-update"] } tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } clap = { version = "3.2.0", features = ["derive", "env"] } @@ -20,7 +19,7 @@ dirs-next = "1.0.2" json5 = "0.2.2" log = { version = "0.4.8", features = ["std"] } rand = "0.8" -tokio = { version = "1.11", features = ["signal"] } +tokio = { version = "1.20", features = ["signal"] } serde = "1.0.126" structopt = { version = "0.3.13", default_features = false } thiserror = "^1.0.26" diff --git a/applications/tari_app_utilities/src/utilities.rs b/applications/tari_app_utilities/src/utilities.rs index c3edfd9560..f098ce2044 100644 --- a/applications/tari_app_utilities/src/utilities.rs +++ b/applications/tari_app_utilities/src/utilities.rs @@ -46,7 +46,8 @@ pub fn setup_runtime() -> Result { /// Returns a CommsPublicKey from either a emoji id or a public key pub fn parse_emoji_id_or_public_key(key: &str) -> Option { - EmojiId::str_to_pubkey(&key.trim().replace('|', "")) + EmojiId::from_emoji_string(&key.trim().replace('|', "")) + .map(|emoji_id| emoji_id.to_public_key()) .or_else(|_| CommsPublicKey::from_hex(key)) .ok() } @@ -79,8 +80,8 @@ impl FromStr for UniPublicKey { type Err = UniIdError; fn from_str(key: &str) -> Result { - if let Ok(public_key) = EmojiId::str_to_pubkey(&key.trim().replace('|', "")) { - Ok(Self(public_key)) + if let Ok(emoji_id) = EmojiId::from_emoji_string(&key.trim().replace('|', "")) { + Ok(Self(emoji_id.to_public_key())) } else if let Ok(public_key) = PublicKey::from_hex(key) { Ok(Self(public_key)) } else { @@ -113,8 +114,8 @@ impl FromStr for UniNodeId { type Err = UniIdError; fn from_str(key: &str) -> Result { - if let Ok(public_key) = EmojiId::str_to_pubkey(&key.trim().replace('|', "")) { - Ok(Self::PublicKey(public_key)) + if let Ok(emoji_id) = EmojiId::from_emoji_string(&key.trim().replace('|', "")) { + Ok(Self::PublicKey(emoji_id.to_public_key())) } else if let Ok(public_key) = PublicKey::from_hex(key) { Ok(Self::PublicKey(public_key)) } else if let Ok(node_id) = NodeId::from_hex(key) { diff --git a/applications/tari_base_node/Cargo.toml b/applications/tari_base_node/Cargo.toml index 4a35a52372..160be8cd04 100644 --- a/applications/tari_base_node/Cargo.toml +++ b/applications/tari_base_node/Cargo.toml @@ -4,7 +4,7 @@ authors = ["The Tari Development Community"] description = "The tari full base node implementation" repository = "https://github.com/tari-project/tari" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [dependencies] @@ -15,8 +15,8 @@ tari_comms = { path = "../../comms/core", features = ["rpc"] } tari_common_types = { path = "../../base_layer/common_types" } tari_comms_dht = { path = "../../comms/dht" } tari_core = { path = "../../base_layer/core", default-features = false, features = ["transactions"] } -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } -tari_libtor = { path = "../../infrastructure/libtor" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } +tari_libtor = { path = "../../infrastructure/libtor", optional = true } tari_mmr = { path = "../../base_layer/mmr", features = ["native_bitmap"] } tari_p2p = { path = "../../base_layer/p2p", features = ["auto-update"] } tari_storage = {path="../../infrastructure/storage"} @@ -47,7 +47,7 @@ serde_json = "1.0.79" strum = { version = "0.22", features = ["derive"] } strum_macros = "0.22" thiserror = "^1.0.26" -tokio = { version = "1.11", features = ["signal"] } +tokio = { version = "1.20", features = ["signal"] } tonic = "0.6.2" tracing = "0.1.26" @@ -65,6 +65,6 @@ default = ["metrics"] avx2 = ["tari_core/avx2", "tari_crypto/simd_backend", "tari_p2p/avx2", "tari_comms/avx2", "tari_comms_dht/avx2"] metrics = ["tari_metrics", "tari_comms/metrics"] safe = [] -libtor = ["tari_libtor/libtor"] +libtor = ["tari_libtor"] diff --git a/applications/tari_base_node/log4rs_sample.yml b/applications/tari_base_node/log4rs_sample.yml index 54ea988c48..ee6d51e03f 100644 --- a/applications/tari_base_node/log4rs_sample.yml +++ b/applications/tari_base_node/log4rs_sample.yml @@ -36,7 +36,24 @@ appenders: count: 5 pattern: "log/base-node/network.{}.log" encoder: - pattern: "{d(%Y-%m-%d %H:%M:%S.%f)} [{t}] [Thread:{I}] {l:5} {m}{n} // {f}:{L}" + pattern: "{d(%Y-%m-%d %H:%M:%S.%f)} [{t}] [Thread:{I}] {l:5} {m} // {f}:{L}{n}" + # An appender named "network" that writes to a file with a custom pattern encoder + message_logging: + kind: rolling_file + path: "log/base-node/messages.log" + policy: + kind: compound + trigger: + kind: size + limit: 10mb + roller: + kind: fixed_window + base: 1 + count: 5 + pattern: "log/base-node/messages.{}.log" + encoder: + pattern: "{d(%Y-%m-%d %H:%M:%S.%f)} [{t}] [Thread:{I}] {l:5} {m} // {f}:{L}{n}" + # An appender named "base_layer" that writes to a file with a custom pattern encoder base_layer: @@ -53,7 +70,7 @@ appenders: count: 5 pattern: "log/base-node/base_layer.{}.log" encoder: - pattern: "{d(%Y-%m-%d %H:%M:%S.%f)} [{t}] [{X(node-public-key)},{X(node-id)}] {l:5} {m}{n} // {f}:{L} " + pattern: "{d(%Y-%m-%d %H:%M:%S.%f)} [{t}] [{X(node-public-key)},{X(node-id)}] {l:5} {m} // {f}:{L}{n}" # An appender named "other" that writes to a file with a custom pattern encoder other: @@ -152,3 +169,9 @@ loggers: appenders: - other additive: false + + comms::middleware::message_logging: + # Set to `trace` to retrieve message logging + level: warn + appenders: + - message_logging diff --git a/applications/tari_base_node/src/bootstrap.rs b/applications/tari_base_node/src/bootstrap.rs index 5021e52784..97d1c24643 100644 --- a/applications/tari_base_node/src/bootstrap.rs +++ b/applications/tari_base_node/src/bootstrap.rs @@ -78,7 +78,7 @@ impl BaseNodeBootstrapper<'_, B> where B: BlockchainBackend + 'static { pub async fn bootstrap(self) -> Result { - let base_node_config = &self.app_config.base_node; + let mut base_node_config = self.app_config.base_node.clone(); let mut p2p_config = self.app_config.base_node.p2p.clone(); let peer_seeds = &self.app_config.peer_seeds; @@ -95,6 +95,8 @@ where B: BlockchainBackend + 'static .collect::, _>>() .map_err(|e| ExitError::new(ExitCode::ConfigError, e))?; + base_node_config.state_machine.blockchain_sync_config.forced_sync_peers = sync_peers.clone(); + debug!(target: LOG_TARGET, "{} sync peer(s) configured", sync_peers.len()); let mempool_sync = MempoolSyncInitializer::new(mempool_config, self.mempool.clone()); diff --git a/applications/tari_base_node/src/builder.rs b/applications/tari_base_node/src/builder.rs index 9ea1b0a661..a9326055e2 100644 --- a/applications/tari_base_node/src/builder.rs +++ b/applications/tari_base_node/src/builder.rs @@ -244,8 +244,8 @@ async fn build_node_context( app_config.base_node.bypass_range_proof_verification, blockchain_db.clone(), )), - Box::new(TxInputAndMaturityValidator::new(blockchain_db.clone())), Box::new(TxConsensusValidator::new(blockchain_db.clone())), + Box::new(TxInputAndMaturityValidator::new(blockchain_db.clone())), ]); let mempool = Mempool::new( app_config.base_node.mempool.clone(), diff --git a/applications/tari_base_node/src/commands/cli.rs b/applications/tari_base_node/src/commands/cli.rs index 45298a5ff3..d6f4b5499a 100644 --- a/applications/tari_base_node/src/commands/cli.rs +++ b/applications/tari_base_node/src/commands/cli.rs @@ -23,10 +23,7 @@ use std::io::stdout; use chrono::{Datelike, Utc}; -use crossterm::{ - execute, - terminal::{SetSize, SetTitle}, -}; +use crossterm::{execute, terminal::SetTitle}; use tari_app_utilities::consts; /// returns the top or bottom box line of the specified length @@ -106,17 +103,8 @@ fn multiline_find_display_length(lines: &str) -> usize { result } -/// Try to resize terminal to make sure the width is enough. -/// In case of error, just simply print out the error. -#[allow(clippy::cast_possible_truncation)] -fn resize_terminal_to_fit_the_box(width: usize, height: usize) { - if let Err(e) = execute!(stdout(), SetSize(width as u16, height as u16)) { - println!("Can't resize terminal to fit the box. Error: {}", e) - } -} - /// Prints a pretty banner on the console as well as the list of available commands -pub fn print_banner(commands: Vec, chunk_size: usize, resize_terminal: bool) { +pub fn print_banner(commands: Vec, chunk_size: usize) { let terminal_title = format!("Tari Base Node - Version {}", consts::APP_VERSION); if let Err(e) = execute!(stdout(), SetTitle(terminal_title.as_str())) { println!("Error setting terminal title. {}", e) @@ -191,13 +179,8 @@ pub fn print_banner(commands: Vec, chunk_size: usize, resize_terminal: b let rows = box_tabular_data_rows(command_data, row_cell_size, target_line_length, 10); // There are 24 fixed rows besides the possible changed "Commands" rows // and plus 2 more blank rows for better layout. - let height_to_resize = &rows.len() + 24 + 2; for row in rows { println!("{}", row); } println!("{}", box_line(target_line_length, false)); - - if resize_terminal { - resize_terminal_to_fit_the_box(target_line_length, height_to_resize); - } } diff --git a/applications/tari_base_node/src/commands/cli_loop.rs b/applications/tari_base_node/src/commands/cli_loop.rs index 6834850916..58f89e0b7b 100644 --- a/applications/tari_base_node/src/commands/cli_loop.rs +++ b/applications/tari_base_node/src/commands/cli_loop.rs @@ -79,8 +79,8 @@ impl CliLoop { /// /// ## Returns /// Doesn't return anything - pub async fn cli_loop(mut self, resize_terminal_on_startup: bool) { - cli::print_banner(self.commands.clone(), 3, resize_terminal_on_startup); + pub async fn cli_loop(mut self) { + cli::print_banner(self.commands.clone(), 3); if self.non_interactive { self.watch_loop_non_interactive().await; diff --git a/applications/tari_base_node/src/commands/command/get_peer.rs b/applications/tari_base_node/src/commands/command/get_peer.rs index 0b1e3c2274..91c78d114f 100644 --- a/applications/tari_base_node/src/commands/command/get_peer.rs +++ b/applications/tari_base_node/src/commands/command/get_peer.rs @@ -80,7 +80,7 @@ impl CommandContext { } }; - let eid = EmojiId::from_pubkey(&peer.public_key); + let eid = EmojiId::from_public_key(&peer.public_key).to_emoji_string(); println!("Emoji ID: {}", eid); println!("Public Key: {}", peer.public_key); println!("NodeId: {}", peer.node_id); diff --git a/applications/tari_base_node/src/commands/command/list_connections.rs b/applications/tari_base_node/src/commands/command/list_connections.rs index 42d7402340..dcef31f483 100644 --- a/applications/tari_base_node/src/commands/command/list_connections.rs +++ b/applications/tari_base_node/src/commands/command/list_connections.rs @@ -23,6 +23,7 @@ use anyhow::Error; use async_trait::async_trait; use clap::Parser; +use tari_comms::PeerConnection; use tari_core::base_node::state_machine_service::states::PeerMetadata; use super::{CommandContext, HandleCommand}; @@ -40,70 +41,94 @@ impl HandleCommand for CommandContext { } impl CommandContext { - /// Function to process the list-connections command - pub async fn list_connections(&mut self) -> Result<(), Error> { - let conns = self.connectivity.get_active_connections().await?; - if conns.is_empty() { - println!("No active peer connections."); - } else { - println!(); - let num_connections = conns.len(); - let mut table = Table::new(); - table.set_titles(vec![ - "NodeId", - "Public Key", - "Address", - "Direction", - "Age", - "Role", - "User Agent", - "Info", + async fn list_connections_print_table(&mut self, conns: &[PeerConnection]) { + let num_connections = conns.len(); + let mut table = Table::new(); + table.set_titles(vec![ + "NodeId", + "Public Key", + "Address", + "Direction", + "Age", + "User Agent", + "Info", + ]); + for conn in conns { + let peer = self + .peer_manager + .find_by_node_id(conn.peer_node_id()) + .await + .expect("Unexpected peer database error") + .expect("Peer not found"); + + let chain_height = peer + .get_metadata(1) + .and_then(|v| bincode::deserialize::(v).ok()) + .map(|metadata| format!("height: {}", metadata.metadata.height_of_longest_chain())); + + let ua = peer.user_agent; + let rpc_sessions = self + .rpc_server + .get_num_active_sessions_for(peer.node_id.clone()) + .await + .unwrap_or(0); + table.add_row(row![ + peer.node_id, + peer.public_key, + conn.address(), + conn.direction(), + format_duration_basic(conn.age()), + { + if ua.is_empty() { + "" + } else { + ua.as_ref() + } + }, + format!( + "{}hnd: {}, ss: {}, rpc: {}", + chain_height.map(|s| format!("{}, ", s)).unwrap_or_default(), + // Exclude the handle held by list-connections + conn.handle_count().saturating_sub(1), + conn.substream_count(), + rpc_sessions + ), ]); - for conn in conns { - let peer = self - .peer_manager - .find_by_node_id(conn.peer_node_id()) - .await - .expect("Unexpected peer database error") - .expect("Peer not found"); + } - let chain_height = peer - .get_metadata(1) - .and_then(|v| bincode::deserialize::(v).ok()) - .map(|metadata| format!("height: {}", metadata.metadata.height_of_longest_chain())); + table.print_stdout(); - let ua = peer.user_agent; - table.add_row(row![ - peer.node_id, - peer.public_key, - conn.address(), - conn.direction(), - format_duration_basic(conn.age()), - { - if peer.features.is_client() { - "Wallet" - } else { - "Base node" - } - }, - { - if ua.is_empty() { - "" - } else { - ua.as_ref() - } - }, - format!( - "substreams: {}{}", - conn.substream_count(), - chain_height.map(|s| format!(", {}", s)).unwrap_or_default() - ), - ]); - } + println!("{} active connection(s)", num_connections); + } +} - table.print_stdout(); +impl CommandContext { + /// Function to process the list-connections command + pub async fn list_connections(&mut self) -> Result<(), Error> { + let conns = self.connectivity.get_active_connections().await?; + let (mut nodes, mut clients) = conns + .into_iter() + .partition::, _>(|a| a.peer_features().is_node()); + nodes.sort_by(|a, b| a.peer_node_id().cmp(b.peer_node_id())); + clients.sort_by(|a, b| a.peer_node_id().cmp(b.peer_node_id())); - println!("{} active connection(s)", num_connections); + println!(); + println!("Base Nodes"); + println!("----------"); + if nodes.is_empty() { + println!("No active node connections."); + } else { + println!(); + self.list_connections_print_table(&nodes).await; + } + println!(); + println!("Wallets"); + println!("-------"); + if nodes.is_empty() { + println!("No active wallet connections."); + } else { + println!(); + self.list_connections_print_table(&clients).await; } Ok(()) } diff --git a/applications/tari_base_node/src/commands/command/ping_peer.rs b/applications/tari_base_node/src/commands/command/ping_peer.rs index b21f72c803..892ce7321b 100644 --- a/applications/tari_base_node/src/commands/command/ping_peer.rs +++ b/applications/tari_base_node/src/commands/command/ping_peer.rs @@ -26,7 +26,7 @@ use clap::Parser; use tari_app_utilities::utilities::UniNodeId; use tari_comms::peer_manager::NodeId; use tari_p2p::services::liveness::LivenessEvent; -use tokio::sync::broadcast::error::RecvError; +use tokio::{sync::broadcast::error::RecvError, task}; use super::{CommandContext, HandleCommand}; @@ -49,27 +49,32 @@ impl CommandContext { pub async fn ping_peer(&mut self, dest_node_id: NodeId) -> Result<(), Error> { println!("🏓 Pinging peer..."); let mut liveness_events = self.liveness.get_event_stream(); - - self.liveness.send_ping(dest_node_id.clone()).await?; - loop { - match liveness_events.recv().await { - Ok(event) => { - if let LivenessEvent::ReceivedPong(pong) = &*event { - if pong.node_id == dest_node_id { - println!( - "🏓️ Pong received, round-trip-time is {:.2?}!", - pong.latency.unwrap_or_default() - ); - break; + let mut liveness = self.liveness.clone(); + task::spawn(async move { + if let Err(e) = liveness.send_ping(dest_node_id.clone()).await { + println!("🏓 Ping failed to send to {}: {}", dest_node_id, e); + return; + } + loop { + match liveness_events.recv().await { + Ok(event) => { + if let LivenessEvent::ReceivedPong(pong) = &*event { + if pong.node_id == dest_node_id { + println!( + "🏓️ Pong received, round-trip-time is {:.2?}!", + pong.latency.unwrap_or_default() + ); + break; + } } - } - }, - Err(RecvError::Closed) => { - break; - }, - Err(RecvError::Lagged(_)) => {}, + }, + Err(RecvError::Closed) => { + break; + }, + Err(RecvError::Lagged(_)) => {}, + } } - } + }); Ok(()) } } diff --git a/applications/tari_base_node/src/config.rs b/applications/tari_base_node/src/config.rs index 6175f02f5e..21fd61dfe2 100644 --- a/applications/tari_base_node/src/config.rs +++ b/applications/tari_base_node/src/config.rs @@ -130,8 +130,6 @@ pub struct BaseNodeConfig { pub metadata_auto_ping_interval: Duration, /// The state_machine config settings pub state_machine: BaseNodeStateMachineConfig, - /// Resize the CLI terminal on startup to a pre-defined size, or keep user settings - pub resize_terminal_on_startup: bool, /// Obscure GRPC error responses pub report_grpc_error: bool, } @@ -166,7 +164,6 @@ impl Default for BaseNodeConfig { buffer_rate_limit: 1_000, metadata_auto_ping_interval: Duration::from_secs(30), state_machine: Default::default(), - resize_terminal_on_startup: true, report_grpc_error: false, } } diff --git a/applications/tari_base_node/src/main.rs b/applications/tari_base_node/src/main.rs index f700964165..9a63982e7e 100644 --- a/applications/tari_base_node/src/main.rs +++ b/applications/tari_base_node/src/main.rs @@ -254,7 +254,7 @@ async fn run_node( } info!(target: LOG_TARGET, "Tari base node has STARTED"); - main_loop.cli_loop(config.base_node.resize_terminal_on_startup).await; + main_loop.cli_loop().await; ctx.wait_for_shutdown().await; diff --git a/applications/tari_console_wallet/Cargo.toml b/applications/tari_console_wallet/Cargo.toml index c2cc7926a6..1c90ebe320 100644 --- a/applications/tari_console_wallet/Cargo.toml +++ b/applications/tari_console_wallet/Cargo.toml @@ -1,19 +1,19 @@ [package] name = "tari_console_wallet" -version = "0.37.0" +version = "0.38.3" authors = ["The Tari Development Community"] edition = "2018" license = "BSD-3-Clause" [dependencies] tari_wallet = { path = "../../base_layer/wallet", features = ["bundled_sqlite"] } -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } tari_common = { path = "../../common" } tari_app_utilities = { path = "../tari_app_utilities" } tari_comms = { path = "../../comms/core" } tari_comms_dht = { path = "../../comms/dht" } tari_common_types = { path = "../../base_layer/common_types" } -tari_libtor = { path = "../../infrastructure/libtor" } +tari_libtor = { path = "../../infrastructure/libtor", optional = true } tari_p2p = { path = "../../base_layer/p2p", features = ["auto-update"] } tari_app_grpc = { path = "../tari_app_grpc" } tari_shutdown = { path = "../../infrastructure/shutdown" } @@ -22,9 +22,9 @@ tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", t # Uncomment for tokio tracing via tokio-console (needs "tracing" featurs) #console-subscriber = "0.1.3" -#tokio = { version = "1.14", features = ["signal", "tracing"] } +#tokio = { version = "1.20", features = ["signal", "tracing"] } # Uncomment for normal use (non tokio-console tracing) -tokio = { version = "1.14", default-features = false, features = ["signal", "sync"] } +tokio = { version = "1.20", default-features = false, features = ["signal", "sync"] } base64 = "0.13.0" bitflags = "1.2.1" @@ -69,5 +69,5 @@ features = ["crossterm"] [features] avx2 = ["tari_core/avx2", "tari_crypto/simd_backend", "tari_wallet/avx2", "tari_comms/avx2", "tari_comms_dht/avx2", "tari_p2p/avx2", "tari_key_manager/avx2"] -libtor = ["tari_libtor/libtor"] +libtor = ["tari_libtor"] diff --git a/applications/tari_console_wallet/src/automation/commands.rs b/applications/tari_console_wallet/src/automation/commands.rs index 484de6dcec..e4701d853e 100644 --- a/applications/tari_console_wallet/src/automation/commands.rs +++ b/applications/tari_console_wallet/src/automation/commands.rs @@ -59,7 +59,7 @@ use tari_wallet::{ connectivity_service::WalletConnectivityInterface, error::WalletError, key_manager_service::NextKeyResult, - output_manager_service::handle::OutputManagerHandle, + output_manager_service::{handle::OutputManagerHandle, UtxoSelectionCriteria}, transaction_service::handle::{TransactionEvent, TransactionServiceHandle}, TransactionStage, WalletConfig, @@ -119,6 +119,7 @@ pub async fn send_tari( .send_transaction( dest_pubkey, amount, + UtxoSelectionCriteria::default(), OutputFeatures::default(), fee_per_gram * uT, message, @@ -127,16 +128,29 @@ pub async fn send_tari( .map_err(CommandError::TransactionServiceError) } +pub async fn burn_tari( + mut wallet_transaction_service: TransactionServiceHandle, + fee_per_gram: u64, + amount: MicroTari, + message: String, +) -> Result { + wallet_transaction_service + .burn_tari(amount, UtxoSelectionCriteria::default(), fee_per_gram * uT, message) + .await + .map_err(CommandError::TransactionServiceError) +} + /// publishes a tari-SHA atomic swap HTLC transaction pub async fn init_sha_atomic_swap( mut wallet_transaction_service: TransactionServiceHandle, fee_per_gram: u64, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, dest_pubkey: PublicKey, message: String, ) -> Result<(TxId, PublicKey, TransactionOutput), CommandError> { let (tx_id, pre_image, output) = wallet_transaction_service - .send_sha_atomic_swap_transaction(dest_pubkey, amount, fee_per_gram * uT, message) + .send_sha_atomic_swap_transaction(dest_pubkey, amount, selection_criteria, fee_per_gram * uT, message) .await .map_err(CommandError::TransactionServiceError)?; Ok((tx_id, pre_image, output)) @@ -200,6 +214,7 @@ pub async fn send_one_sided( mut wallet_transaction_service: TransactionServiceHandle, fee_per_gram: u64, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, dest_pubkey: PublicKey, message: String, ) -> Result { @@ -207,6 +222,7 @@ pub async fn send_one_sided( .send_one_sided_transaction( dest_pubkey, amount, + selection_criteria, OutputFeatures::default(), fee_per_gram * uT, message, @@ -219,6 +235,7 @@ pub async fn send_one_sided_to_stealth_address( mut wallet_transaction_service: TransactionServiceHandle, fee_per_gram: u64, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, dest_pubkey: PublicKey, message: String, ) -> Result { @@ -226,6 +243,7 @@ pub async fn send_one_sided_to_stealth_address( .send_one_sided_to_stealth_address_transaction( dest_pubkey, amount, + selection_criteria, OutputFeatures::default(), fee_per_gram * uT, message, @@ -393,10 +411,26 @@ pub async fn make_it_rain( send_tari(tx_service, fee, amount, pk.clone(), msg.clone()).await }, MakeItRainTransactionType::OneSided => { - send_one_sided(tx_service, fee, amount, pk.clone(), msg.clone()).await + send_one_sided( + tx_service, + fee, + amount, + UtxoSelectionCriteria::default(), + pk.clone(), + msg.clone(), + ) + .await }, MakeItRainTransactionType::StealthOneSided => { - send_one_sided_to_stealth_address(tx_service, fee, amount, pk.clone(), msg.clone()).await + send_one_sided_to_stealth_address( + tx_service, + fee, + amount, + UtxoSelectionCriteria::default(), + pk.clone(), + msg.clone(), + ) + .await }, }; let submit_time = Instant::now(); @@ -585,56 +619,102 @@ pub async fn command_runner( match parsed { GetBalance => match output_service.clone().get_balance().await { Ok(balance) => { + debug!(target: LOG_TARGET, "get-balance concluded"); println!("{}", balance); }, Err(e) => eprintln!("GetBalance error! {}", e), }, DiscoverPeer(args) => { if !online { - wait_for_comms(&connectivity_requester).await?; - online = true; + match wait_for_comms(&connectivity_requester).await { + Ok(..) => { + online = true; + }, + Err(e) => { + eprintln!("DiscoverPeer error! {}", e); + continue; + }, + } + } + if let Err(e) = discover_peer(dht_service.clone(), args.dest_public_key.into()).await { + eprintln!("DiscoverPeer error! {}", e); + } + }, + BurnTari(args) => { + match burn_tari( + transaction_service.clone(), + config.fee_per_gram, + args.amount, + args.message, + ) + .await + { + Ok(tx_id) => { + debug!(target: LOG_TARGET, "burn tari concluded with tx_id {}", tx_id); + tx_ids.push(tx_id); + }, + Err(e) => eprintln!("BurnTari error! {}", e), } - discover_peer(dht_service.clone(), args.dest_public_key.into()).await? }, SendTari(args) => { - let tx_id = send_tari( + match send_tari( transaction_service.clone(), config.fee_per_gram, args.amount, args.destination.into(), args.message, ) - .await?; - debug!(target: LOG_TARGET, "send-tari tx_id {}", tx_id); - tx_ids.push(tx_id); + .await + { + Ok(tx_id) => { + debug!(target: LOG_TARGET, "send-tari concluded with tx_id {}", tx_id); + tx_ids.push(tx_id); + }, + Err(e) => eprintln!("SendTari error! {}", e), + } }, SendOneSided(args) => { - let tx_id = send_one_sided( + match send_one_sided( transaction_service.clone(), config.fee_per_gram, args.amount, + UtxoSelectionCriteria::default(), args.destination.into(), args.message, ) - .await?; - debug!(target: LOG_TARGET, "send-one-sided tx_id {}", tx_id); - tx_ids.push(tx_id); + .await + { + Ok(tx_id) => { + debug!(target: LOG_TARGET, "send-one-sided concluded with tx_id {}", tx_id); + tx_ids.push(tx_id); + }, + Err(e) => eprintln!("SendOneSided error! {}", e), + } }, SendOneSidedToStealthAddress(args) => { - let tx_id = send_one_sided_to_stealth_address( + match send_one_sided_to_stealth_address( transaction_service.clone(), config.fee_per_gram, args.amount, + UtxoSelectionCriteria::default(), args.destination.into(), args.message, ) - .await?; - debug!(target: LOG_TARGET, "send-one-sided-to-stealth-address tx_id {}", tx_id); - tx_ids.push(tx_id); + .await + { + Ok(tx_id) => { + debug!( + target: LOG_TARGET, + "send-one-sided-to-stealth-address concluded with tx_id {}", tx_id + ); + tx_ids.push(tx_id); + }, + Err(e) => eprintln!("SendOneSidedToStealthAddress error! {}", e), + } }, MakeItRain(args) => { let transaction_type = args.transaction_type(); - make_it_rain( + if let Err(e) = make_it_rain( transaction_service.clone(), config.fee_per_gram, args.transactions_per_second, @@ -646,10 +726,13 @@ pub async fn command_runner( transaction_type, args.message, ) - .await?; + .await + { + eprintln!("MakeItRain error! {}", e); + } }, CoinSplit(args) => { - let tx_id = coin_split( + match coin_split( args.amount_per_split, args.num_splits, args.fee_per_gram, @@ -657,161 +740,230 @@ pub async fn command_runner( &mut output_service, &mut transaction_service.clone(), ) - .await?; - tx_ids.push(tx_id); - println!("Coin split succeeded"); + .await + { + Ok(tx_id) => { + tx_ids.push(tx_id); + debug!(target: LOG_TARGET, "coin-split concluded with tx_id {}", tx_id); + println!("Coin split succeeded"); + }, + Err(e) => eprintln!("CoinSplit error! {}", e), + } }, Whois(args) => { let public_key = args.public_key.into(); - let emoji_id = EmojiId::from_pubkey(&public_key); + let emoji_id = EmojiId::from_public_key(&public_key).to_emoji_string(); println!("Public Key: {}", public_key.to_hex()); println!("Emoji ID : {}", emoji_id); }, - ExportUtxos(args) => { - let utxos = output_service.get_unspent_outputs().await?; - let count = utxos.len(); - let sum: MicroTari = utxos.iter().map(|utxo| utxo.value).sum(); - if let Some(file) = args.output_file { - write_utxos_to_csv_file(utxos, file)?; - } else { - for (i, utxo) in utxos.iter().enumerate() { - println!("{}. Value: {} {}", i + 1, utxo.value, utxo.features); + ExportUtxos(args) => match output_service.get_unspent_outputs().await { + Ok(utxos) => { + let count = utxos.len(); + let sum: MicroTari = utxos.iter().map(|utxo| utxo.value).sum(); + if let Some(file) = args.output_file { + if let Err(e) = write_utxos_to_csv_file(utxos, file) { + eprintln!("ExportUtxos error! {}", e); + } + } else { + for (i, utxo) in utxos.iter().enumerate() { + println!("{}. Value: {} {}", i + 1, utxo.value, utxo.features); + } } - } - println!("Total number of UTXOs: {}", count); - println!("Total value of UTXOs: {}", sum); + println!("Total number of UTXOs: {}", count); + println!("Total value of UTXOs: {}", sum); + }, + Err(e) => eprintln!("ExportUtxos error! {}", e), }, - ExportSpentUtxos(args) => { - let utxos = output_service.get_spent_outputs().await?; - let count = utxos.len(); - let sum: MicroTari = utxos.iter().map(|utxo| utxo.value).sum(); - if let Some(file) = args.output_file { - write_utxos_to_csv_file(utxos, file)?; - } else { - for (i, utxo) in utxos.iter().enumerate() { - println!("{}. Value: {} {}", i + 1, utxo.value, utxo.features); + ExportSpentUtxos(args) => match output_service.get_spent_outputs().await { + Ok(utxos) => { + let count = utxos.len(); + let sum: MicroTari = utxos.iter().map(|utxo| utxo.value).sum(); + if let Some(file) = args.output_file { + if let Err(e) = write_utxos_to_csv_file(utxos, file) { + eprintln!("ExportSpentUtxos error! {}", e); + } + } else { + for (i, utxo) in utxos.iter().enumerate() { + println!("{}. Value: {} {}", i + 1, utxo.value, utxo.features); + } } - } - println!("Total number of UTXOs: {}", count); - println!("Total value of UTXOs: {}", sum); + println!("Total number of UTXOs: {}", count); + println!("Total value of UTXOs: {}", sum); + }, + Err(e) => eprintln!("ExportSpentUtxos error! {}", e), }, - CountUtxos => { - let utxos = output_service.get_unspent_outputs().await?; - let count = utxos.len(); - let values: Vec = utxos.iter().map(|utxo| utxo.value).collect(); - let sum: MicroTari = values.iter().sum(); - println!("Total number of UTXOs: {}", count); - println!("Total value of UTXOs : {}", sum); - if let Some(min) = values.iter().min() { - println!("Minimum value UTXO : {}", min); - } - if count > 0 { - let average = f64::from(sum) / count as f64; - let average = Tari::from(MicroTari(average.round() as u64)); - println!("Average value UTXO : {}", average); - } - if let Some(max) = values.iter().max() { - println!("Maximum value UTXO : {}", max); - } + CountUtxos => match output_service.get_unspent_outputs().await { + Ok(utxos) => { + let count = utxos.len(); + let values: Vec = utxos.iter().map(|utxo| utxo.value).collect(); + let sum: MicroTari = values.iter().sum(); + println!("Total number of UTXOs: {}", count); + println!("Total value of UTXOs : {}", sum); + if let Some(min) = values.iter().min() { + println!("Minimum value UTXO : {}", min); + } + if count > 0 { + let average = f64::from(sum) / count as f64; + let average = Tari::from(MicroTari(average.round() as u64)); + println!("Average value UTXO : {}", average); + } + if let Some(max) = values.iter().max() { + println!("Maximum value UTXO : {}", max); + } + }, + Err(e) => eprintln!("CountUtxos error! {}", e), }, SetBaseNode(args) => { - set_base_node_peer(wallet.clone(), args.public_key.into(), args.address).await?; + if let Err(e) = set_base_node_peer(wallet.clone(), args.public_key.into(), args.address).await { + eprintln!("SetBaseNode error! {}", e); + } }, SetCustomBaseNode(args) => { - let (public_key, net_address) = - set_base_node_peer(wallet.clone(), args.public_key.into(), args.address).await?; - wallet - .db - .set_client_key_value(CUSTOM_BASE_NODE_PUBLIC_KEY_KEY.to_string(), public_key.to_string()) - .await?; - wallet - .db - .set_client_key_value(CUSTOM_BASE_NODE_ADDRESS_KEY.to_string(), net_address.to_string()) - .await?; - println!("Custom base node peer saved in wallet database."); + match set_base_node_peer(wallet.clone(), args.public_key.into(), args.address).await { + Ok((public_key, net_address)) => { + if let Err(e) = wallet + .db + .set_client_key_value(CUSTOM_BASE_NODE_PUBLIC_KEY_KEY.to_string(), public_key.to_string()) + { + eprintln!("SetCustomBaseNode error! {}", e); + } else if let Err(e) = wallet + .db + .set_client_key_value(CUSTOM_BASE_NODE_ADDRESS_KEY.to_string(), net_address.to_string()) + { + eprintln!("SetCustomBaseNode error! {}", e); + } else { + println!("Custom base node peer saved in wallet database."); + } + }, + Err(e) => eprintln!("SetCustomBaseNode error! {}", e), + } }, ClearCustomBaseNode => { - wallet + match wallet .db .clear_client_value(CUSTOM_BASE_NODE_PUBLIC_KEY_KEY.to_string()) - .await?; - wallet - .db - .clear_client_value(CUSTOM_BASE_NODE_ADDRESS_KEY.to_string()) - .await?; - println!("Custom base node peer cleared from wallet database."); + { + Ok(_) => match wallet.db.clear_client_value(CUSTOM_BASE_NODE_ADDRESS_KEY.to_string()) { + Ok(true) => { + println!("Custom base node peer cleared from wallet database.") + }, + Ok(false) => { + println!("Warning - custom base node peer not cleared from wallet database.") + }, + Err(e) => eprintln!("ClearCustomBaseNode error! {}", e), + }, + Err(e) => eprintln!("ClearCustomBaseNode error! {}", e), + } }, InitShaAtomicSwap(args) => { - let (tx_id, pre_image, output) = init_sha_atomic_swap( + match init_sha_atomic_swap( transaction_service.clone(), config.fee_per_gram, args.amount, + UtxoSelectionCriteria::default(), args.destination.into(), args.message, ) - .await?; - debug!(target: LOG_TARGET, "tari HTLC tx_id {}", tx_id); - let hash: [u8; 32] = Sha256::digest(pre_image.as_bytes()).into(); - println!("pre_image hex: {}", pre_image.to_hex()); - println!("pre_image hash: {}", hash.to_hex()); - println!("Output hash: {}", output.hash().to_hex()); - tx_ids.push(tx_id); + .await + { + Ok((tx_id, pre_image, output)) => { + debug!(target: LOG_TARGET, "tari HTLC tx_id {}", tx_id); + let hash: [u8; 32] = Sha256::digest(pre_image.as_bytes()).into(); + println!("pre_image hex: {}", pre_image.to_hex()); + println!("pre_image hash: {}", hash.to_hex()); + println!("Output hash: {}", output.hash().to_hex()); + tx_ids.push(tx_id); + }, + Err(e) => eprintln!("InitShaAtomicSwap error! {}", e), + } }, - FinaliseShaAtomicSwap(args) => { - let hash = args.output_hash[0].clone().try_into()?; - let tx_id = finalise_sha_atomic_swap( - output_service.clone(), - transaction_service.clone(), - hash, - args.pre_image.into(), - config.fee_per_gram.into(), - args.message, - ) - .await?; - debug!(target: LOG_TARGET, "claiming tari HTLC tx_id {}", tx_id); - tx_ids.push(tx_id); + FinaliseShaAtomicSwap(args) => match args.output_hash[0].clone().try_into() { + Ok(hash) => { + match finalise_sha_atomic_swap( + output_service.clone(), + transaction_service.clone(), + hash, + args.pre_image.into(), + config.fee_per_gram.into(), + args.message, + ) + .await + { + Ok(tx_id) => { + debug!(target: LOG_TARGET, "claiming tari HTLC tx_id {}", tx_id); + tx_ids.push(tx_id); + }, + Err(e) => eprintln!("FinaliseShaAtomicSwap error! {}", e), + } + }, + Err(e) => eprintln!("FinaliseShaAtomicSwap error! {}", e), }, - ClaimShaAtomicSwapRefund(args) => { - let hash = args.output_hash[0].clone().try_into()?; - let tx_id = claim_htlc_refund( - output_service.clone(), - transaction_service.clone(), - hash, - config.fee_per_gram.into(), - args.message, - ) - .await?; - debug!(target: LOG_TARGET, "claiming tari HTLC tx_id {}", tx_id); - tx_ids.push(tx_id); + ClaimShaAtomicSwapRefund(args) => match args.output_hash[0].clone().try_into() { + Ok(hash) => { + match claim_htlc_refund( + output_service.clone(), + transaction_service.clone(), + hash, + config.fee_per_gram.into(), + args.message, + ) + .await + { + Ok(tx_id) => { + debug!(target: LOG_TARGET, "claiming tari HTLC tx_id {}", tx_id); + tx_ids.push(tx_id); + }, + Err(e) => eprintln!("ClaimShaAtomicSwapRefund error! {}", e), + } + }, + Err(e) => eprintln!("FinaliseShaAtomicSwap error! {}", e), }, + RevalidateWalletDb => { - output_service + if let Err(e) = output_service .revalidate_all_outputs() .await - .map_err(CommandError::OutputManagerError)?; - transaction_service + .map_err(CommandError::OutputManagerError) + { + eprintln!("RevalidateWalletDb error! {}", e); + } + if let Err(e) = transaction_service .revalidate_all_transactions() .await - .map_err(CommandError::TransactionServiceError)?; + .map_err(CommandError::TransactionServiceError) + { + eprintln!("RevalidateWalletDb error! {}", e); + } }, HashGrpcPassword(args) => { - let (username, password) = config + match config .grpc_authentication .username_password() - .ok_or_else(|| CommandError::General("GRPC basic auth is not configured".to_string()))?; - let hashed_password = create_salted_hashed_password(password.reveal()) - .map_err(|e| CommandError::General(e.to_string()))?; - if args.short { - println!("{}", *hashed_password); - } else { - println!("Your hashed password is:"); - println!("{}", *hashed_password); - println!(); - println!( - "Use HTTP basic auth with username '{}' and the hashed password to make GRPC requests", - username - ); + .ok_or_else(|| CommandError::General("GRPC basic auth is not configured".to_string())) + { + Ok((username, password)) => { + match create_salted_hashed_password(password.reveal()) + .map_err(|e| CommandError::General(e.to_string())) + { + Ok(hashed_password) => { + if args.short { + println!("{}", *hashed_password); + } else { + println!("Your hashed password is:"); + println!("{}", *hashed_password); + println!(); + println!( + "Use HTTP basic auth with username '{}' and the hashed password to make GRPC \ + requests", + username + ); + } + }, + Err(e) => eprintln!("HashGrpcPassword error! {}", e), + } + }, + Err(e) => eprintln!("HashGrpcPassword error! {}", e), } }, RegisterValidatorNode(args) => { diff --git a/applications/tari_console_wallet/src/cli.rs b/applications/tari_console_wallet/src/cli.rs index e76574267f..4575673cfa 100644 --- a/applications/tari_console_wallet/src/cli.rs +++ b/applications/tari_console_wallet/src/cli.rs @@ -115,6 +115,7 @@ impl ConfigOverrideProvider for Cli { pub enum CliCommands { GetBalance, SendTari(SendTariArgs), + BurnTari(BurnTariArgs), SendOneSided(SendTariArgs), SendOneSidedToStealthAddress(SendTariArgs), MakeItRain(MakeItRainArgs), @@ -148,6 +149,13 @@ pub struct SendTariArgs { pub message: String, } +#[derive(Debug, Args, Clone)] +pub struct BurnTariArgs { + pub amount: MicroTari, + #[clap(short, long, default_value = "Burn funds")] + pub message: String, +} + #[derive(Debug, Args, Clone)] pub struct MakeItRainArgs { pub destination: UniPublicKey, diff --git a/applications/tari_console_wallet/src/grpc/wallet_grpc_server.rs b/applications/tari_console_wallet/src/grpc/wallet_grpc_server.rs index aeece46823..e5ed5a23a0 100644 --- a/applications/tari_console_wallet/src/grpc/wallet_grpc_server.rs +++ b/applications/tari_console_wallet/src/grpc/wallet_grpc_server.rs @@ -20,13 +20,8 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -use std::{ - convert::{TryFrom, TryInto}, - fs, - path::PathBuf, -}; +use std::convert::{TryFrom, TryInto}; -use clap::Parser; use futures::{ channel::mpsc::{self, Sender}, future, @@ -50,7 +45,6 @@ use tari_app_grpc::{ CreateBurnTransactionResponse, CreateTemplateRegistrationRequest, CreateTemplateRegistrationResponse, - FileDeletedResponse, GetBalanceRequest, GetBalanceResponse, GetCoinbaseRequest, @@ -71,7 +65,6 @@ use tari_app_grpc::{ RegisterValidatorNodeResponse, RevalidateRequest, RevalidateResponse, - SeedWordsResponse, SendShaAtomicSwapRequest, SendShaAtomicSwapResponse, SetBaseNodeRequest, @@ -116,7 +109,6 @@ use tokio::{sync::broadcast, task}; use tonic::{Request, Response, Status}; use crate::{ - cli::Cli, grpc::{convert_to_transaction_event, TransactionWrapper}, notifier::{CANCELLED, CONFIRMATION, MINED, NEW_BLOCK_MINED, QUEUED, RECEIVED, SENT}, }; @@ -319,6 +311,7 @@ impl wallet_server::Wallet for WalletGrpcServer { .send_sha_atomic_swap_transaction( address.clone(), message.amount.into(), + UtxoSelectionCriteria::default(), message.fee_per_gram.into(), message.message, ) @@ -496,6 +489,7 @@ impl wallet_server::Wallet for WalletGrpcServer { .send_transaction( pk, amount.into(), + UtxoSelectionCriteria::default(), OutputFeatures::default(), fee_per_gram.into(), message, @@ -506,6 +500,7 @@ impl wallet_server::Wallet for WalletGrpcServer { .send_one_sided_transaction( pk, amount.into(), + UtxoSelectionCriteria::default(), OutputFeatures::default(), fee_per_gram.into(), message, @@ -516,6 +511,7 @@ impl wallet_server::Wallet for WalletGrpcServer { .send_one_sided_to_stealth_address_transaction( pk, amount.into(), + UtxoSelectionCriteria::default(), OutputFeatures::default(), fee_per_gram.into(), message, @@ -564,7 +560,12 @@ impl wallet_server::Wallet for WalletGrpcServer { let mut transaction_service = self.get_transaction_service(); debug!(target: LOG_TARGET, "Trying to burn {} Tari", message.amount); let response = match transaction_service - .burn_tari(message.amount.into(), message.fee_per_gram.into(), message.message) + .burn_tari( + message.amount.into(), + UtxoSelectionCriteria::default(), + message.fee_per_gram.into(), + message.message, + ) .await { Ok(tx_id) => { @@ -891,41 +892,6 @@ impl wallet_server::Wallet for WalletGrpcServer { } } - /// Returns the contents of a seed words file, provided via CLI - async fn seed_words(&self, _: Request) -> Result, Status> { - let cli = Cli::parse(); - - let filepath: PathBuf = match cli.seed_words_file_name { - Some(filepath) => filepath, - None => return Err(Status::not_found("file path is empty")), - }; - - let words = fs::read_to_string(filepath)? - .split(' ') - .collect::>() - .iter() - .map(|&x| x.into()) - .collect::>(); - - Ok(Response::new(SeedWordsResponse { words })) - } - - /// Deletes the seed words file, provided via CLI - async fn delete_seed_words_file( - &self, - _: Request, - ) -> Result, Status> { - let cli = Cli::parse(); - - // WARNING: the filepath used is supplied as an argument - fs::remove_file(match cli.seed_words_file_name { - Some(filepath) => filepath, - None => return Err(Status::not_found("file path is empty")), - })?; - - Ok(Response::new(FileDeletedResponse {})) - } - async fn create_template_registration( &self, request: Request, diff --git a/applications/tari_console_wallet/src/init/mod.rs b/applications/tari_console_wallet/src/init/mod.rs index 5f899416cd..97ce4105f4 100644 --- a/applications/tari_console_wallet/src/init/mod.rs +++ b/applications/tari_console_wallet/src/init/mod.rs @@ -118,8 +118,9 @@ pub async fn change_password( config: &ApplicationConfig, arg_password: Option, shutdown_signal: ShutdownSignal, + non_interactive_mode: bool, ) -> Result<(), ExitError> { - let mut wallet = init_wallet(config, arg_password, None, None, shutdown_signal).await?; + let mut wallet = init_wallet(config, arg_password, None, None, shutdown_signal, non_interactive_mode).await?; let passphrase = prompt_password("New wallet password: ")?; let confirmed = prompt_password("Confirm new password: ")?; @@ -158,7 +159,7 @@ pub async fn get_base_node_peer_config( Some(ref custom) => SeedPeer::from_str(custom) .map(|node| Some(Peer::from(node))) .map_err(|err| ExitError::new(ExitCode::ConfigError, &format!("Malformed custom base node: {}", err)))?, - None => get_custom_base_node_peer_from_db(wallet).await, + None => get_custom_base_node_peer_from_db(wallet), }; // If the user has not explicitly set a base node in the config, we try detect one @@ -181,7 +182,7 @@ pub async fn get_base_node_peer_config( let address = detected_node.addresses.first().ok_or_else(|| { ExitError::new(ExitCode::ConfigError, "No address found for detected base node") })?; - set_custom_base_node_peer_in_db(wallet, &detected_node.public_key, address).await?; + set_custom_base_node_peer_in_db(wallet, &detected_node.public_key, address)?; selected_base_node = Some(detected_node.into()); } }, @@ -248,6 +249,7 @@ pub async fn init_wallet( seed_words_file_name: Option, recovery_seed: Option, shutdown_signal: ShutdownSignal, + non_interactive_mode: bool, ) -> Result { fs::create_dir_all( &config @@ -293,13 +295,13 @@ pub async fn init_wallet( let node_address = match config.wallet.p2p.public_address.clone() { Some(addr) => addr, - None => match wallet_db.get_node_address().await? { + None => match wallet_db.get_node_address()? { Some(addr) => addr, None => Multiaddr::empty(), }, }; - let master_seed = read_or_create_master_seed(recovery_seed.clone(), &wallet_db).await?; + let master_seed = read_or_create_master_seed(recovery_seed.clone(), &wallet_db)?; let node_identity = match config.wallet.identity_file.as_ref() { Some(identity_file) => { @@ -315,12 +317,12 @@ pub async fn init_wallet( PeerFeatures::COMMUNICATION_CLIENT, )? }, - None => setup_identity_from_db(&wallet_db, &master_seed, node_address.clone()).await?, + None => setup_identity_from_db(&wallet_db, &master_seed, node_address.clone())?, }; let mut wallet_config = config.wallet.clone(); if let TransportType::Tor = config.wallet.p2p.transport.transport_type { - wallet_config.p2p.transport.tor.identity = wallet_db.get_tor_id().await?; + wallet_config.p2p.transport.tor.identity = wallet_db.get_tor_id()?; } let factories = CryptoFactories::default(); @@ -352,7 +354,6 @@ pub async fn init_wallet( wallet .db .set_tor_identity(hs.tor_identity().clone()) - .await .map_err(|e| ExitError::new(ExitCode::WalletError, format!("Problem writing tor identity. {}", e)))?; } @@ -360,28 +361,30 @@ pub async fn init_wallet( debug!(target: LOG_TARGET, "Wallet is not encrypted."); // create using --password arg if supplied and skip seed words confirmation - let (passphrase, interactive) = if let Some(password) = arg_password { - debug!(target: LOG_TARGET, "Setting password from command line argument."); - - (password, false) - } else { - debug!(target: LOG_TARGET, "Prompting for password."); - let password = prompt_password("Create wallet password: ")?; - let confirmed = prompt_password("Confirm wallet password: ")?; + let passphrase = match arg_password { + Some(password) => { + debug!(target: LOG_TARGET, "Setting password from command line argument."); + password + }, + None => { + debug!(target: LOG_TARGET, "Prompting for password."); + let password = prompt_password("Create wallet password: ")?; + let confirmed = prompt_password("Confirm wallet password: ")?; - if password != confirmed { - return Err(ExitError::new(ExitCode::InputError, "Passwords don't match!")); - } + if password != confirmed { + return Err(ExitError::new(ExitCode::InputError, "Passwords don't match!")); + } - (password, true) + password + }, }; wallet.apply_encryption(passphrase).await?; debug!(target: LOG_TARGET, "Wallet encrypted."); - if interactive && recovery_seed.is_none() { - match confirm_seed_words(&mut wallet).await { + if !non_interactive_mode && recovery_seed.is_none() { + match confirm_seed_words(&mut wallet) { Ok(()) => { print!("\x1Bc"); // Clear the screen }, @@ -392,7 +395,7 @@ pub async fn init_wallet( } } if let Some(file_name) = seed_words_file_name { - let seed_words = wallet.get_seed_words(&MnemonicLanguage::English).await?.join(" "); + let seed_words = wallet.get_seed_words(&MnemonicLanguage::English)?.join(" "); let _result = fs::write(file_name, seed_words).map_err(|e| { ExitError::new( ExitCode::WalletError, @@ -416,17 +419,16 @@ async fn detect_local_base_node() -> Option { Some(SeedPeer::new(public_key, vec![address])) } -async fn setup_identity_from_db( +fn setup_identity_from_db( wallet_db: &WalletDatabase, master_seed: &CipherSeed, node_address: Multiaddr, ) -> Result, ExitError> { let node_features = wallet_db - .get_node_features() - .await? + .get_node_features()? .unwrap_or(PeerFeatures::COMMUNICATION_CLIENT); - let identity_sig = wallet_db.get_comms_identity_signature().await?; + let identity_sig = wallet_db.get_comms_identity_signature()?; let comms_secret_key = derive_comms_secret_key(master_seed)?; @@ -452,7 +454,7 @@ async fn setup_identity_from_db( .as_ref() .expect("unreachable panic") .clone(); - wallet_db.set_comms_identity_signature(sig).await?; + wallet_db.set_comms_identity_signature(sig)?; } Ok(node_identity) @@ -514,8 +516,8 @@ async fn validate_txos(wallet: &mut WalletSqlite) -> Result<(), ExitError> { Ok(()) } -async fn confirm_seed_words(wallet: &mut WalletSqlite) -> Result<(), ExitError> { - let seed_words = wallet.get_seed_words(&MnemonicLanguage::English).await?; +fn confirm_seed_words(wallet: &mut WalletSqlite) -> Result<(), ExitError> { + let seed_words = wallet.get_seed_words(&MnemonicLanguage::English)?; println!(); println!("========================="); diff --git a/applications/tari_console_wallet/src/main.rs b/applications/tari_console_wallet/src/main.rs index d11ee5593a..e8f0b4060c 100644 --- a/applications/tari_console_wallet/src/main.rs +++ b/applications/tari_console_wallet/src/main.rs @@ -47,6 +47,7 @@ use tari_key_manager::cipher_seed::CipherSeed; #[cfg(all(unix, feature = "libtor"))] use tari_libtor::tor::Tor; use tari_shutdown::Shutdown; +use tari_utilities::SafePassword; use tracing_subscriber::{layer::SubscriberExt, Registry}; use wallet_modes::{command_mode, grpc_mode, recovery_mode, script_mode, tui_mode, WalletMode}; @@ -92,8 +93,7 @@ fn main() { fn main_inner() -> Result<(), ExitError> { let cli = Cli::parse(); - let config_path = cli.common.config_path(); - let cfg = load_configuration(config_path.as_path(), true, &cli)?; + let cfg = load_configuration(cli.common.config_path().as_path(), true, &cli)?; initialize_logging( &cli.common.log_config_path("wallet"), include_str!("../log4rs_sample.yml"), @@ -118,11 +118,7 @@ fn main_inner() -> Result<(), ExitError> { consts::APP_VERSION ); - let password = cli - .password - .as_ref() - .or(config.wallet.password.as_ref()) - .map(|s| s.to_owned()); + let password = get_password(&config, &cli); if password.is_none() { tari_splash_screen("Console Wallet"); @@ -141,7 +137,12 @@ fn main_inner() -> Result<(), ExitError> { if cli.change_password { info!(target: LOG_TARGET, "Change password requested."); - return runtime.block_on(change_password(&config, password, shutdown_signal)); + return runtime.block_on(change_password( + &config, + password, + shutdown_signal, + cli.non_interactive_mode, + )); } // Run our own Tor instance, if configured @@ -164,10 +165,11 @@ fn main_inner() -> Result<(), ExitError> { seed_words_file_name, recovery_seed, shutdown_signal, + cli.non_interactive_mode, ))?; // Check if there is an in progress recovery in the wallet's database - if runtime.block_on(wallet.is_recovery_in_progress())? { + if wallet.is_recovery_in_progress()? { println!("A Wallet Recovery was found to be in progress, continuing."); boot_mode = WalletBoot::Recovery; } @@ -219,6 +221,13 @@ fn main_inner() -> Result<(), ExitError> { result } +fn get_password(config: &ApplicationConfig, cli: &Cli) -> Option { + cli.password + .as_ref() + .or(config.wallet.password.as_ref()) + .map(|s| s.to_owned()) +} + fn get_recovery_seed(boot_mode: WalletBoot, cli: &Cli) -> Result, ExitError> { if matches!(boot_mode, WalletBoot::Recovery) { let seed = if cli.seed_words.is_some() { diff --git a/applications/tari_console_wallet/src/recovery.rs b/applications/tari_console_wallet/src/recovery.rs index efd0e2e974..6b9a9f3a66 100644 --- a/applications/tari_console_wallet/src/recovery.rs +++ b/applications/tari_console_wallet/src/recovery.rs @@ -29,6 +29,7 @@ use tari_key_manager::{cipher_seed::CipherSeed, mnemonic::Mnemonic}; use tari_shutdown::Shutdown; use tari_utilities::hex::Hex; use tari_wallet::{ + connectivity_service::WalletConnectivityHandle, storage::sqlite_db::wallet::WalletSqliteDatabase, utxo_scanner_service::{handle::UtxoScannerEvent, service::UtxoScannerService}, WalletSqlite, @@ -107,7 +108,7 @@ pub async fn wallet_recovery( .map_err(|err| ExitError::new(ExitCode::NetworkError, err))?; } - let mut recovery_task = UtxoScannerService::::builder() + let mut recovery_task = UtxoScannerService::::builder() .with_peers(peer_public_keys) // Do not make this a small number as wallet recovery needs to be resilient .with_retry_limit(retry_limit) diff --git a/applications/tari_console_wallet/src/ui/components/send_tab.rs b/applications/tari_console_wallet/src/ui/components/send_tab.rs index 734f6cd229..cffad8bd69 100644 --- a/applications/tari_console_wallet/src/ui/components/send_tab.rs +++ b/applications/tari_console_wallet/src/ui/components/send_tab.rs @@ -4,6 +4,7 @@ use log::*; use tari_core::transactions::tari_amount::MicroTari; use tari_utilities::hex::Hex; +use tari_wallet::output_manager_service::UtxoSelectionCriteria; use tokio::{runtime::Handle, sync::watch}; use tui::{ backend::Backend, @@ -268,6 +269,7 @@ impl SendTab { match Handle::current().block_on(app_state.send_one_sided_transaction( self.to_field.clone(), amount.into(), + UtxoSelectionCriteria::default(), fee_per_gram, self.message_field.clone(), tx, @@ -286,6 +288,7 @@ impl SendTab { app_state.send_one_sided_to_stealth_address_transaction( self.to_field.clone(), amount.into(), + UtxoSelectionCriteria::default(), fee_per_gram, self.message_field.clone(), tx, @@ -305,6 +308,7 @@ impl SendTab { match Handle::current().block_on(app_state.send_transaction( self.to_field.clone(), amount.into(), + UtxoSelectionCriteria::default(), fee_per_gram, self.message_field.clone(), tx, diff --git a/applications/tari_console_wallet/src/ui/components/transactions_tab.rs b/applications/tari_console_wallet/src/ui/components/transactions_tab.rs index 8776fe393d..ab15aee417 100644 --- a/applications/tari_console_wallet/src/ui/components/transactions_tab.rs +++ b/applications/tari_console_wallet/src/ui/components/transactions_tab.rs @@ -461,7 +461,7 @@ impl Component for TransactionsTab { span_vec.push(Span::styled("(C)", Style::default().add_modifier(Modifier::BOLD))); span_vec.push(Span::raw(" cancel selected pending Txs ")); span_vec.push(Span::styled("(A)", Style::default().add_modifier(Modifier::BOLD))); - span_vec.push(Span::raw(" show/hide abandoned coinbases ")); + span_vec.push(Span::raw(" show/hide mining ")); span_vec.push(Span::styled("(R)", Style::default().add_modifier(Modifier::BOLD))); span_vec.push(Span::raw(" rebroadcast Txs ")); span_vec.push(Span::styled("(Esc)", Style::default().add_modifier(Modifier::BOLD))); diff --git a/applications/tari_console_wallet/src/ui/state/app_state.rs b/applications/tari_console_wallet/src/ui/state/app_state.rs index 60ad6c115d..0301553c6a 100644 --- a/applications/tari_console_wallet/src/ui/state/app_state.rs +++ b/applications/tari_console_wallet/src/ui/state/app_state.rs @@ -55,7 +55,7 @@ use tari_wallet::{ base_node_service::{handle::BaseNodeEventReceiver, service::BaseNodeState}, connectivity_service::{OnlineStatus, WalletConnectivityHandle, WalletConnectivityInterface}, contacts_service::{handle::ContactsLivenessEvent, storage::database::Contact}, - output_manager_service::{handle::OutputManagerEventReceiver, service::Balance}, + output_manager_service::{handle::OutputManagerEventReceiver, service::Balance, UtxoSelectionCriteria}, transaction_service::{ handle::TransactionEventReceiver, storage::models::{CompletedTransaction, TxCancellationReason}, @@ -217,9 +217,9 @@ impl AppState { let public_key = match CommsPublicKey::from_hex(public_key_or_emoji_id.as_str()) { Ok(pk) => pk, - Err(_) => { - EmojiId::str_to_pubkey(public_key_or_emoji_id.as_str()).map_err(|_| UiError::PublicKeyParseError)? - }, + Err(_) => EmojiId::from_emoji_string(public_key_or_emoji_id.as_str()) + .map_err(|_| UiError::PublicKeyParseError)? + .to_public_key(), }; let contact = Contact::new(alias, public_key, None, None); @@ -250,7 +250,9 @@ impl AppState { let mut inner = self.inner.write().await; let public_key = match CommsPublicKey::from_hex(public_key.as_str()) { Ok(pk) => pk, - Err(_) => EmojiId::str_to_pubkey(public_key.as_str()).map_err(|_| UiError::PublicKeyParseError)?, + Err(_) => EmojiId::from_emoji_string(public_key.as_str()) + .map_err(|_| UiError::PublicKeyParseError)? + .to_public_key(), }; inner.wallet.contacts_service.remove_contact(public_key).await?; @@ -265,6 +267,7 @@ impl AppState { &mut self, public_key: String, amount: u64, + selection_criteria: UtxoSelectionCriteria, fee_per_gram: u64, message: String, result_tx: watch::Sender, @@ -272,7 +275,9 @@ impl AppState { let inner = self.inner.write().await; let public_key = match CommsPublicKey::from_hex(public_key.as_str()) { Ok(pk) => pk, - Err(_) => EmojiId::str_to_pubkey(public_key.as_str()).map_err(|_| UiError::PublicKeyParseError)?, + Err(_) => EmojiId::from_emoji_string(public_key.as_str()) + .map_err(|_| UiError::PublicKeyParseError)? + .to_public_key(), }; let output_features = OutputFeatures { ..Default::default() }; @@ -282,6 +287,7 @@ impl AppState { tokio::spawn(send_transaction_task( public_key, MicroTari::from(amount), + selection_criteria, output_features, message, fee_per_gram, @@ -296,6 +302,7 @@ impl AppState { &mut self, public_key: String, amount: u64, + selection_criteria: UtxoSelectionCriteria, fee_per_gram: u64, message: String, result_tx: watch::Sender, @@ -303,7 +310,9 @@ impl AppState { let inner = self.inner.write().await; let public_key = match CommsPublicKey::from_hex(public_key.as_str()) { Ok(pk) => pk, - Err(_) => EmojiId::str_to_pubkey(public_key.as_str()).map_err(|_| UiError::PublicKeyParseError)?, + Err(_) => EmojiId::from_emoji_string(public_key.as_str()) + .map_err(|_| UiError::PublicKeyParseError)? + .to_public_key(), }; let output_features = OutputFeatures { ..Default::default() }; @@ -313,6 +322,7 @@ impl AppState { tokio::spawn(send_one_sided_transaction_task( public_key, MicroTari::from(amount), + selection_criteria, output_features, message, fee_per_gram, @@ -327,6 +337,7 @@ impl AppState { &mut self, dest_pubkey: String, amount: u64, + selection_criteria: UtxoSelectionCriteria, fee_per_gram: u64, message: String, result_tx: watch::Sender, @@ -334,7 +345,9 @@ impl AppState { let inner = self.inner.write().await; let dest_pubkey = match CommsPublicKey::from_hex(dest_pubkey.as_str()) { Ok(pk) => pk, - Err(_) => EmojiId::str_to_pubkey(dest_pubkey.as_str()).map_err(|_| UiError::PublicKeyParseError)?, + Err(_) => EmojiId::from_emoji_string(dest_pubkey.as_str()) + .map_err(|_| UiError::PublicKeyParseError)? + .to_public_key(), }; let output_features = OutputFeatures { ..Default::default() }; @@ -344,6 +357,7 @@ impl AppState { tokio::spawn(send_one_sided_to_stealth_address_transaction( dest_pubkey, MicroTari::from(amount), + selection_criteria, output_features, message, fee_per_gram, @@ -428,6 +442,7 @@ impl AppState { .completed_txs .iter() .filter(|tx| !matches!(tx.cancelled, Some(TxCancellationReason::AbandonedCoinbase))) + .filter(|tx| !matches!(tx.status, TransactionStatus::Coinbase)) .collect() } else { self.cached_data.completed_txs.iter().collect() @@ -892,15 +907,11 @@ impl AppStateInner { // persist the custom node in wallet db self.wallet .db - .set_client_key_value(CUSTOM_BASE_NODE_PUBLIC_KEY_KEY.to_string(), peer.public_key.to_string()) - .await?; - self.wallet - .db - .set_client_key_value( - CUSTOM_BASE_NODE_ADDRESS_KEY.to_string(), - peer.addresses.first().ok_or(UiError::NoAddress)?.to_string(), - ) - .await?; + .set_client_key_value(CUSTOM_BASE_NODE_PUBLIC_KEY_KEY.to_string(), peer.public_key.to_string())?; + self.wallet.db.set_client_key_value( + CUSTOM_BASE_NODE_ADDRESS_KEY.to_string(), + peer.addresses.first().ok_or(UiError::NoAddress)?.to_string(), + )?; info!( target: LOG_TARGET, "Setting custom base node peer for wallet: {}::{}", @@ -931,12 +942,10 @@ impl AppStateInner { // clear from wallet db self.wallet .db - .clear_client_value(CUSTOM_BASE_NODE_PUBLIC_KEY_KEY.to_string()) - .await?; + .clear_client_value(CUSTOM_BASE_NODE_PUBLIC_KEY_KEY.to_string())?; self.wallet .db - .clear_client_value(CUSTOM_BASE_NODE_ADDRESS_KEY.to_string()) - .await?; + .clear_client_value(CUSTOM_BASE_NODE_ADDRESS_KEY.to_string())?; Ok(()) } @@ -1086,7 +1095,7 @@ impl AppStateData { base_node_selected: Peer, base_node_config: PeerConfig, ) -> Self { - let eid = EmojiId::from_pubkey(node_identity.public_key()).to_string(); + let eid = EmojiId::from_public_key(node_identity.public_key()).to_emoji_string(); let qr_link = format!("tari://{}/pubkey/{}", network, &node_identity.public_key().to_hex()); let code = QrCode::new(qr_link).unwrap(); let image = code diff --git a/applications/tari_console_wallet/src/ui/state/tasks.rs b/applications/tari_console_wallet/src/ui/state/tasks.rs index e7a8d2a368..be18a312f9 100644 --- a/applications/tari_console_wallet/src/ui/state/tasks.rs +++ b/applications/tari_console_wallet/src/ui/state/tasks.rs @@ -22,7 +22,10 @@ use tari_comms::types::CommsPublicKey; use tari_core::transactions::{tari_amount::MicroTari, transaction_components::OutputFeatures}; -use tari_wallet::transaction_service::handle::{TransactionEvent, TransactionSendStatus, TransactionServiceHandle}; +use tari_wallet::{ + output_manager_service::UtxoSelectionCriteria, + transaction_service::handle::{TransactionEvent, TransactionSendStatus, TransactionServiceHandle}, +}; use tokio::sync::{broadcast, watch}; use crate::ui::{state::UiTransactionSendStatus, UiError}; @@ -32,6 +35,7 @@ const LOG_TARGET: &str = "wallet::console_wallet::tasks "; pub async fn send_transaction_task( public_key: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, output_features: OutputFeatures, message: String, fee_per_gram: MicroTari, @@ -42,7 +46,14 @@ pub async fn send_transaction_task( let mut event_stream = transaction_service_handle.get_event_stream(); let mut send_status = TransactionSendStatus::default(); match transaction_service_handle - .send_transaction(public_key, amount, output_features, fee_per_gram, message) + .send_transaction( + public_key, + amount, + selection_criteria, + output_features, + fee_per_gram, + message, + ) .await { Err(e) => { @@ -100,6 +111,7 @@ pub async fn send_transaction_task( pub async fn send_one_sided_transaction_task( public_key: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, output_features: OutputFeatures, message: String, fee_per_gram: MicroTari, @@ -109,7 +121,14 @@ pub async fn send_one_sided_transaction_task( let _result = result_tx.send(UiTransactionSendStatus::Initiated); let mut event_stream = transaction_service_handle.get_event_stream(); match transaction_service_handle - .send_one_sided_transaction(public_key, amount, output_features, fee_per_gram, message) + .send_one_sided_transaction( + public_key, + amount, + selection_criteria, + output_features, + fee_per_gram, + message, + ) .await { Err(e) => { @@ -146,6 +165,7 @@ pub async fn send_one_sided_transaction_task( pub async fn send_one_sided_to_stealth_address_transaction( dest_pubkey: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, output_features: OutputFeatures, message: String, fee_per_gram: MicroTari, @@ -155,7 +175,14 @@ pub async fn send_one_sided_to_stealth_address_transaction( let _result = result_tx.send(UiTransactionSendStatus::Initiated); let mut event_stream = transaction_service_handle.get_event_stream(); match transaction_service_handle - .send_one_sided_to_stealth_address_transaction(dest_pubkey, amount, output_features, fee_per_gram, message) + .send_one_sided_to_stealth_address_transaction( + dest_pubkey, + amount, + selection_criteria, + output_features, + fee_per_gram, + message, + ) .await { Err(e) => { diff --git a/applications/tari_console_wallet/src/ui/ui_contact.rs b/applications/tari_console_wallet/src/ui/ui_contact.rs index fa4af482c6..d55d1eb6e4 100644 --- a/applications/tari_console_wallet/src/ui/ui_contact.rs +++ b/applications/tari_console_wallet/src/ui/ui_contact.rs @@ -26,7 +26,7 @@ impl From for UiContact { Self { alias: c.alias, public_key: c.public_key.to_string(), - emoji_id: EmojiId::from_pubkey(&c.public_key).as_str().to_string(), + emoji_id: EmojiId::from_public_key(&c.public_key).to_emoji_string(), last_seen: match c.last_seen { Some(val) => DateTime::::from_utc(val, Local::now().offset().to_owned()) .format("%m-%dT%H:%M") diff --git a/applications/tari_console_wallet/src/utils/db.rs b/applications/tari_console_wallet/src/utils/db.rs index f50d6bc89a..e06bd39d11 100644 --- a/applications/tari_console_wallet/src/utils/db.rs +++ b/applications/tari_console_wallet/src/utils/db.rs @@ -36,11 +36,10 @@ pub const CUSTOM_BASE_NODE_ADDRESS_KEY: &str = "console_wallet_custom_base_node_ /// This helper function will attempt to read a stored base node public key and address from the wallet database. /// If both are found they are used to construct and return a Peer. -pub async fn get_custom_base_node_peer_from_db(wallet: &mut WalletSqlite) -> Option { +pub fn get_custom_base_node_peer_from_db(wallet: &mut WalletSqlite) -> Option { let custom_base_node_peer_pubkey = match wallet .db .get_client_key_value(CUSTOM_BASE_NODE_PUBLIC_KEY_KEY.to_string()) - .await { Ok(val) => val, Err(e) => { @@ -48,11 +47,7 @@ pub async fn get_custom_base_node_peer_from_db(wallet: &mut WalletSqlite) -> Opt return None; }, }; - let custom_base_node_peer_address = match wallet - .db - .get_client_key_value(CUSTOM_BASE_NODE_ADDRESS_KEY.to_string()) - .await - { + let custom_base_node_peer_address = match wallet.db.get_client_key_value(CUSTOM_BASE_NODE_ADDRESS_KEY.to_string()) { Ok(val) => val, Err(e) => { warn!(target: LOG_TARGET, "Problem reading from wallet database: {}", e); @@ -91,23 +86,19 @@ pub async fn get_custom_base_node_peer_from_db(wallet: &mut WalletSqlite) -> Opt } /// Sets the base node peer in the database -pub async fn set_custom_base_node_peer_in_db( +pub fn set_custom_base_node_peer_in_db( wallet: &mut WalletSqlite, base_node_public_key: &CommsPublicKey, base_node_address: &Multiaddr, ) -> Result<(), WalletStorageError> { - wallet - .db - .set_client_key_value( - CUSTOM_BASE_NODE_PUBLIC_KEY_KEY.to_string(), - base_node_public_key.to_hex(), - ) - .await?; + wallet.db.set_client_key_value( + CUSTOM_BASE_NODE_PUBLIC_KEY_KEY.to_string(), + base_node_public_key.to_hex(), + )?; wallet .db - .set_client_key_value(CUSTOM_BASE_NODE_ADDRESS_KEY.to_string(), base_node_address.to_string()) - .await?; + .set_client_key_value(CUSTOM_BASE_NODE_ADDRESS_KEY.to_string(), base_node_address.to_string())?; Ok(()) } diff --git a/applications/tari_console_wallet/src/wallet_modes.rs b/applications/tari_console_wallet/src/wallet_modes.rs index 2e307cb117..535631b429 100644 --- a/applications/tari_console_wallet/src/wallet_modes.rs +++ b/applications/tari_console_wallet/src/wallet_modes.rs @@ -176,7 +176,10 @@ pub(crate) fn parse_command_file(script: String) -> Result, Exi commands.push(sub_command); } }, - Err(e) => return Err(ExitError::new(ExitCode::CommandError, e.to_string())), + Err(e) => { + println!("\nError! parsing '{}' ({})\n", command, e); + return Err(ExitError::new(ExitCode::CommandError, e.to_string())); + }, } } } @@ -282,7 +285,7 @@ pub fn tui_mode( let base_node_selected; if let Some(peer) = base_node_config.base_node_custom.clone() { base_node_selected = peer; - } else if let Some(peer) = handle.block_on(get_custom_base_node_peer_from_db(&mut wallet)) { + } else if let Some(peer) = get_custom_base_node_peer_from_db(&mut wallet) { base_node_selected = peer; } else if let Some(peer) = handle.block_on(wallet.get_base_node_peer()) { base_node_selected = peer; @@ -423,6 +426,8 @@ mod test { discover-peer f6b2ca781342a3ebe30ee1643655c96f1d7c14f4d49f077695395de98ae73665 send-tari --message Our_secret! 125T 5c4f2a4b3f3f84e047333218a84fd24f581a9d7e4f23b78e3714e9d174427d61 + + burn-tari --message Ups_these_funds_will_be_burned! 100T coin-split --message Make_many_dust_UTXOs! --fee-per-gram 2 0.001T 499 @@ -438,6 +443,7 @@ mod test { let mut get_balance = false; let mut send_tari = false; + let mut burn_tari = false; let mut make_it_rain = false; let mut coin_split = false; let mut discover_peer = false; @@ -446,6 +452,7 @@ mod test { match command { CliCommands::GetBalance => get_balance = true, CliCommands::SendTari(_) => send_tari = true, + CliCommands::BurnTari(_) => burn_tari = true, CliCommands::SendOneSided(_) => {}, CliCommands::SendOneSidedToStealthAddress(_) => {}, CliCommands::MakeItRain(_) => make_it_rain = true, @@ -466,6 +473,6 @@ mod test { CliCommands::RegisterValidatorNode(_) => {}, } } - assert!(get_balance && send_tari && make_it_rain && coin_split && discover_peer && whois); + assert!(get_balance && send_tari && burn_tari && make_it_rain && coin_split && discover_peer && whois); } } diff --git a/applications/tari_merge_mining_proxy/Cargo.toml b/applications/tari_merge_mining_proxy/Cargo.toml index 1b6a6ab6e5..203fc32aab 100644 --- a/applications/tari_merge_mining_proxy/Cargo.toml +++ b/applications/tari_merge_mining_proxy/Cargo.toml @@ -4,20 +4,20 @@ authors = ["The Tari Development Community"] description = "The Tari merge mining proxy for xmrig" repository = "https://github.com/tari-project/tari" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [features] default = [] -envlog = ["env_logger"] [dependencies] tari_app_grpc = { path = "../tari_app_grpc" } tari_common = { path = "../../common" } +tari_common_types = { path = "../../base_layer/common_types" } tari_comms = { path = "../../comms/core" } tari_core = { path = "../../base_layer/core", default-features = false, features = ["transactions"] } tari_app_utilities = { path = "../tari_app_utilities" } -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } anyhow = "1.0.53" @@ -28,7 +28,6 @@ chrono = { version = "0.4.6", default-features = false } clap = { version = "3.1.1", features = ["derive", "env"] } config = { version = "0.13.0" } derivative = "2.2.0" -env_logger = { version = "0.7.1", optional = true } futures = "0.3.5" hex = "0.4.2" hyper = "0.14.12" @@ -40,7 +39,7 @@ serde = { version = "1.0.106", features = ["derive"] } serde_json = "1.0.57" structopt = { version = "0.3.13", default_features = false } thiserror = "1.0.26" -tokio = { version = "1.11", features = ["macros"] } +tokio = { version = "1.20", features = ["macros"] } tonic = "0.6.2" tracing = "0.1" url = "2.1.1" diff --git a/applications/tari_merge_mining_proxy/src/block_template_protocol.rs b/applications/tari_merge_mining_proxy/src/block_template_protocol.rs index 29a48e6f2f..d7ccc67a6e 100644 --- a/applications/tari_merge_mining_proxy/src/block_template_protocol.rs +++ b/applications/tari_merge_mining_proxy/src/block_template_protocol.rs @@ -25,7 +25,7 @@ use std::cmp; use log::*; -use tari_app_grpc::tari_rpc as grpc; +use tari_app_grpc::{authentication::ClientAuthenticationInterceptor, tari_rpc as grpc}; use tari_core::proof_of_work::{monero_rx, monero_rx::FixedByteArray, Difficulty}; use crate::{ @@ -39,13 +39,17 @@ const LOG_TARGET: &str = "tari_mm_proxy::proxy::block_template_protocol"; /// Structure holding grpc connections. pub struct BlockTemplateProtocol<'a> { base_node_client: &'a mut grpc::base_node_client::BaseNodeClient, - wallet_client: &'a mut grpc::wallet_client::WalletClient, + wallet_client: &'a mut grpc::wallet_client::WalletClient< + tonic::codegen::InterceptedService, + >, } impl<'a> BlockTemplateProtocol<'a> { pub fn new( base_node_client: &'a mut grpc::base_node_client::BaseNodeClient, - wallet_client: &'a mut grpc::wallet_client::WalletClient, + wallet_client: &'a mut grpc::wallet_client::WalletClient< + tonic::codegen::InterceptedService, + >, ) -> Self { Self { base_node_client, diff --git a/applications/tari_merge_mining_proxy/src/config.rs b/applications/tari_merge_mining_proxy/src/config.rs index 23549aab1e..0bffda2fa9 100644 --- a/applications/tari_merge_mining_proxy/src/config.rs +++ b/applications/tari_merge_mining_proxy/src/config.rs @@ -22,6 +22,7 @@ use serde::{Deserialize, Serialize}; use tari_common::{configuration::StringList, SubConfigPath}; +use tari_common_types::grpc_authentication::GrpcAuthentication; use tari_comms::multiaddr::Multiaddr; #[derive(Clone, Debug, Deserialize, Serialize)] @@ -41,6 +42,8 @@ pub struct MergeMiningProxyConfig { pub base_node_grpc_address: Multiaddr, /// The Tari console wallet's GRPC address pub console_wallet_grpc_address: Multiaddr, + /// GRPC authentication for console wallet + pub console_wallet_grpc_authentication: GrpcAuthentication, /// Address of the tari_merge_mining_proxy application pub listener_address: Multiaddr, /// In sole merged mining, the block solution is usually submitted to the Monero blockchain (monerod) as well as to @@ -69,6 +72,7 @@ impl Default for MergeMiningProxyConfig { monerod_use_auth: false, base_node_grpc_address: "/ip4/127.0.0.1/tcp/18142".parse().unwrap(), console_wallet_grpc_address: "/ip4/127.0.0.1/tcp/18143".parse().unwrap(), + console_wallet_grpc_authentication: GrpcAuthentication::default(), listener_address: "/ip4/127.0.0.1/tcp/18081".parse().unwrap(), submit_to_origin: true, wait_for_initial_sync_at_startup: true, diff --git a/applications/tari_merge_mining_proxy/src/error.rs b/applications/tari_merge_mining_proxy/src/error.rs index 92fc25651c..0d308d3bf4 100644 --- a/applications/tari_merge_mining_proxy/src/error.rs +++ b/applications/tari_merge_mining_proxy/src/error.rs @@ -26,10 +26,11 @@ use std::io; use hex::FromHexError; use hyper::header::InvalidHeaderValue; +use tari_app_grpc::authentication::BasicAuthError; use tari_common::{ConfigError, ConfigurationError}; use tari_core::{proof_of_work::monero_rx::MergeMineError, transactions::CoinbaseBuildError}; use thiserror::Error; -use tonic::transport; +use tonic::{codegen::http::uri::InvalidUri, transport}; #[derive(Debug, Error)] pub enum MmProxyError { @@ -42,6 +43,8 @@ pub enum MmProxyError { #[from] source: MergeMineError, }, + #[error("Invalid URI: {0}")] + InvalidUriError(#[from] InvalidUri), #[error("Reqwest error: {0}")] ReqwestError(#[from] reqwest::Error), #[error("Missing data:{0}")] @@ -50,6 +53,8 @@ pub enum MmProxyError { IoError(#[from] io::Error), #[error("Tonic transport error: {0}")] TonicTransportError(#[from] transport::Error), + #[error("Grpc authentication error: {0}")] + GRPCAuthenticationError(#[from] BasicAuthError), #[error("GRPC response did not contain the expected field: `{0}`")] GrpcResponseMissingField(&'static str), #[error("Hyper error: {0}")] diff --git a/applications/tari_merge_mining_proxy/src/main.rs b/applications/tari_merge_mining_proxy/src/main.rs index bb5ea7992d..a58230850d 100644 --- a/applications/tari_merge_mining_proxy/src/main.rs +++ b/applications/tari_merge_mining_proxy/src/main.rs @@ -34,6 +34,7 @@ mod test; use std::{ convert::Infallible, io::{stdout, Write}, + str::FromStr, }; use clap::Parser; @@ -42,12 +43,16 @@ use futures::future; use hyper::{service::make_service_fn, Server}; use log::*; use proxy::MergeMiningProxyService; -use tari_app_grpc::tari_rpc as grpc; +use tari_app_grpc::{authentication::ClientAuthenticationInterceptor, tari_rpc as grpc}; use tari_app_utilities::consts; use tari_common::{initialize_logging, load_configuration, DefaultConfigLoader}; use tari_comms::utils::multiaddr::multiaddr_to_socketaddr; use tari_core::proof_of_work::randomx_factory::RandomXFactory; use tokio::time::Duration; +use tonic::{ + codegen::InterceptedService, + transport::{Channel, Endpoint}, +}; use crate::{ block_template_data::BlockTemplateRepository, @@ -57,6 +62,24 @@ use crate::{ }; const LOG_TARGET: &str = "tari_mm_proxy::proxy"; +pub(crate) type WalletGrpcClient = + grpc::wallet_client::WalletClient>; + +async fn connect_wallet_with_authenticator(config: &MergeMiningProxyConfig) -> Result { + let wallet_addr = format!( + "http://{}", + multiaddr_to_socketaddr(&config.console_wallet_grpc_address)? + ); + info!(target: LOG_TARGET, "👛 Connecting to wallet at {}", wallet_addr); + let channel = Endpoint::from_str(&wallet_addr)?.connect().await?; + let wallet_conn = grpc::wallet_client::WalletClient::with_interceptor( + channel, + ClientAuthenticationInterceptor::create(&config.console_wallet_grpc_authentication)?, + ); + + Ok(wallet_conn) +} + #[tokio::main] async fn main() -> Result<(), anyhow::Error> { let terminal_title = format!("Tari Merge Mining Proxy - Version {}", consts::APP_VERSION); @@ -90,7 +113,7 @@ async fn main() -> Result<(), anyhow::Error> { let wallet = multiaddr_to_socketaddr(&config.console_wallet_grpc_address)?; info!(target: LOG_TARGET, "Connecting to wallet at {}", wallet); println!("Connecting to wallet at {}", wallet); - let wallet_client = grpc::wallet_client::WalletClient::connect(format!("http://{}", wallet)).await?; + let wallet_client = connect_wallet_with_authenticator(&config).await?; let listen_addr = multiaddr_to_socketaddr(&config.listener_address)?; let randomx_factory = RandomXFactory::new(config.max_randomx_vms); let xmrig_service = MergeMiningProxyService::new( diff --git a/applications/tari_merge_mining_proxy/src/proxy.rs b/applications/tari_merge_mining_proxy/src/proxy.rs index e67c7fbd76..667cb21320 100644 --- a/applications/tari_merge_mining_proxy/src/proxy.rs +++ b/applications/tari_merge_mining_proxy/src/proxy.rs @@ -54,6 +54,7 @@ use crate::{ common::{json_rpc, monero_rpc::CoreRpcErrorCode, proxy, proxy::convert_json_to_hyper_json_response}, config::MergeMiningProxyConfig, error::MmProxyError, + WalletGrpcClient, }; const LOG_TARGET: &str = "tari_mm_proxy::proxy"; @@ -72,7 +73,7 @@ impl MergeMiningProxyService { config: MergeMiningProxyConfig, http_client: reqwest::Client, base_node_client: grpc::base_node_client::BaseNodeClient, - wallet_client: grpc::wallet_client::WalletClient, + wallet_client: WalletGrpcClient, block_templates: BlockTemplateRepository, randomx_factory: RandomXFactory, ) -> Self { @@ -154,7 +155,7 @@ struct InnerService { block_templates: BlockTemplateRepository, http_client: reqwest::Client, base_node_client: grpc::base_node_client::BaseNodeClient, - wallet_client: grpc::wallet_client::WalletClient, + wallet_client: WalletGrpcClient, initial_sync_achieved: Arc, current_monerod_server: Arc>>, last_assigned_monerod_server: Arc>>, diff --git a/applications/tari_miner/Cargo.toml b/applications/tari_miner/Cargo.toml index 0a04750f94..3b2ba58728 100644 --- a/applications/tari_miner/Cargo.toml +++ b/applications/tari_miner/Cargo.toml @@ -4,7 +4,7 @@ authors = ["The Tari Development Community"] description = "The tari miner implementation" repository = "https://github.com/tari-project/tari" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [dependencies] @@ -14,7 +14,7 @@ tari_common_types = { path = "../../base_layer/common_types" } tari_comms = { path = "../../comms/core" } tari_app_utilities = { path = "../tari_app_utilities" } tari_app_grpc = { path = "../tari_app_grpc" } -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } crossterm = { version = "0.17" } @@ -28,7 +28,7 @@ rand = "0.8" sha3 = "0.9" serde = { version = "1.0", default_features = false, features = ["derive"] } tonic = { version = "0.6.2", features = ["transport"] } -tokio = { version = "1.11", default_features = false, features = ["rt-multi-thread"] } +tokio = { version = "1.20", default_features = false, features = ["rt-multi-thread"] } thiserror = "1.0" reqwest = { version = "0.11", features = ["json"] } serde_json = "1.0.57" diff --git a/base_layer/common_types/Cargo.toml b/base_layer/common_types/Cargo.toml index ac5ecd45d4..896e8e4300 100644 --- a/base_layer/common_types/Cargo.toml +++ b/base_layer/common_types/Cargo.toml @@ -3,11 +3,11 @@ name = "tari_common_types" authors = ["The Tari Development Community"] description = "Tari cryptocurrency common types" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [dependencies] -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } base64 = "0.13.0" @@ -16,5 +16,5 @@ lazy_static = "1.4.0" rand = "0.8" serde = { version = "1.0.106", features = ["derive"] } thiserror = "1.0.29" -tokio = { version = "1.11", features = ["time", "sync"] } +tokio = { version = "1.20", features = ["time", "sync"] } zeroize = "1" diff --git a/base_layer/common_types/src/dammsum.rs b/base_layer/common_types/src/dammsum.rs new file mode 100644 index 0000000000..0ca10a1d71 --- /dev/null +++ b/base_layer/common_types/src/dammsum.rs @@ -0,0 +1,210 @@ +// Copyright 2020. The Tari Project +// +// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the +// following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following +// disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the +// following disclaimer in the documentation and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote +// products derived from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +use thiserror::Error; + +/// Calculates a checksum using the [DammSum](https://github.com/cypherstack/dammsum) algorithm. +/// +/// This approach uses a dictionary whose size must be `2^k` for some `k > 0`. +/// The algorithm accepts an array of arbitrary size, each of whose elements are integers in the range `[0, 2^k)`. +/// The checksum is a single element also within this range. +/// DammSum detects all single transpositions and substitutions. +/// +/// Note that for this implementation, we add the additional restriction that `k == 8`. +/// This is only because DammSum requires us to provide the coefficients for a certain type of polynomial, and +/// because it's unlikely for the alphabet size to change for this use case. +/// See the linked repository for more information, or if you need a different dictionary size. + +#[derive(Debug, Error, PartialEq)] +pub enum ChecksumError { + #[error("Input data is too short")] + InputDataTooShort, + #[error("Invalid checksum")] + InvalidChecksum, +} + +// Fixed for a dictionary size of `2^8 == 256` +const COEFFICIENTS: [u8; 3] = [4, 3, 1]; + +/// Compute the DammSum checksum for an array, each of whose elements are in the range `[0, 2^8)` +pub fn compute_checksum(data: &Vec) -> u8 { + let mut mask = 1u8; + + // Compute the bitmask (if possible) + for bit in COEFFICIENTS { + mask += 1u8 << bit; + } + + // Perform the Damm algorithm + let mut result = 0u8; + + for digit in data { + result ^= *digit; // add + let overflow = (result & (1 << 7)) != 0; + result <<= 1; // double + if overflow { + // reduce + result ^= mask; + } + } + + result +} + +/// Determine whether the array ends with a valid checksum +pub fn validate_checksum(data: &Vec) -> Result<(), ChecksumError> { + // Empty data is not allowed, nor data only consisting of a checksum + if data.len() < 2 { + return Err(ChecksumError::InputDataTooShort); + } + + // It's sufficient to check the entire array against a zero checksum + match compute_checksum(data) { + 0u8 => Ok(()), + _ => Err(ChecksumError::InvalidChecksum), + } +} + +#[cfg(test)] +mod test { + use rand::Rng; + + use crate::dammsum::{compute_checksum, validate_checksum, ChecksumError}; + + #[test] + /// Check that valid checksums validate + fn checksum_validate() { + const SIZE: usize = 33; + + // Generate random data + let mut rng = rand::thread_rng(); + let mut data: Vec = (0..SIZE).map(|_| rng.gen::()).collect(); + + // Compute and append the checksum + data.push(compute_checksum(&data)); + + // Validate + assert!(validate_checksum(&data).is_ok()); + } + + #[test] + /// Sanity check against memory-specific checksums + fn identical_checksum() { + const SIZE: usize = 33; + + // Generate identical random data + let mut rng = rand::thread_rng(); + let data_0: Vec = (0..SIZE).map(|_| rng.gen::()).collect(); + let data_1 = data_0.clone(); + + // Compute the checksums + let check_0 = compute_checksum(&data_0); + let check_1 = compute_checksum(&data_1); + + // They should be equal + assert_eq!(check_0, check_1); + } + + #[test] + /// Sanity check for known distinct checksums + fn distinct_checksum() { + // Fix two inputs that must have a unique checksum + let data_0 = vec![0u8]; + let data_1 = vec![1u8]; + + // Compute the checksums + let check_0 = compute_checksum(&data_0); + let check_1 = compute_checksum(&data_1); + + // They should be distinct + assert!(check_0 != check_1); + } + + #[test] + /// Test validation failure modes + fn failure_modes_validate() { + // Empty input data + let mut data: Vec = vec![]; + assert_eq!(validate_checksum(&data), Err(ChecksumError::InputDataTooShort)); + + // Input data is only a checksum + data = vec![0u8]; + assert_eq!(validate_checksum(&data), Err(ChecksumError::InputDataTooShort)); + } + + #[test] + /// Check that all single subtitutions are detected + fn substitutions() { + const SIZE: usize = 33; + + // Generate random data + let mut rng = rand::thread_rng(); + let mut data: Vec = (0..SIZE).map(|_| rng.gen::()).collect(); + + // Compute the checksum + data.push(compute_checksum(&data)); + + // Validate + assert!(validate_checksum(&data).is_ok()); + + // Check all substitutions in all positions + for j in 0..data.len() { + let mut data_ = data.clone(); + for i in 0..=u8::MAX { + if data[j] == i { + continue; + } + data_[j] = i; + + assert_eq!(validate_checksum(&data_), Err(ChecksumError::InvalidChecksum)); + } + } + } + + #[test] + /// Check that all single transpositions are detected + fn transpositions() { + const SIZE: usize = 33; + + // Generate random data + let mut rng = rand::thread_rng(); + let mut data: Vec = (0..SIZE).map(|_| rng.gen::()).collect(); + + // Compute the checksum + data.push(compute_checksum(&data)); + + // Validate + assert!(validate_checksum(&data).is_ok()); + + // Check all transpositions + for j in 0..(data.len() - 1) { + if data[j] == data[j + 1] { + continue; + } + + let mut data_ = data.clone(); + data_.swap(j, j + 1); + + assert_eq!(validate_checksum(&data_), Err(ChecksumError::InvalidChecksum)); + } + } +} diff --git a/base_layer/common_types/src/emoji.rs b/base_layer/common_types/src/emoji.rs index 250b26fcdf..2642bf8c41 100644 --- a/base_layer/common_types/src/emoji.rs +++ b/base_layer/common_types/src/emoji.rs @@ -22,22 +22,60 @@ use std::{ collections::HashMap, - convert::TryFrom, fmt::{Display, Error, Formatter}, + iter, }; -use tari_crypto::tari_utilities::{ - hex::{Hex, HexError}, - ByteArray, -}; +use tari_crypto::tari_utilities::ByteArray; use thiserror::Error; use crate::{ - luhn::{checksum, is_valid}, + dammsum::{compute_checksum, validate_checksum}, types::PublicKey, }; -const EMOJI: [char; 256] = [ +/// An emoji ID is a 33-character emoji representation of a public key that includes a checksum for safety. +/// Each character corresponds to a byte; the first 32 bytes are an encoding of the underlying public key. +/// The last byte is a DammSum checksum of all preceding bytes. +/// +/// Because the emoji character set contains 256 elements, it is more compact (in character count, not necessarily +/// in display width!) than other common encodings would provide, and is in theory easier for humans to examine. +/// +/// An emoji ID can be instantiated either from a public key or from a string of emoji characters, and can be +/// converted to either form as well. Checksum validation is done automatically on instantiation. +/// +/// # Example +/// +/// ``` +/// use tari_common_types::emoji::EmojiId; +/// +/// // Construct an emoji ID from an emoji string (this can fail) +/// let emoji_string = "🌴🐩🔌📌🚑🌰🎓🌴🐊🐌💕💡🐜📉👛🍵👛🐽🎂🐻🌀🍓😿🐭🐼🏀🎪💔💸🍅🔋🎒👡"; +/// let emoji_id_from_emoji_string = EmojiId::from_emoji_string(emoji_string); +/// assert!(emoji_id_from_emoji_string.is_ok()); +/// +/// // Get the public key +/// let public_key = emoji_id_from_emoji_string.unwrap().to_public_key(); +/// +/// // Reconstruct the emoji ID from the public key (this cannot fail) +/// let emoji_id_from_public_key = EmojiId::from_public_key(&public_key); +/// +/// // An emoji ID is deterministic +/// assert_eq!(emoji_id_from_public_key.to_emoji_string(), emoji_string); +/// +/// // Oh no! We swapped the first two emoji characters by mistake, so this should fail +/// let invalid_emoji_string = "🐩🌴🔌📌🚑🌰🎓🌴🐊🐌💕💡🐜📉👛🍵👛🐽🎂🐻🌀🍓😿🐭🐼🏀🎪💔💸🍅🔋🎒👡"; +/// assert!(EmojiId::from_emoji_string(invalid_emoji_string).is_err()); +/// ``` +#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)] +pub struct EmojiId(PublicKey); + +const DICT_SIZE: usize = 256; // number of elements in the symbol dictionary +const INTERNAL_SIZE: usize = 32; // number of bytes used for the internal representation (without checksum) +const CHECKSUM_SIZE: usize = 1; // number of bytes in the checksum + +// The emoji table, mapping byte values to emoji characters +const EMOJI: [char; DICT_SIZE] = [ '🌀', '🌂', '🌈', '🌊', '🌋', '🌍', '🌙', '🌝', '🌞', '🌟', '🌠', '🌰', '🌴', '🌵', '🌷', '🌸', '🌹', '🌻', '🌽', '🍀', '🍁', '🍄', '🍅', '🍆', '🍇', '🍈', '🍉', '🍊', '🍋', '🍌', '🍍', '🍎', '🍐', '🍑', '🍒', '🍓', '🍔', '🍕', '🍗', '🍚', '🍞', '🍟', '🍠', '🍣', '🍦', '🍩', '🍪', '🍫', '🍬', '🍭', '🍯', '🍰', '🍳', '🍴', '🍵', '🍶', '🍷', @@ -54,175 +92,178 @@ const EMOJI: [char; 256] = [ '🚦', '🚧', '🚨', '🚪', '🚫', '🚲', '🚽', '🚿', '🛁', ]; +// The reverse table, mapping emoji to characters to byte values lazy_static! { - static ref REVERSE_EMOJI: HashMap = { - let mut m = HashMap::with_capacity(256); + static ref REVERSE_EMOJI: HashMap = { + let mut m = HashMap::with_capacity(DICT_SIZE); EMOJI.iter().enumerate().for_each(|(i, c)| { - m.insert(*c, i); + m.insert(*c, i as u8); }); m }; } -/// Emoji IDs are 33-byte long representations of a public key. The first 32 bytes are a mapping of a 256 byte emoji -/// dictionary to each of the 32 bytes in the public key. The 33rd emoji is a checksum character of the 32-length -/// string. -/// -/// Emoji IDs (32 characters minus checksum) are therefore more compact than Base58 or Base64 encodings (~44 characters) -/// or hexadecimal (64 characters) and in theory, more human readable. -/// -/// The checksum is calculated using a Luhn mod 256 checksum, which guards against most transposition errors. -/// -/// # Example -/// -/// ``` -/// use tari_common_types::emoji::EmojiId; -/// -/// assert!(EmojiId::is_valid("🐎🍴🌷🌟💻🐖🐩🐾🌟🐬🎧🐌🏦🐳🐎🐝🐢🔋👕🎸👿🍒🐓🎉💔🌹🏆🐬💡🎳🚦🍹🎒")); -/// let eid = EmojiId::from_hex("70350e09c474809209824c6e6888707b7dd09959aa227343b5106382b856f73a").unwrap(); -/// assert_eq!(eid.as_str(), "🐎🍴🌷🌟💻🐖🐩🐾🌟🐬🎧🐌🏦🐳🐎🐝🐢🔋👕🎸👿🍒🐓🎉💔🌹🏆🐬💡🎳🚦🍹🎒"); -/// ``` -#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)] -pub struct EmojiId(String); - -/// Returns the current emoji set as a vector of char -pub const fn emoji_set() -> [char; 256] { +/// Returns the current emoji set as a character array +pub const fn emoji_set() -> [char; DICT_SIZE] { EMOJI } -impl EmojiId { - /// Construct an Emoji ID from the given pubkey. - pub fn from_pubkey(key: &PublicKey) -> Self { - EmojiId::from_bytes(key.as_bytes()) - } - - /// Try and construct an emoji ID from the given hex string. The method will fail if the hex is not a valid - /// representation of a public key. - pub fn from_hex(hex_key: &str) -> Result { - let key = PublicKey::from_hex(hex_key)?; - Ok(EmojiId::from_pubkey(&key)) - } - - /// Return the public key that this emoji ID represents - pub fn to_pubkey(&self) -> PublicKey { - let bytes = self.to_bytes(); - PublicKey::from_bytes(&bytes).unwrap() - } +#[derive(Debug, Error, PartialEq)] +pub enum EmojiIdError { + #[error("Invalid size")] + InvalidSize, + #[error("Invalid emoji character")] + InvalidEmoji, + #[error("Invalid checksum")] + InvalidChecksum, + #[error("Cannot recover public key")] + CannotRecoverPublicKey, +} - /// Checks whether a given string would be a valid emoji ID using the assertion that - /// i) The string is 33 bytes long - /// ii) The last byte is a valid checksum - pub fn is_valid(s: &str) -> bool { - EmojiId::str_to_pubkey(s).is_ok() - } +impl EmojiId { + /// Construct an emoji ID from an emoji string with checksum + pub fn from_emoji_string(emoji: &str) -> Result { + // The string must be the correct size, including the checksum + if emoji.chars().count() != INTERNAL_SIZE + CHECKSUM_SIZE { + return Err(EmojiIdError::InvalidSize); + } - pub fn str_to_pubkey(s: &str) -> Result { - let mut indices = Vec::with_capacity(33); - for c in s.chars() { + // Convert the emoji string to a byte array + let mut bytes = Vec::::with_capacity(INTERNAL_SIZE + CHECKSUM_SIZE); + for c in emoji.chars() { if let Some(i) = REVERSE_EMOJI.get(&c) { - indices.push(*i); + bytes.push(*i); } else { - return Err(EmojiIdError); + return Err(EmojiIdError::InvalidEmoji); } } - if !is_valid(&indices, 256) { - return Err(EmojiIdError); + + // Assert the checksum is valid + if validate_checksum(&bytes).is_err() { + return Err(EmojiIdError::InvalidChecksum); } - let bytes = EmojiId::byte_vec(s)?; - PublicKey::from_bytes(&bytes).map_err(|_| EmojiIdError) - } - /// Return the 33 character emoji string for this emoji ID - pub fn as_str(&self) -> &str { - &self.0 + // Remove the checksum + bytes.pop(); + + // Convert to a public key + match PublicKey::from_bytes(&bytes) { + Ok(public_key) => Ok(Self(public_key)), + Err(_) => Err(EmojiIdError::CannotRecoverPublicKey), + } } - /// Convert the emoji ID string into its associated public key, represented as a byte array - pub fn to_bytes(&self) -> Vec { - EmojiId::byte_vec(&self.0).unwrap() + /// Construct an emoji ID from a public key + pub fn from_public_key(public_key: &PublicKey) -> Self { + Self(public_key.clone()) } - fn from_bytes(bytes: &[u8]) -> Self { - let mut vec = Vec::::with_capacity(33); - bytes.iter().for_each(|b| vec.push((*b) as usize)); - let checksum = checksum(&vec, 256); - assert!(checksum < 256); - vec.push(checksum); - let id = vec.iter().map(|b| EMOJI[*b]).collect(); - Self(id) + /// Convert the emoji ID to an emoji string with checksum + pub fn to_emoji_string(&self) -> String { + // Convert the public key to bytes and compute the checksum + let bytes = self.0.as_bytes().to_vec(); + bytes + .iter() + .chain(iter::once(&compute_checksum(&bytes))) + .map(|b| EMOJI[*b as usize]) + .collect::() } - fn byte_vec(s: &str) -> Result, EmojiIdError> { - let mut v = Vec::with_capacity(32); - for c in s.chars().take(32) { - if let Some(index) = REVERSE_EMOJI.get(&c) { - v.push(u8::try_from(*index).unwrap()); - } else { - return Err(EmojiIdError); - } - } - Ok(v) + /// Convert the emoji ID to a public key + pub fn to_public_key(&self) -> PublicKey { + self.0.clone() } } impl Display for EmojiId { fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), Error> { - fmt.write_str(self.as_str()) + fmt.write_str(&self.to_emoji_string()) } } -// TODO: We have to add more details -#[derive(Debug, Error)] -#[error("emoji id error")] -pub struct EmojiIdError; - #[cfg(test)] mod test { - use tari_crypto::tari_utilities::hex::Hex; + use std::iter; - use crate::{emoji::EmojiId, types::PublicKey}; + use tari_crypto::keys::{PublicKey as PublicKeyTrait, SecretKey}; + + use crate::{ + dammsum::compute_checksum, + emoji::{emoji_set, EmojiId, EmojiIdError, CHECKSUM_SIZE, INTERNAL_SIZE}, + types::{PrivateKey, PublicKey}, + }; #[test] - fn convert_key() { - let pubkey = PublicKey::from_hex("70350e09c474809209824c6e6888707b7dd09959aa227343b5106382b856f73a").unwrap(); - let eid = EmojiId::from_hex("70350e09c474809209824c6e6888707b7dd09959aa227343b5106382b856f73a").unwrap(); - assert_eq!( - eid.as_str(), - "🐎🍴🌷🌟💻🐖🐩🐾🌟🐬🎧🐌🏦🐳🐎🐝🐢🔋👕🎸👿🍒🐓🎉💔🌹🏆🐬💡🎳🚦🍹🎒" - ); - assert_eq!(EmojiId::from_pubkey(&pubkey), eid); + /// Test valid emoji ID + fn valid_emoji_id() { + // Generate random public key + let mut rng = rand::thread_rng(); + let public_key = PublicKey::from_secret_key(&PrivateKey::random(&mut rng)); + + // Generate an emoji ID from the public key and ensure we recover it + let emoji_id_from_public_key = EmojiId::from_public_key(&public_key); + assert_eq!(emoji_id_from_public_key.to_public_key(), public_key); + + // Check the size of the corresponding emoji string + let emoji_string = emoji_id_from_public_key.to_emoji_string(); + assert_eq!(emoji_string.chars().count(), INTERNAL_SIZE + CHECKSUM_SIZE); + + // Generate an emoji ID from the emoji string and ensure we recover it + let emoji_id_from_emoji_string = EmojiId::from_emoji_string(&emoji_string).unwrap(); + assert_eq!(emoji_id_from_emoji_string.to_emoji_string(), emoji_string); + + // Return to the original public key for good measure + assert_eq!(emoji_id_from_emoji_string.to_public_key(), public_key); + } + + #[test] + /// Test invalid size + fn invalid_size() { + // This emoji string is too short to be a valid emoji ID + let emoji_string = "🌴🐩🔌📌🚑🌰🎓🌴🐊🐌💕💡🐜📉👛🍵👛🐽🎂🐻🌀🍓😿🐭🐼🏀🎪💔💸🍅🔋🎒"; + assert_eq!(EmojiId::from_emoji_string(emoji_string), Err(EmojiIdError::InvalidSize)); + } + + #[test] + /// Test invalid emoji + fn invalid_emoji() { + // This emoji string contains an invalid emoji character + let emoji_string = "🌴🐩🔌📌🚑🌰🎓🌴🐊🐌💕💡🐜📉👛🍵👛🐽🎂🐻🌀🍓😿🐭🐼🏀🎪💔💸🍅🔋🎒🎅"; assert_eq!( - &eid.to_bytes().to_hex(), - "70350e09c474809209824c6e6888707b7dd09959aa227343b5106382b856f73a" + EmojiId::from_emoji_string(emoji_string), + Err(EmojiIdError::InvalidEmoji) ); + } + + #[test] + /// Test invalid checksum + fn invalid_checksum() { + // This emoji string contains an invalid checksum + let emoji_string = "🌴🐩🔌📌🚑🌰🎓🌴🐊🐌💕💡🐜📉👛🍵👛🐽🎂🐻🌀🍓😿🐭🐼🏀🎪💔💸🍅🔋🎒🎒"; assert_eq!( - EmojiId::str_to_pubkey("🐎🍴🌷🌟💻🐖🐩🐾🌟🐬🎧🐌🏦🐳🐎🐝🐢🔋👕🎸👿🍒🐓🎉💔🌹🏆🐬💡🎳🚦🍹🎒").unwrap(), - pubkey + EmojiId::from_emoji_string(emoji_string), + Err(EmojiIdError::InvalidChecksum) ); } #[test] - fn is_valid() { - let eid = EmojiId::from_hex("70350e09c474809209824c6e6888707b7dd09959aa227343b5106382b856f73a").unwrap(); - // Valid emojiID - assert!(EmojiId::is_valid(eid.as_str())); - assert!(!EmojiId::is_valid(""), "Emoji ID too short"); - assert!(!EmojiId::is_valid("🌂"), "Emoji ID too short"); - assert!( - !EmojiId::is_valid("🌟💻🐖🐩🐾🌟🐬🎧🐌🏦🐳🐎🐝🐢🔋👕🎸👿🍒🐓🎉💔🌹🏆🐬💡🎳🚦🍹🎒"), - "Emoji ID too short" - ); - assert!( - !EmojiId::is_valid("70350e09c474809209824c6e6888707b7dd09959aa227343b5106382b856f73a"), - "Not emoji string" - ); - assert!( - !EmojiId::is_valid("🐎🍴🌷🌟💻🐖🐩🐾🌟🐬🎧🐌🏦🐳🐎🐝🐢🔋👕🎸👿🍒🐓🎉💔🌹🏆🐬💡🎳🚦🍹"), - "No checksum" - ); - assert!( - !EmojiId::is_valid("🐎🍴🌷🌟💻🐖🐩🐾🌟🐬🎧🐌🏦🐳🐎🐝🐢🔋👕🎸👿🍒🐓🎉💔🌹🏆🐬💡🎳🚦🍹📝"), - "Wrong checksum" + /// Test invalid public key + fn invalid_public_key() { + // This byte representation does not represent a valid public key + let mut bytes = vec![0u8; INTERNAL_SIZE]; + bytes[0] = 1; + + // Convert to an emoji string and manually add a valid checksum + let emoji_set = emoji_set(); + let emoji_string = bytes + .iter() + .chain(iter::once(&compute_checksum(&bytes))) + .map(|b| emoji_set[*b as usize]) + .collect::(); + + assert_eq!( + EmojiId::from_emoji_string(&emoji_string), + Err(EmojiIdError::CannotRecoverPublicKey) ); } } diff --git a/base_layer/common_types/src/lib.rs b/base_layer/common_types/src/lib.rs index e0a0ee4310..df1e3012cc 100644 --- a/base_layer/common_types/src/lib.rs +++ b/base_layer/common_types/src/lib.rs @@ -21,9 +21,9 @@ // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. pub mod chain_metadata; +pub mod dammsum; pub mod emoji; pub mod grpc_authentication; -pub mod luhn; pub mod transaction; mod tx_id; pub mod types; diff --git a/base_layer/common_types/src/luhn.rs b/base_layer/common_types/src/luhn.rs deleted file mode 100644 index 3225b42ebe..0000000000 --- a/base_layer/common_types/src/luhn.rs +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2020. The Tari Project -// -// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the -// following conditions are met: -// -// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following -// disclaimer. -// -// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the -// following disclaimer in the documentation and/or other materials provided with the distribution. -// -// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote -// products derived from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, -// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, -// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE -// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/// Calculates a checksum using the [Luhn mod n algorithm](https://en.wikipedia.org/wiki/Luhn_mod_N_algorithm). The -/// input to the function is an array of indices, each of which is strictly less than `dict_len`, and the size of the -/// dictionary (`dict_len`). The result is the checksum character, also strictly less than `dict_len`. -pub fn checksum(arr: &[usize], dict_len: usize) -> usize { - // Starting from the right and working leftwards is easier since - let (sum, _) = arr.iter().rev().fold((0usize, 2usize), |(sum, factor), digit| { - let mut addend = factor * *digit; - let factor = factor ^ 3; // Toggles between 1 and 2 - addend = (addend / dict_len) + addend % dict_len; - (sum + addend, factor) - }); - (dict_len - (sum % dict_len)) % dict_len -} - -/// Checks whether the last digit in the array matches the checksum for the array minus the last digit. -pub fn is_valid(arr: &[usize], dict_len: usize) -> bool { - if arr.len() < 2 { - return false; - } - let cs = checksum(&arr[..arr.len() - 1], dict_len); - cs == arr[arr.len() - 1] -} - -#[cfg(test)] -mod test { - use crate::luhn::{checksum, is_valid}; - - #[test] - fn luhn_6() { - assert_eq!(checksum(&[0, 1, 2, 3, 4, 5], 6), 4); - for i in 0..6 { - let valid = is_valid(&[0, 1, 2, 3, 4, 5, i], 6); - match i { - 4 => assert!(valid), - _ => assert!(!valid), - } - } - } - - #[test] - fn luhn_10() { - assert_eq!(checksum(&[7, 9, 9, 2, 7, 3, 9, 8, 7, 1], 10), 3); - for i in 0..10 { - let valid = is_valid(&[7, 9, 9, 2, 7, 3, 9, 8, 7, 1, i], 10); - match i { - 3 => assert!(valid), - _ => assert!(!valid), - } - } - assert_eq!(checksum(&[1, 0, 4], 10), 0); - assert_eq!(checksum(&[9, 1, 2, 4, 3, 4, 3, 3, 0], 10), 3); - assert!(is_valid(&[9, 1, 2, 4, 3, 4, 3, 3, 0, 3], 10)); - // It doesn't catch some transpose errors - assert!(is_valid(&[0, 1, 2, 4, 3, 4, 3, 3, 9, 3], 10)); - } -} diff --git a/base_layer/common_types/src/transaction.rs b/base_layer/common_types/src/transaction.rs index 2e949e2de3..33c7724910 100644 --- a/base_layer/common_types/src/transaction.rs +++ b/base_layer/common_types/src/transaction.rs @@ -3,6 +3,7 @@ use std::{ convert::TryFrom, + fmt, fmt::{Display, Error, Formatter}, }; @@ -107,6 +108,8 @@ pub enum ImportStatus { FauxUnconfirmed, /// This transaction import status is used when a one-sided transaction has been scanned and confirmed FauxConfirmed, + /// This is a coinbase that is imported + Coinbase, } impl TryFrom for TransactionStatus { @@ -117,6 +120,7 @@ impl TryFrom for TransactionStatus { ImportStatus::Imported => Ok(TransactionStatus::Imported), ImportStatus::FauxUnconfirmed => Ok(TransactionStatus::FauxUnconfirmed), ImportStatus::FauxConfirmed => Ok(TransactionStatus::FauxConfirmed), + ImportStatus::Coinbase => Ok(TransactionStatus::Coinbase), } } } @@ -129,11 +133,23 @@ impl TryFrom for ImportStatus { TransactionStatus::Imported => Ok(ImportStatus::Imported), TransactionStatus::FauxUnconfirmed => Ok(ImportStatus::FauxUnconfirmed), TransactionStatus::FauxConfirmed => Ok(ImportStatus::FauxConfirmed), + TransactionStatus::Coinbase => Ok(ImportStatus::Coinbase), _ => Err(TransactionConversionError { code: i32::MAX }), } } } +impl fmt::Display for ImportStatus { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { + match self { + ImportStatus::Imported => write!(f, "Imported"), + ImportStatus::FauxUnconfirmed => write!(f, "FauxUnconfirmed"), + ImportStatus::FauxConfirmed => write!(f, "FauxConfirmed"), + ImportStatus::Coinbase => write!(f, "Coinbase"), + } + } +} + #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] pub enum TransactionDirection { Inbound, diff --git a/base_layer/core/Cargo.toml b/base_layer/core/Cargo.toml index e08f0194c1..bb7b0de177 100644 --- a/base_layer/core/Cargo.toml +++ b/base_layer/core/Cargo.toml @@ -6,7 +6,7 @@ repository = "https://github.com/tari-project/tari" homepage = "https://tari.com" readme = "README.md" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [features] @@ -19,20 +19,20 @@ avx2 = ["tari_crypto/simd_backend"] benches = ["base_node", "criterion"] [dependencies] -tari_common = { version = "^0.37", path = "../../common" } -tari_common_types = { version = "^0.37", path = "../../base_layer/common_types" } -tari_comms = { version = "^0.37", path = "../../comms/core" } -tari_comms_dht = { version = "^0.37", path = "../../comms/dht" } -tari_comms_rpc_macros = { version = "^0.37", path = "../../comms/rpc_macros" } -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } +tari_common = { version = "^0.38", path = "../../common" } +tari_common_types = { version = "^0.38", path = "../../base_layer/common_types" } +tari_comms = { version = "^0.38", path = "../../comms/core" } +tari_comms_dht = { version = "^0.38", path = "../../comms/dht" } +tari_comms_rpc_macros = { version = "^0.38", path = "../../comms/rpc_macros" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } tari_metrics = { path = "../../infrastructure/metrics" } -tari_mmr = { version = "^0.37", path = "../../base_layer/mmr", optional = true, features = ["native_bitmap"] } -tari_p2p = { version = "^0.37", path = "../../base_layer/p2p" } +tari_mmr = { version = "^0.38", path = "../../base_layer/mmr", optional = true, features = ["native_bitmap"] } +tari_p2p = { version = "^0.38", path = "../../base_layer/p2p" } tari_script = { path = "../../infrastructure/tari_script" } -tari_service_framework = { version = "^0.37", path = "../service_framework" } -tari_shutdown = { version = "^0.37", path = "../../infrastructure/shutdown" } -tari_storage = { version = "^0.37", path = "../../infrastructure/storage" } -tari_test_utils = { version = "^0.37", path = "../../infrastructure/test_utils" } +tari_service_framework = { version = "^0.38", path = "../service_framework" } +tari_shutdown = { version = "^0.38", path = "../../infrastructure/shutdown" } +tari_storage = { version = "^0.38", path = "../../infrastructure/storage" } +tari_test_utils = { version = "^0.38", path = "../../infrastructure/test_utils" } tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } async-trait = "0.1.50" @@ -70,21 +70,21 @@ serde_repr = "0.1.8" sha3 = "0.9" strum_macros = "0.22" thiserror = "1.0.26" -tokio = { version = "1.11", features = ["time", "sync", "macros"] } +tokio = { version = "1.20", features = ["time", "sync", "macros"] } tracing = "0.1.26" tracing-attributes = "*" uint = { version = "0.9", default-features = false } [dev-dependencies] -tari_p2p = { version = "^0.37", path = "../../base_layer/p2p", features = ["test-mocks"] } -tari_test_utils = { version = "^0.37", path = "../../infrastructure/test_utils" } +tari_p2p = { version = "^0.38", path = "../../base_layer/p2p", features = ["test-mocks"] } +tari_test_utils = { version = "^0.38", path = "../../infrastructure/test_utils" } config = { version = "0.13.0" } env_logger = "0.7.0" tempfile = "3.1.0" [build-dependencies] -tari_common = { version = "^0.37", path = "../../common", features = ["build"] } +tari_common = { version = "^0.38", path = "../../common", features = ["build"] } [[bench]] name = "mempool" diff --git a/base_layer/core/src/base_node/service/service.rs b/base_layer/core/src/base_node/service/service.rs index cfb5817912..649b156c0a 100644 --- a/base_layer/core/src/base_node/service/service.rs +++ b/base_layer/core/src/base_node/service/service.rs @@ -389,6 +389,7 @@ async fn handle_incoming_request( .send_direct( origin_public_key, OutboundDomainMessage::new(&TariMessageType::BaseNodeResponse, message), + "Outbound response message from base node".to_string(), ) .await?; @@ -473,6 +474,14 @@ async fn handle_outbound_request( node_id: Option, service_request_timeout: Duration, ) -> Result<(), CommsInterfaceError> { + let debug_info = format!( + "Node request:{} to {}", + &request, + node_id + .as_ref() + .map(|n| n.short_str()) + .unwrap_or_else(|| "random".to_string()) + ); let request_key = generate_request_key(&mut OsRng); let service_request = proto::BaseNodeServiceRequest { request_key, @@ -480,6 +489,7 @@ async fn handle_outbound_request( }; let mut send_msg_params = SendMessageParams::new(); + send_msg_params.with_debug_info(debug_info); match node_id { Some(node_id) => send_msg_params.direct_node_id(node_id), None => send_msg_params.random(1), @@ -565,6 +575,7 @@ async fn handle_outbound_block( &TariMessageType::NewBlock, shared_protos::core::NewBlock::from(new_block), ), + "Outbound new block from base node".to_string(), ) .await; if let Err(e) = result { diff --git a/base_layer/core/src/base_node/sync/block_sync/synchronizer.rs b/base_layer/core/src/base_node/sync/block_sync/synchronizer.rs index b578e6ac08..8a5542b893 100644 --- a/base_layer/core/src/base_node/sync/block_sync/synchronizer.rs +++ b/base_layer/core/src/base_node/sync/block_sync/synchronizer.rs @@ -29,7 +29,12 @@ use std::{ use futures::StreamExt; use log::*; use num_format::{Locale, ToFormattedString}; -use tari_comms::{connectivity::ConnectivityRequester, peer_manager::NodeId, PeerConnection}; +use tari_comms::{ + connectivity::ConnectivityRequester, + peer_manager::NodeId, + protocol::rpc::{RpcClient, RpcError}, + PeerConnection, +}; use tari_utilities::hex::Hex; use tracing; @@ -119,8 +124,11 @@ impl BlockSynchronizer { let sync_peer_node_ids = self.sync_peers.iter().map(|p| p.node_id()).cloned().collect::>(); for (i, node_id) in sync_peer_node_ids.iter().enumerate() { let mut conn = self.connect_to_sync_peer(node_id.clone()).await?; + let config = RpcClient::builder() + .with_deadline(self.config.rpc_deadline) + .with_deadline_grace_period(Duration::from_secs(5)); let mut client = conn - .connect_rpc_using_builder(rpc::BaseNodeSyncRpcClient::builder().with_deadline(Duration::from_secs(60))) + .connect_rpc_using_builder::(config) .await?; let latency = client .get_last_request_latency() @@ -158,6 +166,7 @@ impl BlockSynchronizer { self.ban_peer(node_id, &err).await?; return Err(err.into()); }, + Err(err @ BlockSyncError::RpcError(RpcError::ReplyTimeout)) | Err(err @ BlockSyncError::MaxLatencyExceeded { .. }) => { warn!(target: LOG_TARGET, "{}", err); if i == self.sync_peers.len() - 1 { diff --git a/base_layer/core/src/base_node/sync/config.rs b/base_layer/core/src/base_node/sync/config.rs index ec26d2f918..5d3a331aae 100644 --- a/base_layer/core/src/base_node/sync/config.rs +++ b/base_layer/core/src/base_node/sync/config.rs @@ -48,17 +48,21 @@ pub struct BlockchainSyncConfig { pub forced_sync_peers: Vec, /// Number of threads to use for validation pub validation_concurrency: usize, + /// The RPC deadline to set on sync clients. If this deadline is reached, a new sync peer will be selected for + /// sync. + pub rpc_deadline: Duration, } impl Default for BlockchainSyncConfig { fn default() -> Self { Self { - initial_max_sync_latency: Duration::from_secs(10), + initial_max_sync_latency: Duration::from_secs(20), max_latency_increase: Duration::from_secs(2), ban_period: Duration::from_secs(30 * 60), short_ban_period: Duration::from_secs(60), forced_sync_peers: Default::default(), validation_concurrency: 6, + rpc_deadline: Duration::from_secs(10), } } } diff --git a/base_layer/core/src/base_node/sync/header_sync/synchronizer.rs b/base_layer/core/src/base_node/sync/header_sync/synchronizer.rs index 28b3be7e46..889a3568c0 100644 --- a/base_layer/core/src/base_node/sync/header_sync/synchronizer.rs +++ b/base_layer/core/src/base_node/sync/header_sync/synchronizer.rs @@ -31,7 +31,7 @@ use tari_common_types::{chain_metadata::ChainMetadata, types::HashOutput}; use tari_comms::{ connectivity::ConnectivityRequester, peer_manager::NodeId, - protocol::rpc::{RpcError, RpcHandshakeError}, + protocol::rpc::{RpcClient, RpcError, RpcHandshakeError}, PeerConnection, }; use tari_utilities::hex::Hex; @@ -136,7 +136,12 @@ impl<'a, B: BlockchainBackend + 'static> HeaderSynchronizer<'a, B> { "Attempting to synchronize headers with `{}`", node_id ); - let mut client = conn.connect_rpc::().await?; + let config = RpcClient::builder() + .with_deadline(self.config.rpc_deadline) + .with_deadline_grace_period(Duration::from_secs(5)); + let mut client = conn + .connect_rpc_using_builder::(config) + .await?; let latency = client .get_last_request_latency() @@ -208,6 +213,7 @@ impl<'a, B: BlockchainBackend + 'static> HeaderSynchronizer<'a, B> { self.ban_peer_long(node_id, BanReason::GeneralHeaderSyncFailure(err)) .await?; }, + Err(err @ BlockHeaderSyncError::RpcError(RpcError::ReplyTimeout)) | Err(err @ BlockHeaderSyncError::MaxLatencyExceeded { .. }) => { warn!(target: LOG_TARGET, "{}", err); if i == self.sync_peers.len() - 1 { diff --git a/base_layer/core/src/base_node/sync/horizon_state_sync/synchronizer.rs b/base_layer/core/src/base_node/sync/horizon_state_sync/synchronizer.rs index 0b35d8d6eb..3e01e96910 100644 --- a/base_layer/core/src/base_node/sync/horizon_state_sync/synchronizer.rs +++ b/base_layer/core/src/base_node/sync/horizon_state_sync/synchronizer.rs @@ -32,7 +32,11 @@ use croaring::Bitmap; use futures::{stream::FuturesUnordered, StreamExt}; use log::*; use tari_common_types::types::{Commitment, RangeProofService}; -use tari_comms::{connectivity::ConnectivityRequester, peer_manager::NodeId}; +use tari_comms::{ + connectivity::ConnectivityRequester, + peer_manager::NodeId, + protocol::rpc::{RpcClient, RpcError}, +}; use tari_crypto::{commitment::HomomorphicCommitment, tari_utilities::hex::Hex}; use tokio::task; @@ -178,7 +182,10 @@ impl<'a, B: BlockchainBackend + 'static> HorizonStateSynchronization<'a, B> { async fn sync(&mut self, header: &BlockHeader) -> Result<(), HorizonSyncError> { for (i, sync_peer) in self.sync_peers.iter().enumerate() { let mut connection = self.connectivity.dial_peer(sync_peer.node_id().clone()).await?; - let mut client = connection.connect_rpc::().await?; + let config = RpcClient::builder() + .with_deadline(self.config.rpc_deadline) + .with_deadline_grace_period(Duration::from_secs(3)); + let mut client = connection.connect_rpc_using_builder(config).await?; match self.begin_sync(sync_peer.clone(), &mut client, header).await { Ok(_) => match self.finalize_horizon_sync(sync_peer).await { @@ -188,6 +195,7 @@ impl<'a, B: BlockchainBackend + 'static> HorizonStateSynchronization<'a, B> { return Err(err); }, }, + Err(err @ HorizonSyncError::RpcError(RpcError::ReplyTimeout)) | Err(err @ HorizonSyncError::MaxLatencyExceeded { .. }) => { warn!(target: LOG_TARGET, "{}", err); if i == self.sync_peers.len() - 1 { diff --git a/base_layer/core/src/base_node/sync/rpc/service.rs b/base_layer/core/src/base_node/sync/rpc/service.rs index 436ebccd8f..8ea2c04ce1 100644 --- a/base_layer/core/src/base_node/sync/rpc/service.rs +++ b/base_layer/core/src/base_node/sync/rpc/service.rs @@ -35,7 +35,7 @@ use tari_comms::{ }; use tari_utilities::hex::Hex; use tokio::{ - sync::{mpsc, RwLock}, + sync::{mpsc, Mutex}, task, }; use tracing::{instrument, span, Instrument, Level}; @@ -65,7 +65,7 @@ const LOG_TARGET: &str = "c::base_node::sync_rpc"; pub struct BaseNodeSyncRpcService { db: AsyncBlockchainDb, - active_sessions: RwLock>>, + active_sessions: Mutex>>, base_node_service: LocalNodeCommsInterface, } @@ -73,7 +73,7 @@ impl BaseNodeSyncRpcService { pub fn new(db: AsyncBlockchainDb, base_node_service: LocalNodeCommsInterface) -> Self { Self { db, - active_sessions: RwLock::new(Vec::new()), + active_sessions: Mutex::new(Vec::new()), base_node_service, } } @@ -84,7 +84,7 @@ impl BaseNodeSyncRpcService { } pub async fn try_add_exclusive_session(&self, peer: NodeId) -> Result, RpcStatus> { - let mut lock = self.active_sessions.write().await; + let mut lock = self.active_sessions.lock().await; *lock = lock.drain(..).filter(|l| l.strong_count() > 0).collect(); debug!(target: LOG_TARGET, "Number of active sync sessions: {}", lock.len()); diff --git a/base_layer/core/src/blocks/genesis_block.rs b/base_layer/core/src/blocks/genesis_block.rs index dad7848d8b..9374da0dcf 100644 --- a/base_layer/core/src/blocks/genesis_block.rs +++ b/base_layer/core/src/blocks/genesis_block.rs @@ -113,13 +113,13 @@ fn get_igor_genesis_block_raw() -> Block { // Note: Use print_new_genesis_block_igor in core/tests/helpers/block_builders.rs to generate the required fields // below let sig = Signature::new( - PublicKey::from_hex("e22ad0f02bfc08e3b04c667cca050072e091a477c3a4d10345c4114ed4266818").unwrap(), - PrivateKey::from_hex("2685f18306717ed7ccfad9f96185e5cbca52b3fe109673b1075d130fad54f60e").unwrap(), + PublicKey::from_hex("00f3a682fd38a79da6666301a727953ab55b880cd49b3c1bb24b2671756bce6a").unwrap(), + PrivateKey::from_hex("7dbba37445fadf0fa0a64fc0b52afa652fb487b92429634298d83d794b38d905").unwrap(), ); let coinbase_meta_sig = CommitmentSignature::new( - Commitment::from_hex("ecab12e0bab23ab32a0014b592fcdb4d22be7e02cb5031632ad9c3c9b2560229").unwrap(), - PrivateKey::from_hex("8a87214524cb2025a3dbaaf7cb5a6287c4d37f7521e667cdc253909adb48c70e").unwrap(), - PrivateKey::from_hex("6b5f940eaba65b6a46edc112ab9186be9310aeb086cf0878a68bccf73f712600").unwrap(), + Commitment::from_hex("ac07390c975c96d3ebe6bc591571f31edd5426f89e0c5b8a8552d2bcdc335b43").unwrap(), + PrivateKey::from_hex("1a5c8bf91d85ac3a9bb616caae072f95f1a8629cbfec712a5f3cdb94981da502").unwrap(), + PrivateKey::from_hex("40976c932da8f0ee0adce6861fb3860639af6e38e280be0108bd80127ff78607").unwrap(), ); let mut body = AggregateBody::new( vec![], @@ -130,14 +130,14 @@ fn get_igor_genesis_block_raw() -> Block { .. Default::default() }, Commitment::from_hex( - "dc07cc8ad8106d33d38239f63bc308959f48d47c8dbe2a65e32662b93262ba09", + "acab973deac0b901fc966934f33e53e84be0902b863d6b31c07aee1077bb6d74", ) .unwrap(), - BulletRangeProof::from_hex("01002c676a37bd85610b752598fdc493b0d0023b752b5c620e052731ae1278721dcc1ac376f04e6196083a830115a07452a79c82334b2130bec19784dc60d7dd4418f1fafe27b7519ba72c12dad7e8aa98ca52b5db9e051dc8d58a39f47157d72496c13c839f89fa58fa0c9303d2bf2d51bd8fbe00105602c69a75b9d1f9673f75a6abc51ab102e2ffafe96c5b13d49e2eae5a506d768dd4647aee98fa75b9a364cc3c29b0c01ca7fcc6fbf212e592f68bf104ef2c1cc5202ec500e5b37949e95062090b3d947427a7459b128215dbe75629656651362298691a8ef895d7b0bb3090b15b807a38eba20da1349dbc9bd6bb221fee6a79183433ddac29ef2027877a0230eda904e275ab6c9d87d9d2ea0fca13c92cb678edf5782eea1bdcec0d200c944a9e8c0a20ddcbc9e107ec7a84e7a6a498ba059f9bd9aded2c427a8c021e1c28e961c2f6cc4f490fda74407d99ac3cd54737050e68d7208eea5a7cfa85000fded0cfc6422a66da834bdcb14402bf1857467619143ded6de7a454b778dc1015f848bd278fe2d415334bc29b1113a76bcac126d00a9803ed931ec56fa9f085428ac9197191295e05bbae762092f0918d489a4e39e7220d91f2fc0a7de9b45676eee23c36d05967dd00073e436992456adf5974c3acc618fc11b6a216d8647a6fbaf033cd25898ee229002fec218f531a8de40d0dc4a352296bb92ececc5f4f0e46c1f81ba195fa8a667258bcabe027a44ccee5154fa2821b90ce2694c90883028db0ccd61c59fc8123b9d60bc3a4ed6024addee150c04f0cf410701a865fae07").unwrap(), + BulletRangeProof::from_hex("015af6dfef9563da065ac5d55eee0acda4a321222c75ef5c4415b0968c4f923d4f62cef4b10f247b4ccfcde7c83dff19605301af7c79a08d9bdd78d6911b3c2d017e124a1787a7f2bc7fe0f4785d11ce979c7d22774f9c45c063767144cc94c53d64936606452e2fb278094fb91ff3fc4eda9791e8f27f124bca489c3f7c979e04a2a42074c4aa23551a1cb1b132c2af01b9b3541632aaeaef0b2be48f7e61465d2829f7719eaff4127f5d19217ff6a36a19ea8bfb22aa0a453882528a6488be3d4652f275dc82f675eedebe6c32a691e80da39a48a833d26e07b733a253c804382286a3d6d9563d1c42fd4894694c70f07dcbaa21d014dbf900d2a176ce147326dc73e344cbdbc7866034693680bc7b4396bb1b1e1e6995d3564ae7c3884447561019ecbd60b45804aae214115320e2768565e1a145504e9dc95aed921824cd0c6083fefe5a328f17759f47e9288e57dad841e56b1a2a34b2d1416b4c7758e425c2b5925e957a3b78bec21f05c6bd7e0b4c259bc500247f4b0068134a343a90659a48e2ac11f2b1f890352c3b260fa78cb16d0a1d5459c6411ba34993bac7da6c1a662787ee15146a3c8623dff7fb7fa4143caeb431f53c6be02ef260d36eeb209273ff1c5a50413f61d8046ae6bac9e17c198503d602c10165d17522519aa7306b39661e3200a65fe234f67e125e80bdf46019f422325af7743d3ef873bb3e0ba2b2ca472170bd25168ad83870af30245ec5e7f464d2119054f76be999f90d07510193e01135ce0f16026b00128913fae48f2f8612a1fd5f7e4e40cb33c36c09").unwrap(), // For genesis block: A default script can never be spent, intentionally script!(Nop), // Script offset never checked for coinbase, thus can use default - PublicKey::from_hex("9234814d039bf3ac6545ed40a63570a2720b9376dcbde0bc1a75d081eec50446").unwrap(), + PublicKey::from_hex("7048544e58d20ae730105da01a0bdb2cd3b968c15a030a7d280ac69d7a7a1f30").unwrap(), // For genesis block: Metadata signature will never be checked coinbase_meta_sig, Covenant::default(), @@ -150,7 +150,7 @@ fn get_igor_genesis_block_raw() -> Block { MicroTari(0), 0, Commitment::from_hex( - "18d80887a36fae6c2cbef5941d5eedd927aeae1003798bb63c3f292cb68cbe00", + "8ecaca61f68daea32874526b81aa909dd6b36e807b7825ad3d2943070bb30f1e", ) .unwrap(), sig,None @@ -159,7 +159,7 @@ fn get_igor_genesis_block_raw() -> Block { ); body.sort(); // set genesis timestamp - let genesis = DateTime::parse_from_rfc2822("08 Aug 2022 10:00:00 +0200").unwrap(); + let genesis = DateTime::parse_from_rfc2822("30 Aug 2022 11:48:00 +0100").unwrap(); #[allow(clippy::cast_sign_loss)] let timestamp = genesis.timestamp() as u64; let vn_mmr = ValidatorNodeMmr::new(Vec::new()); @@ -169,11 +169,11 @@ fn get_igor_genesis_block_raw() -> Block { height: 0, prev_hash: FixedHash::zero(), timestamp: timestamp.into(), - output_mr: FixedHash::from_hex("55cd15eb1966b15e3dc8f8066371702a86b573915cd409cf8c20c7529a73c027").unwrap(), - witness_mr: FixedHash::from_hex("188b79e4cd780914fc0dfe7d57b9f32bfae04293052b867fce25c4af8b5191dc") + output_mr: FixedHash::from_hex("30e3813df61c3fb129c92f78e32422b362a12f6610e13adc6c8d7f6b952443e5").unwrap(), + witness_mr: FixedHash::from_hex("6adf5e059d7fc60e1c5a0954556adf8246d5c60554fce7d2fddcb338a870acd8") .unwrap(), output_mmr_size: 1, - kernel_mr: FixedHash::from_hex("2e3fde9cd20b48f699523d1b107b4742c6aa03ed1cb210f580d0c7426463b966").unwrap(), + kernel_mr: FixedHash::from_hex("22e57d85985535c197554232f9d223dc3575d57378ef7a4198905ebb515daf71").unwrap(), kernel_mmr_size: 1, input_mr: FixedHash::zero(), total_kernel_offset: PrivateKey::from_hex( @@ -217,26 +217,26 @@ pub fn get_esmeralda_genesis_block() -> ChainBlock { block.body.sort(); // Use this code if you need to generate new Merkle roots - // NB: `esmerlada_genesis_sanity_check` must pass + // NB: `esmeralda_genesis_sanity_check` must pass // // use croaring::Bitmap; // use std::convert::TryFrom; // use crate::{KernelMmr, MutableOutputMmr, WitnessMmr}; - // + // let mut kernel_mmr = KernelMmr::new(Vec::new()); // for k in block.body.kernels() { // println!("k: {}", k); // kernel_mmr.push(k.hash().to_vec()).unwrap(); // } - // + // let mut witness_mmr = WitnessMmr::new(Vec::new()); // let mut output_mmr = MutableOutputMmr::new(Vec::new(), Bitmap::create()).unwrap(); - // + // for o in block.body.outputs() { // witness_mmr.push(o.witness_hash().to_vec()).unwrap(); // output_mmr.push(o.hash().to_vec()).unwrap(); // } - // + // block.header.kernel_mr = FixedHash::try_from(kernel_mmr.get_merkle_root().unwrap()).unwrap(); // block.header.witness_mr = FixedHash::try_from(witness_mmr.get_merkle_root().unwrap()).unwrap(); // block.header.output_mr = FixedHash::try_from(output_mmr.get_merkle_root().unwrap()).unwrap(); @@ -246,11 +246,11 @@ pub fn get_esmeralda_genesis_block() -> ChainBlock { // Hardcode the Merkle roots once they've been computed above block.header.kernel_mr = - FixedHash::from_hex("1bfbc0e257b4876a88025bc07d5a9bc09dde36441fcb597dcee326ab76b90eed").unwrap(); + FixedHash::from_hex("49bec44ce879f529523c593d2f533fffdc2823512d673e78e1bb6b2c28d9fcf5").unwrap(); block.header.witness_mr = - FixedHash::from_hex("0abe819c208dc98149699dd009715b8e302c666b3322a67d31f1d74a0593999f").unwrap(); + FixedHash::from_hex("8e6bb075239bf307e311f497d35c12c77c4563f218c156895e6630a7d9633de3").unwrap(); block.header.output_mr = - FixedHash::from_hex("69b9646e0e57a64a4ab44d05e6c45146bfc02c7449cf5cc6e404190064ee4309").unwrap(); + FixedHash::from_hex("163304b3fe0f9072170db341945854bf88c8e23e23ecaac3ed86b9231b20e16f").unwrap(); let accumulated_data = BlockHeaderAccumulatedData { hash: block.hash(), @@ -268,13 +268,13 @@ fn get_esmeralda_genesis_block_raw() -> Block { // Note: Use print_new_genesis_block_esmeralda in core/tests/helpers/block_builders.rs to generate the required // fields below let excess_sig = Signature::new( - PublicKey::from_hex("9e6be7c87533b2e01763de34c309b1c283e5e5e91500a43856a78dcb26b4233f").unwrap(), - PrivateKey::from_hex("6dc0083f9f4b0deb85b34e6d32598e77a729d6f695568322f461d006929dbb04").unwrap(), + PublicKey::from_hex("70e343b603ec1e0422b02e68d1051675e48b1d7aa26a46d79c0fa104762e1161").unwrap(), + PrivateKey::from_hex("8d9ea6626b108ff5479c6378f605a9ffa9e3908ca2e374797d45957a26e9810b").unwrap(), ); let coinbase_meta_sig = CommitmentSignature::new( - Commitment::from_hex("841ae8f109a4fcefb2522de04ebff133288156f9ebd3380c42727b41df9e3b15").unwrap(), - PrivateKey::from_hex("7270bc342a378be7b16aa5ff6d5e231fd0acfa4cd46f64a4872e42465f2a7b0c").unwrap(), - PrivateKey::from_hex("51a18d5eb531cee771765a8b4ae5444e17c508549cdba62d942d172a247b0709").unwrap(), + Commitment::from_hex("2432288b75a39e102de18a4556533bd340e0e6fd682d7d405fd5c9d834eb0f65").unwrap(), + PrivateKey::from_hex("3dbc5debc8cd2d983bc09322488bc0cd60531e198f2925b2d0175ff0ef0efa0f").unwrap(), + PrivateKey::from_hex("12cb8b669a8d16d78f2760529b651adc8213c41364c861cc0a2e218a0ce3db0a").unwrap(), ); let coinbase = TransactionOutput::new( TransactionOutputVersion::get_current_version(), @@ -287,12 +287,12 @@ fn get_esmeralda_genesis_block_raw() -> Block { validator_node_public_key: None, validator_node_signature: None, }, - Commitment::from_hex("2afed894ae877b5e9c7450cc0e29de46aeb6b118cd3d6b0a77da8c8156a1e234").unwrap(), - BulletRangeProof::from_hex("0136b44930772f85b17139dd8e83789f84ccc2134cf6b2416d908fb8403efa4d3bc0247ec4afbbb1f7f7498d129226f26199eec988bd3e5ccce2572fd7aee16f2c4a2d710fac0e3bc1d612d700af2265e230ae1c45e3b0e4d3aab43cb87534217b56dcdb6598ed859d0cd6d70fae5acaaa38db5bbae6df8339e5e3dd594388bd53cef6f2acda4ac002d8ac6e01d430bdcf8565b8b8823ff3fb7dc8b359e687dd6feab0edf86c7444c713f34d2513145049b9664aae2e3dbc8a3365baae9d26842852ec9f401112a9742560ec220e61b05f65448d75b714839a6bafc723e9a04f25c69c036775fc55b7ec2bb28ef1de25a32cac51c288ed6d43f3819b1c3356d7699ea5f10217d553e90e6c93641649bd289dedb9e5725579539df07301f15093496c8fca3ec66a43332d1be3a3f94b530e1b8ca7feaa24c4ca73e60397a786ab742ac8933ba6bd504ef3c1a53fa1ff4397aba7c42a526507f930fdf9ff00a2a07b521841574d4e2b5beece946a15fa2545c8e556e704eed0ed10c0e3cbb9f5d6147e6e2d260666c79fa04d89c8901eeb3d3793239a68218a2c105f1bcb4211631eea037102bd5c840de751d84f473bb5cf6c41b3b97ec1c978700ec3c132e09a28d0a92c7e141e9968d0d2852c339a85c052356049f6752cb57c3d2b8c03db24525aa1f7db4a4f4d7d48639e27faa8c8bc695ad6c4f7688d43feedabef4d05c20b349ebc1697b3b899038b22fa308546efff290902cdacbe9992450cc31b61fc00652cffe4335c080d8398b061add986626068e17d5982ee9f6f28b4f4579d0406").unwrap(), + Commitment::from_hex("46eec110cf173557e149d453734f6707fea9ed27c9a0dd0276bb43eb1f6e3322").unwrap(), + BulletRangeProof::from_hex("01b05c72ea976764b8f9a56bb302990829dacae5f9b2d26e028e97c66a7ac3a14c7809ea5da55fb1e88a16195619d67381f28181b1ad7e0c9661c726e1c56ad7770eb75e314b51a89d716a2dd7737b26a40d8e956911ff45d4c47a1164edae5505aaca58ec6f95762daaa02545dc2ce502e9892d98422849352b6dbcc3322b6b1adae4d33461dd8b5b75b4a9bf52b3e3b00ef7579b16e59f17f43c45ea5e82db063c23ce2d214f93a211cd8f7a3cb220071c68ba3a348b082c3eebb8b6d6339d18decd0372b82e762a9f16e5e7ed23b21c1025ba093b676c55cfa603d888bcc315bc95e8e4bebad9ec51124aab0fe4a8abfc9053db1fb1560c5214b9485826e0127448a2aa84c25f17c5833b15bf434903db7a676bfb11ace2ece255b018428457122da112d481c8a742f916cca069b874e6762248fbb00fa6895f7d4b8a9a8829164baf6ad1d3ad5775c679766ead9da782977fdeb5af7e4b2eb6828e87551179f888ed1c598dd1b81c46b335fb4a827fadf7669e007ff4ed6f260d0bde3eb42282983f58bb0f11a44e064a80503154f4cdb76537192411b2755c2b453b90b3754e9253e64837f15c933b7a479fbb9b1ea8d45364fff67b4aa71ecf67f16c497b5846ff50aaae882e71ac5e6f3ba29189d03da3ed91511074747db413a3e8f90fd9b8fa0751e8ecde29324f4fe8d9023405e33e0d07741056941f9593e8931d0c22553af6447d5c38c762e45afaa89cc11c6843e77430cea44b41fcef0ad11d08d3be1f279ee791fd3b4a8b39d2889a51a4cb2a81885ef6cab119e8de29908a0e").unwrap(), // A default script can never be spent, intentionally script!(Nop), // The Sender offset public key is not checked for coinbase outputs - PublicKey::from_hex("1e036eb452b9098b48edeaa3b91716502fc4786e1ac4363046546f28d26bb337").unwrap(), + PublicKey::from_hex("f649c442e31f6633099549d08870f6c16e1265af797c941ab93a264aba53ff69").unwrap(), // For genesis block: Metadata signature will never be checked coinbase_meta_sig, // Covenant @@ -306,14 +306,14 @@ fn get_esmeralda_genesis_block_raw() -> Block { KernelFeatures::COINBASE_KERNEL, MicroTari(0), 0, - Commitment::from_hex("2480268904dbe6cb4b4af290fd51b43383588a575c926af674311691e5a6cc59").unwrap(), + Commitment::from_hex("c88376c6b1cd801821e18f199012f07eae50078177c0406fee3bff7f851e5e66").unwrap(), excess_sig, None, ); let mut body = AggregateBody::new(vec![], vec![coinbase], vec![kernel]); body.sort(); // set genesis timestamp - let genesis = DateTime::parse_from_rfc2822("24 Aug 2022 22:00:00 +0200").unwrap(); + let genesis = DateTime::parse_from_rfc2822("30 Aug 2022 11:45:00 +0100").unwrap(); #[allow(clippy::cast_sign_loss)] let timestamp = genesis.timestamp() as u64; let vn_mmr = ValidatorNodeMmr::new(Vec::new()); @@ -323,11 +323,11 @@ fn get_esmeralda_genesis_block_raw() -> Block { height: 0, prev_hash: FixedHash::zero(), timestamp: timestamp.into(), - output_mr: FixedHash::from_hex("49a0bfcf8dd896d59ab2eb1c5a8c96b49cce5ef9c4bed1a172cc2cd713b2a04d").unwrap(), - witness_mr: FixedHash::from_hex("bd043cfe6304c0cb2b6cdbc6ad52c03ff893f3b53631be41846fbac75c422c7e") + output_mr: FixedHash::from_hex("e6fbb394caca64f9fd2e68cbd0406a279c7c7d0e942281934530711e3030f4e8").unwrap(), + witness_mr: FixedHash::from_hex("68c0fa430ec073acea47116ebd23ba896df86217dd016f3a131d455cadc3c246") .unwrap(), output_mmr_size: 1, - kernel_mr: FixedHash::from_hex("df4d2a7d15da3485b8fcaf0524f1ee5d409883bbe8901c4df69543b056763d42").unwrap(), + kernel_mr: FixedHash::from_hex("157ab76af27428ff7bca4d910908bae698df34a6dcaf43a1d11a97fa3e67539a").unwrap(), kernel_mmr_size: 1, input_mr: FixedHash::zero(), total_kernel_offset: PrivateKey::from_hex( diff --git a/base_layer/core/src/chain_storage/blockchain_database.rs b/base_layer/core/src/chain_storage/blockchain_database.rs index 4dc5195e36..fd3b4cf587 100644 --- a/base_layer/core/src/chain_storage/blockchain_database.rs +++ b/base_layer/core/src/chain_storage/blockchain_database.rs @@ -809,8 +809,8 @@ where B: BlockchainBackend if median_timestamp > header.timestamp { header.timestamp = median_timestamp.increase(1); } - let block = Block { header, body }; - let (mut block, roots) = self.calculate_mmr_roots(block)?; + let mut block = Block { header, body }; + let roots = calculate_mmr_roots(&*db, &block)?; block.header.kernel_mr = roots.kernel_mr; block.header.kernel_mmr_size = roots.kernel_mmr_size; block.header.input_mr = roots.input_mr; @@ -890,17 +890,11 @@ where B: BlockchainBackend } let new_height = block.header.height; - // Perform orphan block validation. - if let Err(e) = self.validators.orphan.validate(&block) { - warn!( - target: LOG_TARGET, - "Block #{} ({}) failed validation - {}", - &new_height, - block.hash().to_hex(), - e.to_string() - ); - return Err(e.into()); - } + // This is important, we ask for a write lock to disable all read access to the db. The sync process sets the + // add_block disable flag, but we can have a race condition between the two especially since the orphan + // validation can take some time during big blocks as it does Rangeproof and metadata signature validation. + // Because the sync process first acquires a read_lock then a write_lock, and the RWLock will be prioritised, + // the add_block write lock will be given out before the sync write_lock. trace!( target: LOG_TARGET, "[add_block] waiting for write access to add block block #{}", @@ -915,6 +909,21 @@ where B: BlockchainBackend new_height, timer.elapsed() ); + let block_hash = block.hash(); + if db.contains(&DbKey::BlockHash(block_hash))? { + return Ok(BlockAddResult::BlockExists); + } + // Perform orphan block validation. + if let Err(e) = self.validators.orphan.validate(&block) { + warn!( + target: LOG_TARGET, + "Block #{} ({}) failed validation - {}", + &new_height, + block.hash().to_hex(), + e.to_string() + ); + return Err(e.into()); + } let block_add_result = add_block( &mut *db, &self.config, @@ -1415,10 +1424,6 @@ fn add_block( difficulty_calculator: &DifficultyCalculator, block: Arc, ) -> Result { - let block_hash = block.hash(); - if db.contains(&DbKey::BlockHash(block_hash))? { - return Ok(BlockAddResult::BlockExists); - } handle_possible_reorg( db, config, diff --git a/base_layer/core/src/mempool/mempool.rs b/base_layer/core/src/mempool/mempool.rs index 46f7f43b22..95495634a4 100644 --- a/base_layer/core/src/mempool/mempool.rs +++ b/base_layer/core/src/mempool/mempool.rs @@ -63,14 +63,14 @@ impl Mempool { /// Insert an unconfirmed transaction into the Mempool. pub async fn insert(&self, tx: Arc) -> Result { - self.with_write_access(|storage| storage.insert(tx)).await + self.with_write_access(|storage| Ok(storage.insert(tx))).await } /// Inserts all transactions into the mempool. pub async fn insert_all(&self, transactions: Vec>) -> Result<(), MempoolError> { self.with_write_access(|storage| { for tx in transactions { - storage.insert(tx)?; + storage.insert(tx); } Ok(()) diff --git a/base_layer/core/src/mempool/mempool_storage.rs b/base_layer/core/src/mempool/mempool_storage.rs index 168d2fe3e7..31afcd356b 100644 --- a/base_layer/core/src/mempool/mempool_storage.rs +++ b/base_layer/core/src/mempool/mempool_storage.rs @@ -72,7 +72,7 @@ impl MempoolStorage { /// Insert an unconfirmed transaction into the Mempool. The transaction *MUST* have passed through the validation /// pipeline already and will thus always be internally consistent by this stage - pub fn insert(&mut self, tx: Arc) -> Result { + pub fn insert(&mut self, tx: Arc) -> TxStorageResponse { let tx_id = tx .body .kernels() @@ -87,34 +87,41 @@ impl MempoolStorage { "Transaction {} is VALID, inserting in unconfirmed pool", tx_id ); let weight = self.get_transaction_weighting(0); - self.unconfirmed_pool.insert(tx, None, &weight)?; - Ok(TxStorageResponse::UnconfirmedPool) + self.unconfirmed_pool.insert(tx, None, &weight); + TxStorageResponse::UnconfirmedPool }, Err(ValidationError::UnknownInputs(dependent_outputs)) => { if self.unconfirmed_pool.contains_all_outputs(&dependent_outputs) { let weight = self.get_transaction_weighting(0); - self.unconfirmed_pool.insert(tx, Some(dependent_outputs), &weight)?; - Ok(TxStorageResponse::UnconfirmedPool) + self.unconfirmed_pool.insert(tx, Some(dependent_outputs), &weight); + TxStorageResponse::UnconfirmedPool } else { warn!(target: LOG_TARGET, "Validation failed due to unknown inputs"); - Ok(TxStorageResponse::NotStoredOrphan) + TxStorageResponse::NotStoredOrphan } }, Err(ValidationError::ContainsSTxO) => { - warn!(target: LOG_TARGET, "Validation failed due to already spent output"); - Ok(TxStorageResponse::NotStoredAlreadySpent) + warn!(target: LOG_TARGET, "Validation failed due to already spent input"); + TxStorageResponse::NotStoredAlreadySpent }, Err(ValidationError::MaturityError) => { warn!(target: LOG_TARGET, "Validation failed due to maturity error"); - Ok(TxStorageResponse::NotStoredTimeLocked) + TxStorageResponse::NotStoredTimeLocked }, Err(ValidationError::ConsensusError(msg)) => { warn!(target: LOG_TARGET, "Validation failed due to consensus rule: {}", msg); - Ok(TxStorageResponse::NotStoredConsensus) + TxStorageResponse::NotStoredConsensus + }, + Err(ValidationError::DuplicateKernelError(msg)) => { + debug!( + target: LOG_TARGET, + "Validation failed due to already mined kernel: {}", msg + ); + TxStorageResponse::NotStoredConsensus }, Err(e) => { warn!(target: LOG_TARGET, "Validation failed due to error: {}", e); - Ok(TxStorageResponse::NotStored) + TxStorageResponse::NotStored }, } } @@ -124,11 +131,10 @@ impl MempoolStorage { } // Insert a set of new transactions into the UTxPool. - fn insert_txs(&mut self, txs: Vec>) -> Result<(), MempoolError> { + fn insert_txs(&mut self, txs: Vec>) { for tx in txs { - self.insert(tx)?; + self.insert(tx); } - Ok(()) } /// Update the Mempool based on the received published block. @@ -161,10 +167,14 @@ impl MempoolStorage { failed_block.header.height, failed_block.hash().to_hex() ); - self.unconfirmed_pool + let txs = self + .unconfirmed_pool .remove_published_and_discard_deprecated_transactions(failed_block); + + // Reinsert them to validate if they are still valid + self.insert_txs(txs); self.unconfirmed_pool.compact(); - debug!(target: LOG_TARGET, "{}", self.stats()); + Ok(()) } @@ -183,12 +193,12 @@ impl MempoolStorage { // validation. This is important as invalid transactions that have not been mined yet may remain in the mempool // after a reorg. let removed_txs = self.unconfirmed_pool.drain_all_mempool_transactions(); - self.insert_txs(removed_txs)?; + self.insert_txs(removed_txs); // Remove re-orged transactions from reorg pool and re-submit them to the unconfirmed mempool let removed_txs = self .reorg_pool .remove_reorged_txs_and_discard_double_spends(removed_blocks, new_blocks); - self.insert_txs(removed_txs)?; + self.insert_txs(removed_txs); // Update the Mempool based on the received set of new blocks. for block in new_blocks { self.process_published_block(block)?; @@ -228,7 +238,7 @@ impl MempoolStorage { /// Will only return transactions that will fit into the given weight pub fn retrieve_and_revalidate(&mut self, total_weight: u64) -> Result>, MempoolError> { let results = self.unconfirmed_pool.fetch_highest_priority_txs(total_weight)?; - self.insert_txs(results.transactions_to_insert)?; + self.insert_txs(results.transactions_to_insert); Ok(results.retrieved_transactions) } diff --git a/base_layer/core/src/mempool/service/initializer.rs b/base_layer/core/src/mempool/service/initializer.rs index 83431f8f19..7ba3a67d9e 100644 --- a/base_layer/core/src/mempool/service/initializer.rs +++ b/base_layer/core/src/mempool/service/initializer.rs @@ -40,7 +40,7 @@ use tari_service_framework::{ use tokio::sync::mpsc; use crate::{ - base_node::{comms_interface::LocalNodeCommsInterface, StateMachineHandle}, + base_node::comms_interface::LocalNodeCommsInterface, mempool::{ mempool::Mempool, service::{ @@ -135,7 +135,6 @@ impl ServiceInitializer for MempoolServiceInitializer { context.spawn_until_shutdown(move |handles| { let outbound_message_service = handles.expect_handle::().outbound_requester(); - let state_machine = handles.expect_handle::(); let base_node = handles.expect_handle::(); let streams = MempoolStreams { @@ -146,7 +145,7 @@ impl ServiceInitializer for MempoolServiceInitializer { request_receiver, }; debug!(target: LOG_TARGET, "Mempool service started"); - MempoolService::new(outbound_message_service, inbound_handlers, state_machine).start(streams) + MempoolService::new(outbound_message_service, inbound_handlers).start(streams) }); Ok(()) diff --git a/base_layer/core/src/mempool/service/service.rs b/base_layer/core/src/mempool/service/service.rs index 2eddbd3975..f747484615 100644 --- a/base_layer/core/src/mempool/service/service.rs +++ b/base_layer/core/src/mempool/service/service.rs @@ -36,10 +36,7 @@ use tari_utilities::hex::Hex; use tokio::{sync::mpsc, task}; use crate::{ - base_node::{ - comms_interface::{BlockEvent, BlockEventReceiver}, - StateMachineHandle, - }, + base_node::comms_interface::{BlockEvent, BlockEventReceiver}, mempool::service::{ error::MempoolServiceError, inbound_handlers::MempoolInboundHandlers, @@ -66,19 +63,13 @@ pub struct MempoolStreams { pub struct MempoolService { outbound_message_service: OutboundMessageRequester, inbound_handlers: MempoolInboundHandlers, - state_machine: StateMachineHandle, } impl MempoolService { - pub fn new( - outbound_message_service: OutboundMessageRequester, - inbound_handlers: MempoolInboundHandlers, - state_machine: StateMachineHandle, - ) -> Self { + pub fn new(outbound_message_service: OutboundMessageRequester, inbound_handlers: MempoolInboundHandlers) -> Self { Self { outbound_message_service, inbound_handlers, - state_machine, } } @@ -108,12 +99,20 @@ impl MempoolService { // Outbound tx messages from the OutboundMempoolServiceInterface Some((txn, excluded_peers)) = outbound_tx_stream.recv() => { - self.spawn_handle_outbound_tx(txn, excluded_peers); + let _res = handle_outbound_tx(&mut self.outbound_message_service, txn, excluded_peers).await.map_err(|e| + error!(target: LOG_TARGET, "Error sending outbound tx message: {}", e) + ); }, // Incoming transaction messages from the Comms layer Some(transaction_msg) = inbound_transaction_stream.next() => { - self.spawn_handle_incoming_tx(transaction_msg); + let result = handle_incoming_tx(&mut self.inbound_handlers, transaction_msg).await; + if let Err(e) = result { + error!( + target: LOG_TARGET, + "Failed to handle incoming transaction message: {:?}", e + ); + } } // Incoming local request messages from the LocalMempoolServiceInterface and other local services @@ -144,41 +143,6 @@ impl MempoolService { self.inbound_handlers.handle_request(request).await } - fn spawn_handle_outbound_tx(&self, tx: Arc, excluded_peers: Vec) { - let outbound_message_service = self.outbound_message_service.clone(); - task::spawn(async move { - let result = handle_outbound_tx(outbound_message_service, tx, excluded_peers).await; - if let Err(e) = result { - error!(target: LOG_TARGET, "Failed to handle outbound tx message {:?}", e); - } - }); - } - - fn spawn_handle_incoming_tx(&self, tx_msg: DomainMessage) { - // Determine if we are bootstrapped - let status_watch = self.state_machine.get_status_info_watch(); - - if !(*status_watch.borrow()).bootstrapped { - debug!( - target: LOG_TARGET, - "Transaction with Message {} from peer `{}` not processed while busy with initial sync.", - tx_msg.dht_header.message_tag, - tx_msg.source_peer.node_id.short_str(), - ); - return; - } - let inbound_handlers = self.inbound_handlers.clone(); - task::spawn(async move { - let result = handle_incoming_tx(inbound_handlers, tx_msg).await; - if let Err(e) = result { - error!( - target: LOG_TARGET, - "Failed to handle incoming transaction message: {:?}", e - ); - } - }); - } - fn spawn_handle_local_request( &self, request_context: RequestContext>, @@ -209,7 +173,7 @@ impl MempoolService { } async fn handle_incoming_tx( - mut inbound_handlers: MempoolInboundHandlers, + inbound_handlers: &mut MempoolInboundHandlers, domain_transaction_msg: DomainMessage, ) -> Result<(), MempoolServiceError> { let DomainMessage::<_> { source_peer, inner, .. } = domain_transaction_msg; @@ -236,7 +200,7 @@ async fn handle_incoming_tx( } async fn handle_outbound_tx( - mut outbound_message_service: OutboundMessageRequester, + outbound_message_service: &mut OutboundMessageRequester, tx: Arc, exclude_peers: Vec, ) -> Result<(), MempoolServiceError> { @@ -247,7 +211,13 @@ async fn handle_outbound_tx( exclude_peers, OutboundDomainMessage::new( &TariMessageType::NewTransaction, - proto::types::Transaction::try_from(tx).map_err(MempoolServiceError::ConversionError)?, + proto::types::Transaction::try_from(tx.clone()).map_err(MempoolServiceError::ConversionError)?, + ), + format!( + "Outbound mempool tx: {}", + tx.first_kernel_excess_sig() + .map(|s| s.get_signature().to_hex()) + .unwrap_or_else(|| "No kernels!".to_string()) ), ) .await; diff --git a/base_layer/core/src/mempool/unconfirmed_pool/unconfirmed_pool.rs b/base_layer/core/src/mempool/unconfirmed_pool/unconfirmed_pool.rs index 3656ac51ad..bf59316dc8 100644 --- a/base_layer/core/src/mempool/unconfirmed_pool/unconfirmed_pool.rs +++ b/base_layer/core/src/mempool/unconfirmed_pool/unconfirmed_pool.rs @@ -110,21 +110,21 @@ impl UnconfirmedPool { tx: Arc, dependent_outputs: Option>, transaction_weighting: &TransactionWeight, - ) -> Result<(), UnconfirmedPoolError> { + ) { if tx .body .kernels() .iter() .all(|k| self.txs_by_signature.contains_key(k.excess_sig.get_signature())) { - return Ok(()); + return; } let new_key = self.get_next_key(); let prioritized_tx = PrioritizedTransaction::new(new_key, transaction_weighting, tx, dependent_outputs); if self.tx_by_key.len() >= self.config.storage_capacity { if prioritized_tx.priority < *self.lowest_priority() { - return Ok(()); + return; } self.remove_lowest_priority_tx(); } @@ -143,8 +143,6 @@ impl UnconfirmedPool { "Inserted transaction {} into unconfirmed pool:", prioritized_tx ); self.tx_by_key.insert(new_key, prioritized_tx); - - Ok(()) } /// TThis will search the unconfirmed pool for the set of outputs and return true if all of them are found @@ -158,11 +156,10 @@ impl UnconfirmedPool { &mut self, txs: I, transaction_weighting: &TransactionWeight, - ) -> Result<(), UnconfirmedPoolError> { + ) { for tx in txs { - self.insert(tx, None, transaction_weighting)?; + self.insert(tx, None, transaction_weighting); } - Ok(()) } /// Check if a transaction is available in the UnconfirmedPool @@ -668,12 +665,10 @@ mod test { }); let tx_weight = TransactionWeight::latest(); - unconfirmed_pool - .insert_many( - [tx1.clone(), tx2.clone(), tx3.clone(), tx4.clone(), tx5.clone()], - &tx_weight, - ) - .unwrap(); + unconfirmed_pool.insert_many( + [tx1.clone(), tx2.clone(), tx3.clone(), tx4.clone(), tx5.clone()], + &tx_weight, + ); // Check that lowest priority tx was removed to make room for new incoming transactions assert!(unconfirmed_pool.has_tx_with_excess_sig(&tx1.body.kernels()[0].excess_sig)); assert!(!unconfirmed_pool.has_tx_with_excess_sig(&tx2.body.kernels()[0].excess_sig)); @@ -747,9 +742,7 @@ mod test { }); let tx_weight = TransactionWeight::latest(); - unconfirmed_pool - .insert_many(vec![tx1.clone(), tx2.clone(), tx3.clone()], &tx_weight) - .unwrap(); + unconfirmed_pool.insert_many(vec![tx1.clone(), tx2.clone(), tx3.clone()], &tx_weight); assert_eq!(unconfirmed_pool.len(), 3); let desired_weight = tx1.calculate_weight(&tx_weight) + @@ -779,12 +772,10 @@ mod test { storage_capacity: 10, weight_tx_skip_count: 3, }); - unconfirmed_pool - .insert_many( - vec![tx1.clone(), tx2.clone(), tx3.clone(), tx4.clone(), tx5.clone()], - &tx_weight, - ) - .unwrap(); + unconfirmed_pool.insert_many( + vec![tx1.clone(), tx2.clone(), tx3.clone(), tx4.clone(), tx5.clone()], + &tx_weight, + ); // utx6 should not be added to unconfirmed_pool as it is an unknown transactions that was included in the block // by another node @@ -829,19 +820,17 @@ mod test { storage_capacity: 10, weight_tx_skip_count: 3, }); - unconfirmed_pool - .insert_many( - vec![ - tx1.clone(), - tx2.clone(), - tx3.clone(), - tx4.clone(), - tx5.clone(), - tx6.clone(), - ], - &tx_weight, - ) - .unwrap(); + unconfirmed_pool.insert_many( + vec![ + tx1.clone(), + tx2.clone(), + tx3.clone(), + tx4.clone(), + tx5.clone(), + tx6.clone(), + ], + &tx_weight, + ); // The publishing of tx1 and tx3 will be double-spends and orphan tx5 and tx6 let published_block = create_orphan_block(0, vec![(*tx1).clone(), (*tx2).clone(), (*tx3).clone()], &consensus); @@ -885,7 +874,7 @@ mod test { Arc::new(tx3.clone()), Arc::new(tx4.clone()), ]; - unconfirmed_pool.insert_many(txns.clone(), &tx_weight).unwrap(); + unconfirmed_pool.insert_many(txns.clone(), &tx_weight); for txn in txns { for output in txn.as_ref().body.outputs() { @@ -967,9 +956,7 @@ mod test { let tx2 = Arc::new(tx2); let tx3 = Arc::new(tx3); let tx4 = Arc::new(tx4); - unconfirmed_pool - .insert_many(vec![tx1, tx2, tx3, tx4], &tx_weight) - .unwrap(); + unconfirmed_pool.insert_many(vec![tx1, tx2, tx3, tx4], &tx_weight); let stats = unconfirmed_pool.get_fee_per_gram_stats(1, 19500).unwrap(); assert_eq!(stats[0].order, 0); @@ -1007,7 +994,7 @@ mod test { let tx_weight = TransactionWeight::latest(); let mut unconfirmed_pool = UnconfirmedPool::new(UnconfirmedPoolConfig::default()); - unconfirmed_pool.insert_many(transactions, &tx_weight).unwrap(); + unconfirmed_pool.insert_many(transactions, &tx_weight); let stats = unconfirmed_pool.get_fee_per_gram_stats(2, 2000).unwrap(); assert_eq!(stats, expected_stats); diff --git a/base_layer/core/src/transactions/transaction_components/mod.rs b/base_layer/core/src/transactions/transaction_components/mod.rs index 6261eea1bf..23efa56f09 100644 --- a/base_layer/core/src/transactions/transaction_components/mod.rs +++ b/base_layer/core/src/transactions/transaction_components/mod.rs @@ -32,7 +32,7 @@ pub use output_features::OutputFeatures; pub use output_features_version::OutputFeaturesVersion; pub use output_type::OutputType; pub use side_chain::*; -use tari_common_types::types::{Commitment, FixedHash}; +use tari_common_types::types::{Commitment, FixedHash, PublicKey}; use tari_script::TariScript; pub use transaction::Transaction; pub use transaction_builder::TransactionBuilder; @@ -92,6 +92,7 @@ pub(super) fn hash_output( script: &TariScript, covenant: &Covenant, encrypted_value: &EncryptedValue, + sender_offset_public_key: &PublicKey, minimum_value_promise: MicroTari, ) -> FixedHash { let common_hash = DomainSeparatedConsensusHasher::::new("transaction_output") @@ -101,6 +102,7 @@ pub(super) fn hash_output( .chain(script) .chain(covenant) .chain(encrypted_value) + .chain(sender_offset_public_key) .chain(&minimum_value_promise); match version { diff --git a/base_layer/core/src/transactions/transaction_components/transaction_input.rs b/base_layer/core/src/transactions/transaction_components/transaction_input.rs index ccbf137ae0..a83950b922 100644 --- a/base_layer/core/src/transactions/transaction_components/transaction_input.rs +++ b/base_layer/core/src/transactions/transaction_components/transaction_input.rs @@ -344,6 +344,7 @@ impl TransactionInput { features, covenant, encrypted_value, + sender_offset_public_key, minimum_value_promise, .. } => transaction_components::hash_output( @@ -353,6 +354,7 @@ impl TransactionInput { script, covenant, encrypted_value, + sender_offset_public_key, *minimum_value_promise, ), } @@ -364,34 +366,13 @@ impl TransactionInput { /// Implement the canonical hashing function for TransactionInput for use in ordering pub fn canonical_hash(&self) -> Result { - match self.spent_output { - SpentOutput::OutputHash(_) => Err(TransactionError::MissingTransactionInputData), - SpentOutput::OutputData { - ref version, - ref features, - ref commitment, - ref script, - ref sender_offset_public_key, - ref covenant, - ref encrypted_value, - ref minimum_value_promise, - } => { - // TODO: Change this hash to what is in RFC-0121/Consensus Encoding #testnet-reset - let writer = DomainSeparatedConsensusHasher::::new("transaction_input") - .chain(version) - .chain(features) - .chain(commitment) - .chain(script) - .chain(sender_offset_public_key) - .chain(&self.script_signature) - .chain(&self.input_data) - .chain(covenant) - .chain(encrypted_value) - .chain(minimum_value_promise); - - Ok(writer.finalize().into()) - }, - } + let writer = DomainSeparatedConsensusHasher::::new("transaction_input") + .chain(&self.version) + .chain(&self.script_signature) + .chain(&self.input_data) + .chain(&self.output_hash()); + + Ok(writer.finalize().into()) } pub fn set_maturity(&mut self, maturity: u64) -> Result<(), TransactionError> { diff --git a/base_layer/core/src/transactions/transaction_components/transaction_output.rs b/base_layer/core/src/transactions/transaction_components/transaction_output.rs index bff71791b5..3f9f0ca721 100644 --- a/base_layer/core/src/transactions/transaction_components/transaction_output.rs +++ b/base_layer/core/src/transactions/transaction_components/transaction_output.rs @@ -169,6 +169,7 @@ impl TransactionOutput { &self.script, &self.covenant, &self.encrypted_value, + &self.sender_offset_public_key, self.minimum_value_promise, ) } diff --git a/base_layer/core/src/transactions/transaction_components/unblinded_output.rs b/base_layer/core/src/transactions/transaction_components/unblinded_output.rs index ccbfa52f5c..132d143cb2 100644 --- a/base_layer/core/src/transactions/transaction_components/unblinded_output.rs +++ b/base_layer/core/src/transactions/transaction_components/unblinded_output.rs @@ -346,6 +346,7 @@ impl UnblindedOutput { &self.script, &self.covenant, &self.encrypted_value, + &self.sender_offset_public_key, self.minimum_value_promise, ) } diff --git a/base_layer/core/src/validation/error.rs b/base_layer/core/src/validation/error.rs index 7fa41e1a9d..52a1b09bc1 100644 --- a/base_layer/core/src/validation/error.rs +++ b/base_layer/core/src/validation/error.rs @@ -113,6 +113,8 @@ pub enum ValidationError { }, #[error("Consensus Error: {0}")] ConsensusError(String), + #[error("Duplicate kernel Error: {0}")] + DuplicateKernelError(String), #[error("Covenant failed to validate: {0}")] CovenantError(#[from] CovenantError), #[error("Invalid or unsupported blockchain version {version}")] diff --git a/base_layer/core/src/validation/helpers.rs b/base_layer/core/src/validation/helpers.rs index c79f47978f..f771647284 100644 --- a/base_layer/core/src/validation/helpers.rs +++ b/base_layer/core/src/validation/helpers.rs @@ -380,46 +380,8 @@ pub fn check_input_is_utxo(db: &B, input: &TransactionInpu // We know that the commitment exists in the UTXO set. Check that the output hash matches (i.e. all fields // like output features match) if utxo_hash == output_hash { - // Check that the input found by commitment, matches the input given here - match db - .fetch_output(&utxo_hash)? - .and_then(|output| output.output.into_unpruned_output()) - { - Some(output) => { - let mut compact = input.to_compact(); - compact.add_output_data( - output.version, - output.features, - output.commitment, - output.script, - output.sender_offset_public_key, - output.covenant, - output.encrypted_value, - output.minimum_value_promise, - ); - let input_hash = input.canonical_hash()?; - if compact.canonical_hash()? != input_hash { - warn!( - target: LOG_TARGET, - "Input '{}' spends commitment '{}' found in the UTXO set but does not contain the \ - matching metadata fields.", - input_hash.to_hex(), - input.commitment()?.to_hex(), - ); - return Err(ValidationError::UnknownInput); - } - }, - None => { - error!( - target: LOG_TARGET, - "🚨 Output '{}' was in unspent but was pruned - this indicates a blockchain database \ - inconsistency!", - output_hash.to_hex() - ); - return Err(ValidationError::UnknownInput); - }, - } - + // Because the retrieved hash matches the new input.output_hash() we know all the fields match and are all + // still the same return Ok(()); } @@ -434,7 +396,8 @@ pub fn check_input_is_utxo(db: &B, input: &TransactionInpu input, output ); - return Err(ValidationError::BlockError(BlockValidationError::InvalidInput)); + + return Err(ValidationError::UnknownInput); } // Wallet needs to know if a transaction has already been mined and uses this error variant to do so. diff --git a/base_layer/core/src/validation/transaction_validators.rs b/base_layer/core/src/validation/transaction_validators.rs index ae2e819ded..25e638b8ab 100644 --- a/base_layer/core/src/validation/transaction_validators.rs +++ b/base_layer/core/src/validation/transaction_validators.rs @@ -163,16 +163,16 @@ impl TxConsensusValidator { for kernel in tx.body.kernels() { if let Some((db_kernel, header_hash)) = self.db.fetch_kernel_by_excess_sig(kernel.excess_sig.to_owned())? { let msg = format!( - "Block contains kernel excess: {} which matches already existing excess signature in chain \ - database block hash: {}. Existing kernel excess: {}, excess sig nonce: {}, excess signature: {}", + "Aggregate body contains kernel excess: {} which matches already existing excess signature in \ + chain database block hash: {}. Existing kernel excess: {}, excess sig nonce: {}, excess \ + signature: {}", kernel.excess.to_hex(), header_hash.to_hex(), db_kernel.excess.to_hex(), db_kernel.excess_sig.get_public_nonce().to_hex(), db_kernel.excess_sig.get_signature().to_hex(), ); - warn!(target: LOG_TARGET, "{}", msg); - return Err(ValidationError::ConsensusError(msg)); + return Err(ValidationError::DuplicateKernelError(msg)); }; } Ok(()) diff --git a/base_layer/core/tests/block_validation.rs b/base_layer/core/tests/block_validation.rs index 91a476386f..1c1e51eedc 100644 --- a/base_layer/core/tests/block_validation.rs +++ b/base_layer/core/tests/block_validation.rs @@ -79,35 +79,6 @@ use crate::helpers::{ mod helpers; -#[test] -fn test_genesis_block() { - let factories = CryptoFactories::default(); - let network = Network::Esmeralda; - let rules = ConsensusManager::builder(network).build(); - let backend = create_test_db(); - let validators = Validators::new( - BodyOnlyValidator::new(rules.clone()), - HeaderValidator::new(rules.clone()), - OrphanBlockValidator::new(rules.clone(), false, factories), - ); - let db = BlockchainDatabase::new( - backend, - rules.clone(), - validators, - BlockchainDatabaseConfig::default(), - DifficultyCalculator::new(rules.clone(), Default::default()), - ) - .unwrap(); - let block = rules.get_genesis_block(); - match db.add_block(block.to_arc_block()).unwrap_err() { - ChainStorageError::ValidationError { source } => match source { - ValidationError::ValidatingGenesis => (), - _ => panic!("Failed because incorrect validation error was received"), - }, - _ => panic!("Failed because incorrect ChainStorageError was received"), - } -} - #[test] fn test_monero_blocks() { // Create temporary test folder diff --git a/base_layer/core/tests/mempool.rs b/base_layer/core/tests/mempool.rs index 3caa202711..ffeb0cc4a1 100644 --- a/base_layer/core/tests/mempool.rs +++ b/base_layer/core/tests/mempool.rs @@ -846,6 +846,7 @@ async fn receive_and_propagate_transaction() { &TariMessageType::NewTransaction, proto::types::Transaction::try_from(tx).unwrap(), ), + "mempool tests".to_string(), ) .await .unwrap(); @@ -857,6 +858,7 @@ async fn receive_and_propagate_transaction() { &TariMessageType::NewTransaction, proto::types::Transaction::try_from(orphan).unwrap(), ), + "mempool tests".to_string(), ) .await .unwrap(); @@ -1190,6 +1192,7 @@ async fn consensus_validation_unique_excess_sig() { // trying to submit a transaction with an existing excess signature already in the chain is an error let tx = Arc::new(tx1); let response = mempool.insert(tx).await.unwrap(); + dbg!(&response); assert!(matches!(response, TxStorageResponse::NotStoredConsensus)); } diff --git a/base_layer/key_manager/Cargo.toml b/base_layer/key_manager/Cargo.toml index 7052a430b9..74cc15abeb 100644 --- a/base_layer/key_manager/Cargo.toml +++ b/base_layer/key_manager/Cargo.toml @@ -4,7 +4,7 @@ authors = ["The Tari Development Community"] description = "Tari cryptocurrency wallet key management" repository = "https://github.com/tari-project/tari" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2021" [lib] @@ -12,8 +12,8 @@ crate-type = ["lib", "cdylib"] # NB: All dependencies must support or be gated for the WASM target. [dependencies] -tari_common_types = { version = "^0.37", path = "../../base_layer/common_types" } -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } +tari_common_types = { version = "^0.38", path = "../../base_layer/common_types", optional = true } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } arrayvec = "0.7.1" @@ -21,7 +21,7 @@ argon2 = { version = "0.2", features = ["std"] } blake2 = "0.9.1" chacha20 = "0.7.1" clear_on_drop = "=0.2.4" -console_error_panic_hook = "0.1.7" +console_error_panic_hook = { version = "0.1.7", optional = true } crc32fast = "1.2.1" derivative = "2.2.0" digest = "0.9.0" @@ -35,12 +35,12 @@ thiserror = "1.0.26" strum_macros = "0.22" strum = { version = "0.22", features = ["derive"] } wasm-bindgen = { version = "0.2", features = ["serde-serialize", "nightly"], optional = true } -wasm-bindgen-test = "0.3.28" [dev-dependencies] sha2 = "0.9.8" +wasm-bindgen-test = "0.3.28" [features] avx2 = ["tari_crypto/simd_backend"] js = ["getrandom/js", "js-sys"] -wasm = ["wasm-bindgen", "js"] +wasm = ["wasm-bindgen", "js", "tari_common_types", "console_error_panic_hook"] diff --git a/base_layer/key_manager/src/wasm.rs b/base_layer/key_manager/src/wasm.rs index c9b2a53c03..834896319a 100644 --- a/base_layer/key_manager/src/wasm.rs +++ b/base_layer/key_manager/src/wasm.rs @@ -161,6 +161,7 @@ where T: for<'a> Deserialize<'a> { } } +#[cfg(test)] mod test { use tari_utilities::hex::Hex; use wasm_bindgen_test::*; diff --git a/base_layer/mmr/Cargo.toml b/base_layer/mmr/Cargo.toml index c3a2fb469a..eb071f6809 100644 --- a/base_layer/mmr/Cargo.toml +++ b/base_layer/mmr/Cargo.toml @@ -4,7 +4,7 @@ authors = ["The Tari Development Community"] description = "A Merkle Mountain Range implementation" repository = "https://github.com/tari-project/tari" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [features] @@ -14,7 +14,7 @@ benches = ["criterion"] [dependencies] tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } tari_common = {path = "../../common"} thiserror = "1.0.26" digest = "0.9.0" @@ -26,7 +26,7 @@ criterion = { version="0.2", optional = true } [dev-dependencies] rand="0.8.0" blake2 = "0.9.0" -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } serde_json = "1.0" bincode = "1.1" [lib] diff --git a/base_layer/p2p/Cargo.toml b/base_layer/p2p/Cargo.toml index 4300128153..2363a265f6 100644 --- a/base_layer/p2p/Cargo.toml +++ b/base_layer/p2p/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "tari_p2p" -version = "0.37.0" +version = "0.38.3" authors = ["The Tari Development community"] description = "Tari base layer-specific peer-to-peer communication features" repository = "https://github.com/tari-project/tari" @@ -10,13 +10,13 @@ license = "BSD-3-Clause" edition = "2018" [dependencies] -tari_comms = { version = "^0.37", path = "../../comms/core" } -tari_comms_dht = { version = "^0.37", path = "../../comms/dht" } -tari_common = { version = "^0.37", path = "../../common" } -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } -tari_service_framework = { version = "^0.37", path = "../service_framework" } -tari_shutdown = { version = "^0.37", path = "../../infrastructure/shutdown" } -tari_storage = { version = "^0.37", path = "../../infrastructure/storage" } +tari_comms = { version = "^0.38", path = "../../comms/core" } +tari_comms_dht = { version = "^0.38", path = "../../comms/dht" } +tari_common = { version = "^0.38", path = "../../common" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } +tari_service_framework = { version = "^0.38", path = "../service_framework" } +tari_shutdown = { version = "^0.38", path = "../../infrastructure/shutdown" } +tari_storage = { version = "^0.38", path = "../../infrastructure/storage" } tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } anyhow = "1.0.53" @@ -35,15 +35,15 @@ semver = "1.0.1" serde = "1.0.90" serde_derive = "1.0.90" thiserror = "1.0.26" -tokio = { version = "1.11", features = ["macros"] } -tokio-stream = { version = "0.1.7", default-features = false, features = ["time"] } +tokio = { version = "1.20", features = ["macros"] } +tokio-stream = { version = "0.1.9", default-features = false, features = ["time"] } tower = "0.4.11" tower-service = { version = "0.3.1" } trust-dns-client = { version = "=0.21.0-alpha.5", features = ["dns-over-rustls"] } webpki = "0.21" [dev-dependencies] -tari_test_utils = { version = "^0.37", path = "../../infrastructure/test_utils" } +tari_test_utils = { version = "^0.38", path = "../../infrastructure/test_utils" } config = "0.13.0" clap = "2.33.0" @@ -51,7 +51,7 @@ lazy_static = "1.3.0" tempfile = "3.1.0" [build-dependencies] -tari_common = { version = "^0.37", path = "../../common", features = ["build"] } +tari_common = { version = "^0.38", path = "../../common", features = ["build"] } [features] test-mocks = [] diff --git a/base_layer/p2p/src/config.rs b/base_layer/p2p/src/config.rs index 9d880bafa1..41cd121d99 100644 --- a/base_layer/p2p/src/config.rs +++ b/base_layer/p2p/src/config.rs @@ -95,8 +95,6 @@ pub struct P2pConfig { /// The maximum number of concurrent outbound tasks allowed before back-pressure is applied to outbound messaging /// queue pub max_concurrent_outbound_tasks: usize, - /// The size of the buffer (channel) which holds pending outbound message requests - pub outbound_buffer_size: usize, /// Configuration for DHT pub dht: DhtConfig, /// Set to true to allow peers to provide test addresses (loopback, memory etc.). If set to false, memory @@ -131,9 +129,8 @@ impl Default for P2pConfig { transport: Default::default(), datastore_path: PathBuf::from("peer_db"), peer_database_name: "peers".to_string(), - max_concurrent_inbound_tasks: 50, - max_concurrent_outbound_tasks: 100, - outbound_buffer_size: 100, + max_concurrent_inbound_tasks: 4, + max_concurrent_outbound_tasks: 4, dht: DhtConfig { database_url: DbConnectionUrl::file("dht.sqlite"), ..Default::default() @@ -141,7 +138,7 @@ impl Default for P2pConfig { allow_test_addresses: false, listener_liveness_max_sessions: 0, listener_liveness_allowlist_cidrs: StringList::default(), - user_agent: "".to_string(), + user_agent: String::new(), auxiliary_tcp_listener_address: None, rpc_max_simultaneous_sessions: 100, rpc_max_sessions_per_peer: 10, diff --git a/base_layer/p2p/src/initialization.rs b/base_layer/p2p/src/initialization.rs index 9edcbddc56..8f6d0c2147 100644 --- a/base_layer/p2p/src/initialization.rs +++ b/base_layer/p2p/src/initialization.rs @@ -186,7 +186,6 @@ pub async fn initialize_local_test_comms>( let dht_outbound_layer = dht.outbound_middleware_layer(); let (event_sender, _) = broadcast::channel(100); let pipeline = pipeline::Builder::new() - .outbound_buffer_size(10) .with_outbound_pipeline(outbound_rx, |sink| { ServiceBuilder::new().layer(dht_outbound_layer).service(sink) }) @@ -276,7 +275,6 @@ async fn initialize_hidden_service( mut config: TorTransportConfig, ) -> Result { let mut builder = tor::HiddenServiceBuilder::new() - .with_hs_flags(tor::HsFlags::DETACH) .with_port_mapping(config.to_port_mapping()?) .with_socks_authentication(config.to_socks_auth()) .with_control_server_auth(config.to_control_auth()?) @@ -333,7 +331,7 @@ async fn configure_comms_and_dht( let node_identity = comms.node_identity(); let shutdown_signal = comms.shutdown_signal(); // Create outbound channel - let (outbound_tx, outbound_rx) = mpsc::channel(config.outbound_buffer_size); + let (outbound_tx, outbound_rx) = mpsc::channel(config.dht.outbound_buffer_size); let mut dht = Dht::builder(); dht.with_config(config.dht.clone()).with_outbound_sender(outbound_tx); @@ -350,7 +348,6 @@ async fn configure_comms_and_dht( // Hook up DHT messaging middlewares let messaging_pipeline = pipeline::Builder::new() - .outbound_buffer_size(config.outbound_buffer_size) .with_outbound_pipeline(outbound_rx, |sink| { ServiceBuilder::new().layer(dht_outbound_layer).service(sink) }) diff --git a/base_layer/p2p/src/services/liveness/service.rs b/base_layer/p2p/src/services/liveness/service.rs index 09f556dfe5..def15f5116 100644 --- a/base_layer/p2p/src/services/liveness/service.rs +++ b/base_layer/p2p/src/services/liveness/service.rs @@ -212,7 +212,11 @@ where debug!(target: LOG_TARGET, "Sending ping to peer '{}'", node_id.short_str(),); self.outbound_messaging - .send_direct_node_id(node_id, OutboundDomainMessage::new(&TariMessageType::PingPong, msg)) + .send_direct_node_id( + node_id, + OutboundDomainMessage::new(&TariMessageType::PingPong, msg), + "Send ping".to_string(), + ) .await .map_err(Into::::into)?; @@ -222,7 +226,11 @@ where async fn send_pong(&mut self, nonce: u64, dest: CommsPublicKey) -> Result<(), LivenessError> { let msg = PingPongMessage::pong_with_metadata(nonce, self.state.metadata().clone()); self.outbound_messaging - .send_direct(dest, OutboundDomainMessage::new(&TariMessageType::PingPong, msg)) + .send_direct( + dest, + OutboundDomainMessage::new(&TariMessageType::PingPong, msg), + "Sending pong".to_string(), + ) .await .map(|_| ()) .map_err(Into::into) @@ -302,7 +310,11 @@ where let msg = PingPongMessage::ping_with_metadata(self.state.metadata().clone()); self.state.add_inflight_ping(msg.nonce, peer.clone()); self.outbound_messaging - .send_direct_node_id(peer, OutboundDomainMessage::new(&TariMessageType::PingPong, msg)) + .send_direct_node_id( + peer, + OutboundDomainMessage::new(&TariMessageType::PingPong, msg), + "Start ping round".to_string(), + ) .await?; } diff --git a/base_layer/p2p/src/services/liveness/state.rs b/base_layer/p2p/src/services/liveness/state.rs index 6cadbdcd10..0c2282811b 100644 --- a/base_layer/p2p/src/services/liveness/state.rs +++ b/base_layer/p2p/src/services/liveness/state.rs @@ -173,9 +173,11 @@ impl LivenessState { let (node_id, _) = self.inflight_pings.get(&nonce)?; if node_id == sent_by { - self.inflight_pings - .remove(&nonce) - .map(|(node_id, sent_time)| self.add_latency_sample(node_id, sent_time.elapsed()).calc_average()) + self.inflight_pings.remove(&nonce).map(|(node_id, sent_time)| { + let latency = sent_time.elapsed(); + self.add_latency_sample(node_id, latency); + latency + }) } else { warn!( target: LOG_TARGET, diff --git a/base_layer/service_framework/Cargo.toml b/base_layer/service_framework/Cargo.toml index 55b299680c..85c245b144 100644 --- a/base_layer/service_framework/Cargo.toml +++ b/base_layer/service_framework/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "tari_service_framework" -version = "0.37.0" +version = "0.38.3" authors = ["The Tari Development Community"] description = "The Tari communication stack service framework" repository = "https://github.com/tari-project/tari" @@ -10,19 +10,19 @@ license = "BSD-3-Clause" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -tari_shutdown = { version = "^0.37", path = "../../infrastructure/shutdown" } +tari_shutdown = { version = "^0.38", path = "../../infrastructure/shutdown" } anyhow = "1.0.53" async-trait = "0.1.50" futures = { version = "^0.3.16", features = ["async-await"] } log = "0.4.8" thiserror = "1.0.26" -tokio = { version = "1.14", features = ["rt"] } +tokio = { version = "1.20", features = ["rt"] } tower-service = { version = "0.3" } [dev-dependencies] -tari_test_utils = { version = "^0.37", path = "../../infrastructure/test_utils" } +tari_test_utils = { version = "^0.38", path = "../../infrastructure/test_utils" } -tokio = { version = "1.14", features = ["rt-multi-thread", "macros", "time"] } +tokio = { version = "1.20", features = ["rt-multi-thread", "macros", "time"] } futures-test = { version = "0.3.3" } tower = "0.4" diff --git a/base_layer/tari_mining_helper_ffi/Cargo.toml b/base_layer/tari_mining_helper_ffi/Cargo.toml index 7295769149..68f97cdc47 100644 --- a/base_layer/tari_mining_helper_ffi/Cargo.toml +++ b/base_layer/tari_mining_helper_ffi/Cargo.toml @@ -3,12 +3,12 @@ name = "tari_mining_helper_ffi" authors = ["The Tari Development Community"] description = "Tari cryptocurrency miningcore C FFI bindings" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [dependencies] -tari_comms = { version = "^0.37", path = "../../comms/core" } -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } +tari_comms = { version = "^0.38", path = "../../comms/core" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } tari_common = { path = "../../common" } tari_core = { path = "../core", default-features = false, features = ["transactions"]} tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } diff --git a/base_layer/tari_mining_helper_ffi/src/lib.rs b/base_layer/tari_mining_helper_ffi/src/lib.rs index 3375012bff..09a8948cae 100644 --- a/base_layer/tari_mining_helper_ffi/src/lib.rs +++ b/base_layer/tari_mining_helper_ffi/src/lib.rs @@ -370,8 +370,8 @@ mod tests { #[test] fn detect_change_in_consensus_encoding() { - const NONCE: u64 = 11718790012985321443; - const DIFFICULTY: Difficulty = Difficulty::from_u64(3755); + const NONCE: u64 = 15151693527177504675; + const DIFFICULTY: Difficulty = Difficulty::from_u64(8707); unsafe { let mut error = -1; let error_ptr = &mut error as *mut c_int; diff --git a/base_layer/wallet/Cargo.toml b/base_layer/wallet/Cargo.toml index 7f347d8979..eedaa2b82b 100644 --- a/base_layer/wallet/Cargo.toml +++ b/base_layer/wallet/Cargo.toml @@ -3,29 +3,29 @@ name = "tari_wallet" authors = ["The Tari Development Community"] description = "Tari cryptocurrency wallet library" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [dependencies] tari_common = { path = "../../common" } -tari_common_types = { version = "^0.37", path = "../../base_layer/common_types" } -tari_comms = { version = "^0.37", path = "../../comms/core" } -tari_comms_dht = { version = "^0.37", path = "../../comms/dht" } -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } -tari_key_manager = { version = "^0.37", path = "../key_manager" } -tari_p2p = { version = "^0.37", path = "../p2p", features = ["auto-update"] } +tari_common_types = { version = "^0.38", path = "../../base_layer/common_types" } +tari_comms = { version = "^0.38", path = "../../comms/core" } +tari_comms_dht = { version = "^0.38", path = "../../comms/dht" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } +tari_key_manager = { version = "^0.38", path = "../key_manager" } +tari_p2p = { version = "^0.38", path = "../p2p", features = ["auto-update"] } tari_script = { path = "../../infrastructure/tari_script" } -tari_service_framework = { version = "^0.37", path = "../service_framework" } -tari_shutdown = { version = "^0.37", path = "../../infrastructure/shutdown" } -tari_storage = { version = "^0.37", path = "../../infrastructure/storage" } +tari_service_framework = { version = "^0.38", path = "../service_framework" } +tari_shutdown = { version = "^0.38", path = "../../infrastructure/shutdown" } +tari_storage = { version = "^0.38", path = "../../infrastructure/storage" } tari_common_sqlite = { path = "../../common_sqlite" } tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } # Uncomment for tokio tracing via tokio-console (needs "tracing" featurs) #console-subscriber = "0.1.3" -#tokio = { version = "1.14", features = ["sync", "macros", "tracing"] } +#tokio = { version = "1.20", features = ["sync", "macros", "tracing"] } # Uncomment for normal use (non tokio-console tracing) -tokio = { version = "1.14", features = ["sync", "macros"] } +tokio = { version = "1.20", features = ["sync", "macros"] } async-trait = "0.1.50" argon2 = "0.2" @@ -34,7 +34,6 @@ blake2 = "0.9.0" sha2 = "0.9.5" chrono = { version = "0.4.19", default-features = false, features = ["serde"] } clear_on_drop = "=0.2.4" -crossbeam-channel = "0.5.4" derivative = "2.2.0" diesel = { version = "1.4.8", features = ["sqlite", "serde_json", "chrono", "64-column-tables"] } diesel_migrations = "1.4.0" @@ -59,14 +58,14 @@ chacha20poly1305 = "0.10.1" [dependencies.tari_core] path = "../../base_layer/core" -version = "^0.37" +version = "^0.38" default-features = false features = ["transactions", "mempool_proto", "base_node_proto", ] [dev-dependencies] -tari_p2p = { version = "^0.37", path = "../p2p", features = ["test-mocks"] } -tari_comms_dht = { version = "^0.37", path = "../../comms/dht", features = ["test-mocks"] } -tari_test_utils = { version = "^0.37", path = "../../infrastructure/test_utils" } +tari_p2p = { version = "^0.38", path = "../p2p", features = ["test-mocks"] } +tari_comms_dht = { version = "^0.38", path = "../../comms/dht", features = ["test-mocks"] } +tari_test_utils = { version = "^0.38", path = "../../infrastructure/test_utils" } env_logger = "0.7.1" prost = "0.9.0" diff --git a/base_layer/wallet/migrations/2022-08-08-134037_initial/up.sql b/base_layer/wallet/migrations/2022-08-08-134037_initial/up.sql index a8967cb66d..e9897c669d 100644 --- a/base_layer/wallet/migrations/2022-08-08-134037_initial/up.sql +++ b/base_layer/wallet/migrations/2022-08-08-134037_initial/up.sql @@ -110,12 +110,16 @@ CREATE TABLE outputs ( spent_in_tx_id BIGINT NULL, coinbase_block_height UNSIGNED BIGINT NULL, metadata BLOB NULL, + features_parent_public_key BLOB NULL, + features_unique_id BLOB NULL, features_json TEXT NOT NULL DEFAULT '{}', spending_priority UNSIGNED INTEGER NOT NULL DEFAULT 500, covenant BLOB NOT NULL, mined_timestamp DATETIME NULL, encrypted_value BLOB NOT NULL, + contract_id BLOB NULL, minimum_value_promise BIGINT NOT NULL, + source INTEGER NOT NULL DEFAULT 0, CONSTRAINT unique_commitment UNIQUE (commitment) ); diff --git a/base_layer/wallet/src/base_node_service/monitor.rs b/base_layer/wallet/src/base_node_service/monitor.rs index ccb5f401cb..1504797205 100644 --- a/base_layer/wallet/src/base_node_service/monitor.rs +++ b/base_layer/wallet/src/base_node_service/monitor.rs @@ -163,7 +163,7 @@ where timer.elapsed().as_millis() ); - self.db.set_chain_metadata(chain_metadata.clone()).await?; + self.db.set_chain_metadata(chain_metadata.clone())?; let is_synced = tip_info.is_synced; let height_of_longest_chain = chain_metadata.height_of_longest_chain(); diff --git a/base_layer/wallet/src/base_node_service/service.rs b/base_layer/wallet/src/base_node_service/service.rs index bcb94dfa77..cdd8ba0d71 100644 --- a/base_layer/wallet/src/base_node_service/service.rs +++ b/base_layer/wallet/src/base_node_service/service.rs @@ -150,11 +150,11 @@ where T: WalletBackend + 'static "Handling Wallet Base Node Service Request: {:?}", request ); match request { - BaseNodeServiceRequest::GetChainMetadata => match self.get_state().await.chain_metadata.clone() { + BaseNodeServiceRequest::GetChainMetadata => match self.get_state().await.chain_metadata { Some(metadata) => Ok(BaseNodeServiceResponse::ChainMetadata(Some(metadata))), None => { // if we don't have live state, check if we've previously stored state in the wallet db - let metadata = self.db.get_chain_metadata().await?; + let metadata = self.db.get_chain_metadata()?; Ok(BaseNodeServiceResponse::ChainMetadata(metadata)) }, }, diff --git a/base_layer/wallet/src/config.rs b/base_layer/wallet/src/config.rs index 8d13f1d328..6d0997fa6e 100644 --- a/base_layer/wallet/src/config.rs +++ b/base_layer/wallet/src/config.rs @@ -45,6 +45,12 @@ use crate::{ pub const KEY_MANAGER_COMMS_SECRET_KEY_BRANCH_KEY: &str = "comms"; +fn deserialize_safe_password_option<'de, D>(deserializer: D) -> Result, D::Error> +where D: serde::Deserializer<'de> { + let password: Option = Deserialize::deserialize(deserializer)?; + Ok(password.map(SafePassword::from)) +} + #[derive(Clone, Serialize, Deserialize, Debug)] #[serde(deny_unknown_fields)] pub struct WalletConfig { @@ -74,6 +80,7 @@ pub struct WalletConfig { /// The main wallet db sqlite database backend connection pool size for concurrent reads pub db_connection_pool_size: usize, /// The main wallet password + #[serde(deserialize_with = "deserialize_safe_password_option")] pub password: Option, /// The auto ping interval to use for contacts liveness data #[serde(with = "serializers::seconds")] @@ -129,7 +136,7 @@ impl Default for WalletConfig { base_node_service_config: Default::default(), data_dir: PathBuf::from_str("data/wallet").unwrap(), db_file: PathBuf::from_str("db/console_wallet.db").unwrap(), - db_connection_pool_size: 5, // TODO: get actual default + db_connection_pool_size: 16, // Note: Do not reduce this default number password: None, contacts_auto_ping_interval: Duration::from_secs(30), contacts_online_ping_window: 30, diff --git a/base_layer/wallet/src/connectivity_service/mock.rs b/base_layer/wallet/src/connectivity_service/mock.rs index 54f0295421..11228b6661 100644 --- a/base_layer/wallet/src/connectivity_service/mock.rs +++ b/base_layer/wallet/src/connectivity_service/mock.rs @@ -69,6 +69,11 @@ impl WalletConnectivityMock { self.base_node_watch.send(Some(base_node_peer)); } + pub async fn base_node_changed(&mut self) -> Option { + self.base_node_watch.changed().await; + self.base_node_watch.borrow().as_ref().cloned() + } + pub fn send_shutdown(&self) { self.base_node_wallet_rpc_client.send(None); self.base_node_sync_rpc_client.send(None); diff --git a/base_layer/wallet/src/connectivity_service/service.rs b/base_layer/wallet/src/connectivity_service/service.rs index 12ad2e18bc..e486b28d60 100644 --- a/base_layer/wallet/src/connectivity_service/service.rs +++ b/base_layer/wallet/src/connectivity_service/service.rs @@ -304,8 +304,7 @@ impl WalletConnectivityService { conn.peer_node_id() ); self.pools = Some(ClientPoolContainer { - base_node_sync_rpc_client: conn - .create_rpc_client_pool(self.config.base_node_rpc_pool_size, Default::default()), + base_node_sync_rpc_client: conn.create_rpc_client_pool(1, Default::default()), base_node_wallet_rpc_client: conn .create_rpc_client_pool(self.config.base_node_rpc_pool_size, Default::default()), }); diff --git a/base_layer/wallet/src/contacts_service/service.rs b/base_layer/wallet/src/contacts_service/service.rs index 9bed181739..a6ac57fec7 100644 --- a/base_layer/wallet/src/contacts_service/service.rs +++ b/base_layer/wallet/src/contacts_service/service.rs @@ -148,7 +148,7 @@ where T: ContactsBackend + 'static pin_mut!(shutdown); // Add all contacts as monitored peers to the liveness service - let result = self.db.get_contacts().await; + let result = self.db.get_contacts(); if let Ok(ref contacts) = result { self.add_contacts_to_liveness_service(contacts).await?; } @@ -195,14 +195,14 @@ where T: ContactsBackend + 'static ) -> Result { match request { ContactsServiceRequest::GetContact(pk) => { - let result = self.db.get_contact(pk.clone()).await; + let result = self.db.get_contact(pk.clone()); if let Ok(ref contact) = result { self.liveness.check_add_monitored_peer(contact.node_id.clone()).await?; }; Ok(result.map(ContactsServiceResponse::Contact)?) }, ContactsServiceRequest::UpsertContact(c) => { - self.db.upsert_contact(c.clone()).await?; + self.db.upsert_contact(c.clone())?; self.liveness.check_add_monitored_peer(c.node_id).await?; info!( target: LOG_TARGET, @@ -211,7 +211,7 @@ where T: ContactsBackend + 'static Ok(ContactsServiceResponse::ContactSaved) }, ContactsServiceRequest::RemoveContact(pk) => { - let result = self.db.remove_contact(pk.clone()).await?; + let result = self.db.remove_contact(pk.clone())?; self.liveness .check_remove_monitored_peer(result.node_id.clone()) .await?; @@ -222,7 +222,7 @@ where T: ContactsBackend + 'static Ok(ContactsServiceResponse::ContactRemoved(result)) }, ContactsServiceRequest::GetContacts => { - let result = self.db.get_contacts().await; + let result = self.db.get_contacts(); if let Ok(ref contacts) = result { self.add_contacts_to_liveness_service(contacts).await?; } @@ -254,11 +254,11 @@ where T: ContactsBackend + 'static match event { // Received a ping, check if it contains ContactsLiveness LivenessEvent::ReceivedPing(event) => { - self.update_with_ping_pong(event, ContactMessageType::Ping).await?; + self.update_with_ping_pong(event, ContactMessageType::Ping)?; }, // Received a pong, check if our neighbour sent it and it contains ContactsLiveness LivenessEvent::ReceivedPong(event) => { - self.update_with_ping_pong(event, ContactMessageType::Pong).await?; + self.update_with_ping_pong(event, ContactMessageType::Pong)?; }, // New ping round has begun LivenessEvent::PingRoundBroadcast(num_peers) => { @@ -277,7 +277,7 @@ where T: ContactsBackend + 'static self.resize_contacts_liveness_data_buffer(*num_peers); // Update offline status - if let Ok(contacts) = self.db.get_contacts().await { + if let Ok(contacts) = self.db.get_contacts() { for contact in contacts { let online_status = self.get_online_status(&contact).await?; if online_status == ContactOnlineStatus::Online { @@ -308,11 +308,16 @@ where T: ContactsBackend + 'static let mut online_status = ContactOnlineStatus::NeverSeen; match self.connectivity.get_peer_info(contact.node_id.clone()).await? { Some(peer_data) => { - if peer_data.banned_until().is_some() { - return Ok(ContactOnlineStatus::Banned(peer_data.banned_reason)); + if let Some(banned_until) = peer_data.banned_until() { + let msg = format!( + "Until {} ({})", + banned_until.format("%m-%d %H:%M"), + peer_data.banned_reason + ); + return Ok(ContactOnlineStatus::Banned(msg)); } }, - None => return Ok(online_status), + None => {}, }; if let Some(time) = contact.last_seen { if self.is_online(time) { @@ -332,7 +337,7 @@ where T: ContactsBackend + 'static Utc::now().naive_utc().sub(last_seen) <= ping_window } - async fn update_with_ping_pong( + fn update_with_ping_pong( &mut self, event: &PingPongEvent, message_type: ContactMessageType, @@ -356,15 +361,14 @@ where T: ContactsBackend + 'static } let this_public_key = self .db - .update_contact_last_seen(&event.node_id, last_seen.naive_utc(), latency) - .await?; + .update_contact_last_seen(&event.node_id, last_seen.naive_utc(), latency)?; let data = ContactsLivenessData::new( this_public_key, event.node_id.clone(), latency, Some(last_seen.naive_utc()), - message_type.clone(), + message_type, ContactOnlineStatus::Online, ); self.liveness_data.push(data.clone()); diff --git a/base_layer/wallet/src/contacts_service/storage/database.rs b/base_layer/wallet/src/contacts_service/storage/database.rs index 447276834e..425f59a464 100644 --- a/base_layer/wallet/src/contacts_service/storage/database.rs +++ b/base_layer/wallet/src/contacts_service/storage/database.rs @@ -118,18 +118,14 @@ where T: ContactsBackend + 'static Self { db: Arc::new(db) } } - pub async fn get_contact(&self, pub_key: CommsPublicKey) -> Result { + pub fn get_contact(&self, pub_key: CommsPublicKey) -> Result { let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || fetch!(db_clone, pub_key.clone(), Contact)) - .await - .map_err(|err| ContactsServiceStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) + fetch!(db_clone, pub_key, Contact) } - pub async fn get_contacts(&self) -> Result, ContactsServiceStorageError> { + pub fn get_contacts(&self) -> Result, ContactsServiceStorageError> { let db_clone = self.db.clone(); - - let c = tokio::task::spawn_blocking(move || match db_clone.fetch(&DbKey::Contacts) { + match db_clone.fetch(&DbKey::Contacts) { Ok(None) => log_error( DbKey::Contacts, ContactsServiceStorageError::UnexpectedResult("Could not retrieve contacts".to_string()), @@ -137,46 +133,31 @@ where T: ContactsBackend + 'static Ok(Some(DbValue::Contacts(c))) => Ok(c), Ok(Some(other)) => unexpected_result(DbKey::Contacts, other), Err(e) => log_error(DbKey::Contacts, e), - }) - .await - .map_err(|err| ContactsServiceStorageError::BlockingTaskSpawnError(err.to_string()))??; - Ok(c) + } } - pub async fn upsert_contact(&self, contact: Contact) -> Result<(), ContactsServiceStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || { - db_clone.write(WriteOperation::Upsert(Box::new(DbKeyValuePair::Contact( - contact.public_key.clone(), - contact, - )))) - }) - .await - .map_err(|err| ContactsServiceStorageError::BlockingTaskSpawnError(err.to_string()))??; + pub fn upsert_contact(&self, contact: Contact) -> Result<(), ContactsServiceStorageError> { + self.db.write(WriteOperation::Upsert(Box::new(DbKeyValuePair::Contact( + contact.public_key.clone(), + contact, + ))))?; Ok(()) } - pub async fn update_contact_last_seen( + pub fn update_contact_last_seen( &self, node_id: &NodeId, last_seen: NaiveDateTime, latency: Option, ) -> Result { - let db_clone = self.db.clone(); - let node_id_clone = node_id.clone(); - - let result = tokio::task::spawn_blocking(move || { - db_clone.write(WriteOperation::UpdateLastSeen(Box::new(DbKeyValuePair::LastSeen( - node_id_clone, + let result = self + .db + .write(WriteOperation::UpdateLastSeen(Box::new(DbKeyValuePair::LastSeen( + node_id.clone(), last_seen, latency.map(|val| val as i32), - )))) - }) - .await - .map_err(|err| ContactsServiceStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result)? - .ok_or_else(|| ContactsServiceStorageError::ValueNotFound(DbKey::ContactId(node_id.clone())))?; + ))))? + .ok_or_else(|| ContactsServiceStorageError::ValueNotFound(DbKey::ContactId(node_id.clone())))?; match result { DbValue::PublicKey(k) => Ok(*k), _ => Err(ContactsServiceStorageError::UnexpectedResult( @@ -185,16 +166,11 @@ where T: ContactsBackend + 'static } } - pub async fn remove_contact(&self, pub_key: CommsPublicKey) -> Result { - let db_clone = self.db.clone(); - let pub_key_clone = pub_key.clone(); - let result = - tokio::task::spawn_blocking(move || db_clone.write(WriteOperation::Remove(DbKey::Contact(pub_key_clone)))) - .await - .map_err(|err| ContactsServiceStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result)? - .ok_or_else(|| ContactsServiceStorageError::ValueNotFound(DbKey::Contact(pub_key.clone())))?; - + pub fn remove_contact(&self, pub_key: CommsPublicKey) -> Result { + let result = self + .db + .write(WriteOperation::Remove(DbKey::Contact(pub_key.clone())))? + .ok_or_else(|| ContactsServiceStorageError::ValueNotFound(DbKey::Contact(pub_key.clone())))?; match result { DbValue::Contact(c) => Ok(*c), DbValue::Contacts(_) | DbValue::PublicKey(_) => Err(ContactsServiceStorageError::UnexpectedResult( diff --git a/base_layer/wallet/src/key_manager_service/error.rs b/base_layer/wallet/src/key_manager_service/error.rs index 1492c22868..c23ab2a04b 100644 --- a/base_layer/wallet/src/key_manager_service/error.rs +++ b/base_layer/wallet/src/key_manager_service/error.rs @@ -67,8 +67,6 @@ pub enum KeyManagerStorageError { DieselConnectionError(#[from] diesel::ConnectionError), #[error("Database migration error: `{0}`")] DatabaseMigrationError(String), - #[error("Blocking task spawn error: `{0}`")] - BlockingTaskSpawnError(String), #[error("Wallet db is already encrypted and cannot be encrypted until the previous encryption is removed")] AlreadyEncrypted, #[error("Wallet db is currently encrypted, decrypt before use")] diff --git a/base_layer/wallet/src/key_manager_service/handle.rs b/base_layer/wallet/src/key_manager_service/handle.rs index b3c8213238..78b1454e06 100644 --- a/base_layer/wallet/src/key_manager_service/handle.rs +++ b/base_layer/wallet/src/key_manager_service/handle.rs @@ -67,15 +67,14 @@ where TBackend: KeyManagerBackend + 'static .write() .await .add_key_manager_branch(branch.into()) - .await } async fn apply_encryption(&self, cipher: XChaCha20Poly1305) -> Result<(), KeyManagerServiceError> { - (*self.key_manager_inner).write().await.apply_encryption(cipher).await + (*self.key_manager_inner).write().await.apply_encryption(cipher) } async fn remove_encryption(&self) -> Result<(), KeyManagerServiceError> { - (*self.key_manager_inner).write().await.remove_encryption().await + (*self.key_manager_inner).write().await.remove_encryption() } async fn get_next_key + Send>(&self, branch: T) -> Result { diff --git a/base_layer/wallet/src/key_manager_service/service.rs b/base_layer/wallet/src/key_manager_service/service.rs index 75cea72c21..9378f6c68b 100644 --- a/base_layer/wallet/src/key_manager_service/service.rs +++ b/base_layer/wallet/src/key_manager_service/service.rs @@ -56,19 +56,19 @@ where TBackend: KeyManagerBackend + 'static } } - pub async fn add_key_manager_branch(&mut self, branch: String) -> Result { + pub fn add_key_manager_branch(&mut self, branch: String) -> Result { let result = if self.key_managers.contains_key(&branch) { AddResult::AlreadyExists } else { AddResult::NewEntry }; - let state = match self.db.get_key_manager_state(branch.clone()).await? { + let state = match self.db.get_key_manager_state(branch.clone())? { None => { let starting_state = KeyManagerState { branch_seed: branch.to_string(), primary_key_index: 0, }; - self.db.set_key_manager_state(starting_state.clone()).await?; + self.db.set_key_manager_state(starting_state.clone())?; starting_state }, Some(km) => km, @@ -92,7 +92,7 @@ where TBackend: KeyManagerBackend + 'static .lock() .await; let key = km.next_key()?; - self.db.increment_key_index(branch).await?; + self.db.increment_key_index(branch)?; Ok(NextKeyResult { key: key.k, index: km.key_index(), @@ -110,13 +110,13 @@ where TBackend: KeyManagerBackend + 'static Ok(key.k) } - pub async fn apply_encryption(&self, cipher: XChaCha20Poly1305) -> Result<(), KeyManagerServiceError> { - self.db.apply_encryption(cipher).await?; + pub fn apply_encryption(&self, cipher: XChaCha20Poly1305) -> Result<(), KeyManagerServiceError> { + self.db.apply_encryption(cipher)?; Ok(()) } - pub async fn remove_encryption(&self) -> Result<(), KeyManagerServiceError> { - self.db.remove_encryption().await?; + pub fn remove_encryption(&self) -> Result<(), KeyManagerServiceError> { + self.db.remove_encryption()?; Ok(()) } @@ -156,7 +156,7 @@ where TBackend: KeyManagerBackend + 'static let current_index = km.key_index(); if index > current_index { km.update_key_index(index); - self.db.set_key_index(branch, index).await?; + self.db.set_key_index(branch, index)?; trace!(target: LOG_TARGET, "Updated UTXO Key Index to {}", index); } Ok(()) diff --git a/base_layer/wallet/src/key_manager_service/storage/database/mod.rs b/base_layer/wallet/src/key_manager_service/storage/database/mod.rs index 4c390f5010..364e6eadea 100644 --- a/base_layer/wallet/src/key_manager_service/storage/database/mod.rs +++ b/base_layer/wallet/src/key_manager_service/storage/database/mod.rs @@ -52,63 +52,35 @@ where T: KeyManagerBackend + 'static /// Retrieves the key manager state of the provided branch /// Returns None if the request branch does not exist. - pub async fn get_key_manager_state( - &self, - branch: String, - ) -> Result, KeyManagerStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.get_key_manager(branch)) - .await - .map_err(|err| KeyManagerStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) + pub fn get_key_manager_state(&self, branch: String) -> Result, KeyManagerStorageError> { + self.db.get_key_manager(branch) } /// Saves the specified key manager state to the backend database. - pub async fn set_key_manager_state(&self, state: KeyManagerState) -> Result<(), KeyManagerStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.add_key_manager(state)) - .await - .map_err(|err| KeyManagerStorageError::BlockingTaskSpawnError(err.to_string()))??; - - Ok(()) + pub fn set_key_manager_state(&self, state: KeyManagerState) -> Result<(), KeyManagerStorageError> { + self.db.add_key_manager(state) } /// Increment the key index of the provided branch of the key manager. /// Will error if the branch does not exist. - pub async fn increment_key_index(&self, branch: String) -> Result<(), KeyManagerStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.increment_key_index(branch)) - .await - .map_err(|err| KeyManagerStorageError::BlockingTaskSpawnError(err.to_string()))??; - Ok(()) + pub fn increment_key_index(&self, branch: String) -> Result<(), KeyManagerStorageError> { + self.db.increment_key_index(branch) } /// Sets the key index of the provided branch of the key manager. /// Will error if the branch does not exist. - pub async fn set_key_index(&self, branch: String, index: u64) -> Result<(), KeyManagerStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.set_key_index(branch, index)) - .await - .map_err(|err| KeyManagerStorageError::BlockingTaskSpawnError(err.to_string()))??; - Ok(()) + pub fn set_key_index(&self, branch: String, index: u64) -> Result<(), KeyManagerStorageError> { + self.db.set_key_index(branch, index) } /// Encrypts the entire key manager with all branches. /// This will only encrypt the index used, as the master seed phrase is not directly stored with the key manager. - pub async fn apply_encryption(&self, cipher: XChaCha20Poly1305) -> Result<(), KeyManagerStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.apply_encryption(cipher)) - .await - .map_err(|err| KeyManagerStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) + pub fn apply_encryption(&self, cipher: XChaCha20Poly1305) -> Result<(), KeyManagerStorageError> { + self.db.apply_encryption(cipher) } /// Decrypts the entire key manager. - pub async fn remove_encryption(&self) -> Result<(), KeyManagerStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.remove_encryption()) - .await - .map_err(|err| KeyManagerStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) + pub fn remove_encryption(&self) -> Result<(), KeyManagerStorageError> { + self.db.remove_encryption() } } diff --git a/base_layer/wallet/src/output_manager_service/config.rs b/base_layer/wallet/src/output_manager_service/config.rs index 5c48222286..d08434de01 100644 --- a/base_layer/wallet/src/output_manager_service/config.rs +++ b/base_layer/wallet/src/output_manager_service/config.rs @@ -35,6 +35,12 @@ pub struct OutputManagerServiceConfig { pub num_confirmations_required: u64, /// The number of batches the unconfirmed outputs will be divided into before being queried from the base node pub tx_validator_batch_size: usize, + /// Wallets currently will choose the best outputs as inputs when spending, however since a lurking base node can + /// generate a transaction graph of inputs to outputs with relative ease, a wallet may reveal its transaction + /// history by including a (non-stealth address) one-sided payment. + /// If set to `true`, then outputs received via simple one-sided transactions, won't be automatically selected as + /// inputs for further transactions, but can still be selected individually as specific outputs. + pub autoignore_onesided_utxos: bool, } impl Default for OutputManagerServiceConfig { @@ -44,6 +50,7 @@ impl Default for OutputManagerServiceConfig { event_channel_size: 250, num_confirmations_required: 3, tx_validator_batch_size: 100, + autoignore_onesided_utxos: false, } } } diff --git a/base_layer/wallet/src/output_manager_service/error.rs b/base_layer/wallet/src/output_manager_service/error.rs index 8adcc975af..293a9ac7c1 100644 --- a/base_layer/wallet/src/output_manager_service/error.rs +++ b/base_layer/wallet/src/output_manager_service/error.rs @@ -94,6 +94,8 @@ pub enum OutputManagerError { ServiceError(String), #[error("Base node is not synced")] BaseNodeNotSynced, + #[error("Base node changed")] + BaseNodeChanged, #[error("Invalid Sender Message Type")] InvalidSenderMessage, #[error("Coinbase build error: `{0}`")] diff --git a/base_layer/wallet/src/output_manager_service/handle.rs b/base_layer/wallet/src/output_manager_service/handle.rs index 3403678213..45923b6f93 100644 --- a/base_layer/wallet/src/output_manager_service/handle.rs +++ b/base_layer/wallet/src/output_manager_service/handle.rs @@ -78,7 +78,7 @@ pub enum OutputManagerRequest { PrepareToSendTransaction { tx_id: TxId, amount: MicroTari, - utxo_selection: UtxoSelectionCriteria, + selection_criteria: UtxoSelectionCriteria, output_features: Box, fee_per_gram: MicroTari, tx_meta: TransactionMetadata, @@ -90,7 +90,7 @@ pub enum OutputManagerRequest { CreatePayToSelfTransaction { tx_id: TxId, amount: MicroTari, - utxo_selection: UtxoSelectionCriteria, + selection_criteria: UtxoSelectionCriteria, output_features: Box, fee_per_gram: MicroTari, lock_height: Option, @@ -99,7 +99,7 @@ pub enum OutputManagerRequest { CreatePayToSelfWithOutputs { outputs: Vec, fee_per_gram: MicroTari, - input_selection: UtxoSelectionCriteria, + selection_criteria: UtxoSelectionCriteria, }, CancelTransaction(TxId), GetSpentOutputs, @@ -120,6 +120,7 @@ pub enum OutputManagerRequest { RemoveEncryption, FeeEstimate { amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, fee_per_gram: MicroTari, num_kernels: usize, num_outputs: usize, @@ -197,13 +198,14 @@ impl fmt::Display for OutputManagerRequest { GetCoinbaseTransaction(_) => write!(f, "GetCoinbaseTransaction"), FeeEstimate { amount, + selection_criteria, fee_per_gram, num_kernels, num_outputs, } => write!( f, - "FeeEstimate(amount: {}, fee_per_gram: {}, num_kernels: {}, num_outputs: {})", - amount, fee_per_gram, num_kernels, num_outputs + "FeeEstimate(amount: {}, fee_per_gram: {}, num_kernels: {}, num_outputs: {}, selection_criteria: {:?})", + amount, fee_per_gram, num_kernels, num_outputs, selection_criteria ), ScanForRecoverableOutputs(_) => write!(f, "ScanForRecoverableOutputs"), ScanOutputs(_) => write!(f, "ScanOutputs"), @@ -545,7 +547,7 @@ impl OutputManagerHandle { .call(OutputManagerRequest::PrepareToSendTransaction { tx_id, amount, - utxo_selection, + selection_criteria: utxo_selection, output_features: Box::new(output_features), fee_per_gram, tx_meta, @@ -566,6 +568,7 @@ impl OutputManagerHandle { pub async fn fee_estimate( &mut self, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, fee_per_gram: MicroTari, num_kernels: usize, num_outputs: usize, @@ -574,6 +577,7 @@ impl OutputManagerHandle { .handle .call(OutputManagerRequest::FeeEstimate { amount, + selection_criteria, fee_per_gram, num_kernels, num_outputs, @@ -833,7 +837,7 @@ impl OutputManagerHandle { .call(OutputManagerRequest::CreatePayToSelfWithOutputs { outputs, fee_per_gram, - input_selection, + selection_criteria: input_selection, }) .await?? { @@ -857,7 +861,7 @@ impl OutputManagerHandle { .call(OutputManagerRequest::CreatePayToSelfTransaction { tx_id, amount, - utxo_selection, + selection_criteria: utxo_selection, output_features: Box::new(output_features), fee_per_gram, lock_height, diff --git a/base_layer/wallet/src/output_manager_service/input_selection.rs b/base_layer/wallet/src/output_manager_service/input_selection.rs index ead49139b7..933285c312 100644 --- a/base_layer/wallet/src/output_manager_service/input_selection.rs +++ b/base_layer/wallet/src/output_manager_service/input_selection.rs @@ -32,6 +32,7 @@ pub struct UtxoSelectionCriteria { pub filter: UtxoSelectionFilter, pub ordering: UtxoSelectionOrdering, pub excluding: Vec, + pub excluding_onesided: bool, } impl UtxoSelectionCriteria { diff --git a/base_layer/wallet/src/output_manager_service/recovery/standard_outputs_recoverer.rs b/base_layer/wallet/src/output_manager_service/recovery/standard_outputs_recoverer.rs index 268cc2756a..00d81a9334 100644 --- a/base_layer/wallet/src/output_manager_service/recovery/standard_outputs_recoverer.rs +++ b/base_layer/wallet/src/output_manager_service/recovery/standard_outputs_recoverer.rs @@ -37,7 +37,7 @@ use tari_crypto::{ keys::{PublicKey as PublicKeyTrait, SecretKey}, tari_utilities::hex::Hex, }; -use tari_script::{inputs, script}; +use tari_script::{inputs, script, Opcode}; use crate::{ key_manager_service::KeyManagerInterface, @@ -48,6 +48,7 @@ use crate::{ storage::{ database::{OutputManagerBackend, OutputManagerDatabase}, models::DbUnblindedOutput, + OutputSource, }, }, }; @@ -145,12 +146,21 @@ where let mut rewound_outputs_with_tx_id: Vec = Vec::new(); for (output, proof) in &mut rewound_outputs { + // Attempting to recognize output source by i.e., standard MimbleWimble, simple or stealth one-sided + let output_source = match *output.script.as_slice() { + [Opcode::Nop] => OutputSource::Standard, + [Opcode::PushPubKey(_), Opcode::Drop, Opcode::PushPubKey(_)] => OutputSource::StealthOneSided, + [Opcode::PushPubKey(_)] => OutputSource::OneSided, + _ => OutputSource::RecoveredButUnrecognized, + }; + let db_output = DbUnblindedOutput::rewindable_from_unblinded_output( output.clone(), &self.factories, &self.rewind_data, None, Some(proof), + output_source, )?; let tx_id = TxId::new_random(); let output_hex = db_output.commitment.to_hex(); diff --git a/base_layer/wallet/src/output_manager_service/service.rs b/base_layer/wallet/src/output_manager_service/service.rs index 10102e07e0..b2d344b66e 100644 --- a/base_layer/wallet/src/output_manager_service/service.rs +++ b/base_layer/wallet/src/output_manager_service/service.rs @@ -91,6 +91,7 @@ use crate::{ storage::{ database::{OutputBackendQuery, OutputManagerBackend, OutputManagerDatabase}, models::{DbUnblindedOutput, KnownOneSidedPaymentScript, SpendingPriority}, + OutputSource, OutputStatus, }, tasks::TxoValidationTask, @@ -278,7 +279,7 @@ where OutputManagerRequest::PrepareToSendTransaction { tx_id, amount, - utxo_selection, + selection_criteria, output_features, fee_per_gram, tx_meta, @@ -290,7 +291,7 @@ where .prepare_transaction_to_send( tx_id, amount, - utxo_selection, + selection_criteria, fee_per_gram, tx_meta, message, @@ -304,7 +305,7 @@ where OutputManagerRequest::CreatePayToSelfTransaction { tx_id, amount, - utxo_selection, + selection_criteria, output_features, fee_per_gram, lock_height, @@ -313,7 +314,7 @@ where .create_pay_to_self_transaction( tx_id, amount, - utxo_selection, + selection_criteria, *output_features, fee_per_gram, lock_height, @@ -323,11 +324,12 @@ where .map(OutputManagerResponse::PayToSelfTransaction), OutputManagerRequest::FeeEstimate { amount, + selection_criteria, fee_per_gram, num_kernels, num_outputs, } => self - .fee_estimate(amount, fee_per_gram, num_kernels, num_outputs) + .fee_estimate(amount, selection_criteria, fee_per_gram, num_kernels, num_outputs) .await .map(OutputManagerResponse::FeeEstimate), OutputManagerRequest::ConfirmPendingTransaction(tx_id) => self @@ -444,10 +446,10 @@ where OutputManagerRequest::CreatePayToSelfWithOutputs { outputs, fee_per_gram, - input_selection, + selection_criteria, } => { let (tx_id, transaction) = self - .create_pay_to_self_containing_outputs(outputs, fee_per_gram, input_selection) + .create_pay_to_self_containing_outputs(outputs, selection_criteria, fee_per_gram) .await?; Ok(OutputManagerResponse::CreatePayToSelfWithOutputs { transaction: Box::new(transaction), @@ -532,11 +534,13 @@ where } fn validate_outputs(&mut self) -> Result { - if !self.resources.connectivity.is_base_node_set() { - return Err(OutputManagerError::NoBaseNodeKeysProvided); - } + let current_base_node = self + .resources + .connectivity + .get_current_base_node_id() + .ok_or(OutputManagerError::NoBaseNodeKeysProvided)?; let id = OsRng.next_u64(); - let utxo_validation = TxoValidationTask::new( + let txo_validation = TxoValidationTask::new( id, self.resources.db.clone(), self.resources.connectivity.clone(), @@ -544,28 +548,56 @@ where self.resources.config.clone(), ); - let shutdown = self.resources.shutdown_signal.clone(); + let mut shutdown = self.resources.shutdown_signal.clone(); + let mut base_node_watch = self.resources.connectivity.get_current_base_node_watcher(); let event_publisher = self.resources.event_publisher.clone(); tokio::spawn(async move { - match utxo_validation.execute(shutdown).await { - Ok(id) => { - info!( - target: LOG_TARGET, - "UTXO Validation Protocol (Id: {}) completed successfully", id - ); - }, - Err(OutputManagerProtocolError { id, error }) => { - warn!( - target: LOG_TARGET, - "Error completing UTXO Validation Protocol (Id: {}): {:?}", id, error - ); - if let Err(e) = event_publisher.send(Arc::new(OutputManagerEvent::TxoValidationFailure(id))) { - debug!( - target: LOG_TARGET, - "Error sending event because there are no subscribers: {:?}", e - ); + let exec_fut = txo_validation.execute(); + tokio::pin!(exec_fut); + loop { + tokio::select! { + result = &mut exec_fut => { + match result { + Ok(id) => { + info!( + target: LOG_TARGET, + "UTXO Validation Protocol (Id: {}) completed successfully", id + ); + return; + }, + Err(OutputManagerProtocolError { id, error }) => { + warn!( + target: LOG_TARGET, + "Error completing UTXO Validation Protocol (Id: {}): {:?}", id, error + ); + if let Err(e) = event_publisher.send(Arc::new(OutputManagerEvent::TxoValidationFailure(id))) { + debug!( + target: LOG_TARGET, + "Error sending event because there are no subscribers: {:?}", e + ); + } + + return; + }, + } + }, + _ = shutdown.wait() => { + debug!(target: LOG_TARGET, "TXO Validation Protocol (Id: {}) shutting down because the system is shutting down", id); + return; + }, + _ = base_node_watch.changed() => { + if let Some(peer) = base_node_watch.borrow().as_ref() { + if peer.node_id != current_base_node { + debug!( + target: LOG_TARGET, + "TXO Validation Protocol (Id: {}) cancelled because base node changed", id + ); + return; + } + } + } - }, + } } }); Ok(id) @@ -588,7 +620,12 @@ where "Add output of value {} to Output Manager", output.value ); - let output = DbUnblindedOutput::from_unblinded_output(output, &self.resources.factories, spend_priority)?; + let output = DbUnblindedOutput::from_unblinded_output( + output, + &self.resources.factories, + spend_priority, + OutputSource::default(), + )?; debug!( target: LOG_TARGET, "saving output of hash {} to Output Manager", @@ -625,6 +662,7 @@ where &rewind_data, spend_priority, None, + OutputSource::default(), )?; debug!( target: LOG_TARGET, @@ -660,7 +698,12 @@ where target: LOG_TARGET, "Add unvalidated output of value {} to Output Manager", output.value ); - let output = DbUnblindedOutput::from_unblinded_output(output, &self.resources.factories, spend_priority)?; + let output = DbUnblindedOutput::from_unblinded_output( + output, + &self.resources.factories, + spend_priority, + OutputSource::default(), + )?; self.resources.db.add_unvalidated_output(tx_id, output)?; Ok(()) } @@ -772,6 +815,7 @@ where &self.resources.rewind_data, None, None, + OutputSource::default(), )?; self.resources @@ -796,6 +840,7 @@ where async fn fee_estimate( &mut self, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, fee_per_gram: MicroTari, num_kernels: usize, num_outputs: usize, @@ -820,15 +865,34 @@ where Covenant::new().consensus_encode_exact_size(), ); - let utxo_selection = self + let utxo_selection = match self .select_utxos( amount, + selection_criteria, fee_per_gram, num_outputs, metadata_byte_size * num_outputs, - UtxoSelectionCriteria::default(), ) - .await?; + .await + { + Ok(v) => Ok(v), + Err(OutputManagerError::FundsPending | OutputManagerError::NotEnoughFunds) => { + debug!( + target: LOG_TARGET, + "We dont have enough funds available to make a fee estimate, so we estimate 1 input, no change" + ); + let fee_calc = self.get_fee_calc(); + let output_features_estimate = OutputFeatures::default(); + let default_metadata_size = fee_calc.weighting().round_up_metadata_size( + output_features_estimate.consensus_encode_exact_size() + + Covenant::new().consensus_encode_exact_size() + + script![Nop].consensus_encode_exact_size(), + ); + let fee = fee_calc.calculate(fee_per_gram, 1, 1, num_outputs, default_metadata_size); + return Ok(Fee::normalize(fee)); + }, + Err(e) => Err(e), + }?; debug!(target: LOG_TARGET, "{} utxos selected.", utxo_selection.utxos.len()); @@ -845,7 +909,7 @@ where &mut self, tx_id: TxId, amount: MicroTari, - utxo_selection: UtxoSelectionCriteria, + selection_criteria: UtxoSelectionCriteria, fee_per_gram: MicroTari, tx_meta: TransactionMetadata, message: String, @@ -858,7 +922,7 @@ where target: LOG_TARGET, "Preparing to send transaction. Amount: {}. UTXO Selection: {}. Fee per gram: {}. ", amount, - utxo_selection, + selection_criteria, fee_per_gram, ); let metadata_byte_size = self @@ -872,7 +936,7 @@ where ); let input_selection = self - .select_utxos(amount, fee_per_gram, 1, metadata_byte_size, utxo_selection) + .select_utxos(amount, selection_criteria, fee_per_gram, 1, metadata_byte_size) .await?; let offset = PrivateKey::random(&mut OsRng); @@ -946,6 +1010,7 @@ where &self.resources.rewind_data.clone(), None, None, + OutputSource::default(), )?); } @@ -1007,6 +1072,7 @@ where &self.resources.rewind_data, None, None, + OutputSource::Coinbase, )?; // If there is no existing output available, we store the one we produced. @@ -1036,8 +1102,8 @@ where async fn create_pay_to_self_containing_outputs( &mut self, outputs: Vec, - fee_per_gram: MicroTari, selection_criteria: UtxoSelectionCriteria, + fee_per_gram: MicroTari, ) -> Result<(TxId, Transaction), OutputManagerError> { let total_value = outputs.iter().map(|o| o.value()).sum(); let nop_script = script![Nop]; @@ -1054,10 +1120,10 @@ where let input_selection = self .select_utxos( total_value, + selection_criteria, fee_per_gram, outputs.len(), metadata_byte_size, - selection_criteria, ) .await?; let offset = PrivateKey::random(&mut OsRng); @@ -1113,59 +1179,13 @@ where &self.resources.rewind_data, None, None, + OutputSource::default(), )?) } - // let mut change_keys = None; - // - // let fee = Fee::calculate(fee_per_gram, 1, inputs.len(), 1); - // let change_value = total.saturating_sub(fee); - // if change_value > 0.into() { - // let (spending_key, script_private_key) = self - // .resources - // .master_key_manager - // .get_next_spend_and_script_key() - // .await?; - // change_keys = Some((spending_key.clone(), script_private_key.clone())); - // builder.with_change_secret(spending_key); - // builder.with_rewindable_outputs(&self.resources.rewind_data.clone()); - // builder.with_change_script( - // script!(Nop), - // inputs!(PublicKey::from_secret_key(&script_private_key)), - // script_private_key, - // ); - // } - let mut stp = builder .build(&self.resources.factories, None, u64::MAX) .map_err(|e| OutputManagerError::BuildError(e.message))?; - // if let Some((spending_key, script_private_key)) = change_keys { - // // let change_script_offset_public_key = stp.get_change_sender_offset_public_key()?.ok_or_else(|| { - // // OutputManagerError::BuildError( - // // "There should be a change script offset public key available".to_string(), - // // ) - // // })?; - // - // let sender_offset_private_key = PrivateKey::random(&mut OsRng); - // let sender_offset_public_key = PublicKey::from_secret_key(&sender_offset_private_key); - // - // let public_offset_commitment_private_key = PrivateKey::random(&mut OsRng); - // let public_offset_commitment_pub_key = PublicKey::from_secret_key(&public_offset_commitment_private_key); - // - // let mut output_builder = UnblindedOutputBuilder::new(stp.get_change_amount()?, spending_key) - // .with_script(script!(Nop)) - // .with_input_data(inputs!(PublicKey::from_secret_key(&script_private_key))) - // .with_script_private_key(script_private_key); - // - // output_builder.sign_as_receiver(sender_offset_public_key, public_offset_commitment_pub_key)?; - // output_builder.sign_as_sender(&sender_offset_private_key)?; - // - - // let change_output = - // DbUnblindedOutput::from_unblinded_output(output_builder.try_build()?, &self.resources.factories)?; - // - // db_outputs.push(change_output); - // } if let Some(unblinded_output) = stp.get_change_unblinded_output()? { db_outputs.push(DbUnblindedOutput::rewindable_from_unblinded_output( @@ -1174,6 +1194,7 @@ where &self.resources.rewind_data, None, None, + OutputSource::default(), )?); } let tx_id = stp.get_tx_id()?; @@ -1191,7 +1212,7 @@ where &mut self, tx_id: TxId, amount: MicroTari, - utxo_selection: UtxoSelectionCriteria, + selection_criteria: UtxoSelectionCriteria, output_features: OutputFeatures, fee_per_gram: MicroTari, lock_height: Option, @@ -1210,7 +1231,7 @@ where ); let input_selection = self - .select_utxos(amount, fee_per_gram, 1, metadata_byte_size, utxo_selection) + .select_utxos(amount, selection_criteria, fee_per_gram, 1, metadata_byte_size) .await?; let offset = PrivateKey::random(&mut OsRng); @@ -1277,6 +1298,7 @@ where &self.resources.rewind_data, None, None, + OutputSource::default(), )?; builder .with_output(utxo.unblinded_output.clone(), sender_offset_private_key.clone()) @@ -1316,6 +1338,7 @@ where &self.resources.rewind_data, None, None, + OutputSource::default(), )?; outputs.push(change_output); } @@ -1368,10 +1391,10 @@ where async fn select_utxos( &mut self, amount: MicroTari, + mut selection_criteria: UtxoSelectionCriteria, fee_per_gram: MicroTari, num_outputs: usize, total_output_metadata_byte_size: usize, - selection_criteria: UtxoSelectionCriteria, ) -> Result { debug!( target: LOG_TARGET, @@ -1390,6 +1413,11 @@ where // Attempt to get the chain tip height let chain_metadata = self.base_node_service.get_chain_metadata().await?; + // Respecting the setting to not choose outputs that reveal the address + if self.resources.config.autoignore_onesided_utxos { + selection_criteria.excluding_onesided = self.resources.config.autoignore_onesided_utxos; + } + warn!( target: LOG_TARGET, "select_utxos selection criteria: {}", selection_criteria @@ -1424,7 +1452,7 @@ where for o in uo { utxos_total_value += o.unblinded_output.value; - error!(target: LOG_TARGET, "-- utxos_total_value = {:?}", utxos_total_value); + trace!(target: LOG_TARGET, "-- utxos_total_value = {:?}", utxos_total_value); utxos.push(o); // The assumption here is that the only output will be the payment output and change if required fee_without_change = fee_calc.calculate( @@ -1445,7 +1473,7 @@ where total_output_metadata_byte_size + default_metadata_size, ); - error!(target: LOG_TARGET, "-- amt+fee = {:?} {}", amount, fee_with_change); + trace!(target: LOG_TARGET, "-- amt+fee = {:?} {}", amount, fee_with_change); if utxos_total_value > amount + fee_with_change { requires_change_output = true; break; @@ -1620,10 +1648,10 @@ where let selection = self .select_utxos( amount_per_split * MicroTari(number_of_splits as u64), + UtxoSelectionCriteria::largest_first(), fee_per_gram, number_of_splits, self.default_metadata_size() * number_of_splits, - UtxoSelectionCriteria::largest_first(), ) .await?; @@ -1761,6 +1789,7 @@ where &self.resources.rewind_data.clone(), None, None, + OutputSource::default(), )?; tx_builder @@ -1979,6 +2008,7 @@ where &self.resources.rewind_data.clone(), None, None, + OutputSource::default(), )?; tx_builder @@ -2036,6 +2066,7 @@ where &self.resources.rewind_data.clone(), None, None, + OutputSource::default(), )?); } @@ -2184,6 +2215,7 @@ where &self.resources.rewind_data.clone(), None, None, + OutputSource::default(), )?; tx_builder @@ -2348,6 +2380,7 @@ where &self.resources.rewind_data, None, None, + OutputSource::AtomicSwap, )?; outputs.push(change_output); @@ -2434,6 +2467,7 @@ where &self.resources.rewind_data, None, None, + OutputSource::Refund, )?; outputs.push(change_output); @@ -2506,9 +2540,12 @@ where ) .as_bytes(), ) { - Ok(spending_sk) => { - scanned_outputs.push((output.clone(), matched_key.private_key.clone(), spending_sk)) - }, + Ok(spending_sk) => scanned_outputs.push(( + output.clone(), + OutputSource::OneSided, + matched_key.private_key.clone(), + spending_sk, + )), Err(e) => { error!( target: LOG_TARGET, @@ -2544,9 +2581,12 @@ where match PrivateKey::from_bytes( CommsPublicKey::shared_secret(&wallet_sk, &output.sender_offset_public_key).as_bytes(), ) { - Ok(spending_sk) => { - scanned_outputs.push((output.clone(), wallet_sk.clone() + shared_secret, spending_sk)) - }, + Ok(spending_sk) => scanned_outputs.push(( + output.clone(), + OutputSource::StealthOneSided, + wallet_sk.clone() + shared_secret, + spending_sk, + )), Err(e) => { error!( target: LOG_TARGET, @@ -2567,11 +2607,11 @@ where // Imports scanned outputs into the wallet fn import_onesided_outputs( &self, - scanned_outputs: Vec<(TransactionOutput, PrivateKey, RistrettoSecretKey)>, + scanned_outputs: Vec<(TransactionOutput, OutputSource, PrivateKey, RistrettoSecretKey)>, ) -> Result, OutputManagerError> { let mut rewound_outputs = Vec::with_capacity(scanned_outputs.len()); - for (output, script_private_key, spending_sk) in scanned_outputs { + for (output, output_source, script_private_key, spending_sk) in scanned_outputs { let rewind_blinding_key = PrivateKey::from_bytes(&hash_secret_key(&spending_sk))?; let encryption_key = PrivateKey::from_bytes(&hash_secret_key(&rewind_blinding_key))?; let committed_value = @@ -2611,6 +2651,7 @@ where }, None, Some(&output.proof), + output_source, )?; let output_hex = output.commitment.to_hex(); diff --git a/base_layer/wallet/src/output_manager_service/storage/database/backend.rs b/base_layer/wallet/src/output_manager_service/storage/database/backend.rs index f1f72772d4..a97a943ff1 100644 --- a/base_layer/wallet/src/output_manager_service/storage/database/backend.rs +++ b/base_layer/wallet/src/output_manager_service/storage/database/backend.rs @@ -47,7 +47,7 @@ pub trait OutputManagerBackend: Send + Sync + Clone { mined_timestamp: u64, ) -> Result<(), OutputManagerStorageError>; - fn set_output_to_unmined(&self, hash: FixedHash) -> Result<(), OutputManagerStorageError>; + fn set_output_to_unmined_and_invalid(&self, hash: FixedHash) -> Result<(), OutputManagerStorageError>; fn set_outputs_to_be_revalidated(&self) -> Result<(), OutputManagerStorageError>; fn mark_output_as_spent( diff --git a/base_layer/wallet/src/output_manager_service/storage/database/mod.rs b/base_layer/wallet/src/output_manager_service/storage/database/mod.rs index 934c6e0161..b12066ea65 100644 --- a/base_layer/wallet/src/output_manager_service/storage/database/mod.rs +++ b/base_layer/wallet/src/output_manager_service/storage/database/mod.rs @@ -416,9 +416,9 @@ where T: OutputManagerBackend + 'static Ok(()) } - pub fn set_output_to_unmined(&self, hash: HashOutput) -> Result<(), OutputManagerStorageError> { + pub fn set_output_to_unmined_and_invalid(&self, hash: HashOutput) -> Result<(), OutputManagerStorageError> { let db = self.db.clone(); - db.set_output_to_unmined(hash)?; + db.set_output_to_unmined_and_invalid(hash)?; Ok(()) } diff --git a/base_layer/wallet/src/output_manager_service/storage/mod.rs b/base_layer/wallet/src/output_manager_service/storage/mod.rs index 8ecb04f5fe..8af28c8b4a 100644 --- a/base_layer/wallet/src/output_manager_service/storage/mod.rs +++ b/base_layer/wallet/src/output_manager_service/storage/mod.rs @@ -22,6 +22,9 @@ pub mod database; pub mod models; +pub mod output_source; pub mod output_status; pub mod sqlite_db; + +pub use output_source::OutputSource; pub use output_status::OutputStatus; diff --git a/base_layer/wallet/src/output_manager_service/storage/models.rs b/base_layer/wallet/src/output_manager_service/storage/models.rs index 9cf45b893f..45ad665a7d 100644 --- a/base_layer/wallet/src/output_manager_service/storage/models.rs +++ b/base_layer/wallet/src/output_manager_service/storage/models.rs @@ -32,7 +32,10 @@ use tari_core::transactions::{ }; use tari_script::{ExecutionStack, TariScript}; -use crate::output_manager_service::{error::OutputManagerStorageError, storage::OutputStatus}; +use crate::output_manager_service::{ + error::OutputManagerStorageError, + storage::{OutputSource, OutputStatus}, +}; #[derive(Debug, Clone)] pub struct DbUnblindedOutput { @@ -47,6 +50,7 @@ pub struct DbUnblindedOutput { pub marked_deleted_at_height: Option, pub marked_deleted_in_block: Option, pub spending_priority: SpendingPriority, + pub source: OutputSource, } impl DbUnblindedOutput { @@ -54,6 +58,7 @@ impl DbUnblindedOutput { output: UnblindedOutput, factory: &CryptoFactories, spend_priority: Option, + source: OutputSource, ) -> Result { let tx_out = output.as_transaction_output(factory)?; Ok(DbUnblindedOutput { @@ -68,6 +73,7 @@ impl DbUnblindedOutput { marked_deleted_at_height: None, marked_deleted_in_block: None, spending_priority: spend_priority.unwrap_or(SpendingPriority::Normal), + source, }) } @@ -77,6 +83,7 @@ impl DbUnblindedOutput { rewind_data: &RewindData, spending_priority: Option, proof: Option<&BulletRangeProof>, + source: OutputSource, ) -> Result { let tx_out = output.as_rewindable_transaction_output(factory, rewind_data, proof)?; Ok(DbUnblindedOutput { @@ -91,6 +98,7 @@ impl DbUnblindedOutput { marked_deleted_at_height: None, marked_deleted_in_block: None, spending_priority: spending_priority.unwrap_or(SpendingPriority::Normal), + source, }) } } diff --git a/base_layer/wallet/src/output_manager_service/storage/output_source.rs b/base_layer/wallet/src/output_manager_service/storage/output_source.rs new file mode 100644 index 0000000000..51a85d03aa --- /dev/null +++ b/base_layer/wallet/src/output_manager_service/storage/output_source.rs @@ -0,0 +1,68 @@ +// Copyright 2021. The Tari Project +// +// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the +// following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following +// disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the +// following disclaimer in the documentation and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote +// products derived from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +use core::{ + convert::TryFrom, + result::{ + Result, + Result::{Err, Ok}, + }, +}; + +use strum_macros::Display; + +use crate::output_manager_service::error::OutputManagerStorageError; + +// The source of where the output came from +#[derive(Copy, Clone, Debug, PartialEq, Display, Default)] +pub enum OutputSource { + Unknown, + Coinbase, + RecoveredButUnrecognized, + #[default] + Standard, + OneSided, + StealthOneSided, + Refund, + AtomicSwap, +} + +impl TryFrom for OutputSource { + type Error = OutputManagerStorageError; + + fn try_from(value: i32) -> Result { + Ok(match value { + 0 => OutputSource::Unknown, + 1 => OutputSource::Coinbase, + 2 => OutputSource::RecoveredButUnrecognized, + 3 => OutputSource::Standard, + 4 => OutputSource::OneSided, + 5 => OutputSource::StealthOneSided, + 6 => OutputSource::Refund, + 7 => OutputSource::AtomicSwap, + _ => { + return Err(OutputManagerStorageError::ConversionError { + reason: "Was expecting value between 0 and 7 for OutputSource".to_string(), + }) + }, + }) + } +} diff --git a/base_layer/wallet/src/output_manager_service/storage/sqlite_db/mod.rs b/base_layer/wallet/src/output_manager_service/storage/sqlite_db/mod.rs index b6a5c8dc4e..80d9e289a3 100644 --- a/base_layer/wallet/src/output_manager_service/storage/sqlite_db/mod.rs +++ b/base_layer/wallet/src/output_manager_service/storage/sqlite_db/mod.rs @@ -514,7 +514,7 @@ impl OutputManagerBackend for OutputManagerSqliteDatabase { Ok(()) } - fn set_output_to_unmined(&self, hash: FixedHash) -> Result<(), OutputManagerStorageError> { + fn set_output_to_unmined_and_invalid(&self, hash: FixedHash) -> Result<(), OutputManagerStorageError> { let start = Instant::now(); let conn = self.database_connection.get_pooled_connection()?; let acquire_lock = start.elapsed(); @@ -663,6 +663,7 @@ impl OutputManagerBackend for OutputManagerSqliteDatabase { let acquire_lock = start.elapsed(); let mut outputs_to_be_spent = Vec::with_capacity(outputs_to_send.len()); + for i in outputs_to_send { let output = OutputSql::find_by_commitment_and_cancelled(i.commitment.as_bytes(), false, &conn)?; if output.status != (OutputStatus::Unspent as i32) { @@ -898,6 +899,8 @@ impl OutputManagerBackend for OutputManagerSqliteDatabase { UpdateOutput { status: Some(OutputStatus::Unspent), spent_in_tx_id: Some(None), + // We clear these so that the output will be revalidated the next time a validation is done. + mined_height: Some(None), mined_in_block: Some(None), ..Default::default() }, @@ -1240,6 +1243,7 @@ pub struct UpdateOutput { script_private_key: Option>, metadata_signature_nonce: Option>, metadata_signature_u_key: Option>, + mined_height: Option>, mined_in_block: Option>>, } @@ -1253,18 +1257,10 @@ pub struct UpdateOutputSql { script_private_key: Option>, metadata_signature_nonce: Option>, metadata_signature_u_key: Option>, + mined_height: Option>, mined_in_block: Option>>, } -#[derive(AsChangeset)] -#[table_name = "outputs"] -#[changeset_options(treat_none_as_null = "true")] -/// This struct is used to set the contained field to null -pub struct NullOutputSql { - received_in_tx_id: Option, - spent_in_tx_id: Option, -} - /// Map a Rust friendly UpdateOutput to the Sql data type form impl From for UpdateOutputSql { fn from(u: UpdateOutput) -> Self { @@ -1276,6 +1272,7 @@ impl From for UpdateOutputSql { metadata_signature_u_key: u.metadata_signature_u_key, received_in_tx_id: u.received_in_tx_id.map(|o| o.map(TxId::as_i64_wrapped)), spent_in_tx_id: u.spent_in_tx_id.map(|o| o.map(TxId::as_i64_wrapped)), + mined_height: u.mined_height, mined_in_block: u.mined_in_block, } } @@ -1480,6 +1477,7 @@ mod test { OutputStatus, UpdateOutput, }, + OutputSource, }, storage::sqlite_utilities::wallet_db_connection::WalletDbConnection, util::encryption::Encryptable, @@ -1517,7 +1515,7 @@ mod test { for _i in 0..2 { let (_, uo) = make_input(MicroTari::from(100 + OsRng.next_u64() % 1000)); - let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None).unwrap(); + let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None, OutputSource::Unknown).unwrap(); let o = NewOutputSql::new(uo, OutputStatus::Unspent, None, None).unwrap(); outputs.push(o.clone()); outputs_unspent.push(o.clone()); @@ -1526,7 +1524,7 @@ mod test { for _i in 0..3 { let (_, uo) = make_input(MicroTari::from(100 + OsRng.next_u64() % 1000)); - let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None).unwrap(); + let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None, OutputSource::Unknown).unwrap(); let o = NewOutputSql::new(uo, OutputStatus::Spent, None, None).unwrap(); outputs.push(o.clone()); outputs_spent.push(o.clone()); @@ -1627,7 +1625,7 @@ mod test { let factories = CryptoFactories::default(); let (_, uo) = make_input(MicroTari::from(100 + OsRng.next_u64() % 1000)); - let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None).unwrap(); + let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None, OutputSource::Unknown).unwrap(); let output = NewOutputSql::new(uo, OutputStatus::Unspent, None, None).unwrap(); let mut key = [0u8; size_of::()]; @@ -1694,12 +1692,12 @@ mod test { let factories = CryptoFactories::default(); let (_, uo) = make_input(MicroTari::from(100 + OsRng.next_u64() % 1000)); - let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None).unwrap(); + let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None, OutputSource::Unknown).unwrap(); let output = NewOutputSql::new(uo, OutputStatus::Unspent, None, None).unwrap(); output.commit(&conn).unwrap(); let (_, uo2) = make_input(MicroTari::from(100 + OsRng.next_u64() % 1000)); - let uo2 = DbUnblindedOutput::from_unblinded_output(uo2, &factories, None).unwrap(); + let uo2 = DbUnblindedOutput::from_unblinded_output(uo2, &factories, None, OutputSource::Unknown).unwrap(); let output2 = NewOutputSql::new(uo2, OutputStatus::Unspent, None, None).unwrap(); output2.commit(&conn).unwrap(); } diff --git a/base_layer/wallet/src/output_manager_service/storage/sqlite_db/new_output_sql.rs b/base_layer/wallet/src/output_manager_service/storage/sqlite_db/new_output_sql.rs index 502b13c202..d3d2561ee8 100644 --- a/base_layer/wallet/src/output_manager_service/storage/sqlite_db/new_output_sql.rs +++ b/base_layer/wallet/src/output_manager_service/storage/sqlite_db/new_output_sql.rs @@ -63,6 +63,7 @@ pub struct NewOutputSql { pub covenant: Vec, pub encrypted_value: Vec, pub minimum_value_promise: i64, + pub source: i32, } impl NewOutputSql { @@ -99,6 +100,7 @@ impl NewOutputSql { covenant: output.unblinded_output.covenant.to_bytes(), encrypted_value: output.unblinded_output.encrypted_value.to_vec(), minimum_value_promise: output.unblinded_output.minimum_value_promise.as_u64() as i64, + source: output.source as i32, }) } @@ -168,6 +170,7 @@ impl From for NewOutputSql { covenant: o.covenant, encrypted_value: o.encrypted_value, minimum_value_promise: o.minimum_value_promise, + source: 0, } } } diff --git a/base_layer/wallet/src/output_manager_service/storage/sqlite_db/output_sql.rs b/base_layer/wallet/src/output_manager_service/storage/sqlite_db/output_sql.rs index 140b8a11fd..1157e7b173 100644 --- a/base_layer/wallet/src/output_manager_service/storage/sqlite_db/output_sql.rs +++ b/base_layer/wallet/src/output_manager_service/storage/sqlite_db/output_sql.rs @@ -51,6 +51,7 @@ use crate::{ database::{OutputBackendQuery, SortDirection}, models::DbUnblindedOutput, sqlite_db::{UpdateOutput, UpdateOutputSql}, + OutputSource, OutputStatus, }, UtxoSelectionFilter, @@ -102,6 +103,7 @@ pub struct OutputSql { pub mined_timestamp: Option, pub encrypted_value: Vec, pub minimum_value_promise: i64, + pub source: i32, } impl OutputSql { @@ -206,7 +208,11 @@ impl OutputSql { outputs::output_type .eq(i32::from(OutputType::Standard.as_byte())) .or(outputs::output_type.eq(i32::from(OutputType::Coinbase.as_byte()))), - ) + ); + + if selection_criteria.excluding_onesided { + query = query.filter(outputs::source.ne(OutputSource::OneSided as i32)); + } }, UtxoSelectionFilter::SpecificOutputs { commitments } => { query = match commitments.len() { @@ -385,7 +391,7 @@ impl OutputSql { FROM outputs WHERE status = ? AND maturity > ? OR script_lock_height > ? \ UNION ALL \ SELECT coalesce(sum(value), 0) as amount, 'pending_incoming_balance' as category \ - FROM outputs WHERE status = ? OR status = ? OR status = ? \ + FROM outputs WHERE source != ? AND status = ? OR status = ? OR status = ? \ UNION ALL \ SELECT coalesce(sum(value), 0) as amount, 'pending_outgoing_balance' as category \ FROM outputs WHERE status = ? OR status = ? OR status = ?", @@ -397,6 +403,7 @@ impl OutputSql { .bind::(current_tip as i64) .bind::(current_tip as i64) // pending_incoming_balance + .bind::(OutputSource::Coinbase as i32) .bind::(OutputStatus::EncumberedToBeReceived as i32) .bind::(OutputStatus::ShortTermEncumberedToBeReceived as i32) .bind::(OutputStatus::UnspentMinedUnconfirmed as i32) @@ -411,7 +418,7 @@ impl OutputSql { FROM outputs WHERE status = ? \ UNION ALL \ SELECT coalesce(sum(value), 0) as amount, 'pending_incoming_balance' as category \ - FROM outputs WHERE status = ? OR status = ? OR status = ? \ + FROM outputs WHERE source != ? AND status = ? OR status = ? OR status = ? \ UNION ALL \ SELECT coalesce(sum(value), 0) as amount, 'pending_outgoing_balance' as category \ FROM outputs WHERE status = ? OR status = ? OR status = ?", @@ -419,6 +426,7 @@ impl OutputSql { // available_balance .bind::(OutputStatus::Unspent as i32) // pending_incoming_balance + .bind::(OutputSource::Coinbase as i32) .bind::(OutputStatus::EncumberedToBeReceived as i32) .bind::(OutputStatus::ShortTermEncumberedToBeReceived as i32) .bind::(OutputStatus::UnspentMinedUnconfirmed as i32) @@ -739,6 +747,7 @@ impl TryFrom for DbUnblindedOutput { marked_deleted_at_height: o.marked_deleted_at_height.map(|d| d as u64), marked_deleted_in_block, spending_priority, + source: o.source.try_into()?, }) } } diff --git a/base_layer/wallet/src/output_manager_service/tasks/txo_validation_task.rs b/base_layer/wallet/src/output_manager_service/tasks/txo_validation_task.rs index d66589fd6a..3f3456fe82 100644 --- a/base_layer/wallet/src/output_manager_service/tasks/txo_validation_task.rs +++ b/base_layer/wallet/src/output_manager_service/tasks/txo_validation_task.rs @@ -27,14 +27,14 @@ use std::{ use log::*; use tari_common_types::types::{BlockHash, FixedHash}; -use tari_comms::protocol::rpc::RpcError::RequestFailed; +use tari_comms::{peer_manager::Peer, protocol::rpc::RpcError::RequestFailed}; use tari_core::{ base_node::rpc::BaseNodeWalletRpcClient, blocks::BlockHeader, proto::base_node::{QueryDeletedRequest, UtxoQueryRequest}, }; -use tari_shutdown::ShutdownSignal; use tari_utilities::hex::Hex; +use tokio::sync::watch; use crate::{ connectivity_service::WalletConnectivityInterface, @@ -54,6 +54,7 @@ const LOG_TARGET: &str = "wallet::output_service::txo_validation_task"; pub struct TxoValidationTask { operation_id: u64, db: OutputManagerDatabase, + base_node_watch: watch::Receiver>, connectivity: TWalletConnectivity, event_publisher: OutputManagerEventSender, config: OutputManagerServiceConfig, @@ -74,13 +75,14 @@ where Self { operation_id, db, + base_node_watch: connectivity.get_current_base_node_watcher(), connectivity, event_publisher, config, } } - pub async fn execute(mut self, _shutdown: ShutdownSignal) -> Result { + pub async fn execute(mut self) -> Result { let mut base_node_client = self .connectivity .obtain_base_node_wallet_rpc_client() @@ -88,9 +90,15 @@ where .ok_or(OutputManagerError::Shutdown) .for_protocol(self.operation_id)?; + let base_node_peer = self + .base_node_watch + .borrow() + .as_ref() + .map(|p| p.node_id.clone()) + .ok_or_else(|| OutputManagerProtocolError::new(self.operation_id, OutputManagerError::BaseNodeChanged))?; debug!( target: LOG_TARGET, - "Starting TXO validation protocol (Id: {})", self.operation_id, + "Starting TXO validation protocol with peer {} (Id: {})", base_node_peer, self.operation_id, ); let last_mined_header = self.check_for_reorgs(&mut base_node_client).await?; @@ -99,10 +107,11 @@ where self.update_spent_outputs(&mut base_node_client, last_mined_header) .await?; + self.publish_event(OutputManagerEvent::TxoValidationSuccess(self.operation_id)); debug!( target: LOG_TARGET, - "Finished TXO validation protocol (Id: {})", self.operation_id, + "Finished TXO validation protocol from base node {} (Id: {})", base_node_peer, self.operation_id, ); Ok(self.operation_id) } @@ -233,6 +242,7 @@ where batch.len(), self.operation_id ); + let (mined, unmined, tip_height) = self .query_base_node_for_outputs(batch, wallet_client) .await @@ -345,7 +355,7 @@ where self.operation_id ); self.db - .set_output_to_unmined(last_mined_output.hash) + .set_output_to_unmined_and_invalid(last_mined_output.hash) .for_protocol(self.operation_id)?; } else { debug!( diff --git a/base_layer/wallet/src/schema.rs b/base_layer/wallet/src/schema.rs index e2a7fa2528..06fd45f00e 100644 --- a/base_layer/wallet/src/schema.rs +++ b/base_layer/wallet/src/schema.rs @@ -155,6 +155,7 @@ table! { mined_timestamp -> Nullable, encrypted_value -> Binary, minimum_value_promise -> BigInt, + source -> Integer, } } diff --git a/base_layer/wallet/src/storage/database.rs b/base_layer/wallet/src/storage/database.rs index 9c870e7f0e..1ff079243e 100644 --- a/base_layer/wallet/src/storage/database.rs +++ b/base_layer/wallet/src/storage/database.rs @@ -121,218 +121,143 @@ where T: WalletBackend + 'static Self { db: Arc::new(db) } } - pub async fn get_master_seed(&self) -> Result, WalletStorageError> { - let db_clone = self.db.clone(); - - let c = tokio::task::spawn_blocking(move || match db_clone.fetch(&DbKey::MasterSeed) { + pub fn get_master_seed(&self) -> Result, WalletStorageError> { + let c = match self.db.fetch(&DbKey::MasterSeed) { Ok(None) => Ok(None), Ok(Some(DbValue::MasterSeed(k))) => Ok(Some(k)), Ok(Some(other)) => unexpected_result(DbKey::MasterSeed, other), Err(e) => log_error(DbKey::MasterSeed, e), - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(c) } - pub async fn set_master_seed(&self, seed: CipherSeed) -> Result<(), WalletStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || db_clone.write(WriteOperation::Insert(DbKeyValuePair::MasterSeed(seed)))) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + pub fn set_master_seed(&self, seed: CipherSeed) -> Result<(), WalletStorageError> { + self.db + .write(WriteOperation::Insert(DbKeyValuePair::MasterSeed(seed)))?; Ok(()) } - pub async fn clear_master_seed(&self) -> Result<(), WalletStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.write(WriteOperation::Remove(DbKey::MasterSeed))) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + pub fn clear_master_seed(&self) -> Result<(), WalletStorageError> { + self.db.write(WriteOperation::Remove(DbKey::MasterSeed))?; Ok(()) } - pub async fn get_tor_id(&self) -> Result, WalletStorageError> { - let db_clone = self.db.clone(); - - let c = tokio::task::spawn_blocking(move || match db_clone.fetch(&DbKey::TorId) { + pub fn get_tor_id(&self) -> Result, WalletStorageError> { + let c = match self.db.fetch(&DbKey::TorId) { Ok(None) => Ok(None), Ok(Some(DbValue::TorId(k))) => Ok(Some(k)), Ok(Some(other)) => unexpected_result(DbKey::TorId, other), Err(e) => log_error(DbKey::TorId, e), - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(c) } - pub async fn set_tor_identity(&self, id: TorIdentity) -> Result<(), WalletStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || db_clone.write(WriteOperation::Insert(DbKeyValuePair::TorId(id)))) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + pub fn set_tor_identity(&self, id: TorIdentity) -> Result<(), WalletStorageError> { + self.db.write(WriteOperation::Insert(DbKeyValuePair::TorId(id)))?; Ok(()) } - pub async fn get_node_address(&self) -> Result, WalletStorageError> { - let db_clone = self.db.clone(); - - let c = tokio::task::spawn_blocking(move || match db_clone.fetch(&DbKey::CommsAddress) { + pub fn get_node_address(&self) -> Result, WalletStorageError> { + let c = match self.db.fetch(&DbKey::CommsAddress) { Ok(None) => Ok(None), Ok(Some(DbValue::CommsAddress(k))) => Ok(Some(k)), Ok(Some(other)) => unexpected_result(DbKey::CommsAddress, other), Err(e) => log_error(DbKey::CommsAddress, e), - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(c) } - pub async fn set_node_address(&self, address: Multiaddr) -> Result<(), WalletStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || { - db_clone.write(WriteOperation::Insert(DbKeyValuePair::CommsAddress(address))) - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + pub fn set_node_address(&self, address: Multiaddr) -> Result<(), WalletStorageError> { + self.db + .write(WriteOperation::Insert(DbKeyValuePair::CommsAddress(address)))?; Ok(()) } - pub async fn get_node_features(&self) -> Result, WalletStorageError> { - let db_clone = self.db.clone(); - - let c = tokio::task::spawn_blocking(move || match db_clone.fetch(&DbKey::CommsFeatures) { + pub fn get_node_features(&self) -> Result, WalletStorageError> { + let c = match self.db.fetch(&DbKey::CommsFeatures) { Ok(None) => Ok(None), Ok(Some(DbValue::CommsFeatures(k))) => Ok(Some(k)), Ok(Some(other)) => unexpected_result(DbKey::CommsFeatures, other), Err(e) => log_error(DbKey::CommsFeatures, e), - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(c) } - pub async fn set_node_features(&self, features: PeerFeatures) -> Result<(), WalletStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || { - db_clone.write(WriteOperation::Insert(DbKeyValuePair::CommsFeatures(features))) - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + pub fn set_node_features(&self, features: PeerFeatures) -> Result<(), WalletStorageError> { + self.db + .write(WriteOperation::Insert(DbKeyValuePair::CommsFeatures(features)))?; Ok(()) } - pub async fn get_comms_identity_signature(&self) -> Result, WalletStorageError> { - let db = self.db.clone(); - - let sig = tokio::task::spawn_blocking(move || match db.fetch(&DbKey::CommsIdentitySignature) { + pub fn get_comms_identity_signature(&self) -> Result, WalletStorageError> { + let sig = match self.db.fetch(&DbKey::CommsIdentitySignature) { Ok(None) => Ok(None), Ok(Some(DbValue::CommsIdentitySignature(k))) => Ok(Some(*k)), Ok(Some(other)) => unexpected_result(DbKey::CommsIdentitySignature, other), Err(e) => log_error(DbKey::CommsIdentitySignature, e), - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(sig) } - pub async fn set_comms_identity_signature(&self, sig: IdentitySignature) -> Result<(), WalletStorageError> { - let db = self.db.clone(); - - tokio::task::spawn_blocking(move || { - db.write(WriteOperation::Insert(DbKeyValuePair::CommsIdentitySignature( + pub fn set_comms_identity_signature(&self, sig: IdentitySignature) -> Result<(), WalletStorageError> { + self.db + .write(WriteOperation::Insert(DbKeyValuePair::CommsIdentitySignature( Box::new(sig), - ))) - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + )))?; Ok(()) } - pub async fn get_chain_metadata(&self) -> Result, WalletStorageError> { - let db_clone = self.db.clone(); - - let c = tokio::task::spawn_blocking(move || match db_clone.fetch(&DbKey::BaseNodeChainMetadata) { + pub fn get_chain_metadata(&self) -> Result, WalletStorageError> { + let c = match self.db.fetch(&DbKey::BaseNodeChainMetadata) { Ok(None) => Ok(None), Ok(Some(DbValue::BaseNodeChainMetadata(metadata))) => Ok(Some(metadata)), Ok(Some(other)) => unexpected_result(DbKey::BaseNodeChainMetadata, other), Err(e) => log_error(DbKey::BaseNodeChainMetadata, e), - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(c) } - pub async fn set_chain_metadata(&self, metadata: ChainMetadata) -> Result<(), WalletStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || { - db_clone.write(WriteOperation::Insert(DbKeyValuePair::BaseNodeChainMetadata(metadata))) - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + pub fn set_chain_metadata(&self, metadata: ChainMetadata) -> Result<(), WalletStorageError> { + self.db + .write(WriteOperation::Insert(DbKeyValuePair::BaseNodeChainMetadata(metadata)))?; Ok(()) } - pub async fn apply_encryption(&self, passphrase: SafePassword) -> Result { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.apply_encryption(passphrase)) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) + pub fn apply_encryption(&self, passphrase: SafePassword) -> Result { + self.db.apply_encryption(passphrase) } - pub async fn remove_encryption(&self) -> Result<(), WalletStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.remove_encryption()) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) + pub fn remove_encryption(&self) -> Result<(), WalletStorageError> { + self.db.remove_encryption() } - pub async fn set_client_key_value(&self, key: String, value: String) -> Result<(), WalletStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || { - db_clone.write(WriteOperation::Insert(DbKeyValuePair::ClientKeyValue(key, value))) - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + pub fn set_client_key_value(&self, key: String, value: String) -> Result<(), WalletStorageError> { + self.db + .write(WriteOperation::Insert(DbKeyValuePair::ClientKeyValue(key, value)))?; Ok(()) } - pub async fn get_client_key_value(&self, key: String) -> Result, WalletStorageError> { - let db_clone = self.db.clone(); - - let c = tokio::task::spawn_blocking(move || match db_clone.fetch(&DbKey::ClientKey(key.clone())) { + pub fn get_client_key_value(&self, key: String) -> Result, WalletStorageError> { + let c = match self.db.fetch(&DbKey::ClientKey(key.clone())) { Ok(None) => Ok(None), Ok(Some(DbValue::ClientValue(k))) => Ok(Some(k)), Ok(Some(other)) => unexpected_result(DbKey::ClientKey(key), other), Err(e) => log_error(DbKey::ClientKey(key), e), - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(c) } - pub async fn get_client_key_from_str(&self, key: String) -> Result, WalletStorageError> + pub fn get_client_key_from_str(&self, key: String) -> Result, WalletStorageError> where V: std::str::FromStr, V::Err: ToString, { - let db = self.db.clone(); - - let value = tokio::task::spawn_blocking(move || match db.fetch(&DbKey::ClientKey(key.clone())) { + let value = match self.db.fetch(&DbKey::ClientKey(key.clone())) { Ok(None) => Ok(None), Ok(Some(DbValue::ClientValue(k))) => Ok(Some(k)), Ok(Some(other)) => unexpected_result(DbKey::ClientKey(key), other), Err(e) => log_error(DbKey::ClientKey(key), e), - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; match value { Some(c) => { @@ -343,89 +268,54 @@ where T: WalletBackend + 'static } } - pub async fn clear_client_value(&self, key: String) -> Result { - let db_clone = self.db.clone(); - - let c = tokio::task::spawn_blocking(move || { - match db_clone.write(WriteOperation::Remove(DbKey::ClientKey(key.clone()))) { - Ok(None) => Ok(false), - Ok(Some(DbValue::ValueCleared)) => Ok(true), - Ok(Some(other)) => unexpected_result(DbKey::ClientKey(key), other), - Err(e) => log_error(DbKey::ClientKey(key), e), - } - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + pub fn clear_client_value(&self, key: String) -> Result { + let c = match self.db.write(WriteOperation::Remove(DbKey::ClientKey(key.clone()))) { + Ok(None) => Ok(false), + Ok(Some(DbValue::ValueCleared)) => Ok(true), + Ok(Some(other)) => unexpected_result(DbKey::ClientKey(key), other), + Err(e) => log_error(DbKey::ClientKey(key), e), + }?; Ok(c) } - pub async fn get_wallet_birthday(&self) -> Result { - let db_clone = self.db.clone(); - - let result = tokio::task::spawn_blocking(move || match db_clone.fetch(&DbKey::WalletBirthday) { + pub fn get_wallet_birthday(&self) -> Result { + let result = match self.db.fetch(&DbKey::WalletBirthday) { Ok(None) => Err(WalletStorageError::ValueNotFound(DbKey::WalletBirthday)), Ok(Some(DbValue::WalletBirthday(b))) => Ok(b .parse::() .map_err(|_| WalletStorageError::ConversionError("Could not parse wallet birthday".to_string()))?), Ok(Some(other)) => unexpected_result(DbKey::WalletBirthday, other), Err(e) => log_error(DbKey::WalletBirthday, e), - }) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(result) } - pub async fn get_scanned_blocks(&self) -> Result, WalletStorageError> { - let db_clone = self.db.clone(); - - let result = tokio::task::spawn_blocking(move || db_clone.get_scanned_blocks()) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; - + pub fn get_scanned_blocks(&self) -> Result, WalletStorageError> { + let result = self.db.get_scanned_blocks()?; Ok(result) } - pub async fn save_scanned_block(&self, scanned_block: ScannedBlock) -> Result<(), WalletStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || db_clone.save_scanned_block(scanned_block)) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; - + pub fn save_scanned_block(&self, scanned_block: ScannedBlock) -> Result<(), WalletStorageError> { + self.db.save_scanned_block(scanned_block)?; Ok(()) } - pub async fn clear_scanned_blocks(&self) -> Result<(), WalletStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || db_clone.clear_scanned_blocks()) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; - + pub fn clear_scanned_blocks(&self) -> Result<(), WalletStorageError> { + self.db.clear_scanned_blocks()?; Ok(()) } - pub async fn clear_scanned_blocks_from_and_higher(&self, height: u64) -> Result<(), WalletStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || db_clone.clear_scanned_blocks_from_and_higher(height)) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; - + pub fn clear_scanned_blocks_from_and_higher(&self, height: u64) -> Result<(), WalletStorageError> { + self.db.clear_scanned_blocks_from_and_higher(height)?; Ok(()) } - pub async fn clear_scanned_blocks_before_height( + pub fn clear_scanned_blocks_before_height( &self, height: u64, exclude_recovered: bool, ) -> Result<(), WalletStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || db_clone.clear_scanned_blocks_before_height(height, exclude_recovered)) - .await - .map_err(|err| WalletStorageError::BlockingTaskSpawnError(err.to_string()))??; - + self.db.clear_scanned_blocks_before_height(height, exclude_recovered)?; Ok(()) } } @@ -486,7 +376,6 @@ mod test { use tari_key_manager::cipher_seed::CipherSeed; use tari_test_utils::random::string; use tempfile::tempdir; - use tokio::runtime::Runtime; use crate::storage::{ database::WalletDatabase, @@ -496,8 +385,6 @@ mod test { #[test] fn test_database_crud() { - let runtime = Runtime::new().unwrap(); - let db_name = format!("{}.sqlite3", string(8).as_str()); let db_folder = tempdir().unwrap().path().to_str().unwrap().to_string(); let connection = run_migration_and_create_sqlite_connection(&format!("{}{}", db_folder, db_name), 16).unwrap(); @@ -505,13 +392,13 @@ mod test { let db = WalletDatabase::new(WalletSqliteDatabase::new(connection, None).unwrap()); // Test wallet settings - assert!(runtime.block_on(db.get_master_seed()).unwrap().is_none()); + assert!(db.get_master_seed().unwrap().is_none()); let seed = CipherSeed::new(); - runtime.block_on(db.set_master_seed(seed.clone())).unwrap(); - let stored_seed = runtime.block_on(db.get_master_seed()).unwrap().unwrap(); + db.set_master_seed(seed.clone()).unwrap(); + let stored_seed = db.get_master_seed().unwrap().unwrap(); assert_eq!(seed, stored_seed); - runtime.block_on(db.clear_master_seed()).unwrap(); - assert!(runtime.block_on(db.get_master_seed()).unwrap().is_none()); + db.clear_master_seed().unwrap(); + assert!(db.get_master_seed().unwrap().is_none()); let client_key_values = vec![ ("key1".to_string(), "value1".to_string()), @@ -520,36 +407,25 @@ mod test { ]; for kv in &client_key_values { - runtime - .block_on(db.set_client_key_value(kv.0.clone(), kv.1.clone())) - .unwrap(); + db.set_client_key_value(kv.0.clone(), kv.1.clone()).unwrap(); } - assert!(runtime - .block_on(db.get_client_key_value("wrong".to_string())) - .unwrap() - .is_none()); + assert!(db.get_client_key_value("wrong".to_string()).unwrap().is_none()); - runtime - .block_on(db.set_client_key_value(client_key_values[0].0.clone(), "updated".to_string())) + db.set_client_key_value(client_key_values[0].0.clone(), "updated".to_string()) .unwrap(); assert_eq!( - runtime - .block_on(db.get_client_key_value(client_key_values[0].0.clone())) + db.get_client_key_value(client_key_values[0].0.clone()) .unwrap() .unwrap(), "updated".to_string() ); - assert!(!runtime.block_on(db.clear_client_value("wrong".to_string())).unwrap()); + assert!(!db.clear_client_value("wrong".to_string()).unwrap()); - assert!(runtime - .block_on(db.clear_client_value(client_key_values[0].0.clone())) - .unwrap()); + assert!(db.clear_client_value(client_key_values[0].0.clone()).unwrap()); - assert!(!runtime - .block_on(db.clear_client_value(client_key_values[0].0.clone())) - .unwrap()); + assert!(!db.clear_client_value(client_key_values[0].0.clone()).unwrap()); } } diff --git a/base_layer/wallet/src/storage/sqlite_utilities/mod.rs b/base_layer/wallet/src/storage/sqlite_utilities/mod.rs index 9802aa13c7..06984ced8d 100644 --- a/base_layer/wallet/src/storage/sqlite_utilities/mod.rs +++ b/base_layer/wallet/src/storage/sqlite_utilities/mod.rs @@ -71,7 +71,7 @@ pub fn run_migration_and_create_sqlite_connection>( /// This function will copy a wallet database to the provided path and then clear the Master Private Key from the /// database. -pub async fn partial_wallet_backup>(current_db: P, backup_path: P) -> Result<(), WalletStorageError> { +pub fn partial_wallet_backup>(current_db: P, backup_path: P) -> Result<(), WalletStorageError> { // Copy the current db to the backup path let db_path = current_db .as_ref() @@ -87,7 +87,7 @@ pub async fn partial_wallet_backup>(current_db: P, backup_path: P // open a connection and clear the Master Secret Key let connection = run_migration_and_create_sqlite_connection(backup_path, 16)?; let db = WalletDatabase::new(WalletSqliteDatabase::new(connection, None)?); - db.clear_master_seed().await?; + db.clear_master_seed()?; Ok(()) } diff --git a/base_layer/wallet/src/transaction_service/error.rs b/base_layer/wallet/src/transaction_service/error.rs index eb934ecb75..fe5107bb3e 100644 --- a/base_layer/wallet/src/transaction_service/error.rs +++ b/base_layer/wallet/src/transaction_service/error.rs @@ -94,6 +94,8 @@ pub enum TransactionServiceError { AttemptedToBroadcastCoinbaseTransaction(TxId), #[error("No Base Node public keys are provided for Base chain broadcast and monitoring")] NoBaseNodeKeysProvided, + #[error("Base node changed during {task_name}")] + BaseNodeChanged { task_name: &'static str }, #[error("Error sending data to Protocol via registered channels")] ProtocolChannelError, #[error("Transaction detected as rejected by mempool")] diff --git a/base_layer/wallet/src/transaction_service/handle.rs b/base_layer/wallet/src/transaction_service/handle.rs index 86ff23093c..64f16b2500 100644 --- a/base_layer/wallet/src/transaction_service/handle.rs +++ b/base_layer/wallet/src/transaction_service/handle.rs @@ -48,6 +48,7 @@ use tokio::sync::broadcast; use tower::Service; use crate::{ + output_manager_service::UtxoSelectionCriteria, transaction_service::{ error::TransactionServiceError, storage::models::{ @@ -76,12 +77,14 @@ pub enum TransactionServiceRequest { SendTransaction { dest_pubkey: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, output_features: Box, fee_per_gram: MicroTari, message: String, }, BurnTari { amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, fee_per_gram: MicroTari, message: String, }, @@ -94,6 +97,7 @@ pub enum TransactionServiceRequest { SendOneSidedTransaction { dest_pubkey: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, output_features: Box, fee_per_gram: MicroTari, message: String, @@ -101,11 +105,12 @@ pub enum TransactionServiceRequest { SendOneSidedToStealthAddressTransaction { dest_pubkey: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, output_features: Box, fee_per_gram: MicroTari, message: String, }, - SendShaAtomicSwapTransaction(CommsPublicKey, MicroTari, MicroTari, String), + SendShaAtomicSwapTransaction(CommsPublicKey, MicroTari, UtxoSelectionCriteria, MicroTari, String), CancelTransaction(TxId), ImportUtxoWithStatus { amount: MicroTari, @@ -185,7 +190,7 @@ impl fmt::Display for TransactionServiceRequest { amount, message )), - Self::SendShaAtomicSwapTransaction(k, v, _, msg) => { + Self::SendShaAtomicSwapTransaction(k, _, v, _, msg) => { f.write_str(&format!("SendShaAtomicSwapTransaction (to {}, {}, {})", k, v, msg)) }, Self::CancelTransaction(t) => f.write_str(&format!("CancelTransaction ({})", t)), @@ -440,6 +445,7 @@ impl TransactionServiceHandle { &mut self, dest_pubkey: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, output_features: OutputFeatures, fee_per_gram: MicroTari, message: String, @@ -449,6 +455,7 @@ impl TransactionServiceHandle { .call(TransactionServiceRequest::SendTransaction { dest_pubkey, amount, + selection_criteria, output_features: Box::new(output_features), fee_per_gram, message, @@ -486,6 +493,7 @@ impl TransactionServiceHandle { &mut self, dest_pubkey: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, output_features: OutputFeatures, fee_per_gram: MicroTari, message: String, @@ -495,6 +503,7 @@ impl TransactionServiceHandle { .call(TransactionServiceRequest::SendOneSidedTransaction { dest_pubkey, amount, + selection_criteria, output_features: Box::new(output_features), fee_per_gram, message, @@ -510,6 +519,7 @@ impl TransactionServiceHandle { pub async fn burn_tari( &mut self, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, fee_per_gram: MicroTari, message: String, ) -> Result { @@ -517,6 +527,7 @@ impl TransactionServiceHandle { .handle .call(TransactionServiceRequest::BurnTari { amount, + selection_criteria, fee_per_gram, message, }) @@ -531,6 +542,7 @@ impl TransactionServiceHandle { &mut self, dest_pubkey: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, output_features: OutputFeatures, fee_per_gram: MicroTari, message: String, @@ -540,6 +552,7 @@ impl TransactionServiceHandle { .call(TransactionServiceRequest::SendOneSidedToStealthAddressTransaction { dest_pubkey, amount, + selection_criteria, output_features: Box::new(output_features), fee_per_gram, message, @@ -844,6 +857,7 @@ impl TransactionServiceHandle { &mut self, dest_pubkey: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, fee_per_gram: MicroTari, message: String, ) -> Result<(TxId, PublicKey, TransactionOutput), TransactionServiceError> { @@ -852,6 +866,7 @@ impl TransactionServiceHandle { .call(TransactionServiceRequest::SendShaAtomicSwapTransaction( dest_pubkey, amount, + selection_criteria, fee_per_gram, message, )) diff --git a/base_layer/wallet/src/transaction_service/protocols/transaction_broadcast_protocol.rs b/base_layer/wallet/src/transaction_service/protocols/transaction_broadcast_protocol.rs index 5ea7cb7338..a3c9509728 100644 --- a/base_layer/wallet/src/transaction_service/protocols/transaction_broadcast_protocol.rs +++ b/base_layer/wallet/src/transaction_service/protocols/transaction_broadcast_protocol.rs @@ -99,7 +99,7 @@ where .await .ok_or_else(|| TransactionServiceProtocolError::new(self.tx_id, TransactionServiceError::Shutdown))?; - let completed_tx = match self.resources.db.get_completed_transaction(self.tx_id).await { + let completed_tx = match self.resources.db.get_completed_transaction(self.tx_id) { Ok(tx) => tx, Err(e) => { error!( @@ -275,7 +275,6 @@ where self.resources .db .broadcast_completed_transaction(self.tx_id) - .await .map_err(|e| TransactionServiceProtocolError::new(self.tx_id, TransactionServiceError::from(e)))?; let _size = self .resources @@ -430,7 +429,7 @@ where "Failed to Cancel outputs for TxId: {} after failed sending attempt with error {:?}", self.tx_id, e ); } - if let Err(e) = self.resources.db.reject_completed_transaction(self.tx_id, reason).await { + if let Err(e) = self.resources.db.reject_completed_transaction(self.tx_id, reason) { warn!( target: LOG_TARGET, "Failed to Cancel TxId: {} after failed sending attempt with error {:?}", self.tx_id, e diff --git a/base_layer/wallet/src/transaction_service/protocols/transaction_receive_protocol.rs b/base_layer/wallet/src/transaction_service/protocols/transaction_receive_protocol.rs index 43acb079cd..4eb3559efe 100644 --- a/base_layer/wallet/src/transaction_service/protocols/transaction_receive_protocol.rs +++ b/base_layer/wallet/src/transaction_service/protocols/transaction_receive_protocol.rs @@ -131,7 +131,6 @@ where .resources .db .transaction_exists(data.tx_id) - .await .map_err(|e| TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)))? { trace!( @@ -167,7 +166,6 @@ where self.resources .db .add_pending_inbound_transaction(inbound_transaction.tx_id, inbound_transaction.clone()) - .await .map_err(|e| TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)))?; let send_result = send_transaction_reply( @@ -182,7 +180,6 @@ where self.resources .db .increment_send_count(self.id) - .await .map_err(|e| TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)))?; if send_result { @@ -237,7 +234,7 @@ where .ok_or_else(|| TransactionServiceProtocolError::new(self.id, TransactionServiceError::InvalidStateError))? .fuse(); - let inbound_tx = match self.resources.db.get_pending_inbound_transaction(self.id).await { + let inbound_tx = match self.resources.db.get_pending_inbound_transaction(self.id) { Ok(tx) => tx, Err(_e) => { debug!( @@ -295,7 +292,6 @@ where self.resources .db .increment_send_count(self.id) - .await .map_err(|e| TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)))?; } @@ -339,7 +335,6 @@ where Ok(_) => self.resources .db .increment_send_count(self.id) - .await .map_err(|e| TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)))?, Err(e) => warn!( target: LOG_TARGET, @@ -456,8 +451,7 @@ where self.resources .db - .complete_inbound_transaction(self.id, completed_transaction.clone()) - .await + .complete_inbound_transaction(self.id, completed_transaction) .map_err(|e| TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)))?; info!( @@ -486,17 +480,13 @@ where "Cancelling Transaction Receive Protocol (TxId: {}) due to timeout after no counterparty response", self.id ); - self.resources - .db - .cancel_pending_transaction(self.id) - .await - .map_err(|e| { - warn!( - target: LOG_TARGET, - "Pending Transaction does not exist and could not be cancelled: {:?}", e - ); - TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)) - })?; + self.resources.db.cancel_pending_transaction(self.id).map_err(|e| { + warn!( + target: LOG_TARGET, + "Pending Transaction does not exist and could not be cancelled: {:?}", e + ); + TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)) + })?; self.resources .output_manager_service diff --git a/base_layer/wallet/src/transaction_service/protocols/transaction_send_protocol.rs b/base_layer/wallet/src/transaction_service/protocols/transaction_send_protocol.rs index 6076272dbf..7d498b139e 100644 --- a/base_layer/wallet/src/transaction_service/protocols/transaction_send_protocol.rs +++ b/base_layer/wallet/src/transaction_service/protocols/transaction_send_protocol.rs @@ -29,7 +29,7 @@ use tari_common_types::{ transaction::{TransactionDirection, TransactionStatus, TxId}, types::HashOutput, }; -use tari_comms::{peer_manager::NodeId, types::CommsPublicKey}; +use tari_comms::types::CommsPublicKey; use tari_comms_dht::{ domain_message::OutboundDomainMessage, outbound::{OutboundEncryption, SendMessageResponse}, @@ -317,7 +317,6 @@ where .resources .db .transaction_exists(tx_id) - .await .map_err(|e| TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)))? { let fee = sender_protocol @@ -337,14 +336,12 @@ where self.resources .db .add_pending_outbound_transaction(outbound_tx.tx_id, outbound_tx) - .await .map_err(|e| TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)))?; } if transaction_status == TransactionStatus::Pending { self.resources .db .increment_send_count(self.id) - .await .map_err(|e| TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)))?; } @@ -394,7 +391,6 @@ where .resources .db .get_pending_outbound_transaction(tx_id) - .await .map_err(|e| TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)))?; if !outbound_tx.sender_protocol.is_collecting_single_signature() { @@ -452,7 +448,6 @@ where self.resources .db .increment_send_count(self.id) - .await .map_err(|e| TransactionServiceProtocolError::new(self.id, e.into()))? } }, @@ -499,7 +494,6 @@ where self.resources .db .increment_send_count(self.id) - .await .map_err(|e| TransactionServiceProtocolError::new( self.id, TransactionServiceError::from(e)) )?; @@ -521,7 +515,6 @@ where self.resources .db .increment_send_count(self.id) - .await .map_err(|e| TransactionServiceProtocolError::new( self.id, TransactionServiceError::from(e)) )? @@ -594,7 +587,6 @@ where self.resources .db .complete_outbound_transaction(tx_id, completed_transaction.clone()) - .await .map_err(|e| TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)))?; info!( target: LOG_TARGET, @@ -615,7 +607,6 @@ where self.resources .db .increment_send_count(tx_id) - .await .map_err(|e| TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)))?; let _size = self @@ -688,6 +679,7 @@ where .send_direct( self.dest_pubkey.clone(), OutboundDomainMessage::new(&TariMessageType::SenderPartialTransaction, proto_message.clone()), + "transaction send".to_string(), ) .await { @@ -828,7 +820,7 @@ where .resources .outbound_message_service .closest_broadcast( - NodeId::from_public_key(&self.dest_pubkey), + self.dest_pubkey.clone(), OutboundEncryption::encrypt_for(self.dest_pubkey.clone()), vec![], OutboundDomainMessage::new(&TariMessageType::SenderPartialTransaction, proto_message), @@ -905,20 +897,15 @@ where self.resources .db .increment_send_count(self.id) - .await .map_err(|e| TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)))?; - self.resources - .db - .cancel_pending_transaction(self.id) - .await - .map_err(|e| { - warn!( - target: LOG_TARGET, - "Pending Transaction does not exist and could not be cancelled: {:?}", e - ); - TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)) - })?; + self.resources.db.cancel_pending_transaction(self.id).map_err(|e| { + warn!( + target: LOG_TARGET, + "Pending Transaction does not exist and could not be cancelled: {:?}", e + ); + TransactionServiceProtocolError::new(self.id, TransactionServiceError::from(e)) + })?; self.resources .output_manager_service diff --git a/base_layer/wallet/src/transaction_service/protocols/transaction_validation_protocol.rs b/base_layer/wallet/src/transaction_service/protocols/transaction_validation_protocol.rs index e6f4f50265..e6bbf2a64b 100644 --- a/base_layer/wallet/src/transaction_service/protocols/transaction_validation_protocol.rs +++ b/base_layer/wallet/src/transaction_service/protocols/transaction_validation_protocol.rs @@ -29,7 +29,7 @@ use std::{ use log::*; use tari_common_types::{ transaction::{TransactionStatus, TxId}, - types::BlockHash, + types::{BlockHash, Signature}, }; use tari_comms::protocol::rpc::{RpcError::RequestFailed, RpcStatusCode::NotFound}; use tari_core::{ @@ -51,6 +51,7 @@ use crate::{ handle::{TransactionEvent, TransactionEventSender}, storage::{ database::{TransactionBackend, TransactionDatabase}, + models::TxCancellationReason, sqlite_db::UnconfirmedTransactionInfo, }, }, @@ -67,9 +68,6 @@ pub struct TransactionValidationProtocol TransactionValidationProtocol @@ -112,7 +110,6 @@ where let unconfirmed_transactions = self .db .fetch_unconfirmed_transactions_info() - .await .for_protocol(self.operation_id) .unwrap(); @@ -216,7 +213,7 @@ where self.operation_id ); let op_id = self.operation_id; - while let Some(last_mined_transaction) = self.db.fetch_last_mined_transaction().await.for_protocol(op_id)? { + while let Some(last_mined_transaction) = self.db.fetch_last_mined_transaction().for_protocol(op_id)? { let mined_height = last_mined_transaction .mined_height .ok_or_else(|| { @@ -414,7 +411,6 @@ where num_confirmations >= self.config.num_confirmations_required, status.is_faux(), ) - .await .for_protocol(self.operation_id)?; if num_confirmations >= self.config.num_confirmations_required { @@ -461,6 +457,23 @@ where mined_timestamp: u64, num_confirmations: u64, ) -> Result<(), TransactionServiceProtocolError> { + // This updates the OMS first before we update the TMS. If we update the TMS first and operation fail inside of + // the OMS, we have two databases that are out of sync, as the TMS would have been updated and OMS will be stuck + // forever as pending_incoming. + self.output_manager_handle + .set_coinbase_abandoned(tx_id, true) + .await + .map_err(|e| { + warn!( + target: LOG_TARGET, + "Could not mark coinbase output for TxId: {} as abandoned: {} (Operation ID: {})", + tx_id, + e, + self.operation_id + ); + e + }) + .for_protocol(self.operation_id)?; self.db .set_transaction_mined_height( tx_id, @@ -471,23 +484,12 @@ where num_confirmations >= self.config.num_confirmations_required, false, ) - .await .for_protocol(self.operation_id)?; self.db .abandon_coinbase_transaction(tx_id) - .await .for_protocol(self.operation_id)?; - if let Err(e) = self.output_manager_handle.set_coinbase_abandoned(tx_id, true).await { - warn!( - target: LOG_TARGET, - "Could not mark coinbase output for TxId: {} as abandoned: {} (Operation ID: {})", - tx_id, - e, - self.operation_id - ); - }; self.publish_event(TransactionEvent::TransactionCancelled( tx_id, TxCancellationReason::AbandonedCoinbase, @@ -500,11 +502,6 @@ where tx_id: TxId, status: &TransactionStatus, ) -> Result<(), TransactionServiceProtocolError> { - self.db - .set_transaction_as_unmined(tx_id) - .await - .for_protocol(self.operation_id)?; - if *status == TransactionStatus::Coinbase { if let Err(e) = self.output_manager_handle.set_coinbase_abandoned(tx_id, false).await { warn!( @@ -517,6 +514,10 @@ where }; } + self.db + .set_transaction_as_unmined(tx_id) + .for_protocol(self.operation_id)?; + self.publish_event(TransactionEvent::TransactionBroadcast(tx_id)); Ok(()) } diff --git a/base_layer/wallet/src/transaction_service/service.rs b/base_layer/wallet/src/transaction_service/service.rs index 8d9ea4e299..f4a999e65e 100644 --- a/base_layer/wallet/src/transaction_service/service.rs +++ b/base_layer/wallet/src/transaction_service/service.rs @@ -379,7 +379,7 @@ where trace!(target: LOG_TARGET, "Handling Transaction Message, Trace: {}", msg.dht_header.message_tag); let result = self.accept_transaction(origin_public_key, inner_msg, - msg.dht_header.message_tag.as_value(), &mut receive_transaction_protocol_handles).await; + msg.dht_header.message_tag.as_value(), &mut receive_transaction_protocol_handles); match result { Err(TransactionServiceError::RepeatedMessageError) => { @@ -506,7 +506,7 @@ where Ok(join_result_inner) => self.complete_send_transaction_protocol( join_result_inner, &mut transaction_broadcast_protocol_handles - ).await, + ), Err(e) => error!(target: LOG_TARGET, "Error resolving Send Transaction Protocol: {:?}", e), }; } @@ -516,14 +516,14 @@ where Ok(join_result_inner) => self.complete_receive_transaction_protocol( join_result_inner, &mut transaction_broadcast_protocol_handles - ).await, + ), Err(e) => error!(target: LOG_TARGET, "Error resolving Send Transaction Protocol: {:?}", e), }; } Some(join_result) = transaction_broadcast_protocol_handles.next() => { trace!(target: LOG_TARGET, "Transaction Broadcast protocol has ended with result {:?}", join_result); match join_result { - Ok(join_result_inner) => self.complete_transaction_broadcast_protocol(join_result_inner).await, + Ok(join_result_inner) => self.complete_transaction_broadcast_protocol(join_result_inner), Err(e) => error!(target: LOG_TARGET, "Error resolving Broadcast Protocol: {:?}", e), }; } @@ -533,7 +533,7 @@ where Ok(join_result_inner) => self.complete_transaction_validation_protocol( join_result_inner, &mut transaction_broadcast_protocol_handles, - ).await, + ), Err(e) => error!(target: LOG_TARGET, "Error resolving Transaction Validation protocol: {:?}", e), }; } @@ -573,6 +573,7 @@ where TransactionServiceRequest::SendTransaction { dest_pubkey, amount, + selection_criteria, output_features, fee_per_gram, message, @@ -581,6 +582,7 @@ where self.send_transaction( dest_pubkey, amount, + selection_criteria, *output_features, fee_per_gram, message, @@ -595,6 +597,7 @@ where TransactionServiceRequest::SendOneSidedTransaction { dest_pubkey, amount, + selection_criteria, output_features, fee_per_gram, message, @@ -602,6 +605,7 @@ where .send_one_sided_transaction( dest_pubkey, amount, + selection_criteria, *output_features, fee_per_gram, message, @@ -612,6 +616,7 @@ where TransactionServiceRequest::SendOneSidedToStealthAddressTransaction { dest_pubkey, amount, + selection_criteria, output_features, fee_per_gram, message, @@ -619,6 +624,7 @@ where .send_one_sided_to_stealth_address_transaction( dest_pubkey, amount, + selection_criteria, *output_features, fee_per_gram, message, @@ -628,10 +634,17 @@ where .map(TransactionServiceResponse::TransactionSent), TransactionServiceRequest::BurnTari { amount, + selection_criteria, fee_per_gram, message, } => self - .burn_tari(amount, fee_per_gram, message, transaction_broadcast_join_handles) + .burn_tari( + amount, + selection_criteria, + fee_per_gram, + message, + transaction_broadcast_join_handles, + ) .await .map(TransactionServiceResponse::TransactionSent), TransactionServiceRequest::RegisterValidatorNode { @@ -653,58 +666,55 @@ where .await?; return Ok(()); }, - TransactionServiceRequest::SendShaAtomicSwapTransaction(dest_pubkey, amount, fee_per_gram, message) => { - Ok(TransactionServiceResponse::ShaAtomicSwapTransactionSent( - self.send_sha_atomic_swap_transaction( - dest_pubkey, - amount, - fee_per_gram, - message, - transaction_broadcast_join_handles, - ) - .await?, - )) - }, + TransactionServiceRequest::SendShaAtomicSwapTransaction( + dest_pubkey, + amount, + selection_criteria, + fee_per_gram, + message, + ) => Ok(TransactionServiceResponse::ShaAtomicSwapTransactionSent( + self.send_sha_atomic_swap_transaction( + dest_pubkey, + amount, + selection_criteria, + fee_per_gram, + message, + transaction_broadcast_join_handles, + ) + .await?, + )), TransactionServiceRequest::CancelTransaction(tx_id) => self .cancel_pending_transaction(tx_id) .await .map(|_| TransactionServiceResponse::TransactionCancelled), - TransactionServiceRequest::GetPendingInboundTransactions => { - Ok(TransactionServiceResponse::PendingInboundTransactions( - self.db.get_pending_inbound_transactions().await?, - )) - }, - TransactionServiceRequest::GetPendingOutboundTransactions => { - Ok(TransactionServiceResponse::PendingOutboundTransactions( - self.db.get_pending_outbound_transactions().await?, - )) - }, + TransactionServiceRequest::GetPendingInboundTransactions => Ok( + TransactionServiceResponse::PendingInboundTransactions(self.db.get_pending_inbound_transactions()?), + ), + TransactionServiceRequest::GetPendingOutboundTransactions => Ok( + TransactionServiceResponse::PendingOutboundTransactions(self.db.get_pending_outbound_transactions()?), + ), TransactionServiceRequest::GetCompletedTransactions => Ok( - TransactionServiceResponse::CompletedTransactions(self.db.get_completed_transactions().await?), + TransactionServiceResponse::CompletedTransactions(self.db.get_completed_transactions()?), ), TransactionServiceRequest::GetCancelledPendingInboundTransactions => { Ok(TransactionServiceResponse::PendingInboundTransactions( - self.db.get_cancelled_pending_inbound_transactions().await?, + self.db.get_cancelled_pending_inbound_transactions()?, )) }, TransactionServiceRequest::GetCancelledPendingOutboundTransactions => { Ok(TransactionServiceResponse::PendingOutboundTransactions( - self.db.get_cancelled_pending_outbound_transactions().await?, + self.db.get_cancelled_pending_outbound_transactions()?, )) }, - TransactionServiceRequest::GetCancelledCompletedTransactions => { - Ok(TransactionServiceResponse::CompletedTransactions( - self.db.get_cancelled_completed_transactions().await?, - )) - }, - TransactionServiceRequest::GetCompletedTransaction(tx_id) => { - Ok(TransactionServiceResponse::CompletedTransaction(Box::new( - self.db.get_completed_transaction(tx_id).await?, - ))) - }, + TransactionServiceRequest::GetCancelledCompletedTransactions => Ok( + TransactionServiceResponse::CompletedTransactions(self.db.get_cancelled_completed_transactions()?), + ), + TransactionServiceRequest::GetCompletedTransaction(tx_id) => Ok( + TransactionServiceResponse::CompletedTransaction(Box::new(self.db.get_completed_transaction(tx_id)?)), + ), TransactionServiceRequest::GetAnyTransaction(tx_id) => Ok(TransactionServiceResponse::AnyTransaction( - Box::new(self.db.get_any_transaction(tx_id).await?), + Box::new(self.db.get_any_transaction(tx_id)?), )), TransactionServiceRequest::ImportUtxoWithStatus { amount, @@ -726,11 +736,9 @@ where current_height, mined_timestamp, ) - .await .map(TransactionServiceResponse::UtxoImported), TransactionServiceRequest::SubmitTransactionToSelf(tx_id, tx, fee, amount, message) => self .submit_transaction_to_self(transaction_broadcast_join_handles, tx_id, tx, fee, amount, message) - .await .map(|_| TransactionServiceResponse::TransactionSubmitted), TransactionServiceRequest::GenerateCoinbaseTransaction(reward, fees, block_height) => self .generate_coinbase_transaction(reward, fees, block_height) @@ -747,13 +755,11 @@ where TransactionServiceRequest::ApplyEncryption(cipher) => self .db .apply_encryption(*cipher) - .await .map(|_| TransactionServiceResponse::EncryptionApplied) .map_err(TransactionServiceError::TransactionStorageError), TransactionServiceRequest::RemoveEncryption => self .db .remove_encryption() - .await .map(|_| TransactionServiceResponse::EncryptionRemoved) .map_err(TransactionServiceError::TransactionStorageError), TransactionServiceRequest::RestartTransactionProtocols => self @@ -761,11 +767,9 @@ where send_transaction_join_handles, receive_transaction_join_handles, ) - .await .map(|_| TransactionServiceResponse::ProtocolsRestarted), TransactionServiceRequest::RestartBroadcastProtocols => self .restart_broadcast_protocols(transaction_broadcast_join_handles) - .await .map(|_| TransactionServiceResponse::ProtocolsRestarted), TransactionServiceRequest::GetNumConfirmationsRequired => Ok( TransactionServiceResponse::NumConfirmationsRequired(self.resources.config.num_confirmations_required), @@ -875,7 +879,7 @@ where if let OutputManagerEvent::TxoValidationSuccess(_) = (*event).clone() { let db = self.db.clone(); let output_manager_handle = self.output_manager_service.clone(); - let metadata = match self.wallet_db.get_chain_metadata().await { + let metadata = match self.wallet_db.get_chain_metadata() { Ok(data) => data, Err(_) => None, }; @@ -902,6 +906,7 @@ where &mut self, dest_pubkey: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, output_features: OutputFeatures, fee_per_gram: MicroTari, message: String, @@ -928,8 +933,7 @@ where .create_pay_to_self_transaction( tx_id, amount, - // TODO: allow customization of selected inputs and outputs - UtxoSelectionCriteria::default(), + selection_criteria, output_features, fee_per_gram, None, @@ -959,8 +963,7 @@ where None, None, ), - ) - .await?; + )?; let _result = reply_channel .send(Ok(TransactionServiceResponse::TransactionSent(tx_id))) @@ -1010,6 +1013,7 @@ where &mut self, dest_pubkey: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, fee_per_gram: MicroTari, message: String, transaction_broadcast_join_handles: &mut FuturesUnordered< @@ -1045,7 +1049,7 @@ where .prepare_transaction_to_send( tx_id, amount, - UtxoSelectionCriteria::default(), + selection_criteria, OutputFeatures::default(), fee_per_gram, TransactionMetadata::default(), @@ -1181,8 +1185,7 @@ where None, None, ), - ) - .await?; + )?; Ok(Box::new((tx_id, pre_image, output))) } @@ -1191,6 +1194,7 @@ where &mut self, dest_pubkey: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, output_features: OutputFeatures, fee_per_gram: MicroTari, message: String, @@ -1207,7 +1211,7 @@ where .prepare_transaction_to_send( tx_id, amount, - UtxoSelectionCriteria::default(), + selection_criteria, output_features, fee_per_gram, TransactionMetadata::default(), @@ -1237,7 +1241,7 @@ where .get_recipient_sender_offset_private_key(0) .map_err(|e| TransactionServiceProtocolError::new(tx_id, e.into()))?; let spend_key = PrivateKey::from_bytes( - CommsPublicKey::shared_secret(&sender_offset_private_key.clone(), &dest_pubkey.clone()).as_bytes(), + CommsPublicKey::shared_secret(&sender_offset_private_key, &dest_pubkey.clone()).as_bytes(), ) .map_err(|e| TransactionServiceProtocolError::new(tx_id, e.into()))?; @@ -1245,8 +1249,8 @@ where let rewind_blinding_key = PrivateKey::from_bytes(&hash_secret_key(&spend_key))?; let encryption_key = PrivateKey::from_bytes(&hash_secret_key(&rewind_blinding_key))?; let rewind_data = RewindData { - rewind_blinding_key: rewind_blinding_key.clone(), - encryption_key: encryption_key.clone(), + rewind_blinding_key, + encryption_key, }; let rtp = ReceiverTransactionProtocol::new_with_rewindable_output( @@ -1311,8 +1315,7 @@ where None, None, ), - ) - .await?; + )?; Ok(tx_id) } @@ -1326,6 +1329,7 @@ where &mut self, dest_pubkey: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, output_features: OutputFeatures, fee_per_gram: MicroTari, message: String, @@ -1342,6 +1346,7 @@ where self.send_one_sided_or_stealth( dest_pubkey.clone(), amount, + selection_criteria, output_features, fee_per_gram, message, @@ -1358,6 +1363,7 @@ where pub async fn burn_tari( &mut self, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, fee_per_gram: MicroTari, message: String, transaction_broadcast_join_handles: &mut FuturesUnordered< @@ -1373,7 +1379,7 @@ where .prepare_transaction_to_send( tx_id, amount, - UtxoSelectionCriteria::default(), + selection_criteria, output_features, fee_per_gram, tx_meta, @@ -1457,8 +1463,7 @@ where None, None, ), - ) - .await?; + )?; Ok(tx_id) } @@ -1504,6 +1509,7 @@ where &mut self, dest_pubkey: CommsPublicKey, amount: MicroTari, + selection_criteria: UtxoSelectionCriteria, output_features: OutputFeatures, fee_per_gram: MicroTari, message: String, @@ -1530,6 +1536,7 @@ where self.send_one_sided_or_stealth( dest_pubkey, amount, + selection_criteria, output_features, fee_per_gram, message, @@ -1549,7 +1556,7 @@ where recipient_reply: proto::RecipientSignedMessage, ) -> Result<(), TransactionServiceError> { // Check if a wallet recovery is in progress, if it is we will ignore this request - self.check_recovery_status().await?; + self.check_recovery_status()?; let recipient_reply: RecipientSignedMessage = recipient_reply .try_into() @@ -1558,8 +1565,8 @@ where let tx_id = recipient_reply.tx_id; // First we check if this Reply is for a cancelled Pending Outbound Tx or a Completed Tx - let cancelled_outbound_tx = self.db.get_cancelled_pending_outbound_transaction(tx_id).await; - let completed_tx = self.db.get_completed_transaction_cancelled_or_not(tx_id).await; + let cancelled_outbound_tx = self.db.get_cancelled_pending_outbound_transaction(tx_id); + let completed_tx = self.db.get_completed_transaction_cancelled_or_not(tx_id); // This closure will check if the timestamps are beyond the cooldown period let check_cooldown = |timestamp: Option| { @@ -1599,7 +1606,7 @@ where ); tokio::spawn(send_transaction_cancelled_message( tx_id, - source_pubkey.clone(), + source_pubkey, self.resources.outbound_message_service.clone(), )); } else { @@ -1611,14 +1618,14 @@ where tokio::spawn(send_finalized_transaction_message( tx_id, ctx.transaction, - source_pubkey.clone(), + source_pubkey, self.resources.outbound_message_service.clone(), self.resources.config.direct_send_timeout, self.resources.config.transaction_routing_mechanism, )); } - if let Err(e) = self.resources.db.increment_send_count(tx_id).await { + if let Err(e) = self.resources.db.increment_send_count(tx_id) { warn!( target: LOG_TARGET, "Could not increment send count for completed transaction TxId {}: {:?}", tx_id, e @@ -1645,11 +1652,11 @@ where ); tokio::spawn(send_transaction_cancelled_message( tx_id, - source_pubkey.clone(), + source_pubkey, self.resources.outbound_message_service.clone(), )); - if let Err(e) = self.resources.db.increment_send_count(tx_id).await { + if let Err(e) = self.resources.db.increment_send_count(tx_id) { warn!( target: LOG_TARGET, "Could not increment send count for completed transaction TxId {}: {:?}", tx_id, e @@ -1673,7 +1680,7 @@ where } /// Handle the final clean up after a Send Transaction protocol completes - async fn complete_send_transaction_protocol( + fn complete_send_transaction_protocol( &mut self, join_result: Result>, transaction_broadcast_join_handles: &mut FuturesUnordered< @@ -1685,7 +1692,7 @@ where if val.transaction_status != TransactionStatus::Queued { let _sender = self.pending_transaction_reply_senders.remove(&val.tx_id); let _sender = self.send_transaction_cancellation_senders.remove(&val.tx_id); - let completed_tx = match self.db.get_completed_transaction(val.tx_id).await { + let completed_tx = match self.db.get_completed_transaction(val.tx_id) { Ok(v) => v, Err(e) => { error!( @@ -1697,7 +1704,6 @@ where }; let _result = self .broadcast_completed_transaction(completed_tx, transaction_broadcast_join_handles) - .await .map_err(|resp| { error!( target: LOG_TARGET, @@ -1734,7 +1740,7 @@ where /// Cancel a pending transaction async fn cancel_pending_transaction(&mut self, tx_id: TxId) -> Result<(), TransactionServiceError> { - self.db.cancel_pending_transaction(tx_id).await.map_err(|e| { + self.db.cancel_pending_transaction(tx_id).map_err(|e| { warn!( target: LOG_TARGET, "Pending Transaction does not exist and could not be cancelled: {:?}", e @@ -1784,7 +1790,7 @@ where // Check that an inbound transaction exists to be cancelled and that the Source Public key for that transaction // is the same as the cancellation message - if let Ok(inbound_tx) = self.db.get_pending_inbound_transaction(tx_id).await { + if let Ok(inbound_tx) = self.db.get_pending_inbound_transaction(tx_id) { if inbound_tx.source_public_key == source_pubkey { self.cancel_pending_transaction(tx_id).await?; } else { @@ -1800,13 +1806,13 @@ where } #[allow(clippy::map_entry)] - async fn restart_all_send_transaction_protocols( + fn restart_all_send_transaction_protocols( &mut self, join_handles: &mut FuturesUnordered< JoinHandle>>, >, ) -> Result<(), TransactionServiceError> { - let outbound_txs = self.db.get_pending_outbound_transactions().await?; + let outbound_txs = self.db.get_pending_outbound_transactions()?; for (tx_id, tx) in outbound_txs { let (sender_protocol, stage) = if tx.send_count > 0 { (None, TransactionSendProtocolStage::WaitForReply) @@ -1870,7 +1876,7 @@ where /// 'source_pubkey' - The pubkey from which the message was sent and to which the reply will be sent. /// 'sender_message' - Message from a sender containing the setup of the transaction being sent to you #[allow(clippy::too_many_lines)] - pub async fn accept_transaction( + pub fn accept_transaction( &mut self, source_pubkey: CommsPublicKey, sender_message: proto::TransactionSenderMessage, @@ -1878,7 +1884,7 @@ where join_handles: &mut FuturesUnordered>>>, ) -> Result<(), TransactionServiceError> { // Check if a wallet recovery is in progress, if it is we will ignore this request - self.check_recovery_status().await?; + self.check_recovery_status()?; let sender_message: TransactionSenderMessage = sender_message .try_into() @@ -1895,7 +1901,7 @@ where ); // Check if this transaction has already been received and cancelled. - if let Ok(Some(any_tx)) = self.db.get_any_cancelled_transaction(data.tx_id).await { + if let Ok(Some(any_tx)) = self.db.get_any_cancelled_transaction(data.tx_id) { let tx = CompletedTransaction::from(any_tx); if tx.source_public_key != source_pubkey { @@ -1916,7 +1922,7 @@ where } // Check if this transaction has already been received. - if let Ok(inbound_tx) = self.db.get_pending_inbound_transaction(data.clone().tx_id).await { + if let Ok(inbound_tx) = self.db.get_pending_inbound_transaction(data.tx_id) { // Check that it is from the same person if inbound_tx.source_public_key != source_pubkey { return Err(TransactionServiceError::InvalidSourcePublicKey); @@ -1946,7 +1952,7 @@ where self.resources.config.direct_send_timeout, self.resources.config.transaction_routing_mechanism, )); - if let Err(e) = self.resources.db.increment_send_count(tx_id).await { + if let Err(e) = self.resources.db.increment_send_count(tx_id) { warn!( target: LOG_TARGET, "Could not increment send count for inbound transaction TxId {}: {:?}", tx_id, e @@ -2006,7 +2012,7 @@ where join_handles: &mut FuturesUnordered>>>, ) -> Result<(), TransactionServiceError> { // Check if a wallet recovery is in progress, if it is we will ignore this request - self.check_recovery_status().await?; + self.check_recovery_status()?; let tx_id = finalized_transaction.tx_id.into(); let transaction: Transaction = finalized_transaction @@ -2026,7 +2032,7 @@ where let sender = match self.finalized_transaction_senders.get_mut(&tx_id) { None => { // First check if perhaps we know about this inbound transaction but it was cancelled - match self.db.get_cancelled_pending_inbound_transaction(tx_id).await { + match self.db.get_cancelled_pending_inbound_transaction(tx_id) { Ok(t) => { if t.source_public_key != source_pubkey { debug!( @@ -2043,7 +2049,7 @@ where Restarting protocol", tx_id ); - self.db.uncancel_pending_transaction(tx_id).await?; + self.db.uncancel_pending_transaction(tx_id)?; self.output_manager_service .reinstate_cancelled_inbound_transaction_outputs(tx_id) .await?; @@ -2069,7 +2075,7 @@ where } /// Handle the final clean up after a Send Transaction protocol completes - async fn complete_receive_transaction_protocol( + fn complete_receive_transaction_protocol( &mut self, join_result: Result>, transaction_broadcast_join_handles: &mut FuturesUnordered< @@ -2081,7 +2087,7 @@ where let _public_key = self.finalized_transaction_senders.remove(&id); let _result = self.receiver_transaction_cancellation_senders.remove(&id); - let completed_tx = match self.db.get_completed_transaction(id).await { + let completed_tx = match self.db.get_completed_transaction(id) { Ok(v) => v, Err(e) => { warn!( @@ -2093,7 +2099,6 @@ where }; let _result = self .broadcast_completed_transaction(completed_tx, transaction_broadcast_join_handles) - .await .map_err(|e| { warn!( target: LOG_TARGET, @@ -2134,11 +2139,11 @@ where } } - async fn restart_all_receive_transaction_protocols( + fn restart_all_receive_transaction_protocols( &mut self, join_handles: &mut FuturesUnordered>>>, ) -> Result<(), TransactionServiceError> { - let inbound_txs = self.db.get_pending_inbound_transaction_sender_info().await?; + let inbound_txs = self.db.get_pending_inbound_transaction_sender_info()?; for txn in inbound_txs { self.restart_receive_transaction_protocol(txn.tx_id, txn.source_public_key, join_handles); } @@ -2179,7 +2184,7 @@ where } } - async fn restart_transaction_negotiation_protocols( + fn restart_transaction_negotiation_protocols( &mut self, send_transaction_join_handles: &mut FuturesUnordered< JoinHandle>>, @@ -2190,7 +2195,6 @@ where ) -> Result<(), TransactionServiceError> { trace!(target: LOG_TARGET, "Restarting transaction negotiation protocols"); self.restart_all_send_transaction_protocols(send_transaction_join_handles) - .await .map_err(|resp| { error!( target: LOG_TARGET, @@ -2200,7 +2204,6 @@ where })?; self.restart_all_receive_transaction_protocols(receive_transaction_join_handles) - .await .map_err(|resp| { error!( target: LOG_TARGET, @@ -2218,7 +2221,7 @@ where JoinHandle>>, >, ) -> Result { - self.resources.db.mark_all_transactions_as_unvalidated().await?; + self.resources.db.mark_all_transactions_as_unvalidated()?; self.start_transaction_validation_protocol(join_handles).await } @@ -2228,9 +2231,12 @@ where JoinHandle>>, >, ) -> Result { - if !self.connectivity().is_base_node_set() { - return Err(TransactionServiceError::NoBaseNodeKeysProvided); - } + let current_base_node = self + .resources + .connectivity + .get_current_base_node_id() + .ok_or(TransactionServiceError::NoBaseNodeKeysProvided)?; + trace!(target: LOG_TARGET, "Starting transaction validation protocol"); let id = OperationId::new_random(); @@ -2243,14 +2249,36 @@ where self.resources.output_manager_service.clone(), ); - let join_handle = tokio::spawn(protocol.execute()); + let mut base_node_watch = self.connectivity().get_current_base_node_watcher(); + + let join_handle = tokio::spawn(async move { + let exec_fut = protocol.execute(); + tokio::pin!(exec_fut); + loop { + tokio::select! { + result = &mut exec_fut => { + return result; + }, + _ = base_node_watch.changed() => { + if let Some(peer) = base_node_watch.borrow().as_ref() { + if peer.node_id != current_base_node { + debug!(target: LOG_TARGET, "Base node changed, exiting transaction validation protocol"); + return Err(TransactionServiceProtocolError::new(id, TransactionServiceError::BaseNodeChanged { + task_name: "transaction validation_protocol", + })); + } + } + } + } + } + }); join_handles.push(join_handle); Ok(id) } /// Handle the final clean up after a Transaction Validation protocol completes - async fn complete_transaction_validation_protocol( + fn complete_transaction_validation_protocol( &mut self, join_result: Result>, transaction_broadcast_join_handles: &mut FuturesUnordered< @@ -2266,7 +2294,6 @@ where // Restart broadcast protocols for any transactions that were found to be no longer mined. let _ = self .restart_broadcast_protocols(transaction_broadcast_join_handles) - .await .map_err(|e| warn!(target: LOG_TARGET, "Error restarting broadcast protocols: {}", e)); }, Err(TransactionServiceProtocolError { id, error }) => { @@ -2284,7 +2311,7 @@ where } } - async fn restart_broadcast_protocols( + fn restart_broadcast_protocols( &mut self, broadcast_join_handles: &mut FuturesUnordered>>>, ) -> Result<(), TransactionServiceError> { @@ -2294,7 +2321,6 @@ where trace!(target: LOG_TARGET, "Restarting transaction broadcast protocols"); self.broadcast_completed_and_broadcast_transactions(broadcast_join_handles) - .await .map_err(|resp| { error!( target: LOG_TARGET, @@ -2309,7 +2335,7 @@ where } /// Start to protocol to Broadcast the specified Completed Transaction to the Base Node. - async fn broadcast_completed_transaction( + fn broadcast_completed_transaction( &mut self, completed_tx: CompletedTransaction, join_handles: &mut FuturesUnordered>>>, @@ -2354,7 +2380,7 @@ where /// Broadcast all valid and not cancelled completed transactions with status 'Completed' and 'Broadcast' to the base /// node. - async fn broadcast_completed_and_broadcast_transactions( + fn broadcast_completed_and_broadcast_transactions( &mut self, join_handles: &mut FuturesUnordered>>>, ) -> Result<(), TransactionServiceError> { @@ -2363,17 +2389,16 @@ where "Attempting to Broadcast all valid and not cancelled Completed Transactions with status 'Completed' and \ 'Broadcast'" ); - let txn_list = self.db.get_transactions_to_be_broadcast().await?; + let txn_list = self.db.get_transactions_to_be_broadcast()?; for completed_txn in txn_list { - self.broadcast_completed_transaction(completed_txn, join_handles) - .await?; + self.broadcast_completed_transaction(completed_txn, join_handles)?; } Ok(()) } /// Handle the final clean up after a Transaction Broadcast protocol completes - async fn complete_transaction_broadcast_protocol( + fn complete_transaction_broadcast_protocol( &mut self, join_result: Result>, ) { @@ -2437,7 +2462,7 @@ where } /// Add a completed transaction to the Transaction Manager to record directly importing a spendable UTXO. - pub async fn add_utxo_import_transaction_with_status( + pub fn add_utxo_import_transaction_with_status( &mut self, value: MicroTari, source_public_key: CommsPublicKey, @@ -2449,19 +2474,17 @@ where mined_timestamp: Option, ) -> Result { let tx_id = if let Some(id) = tx_id { id } else { TxId::new_random() }; - self.db - .add_utxo_import_transaction_with_status( - tx_id, - value, - source_public_key, - self.node_identity.public_key().clone(), - message, - maturity, - import_status.clone(), - current_height, - mined_timestamp, - ) - .await?; + self.db.add_utxo_import_transaction_with_status( + tx_id, + value, + source_public_key, + self.node_identity.public_key().clone(), + message, + maturity, + import_status.clone(), + current_height, + mined_timestamp, + )?; let transaction_event = match import_status { ImportStatus::Imported => TransactionEvent::TransactionImported(tx_id), ImportStatus::FauxUnconfirmed => TransactionEvent::FauxTransactionUnconfirmed { @@ -2469,7 +2492,9 @@ where num_confirmations: 0, is_valid: true, }, - ImportStatus::FauxConfirmed => TransactionEvent::FauxTransactionConfirmed { tx_id, is_valid: true }, + ImportStatus::FauxConfirmed | ImportStatus::Coinbase => { + TransactionEvent::FauxTransactionConfirmed { tx_id, is_valid: true } + }, }; let _size = self.event_publisher.send(Arc::new(transaction_event)).map_err(|e| { trace!( @@ -2483,7 +2508,7 @@ where } /// Submit a completed transaction to the Transaction Manager - async fn submit_transaction( + fn submit_transaction( &mut self, transaction_broadcast_join_handles: &mut FuturesUnordered< JoinHandle>>, @@ -2492,9 +2517,7 @@ where ) -> Result<(), TransactionServiceError> { let tx_id = completed_transaction.tx_id; trace!(target: LOG_TARGET, "Submit transaction ({}) to db.", tx_id); - self.db - .insert_completed_transaction(tx_id, completed_transaction) - .await?; + self.db.insert_completed_transaction(tx_id, completed_transaction)?; trace!( target: LOG_TARGET, "Launch the transaction broadcast protocol for submitted transaction ({}).", @@ -2506,14 +2529,13 @@ where transaction_status: TransactionStatus::Completed, }), transaction_broadcast_join_handles, - ) - .await; + ); Ok(()) } /// Submit a completed coin split transaction to the Transaction Manager. This is different from /// `submit_transaction` in that it will expose less information about the completed transaction. - pub async fn submit_transaction_to_self( + pub fn submit_transaction_to_self( &mut self, transaction_broadcast_join_handles: &mut FuturesUnordered< JoinHandle>>, @@ -2541,8 +2563,7 @@ where None, None, ), - ) - .await?; + )?; Ok(()) } @@ -2557,8 +2578,7 @@ where // first check if we already have a coinbase tx for this height and amount let find_result = self .db - .find_coinbase_transaction_at_block_height(block_height, amount) - .await?; + .find_coinbase_transaction_at_block_height(block_height, amount)?; let completed_transaction = match find_result { Some(completed_tx) => { @@ -2579,32 +2599,24 @@ where .output_manager_service .get_coinbase_transaction(tx_id, reward, fees, block_height) .await?; - - // Cancel existing unmined coinbase transactions for this blockheight - self.db - .cancel_coinbase_transaction_at_block_height(block_height) - .await?; - - self.db - .insert_completed_transaction( + self.db.insert_completed_transaction( + tx_id, + CompletedTransaction::new( tx_id, - CompletedTransaction::new( - tx_id, - self.node_identity.public_key().clone(), - self.node_identity.public_key().clone(), - amount, - MicroTari::from(0), - tx.clone(), - TransactionStatus::Coinbase, - format!("Coinbase Transaction for Block #{}", block_height), - Utc::now().naive_utc(), - TransactionDirection::Inbound, - Some(block_height), - None, - None, - ), - ) - .await?; + self.node_identity.public_key().clone(), + self.node_identity.public_key().clone(), + amount, + MicroTari::from(0), + tx.clone(), + TransactionStatus::Coinbase, + format!("Coinbase Transaction for Block #{}", block_height), + Utc::now().naive_utc(), + TransactionDirection::Inbound, + Some(block_height), + None, + None, + ), + )?; let _size = self .resources @@ -2632,8 +2644,8 @@ where /// Check if a Recovery Status is currently stored in the databse, this indicates that a wallet recovery is in /// progress - async fn check_recovery_status(&self) -> Result<(), TransactionServiceError> { - let value = self.wallet_db.get_client_key_value(RECOVERY_KEY.to_owned()).await?; + fn check_recovery_status(&self) -> Result<(), TransactionServiceError> { + let value = self.wallet_db.get_client_key_value(RECOVERY_KEY.to_owned())?; match value { None => Ok(()), Some(_) => Err(TransactionServiceError::WalletRecoveryInProgress), diff --git a/base_layer/wallet/src/transaction_service/storage/database.rs b/base_layer/wallet/src/transaction_service/storage/database.rs index 6fc21c2354..f018ba3088 100644 --- a/base_layer/wallet/src/transaction_service/storage/database.rs +++ b/base_layer/wallet/src/transaction_service/storage/database.rs @@ -280,173 +280,136 @@ where T: TransactionBackend + 'static Self { db: Arc::new(db) } } - pub async fn add_pending_inbound_transaction( + pub fn add_pending_inbound_transaction( &self, tx_id: TxId, inbound_tx: InboundTransaction, ) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || { - db_clone.write(WriteOperation::Insert(DbKeyValuePair::PendingInboundTransaction( + self.db + .write(WriteOperation::Insert(DbKeyValuePair::PendingInboundTransaction( tx_id, Box::new(inbound_tx), - ))) - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; - + )))?; Ok(()) } - pub async fn add_pending_outbound_transaction( + pub fn add_pending_outbound_transaction( &self, tx_id: TxId, outbound_tx: OutboundTransaction, ) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || { - db_clone.write(WriteOperation::Insert(DbKeyValuePair::PendingOutboundTransaction( + self.db + .write(WriteOperation::Insert(DbKeyValuePair::PendingOutboundTransaction( tx_id, Box::new(outbound_tx), - ))) - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + )))?; Ok(()) } - pub async fn remove_pending_outbound_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || { - db_clone.write(WriteOperation::Remove(DbKey::PendingOutboundTransaction(tx_id))) - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + pub fn remove_pending_outbound_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { + self.db + .write(WriteOperation::Remove(DbKey::PendingOutboundTransaction(tx_id)))?; Ok(()) } /// Check if a transaction with the specified TxId exists in any of the collections - pub async fn transaction_exists(&self, tx_id: TxId) -> Result { - let db_clone = self.db.clone(); - let tx_id_clone = tx_id; - tokio::task::spawn_blocking(move || db_clone.transaction_exists(tx_id_clone)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) + pub fn transaction_exists(&self, tx_id: TxId) -> Result { + self.db.transaction_exists(tx_id) } - pub async fn insert_completed_transaction( + pub fn insert_completed_transaction( &self, tx_id: TxId, transaction: CompletedTransaction, ) -> Result, TransactionStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || { - db_clone.write(WriteOperation::Insert(DbKeyValuePair::CompletedTransaction( + self.db + .write(WriteOperation::Insert(DbKeyValuePair::CompletedTransaction( tx_id, Box::new(transaction), ))) - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) } - pub async fn get_pending_outbound_transaction( + pub fn get_pending_outbound_transaction( &self, tx_id: TxId, ) -> Result { - self.get_pending_outbound_transaction_by_cancelled(tx_id, false).await + self.get_pending_outbound_transaction_by_cancelled(tx_id, false) } - pub async fn get_cancelled_pending_outbound_transaction( + pub fn get_cancelled_pending_outbound_transaction( &self, tx_id: TxId, ) -> Result { - self.get_pending_outbound_transaction_by_cancelled(tx_id, true).await + self.get_pending_outbound_transaction_by_cancelled(tx_id, true) } - pub async fn get_pending_outbound_transaction_by_cancelled( + pub fn get_pending_outbound_transaction_by_cancelled( &self, tx_id: TxId, cancelled: bool, ) -> Result { - let db_clone = self.db.clone(); let key = if cancelled { DbKey::CancelledPendingOutboundTransaction(tx_id) } else { DbKey::PendingOutboundTransaction(tx_id) }; - let t = tokio::task::spawn_blocking(move || match db_clone.fetch(&key) { + let t = match self.db.fetch(&key) { Ok(None) => Err(TransactionStorageError::ValueNotFound(key)), Ok(Some(DbValue::PendingOutboundTransaction(pt))) => Ok(pt), Ok(Some(other)) => unexpected_result(key, other), Err(e) => log_error(key, e), - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(*t) } - pub async fn get_pending_inbound_transaction( - &self, - tx_id: TxId, - ) -> Result { - self.get_pending_inbound_transaction_by_cancelled(tx_id, false).await + pub fn get_pending_inbound_transaction(&self, tx_id: TxId) -> Result { + self.get_pending_inbound_transaction_by_cancelled(tx_id, false) } - pub async fn get_cancelled_pending_inbound_transaction( + pub fn get_cancelled_pending_inbound_transaction( &self, tx_id: TxId, ) -> Result { - self.get_pending_inbound_transaction_by_cancelled(tx_id, true).await + self.get_pending_inbound_transaction_by_cancelled(tx_id, true) } - pub async fn get_pending_inbound_transaction_by_cancelled( + pub fn get_pending_inbound_transaction_by_cancelled( &self, tx_id: TxId, cancelled: bool, ) -> Result { - let db_clone = self.db.clone(); let key = if cancelled { DbKey::CancelledPendingInboundTransaction(tx_id) } else { DbKey::PendingInboundTransaction(tx_id) }; - let t = tokio::task::spawn_blocking(move || match db_clone.fetch(&key) { + let t = match self.db.fetch(&key) { Ok(None) => Err(TransactionStorageError::ValueNotFound(key)), Ok(Some(DbValue::PendingInboundTransaction(pt))) => Ok(pt), Ok(Some(other)) => unexpected_result(key, other), Err(e) => log_error(key, e), - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(*t) } - pub async fn get_completed_transaction( - &self, - tx_id: TxId, - ) -> Result { - self.get_completed_transaction_by_cancelled(tx_id, false).await + pub fn get_completed_transaction(&self, tx_id: TxId) -> Result { + self.get_completed_transaction_by_cancelled(tx_id, false) } - pub async fn get_cancelled_completed_transaction( + pub fn get_cancelled_completed_transaction( &self, tx_id: TxId, ) -> Result { - self.get_completed_transaction_by_cancelled(tx_id, true).await + self.get_completed_transaction_by_cancelled(tx_id, true) } - pub async fn get_completed_transaction_by_cancelled( + pub fn get_completed_transaction_by_cancelled( &self, tx_id: TxId, cancelled: bool, ) -> Result { - let db_clone = self.db.clone(); let key = DbKey::CompletedTransaction(tx_id); - let t = tokio::task::spawn_blocking(move || match db_clone.fetch(&DbKey::CompletedTransaction(tx_id)) { + let t = match self.db.fetch(&DbKey::CompletedTransaction(tx_id)) { Ok(None) => Err(TransactionStorageError::ValueNotFound(key)), Ok(Some(DbValue::CompletedTransaction(pt))) => { if (pt.cancelled.is_some()) == cancelled { @@ -457,99 +420,81 @@ where T: TransactionBackend + 'static }, Ok(Some(other)) => unexpected_result(key, other), Err(e) => log_error(key, e), - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(*t) } - pub async fn get_imported_transactions(&self) -> Result, TransactionStorageError> { - let db_clone = self.db.clone(); - let t = tokio::task::spawn_blocking(move || db_clone.fetch_imported_transactions()) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + pub fn get_imported_transactions(&self) -> Result, TransactionStorageError> { + let t = self.db.fetch_imported_transactions()?; Ok(t) } - pub async fn get_unconfirmed_faux_transactions( - &self, - ) -> Result, TransactionStorageError> { - let db_clone = self.db.clone(); - let t = tokio::task::spawn_blocking(move || db_clone.fetch_unconfirmed_faux_transactions()) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + pub fn get_unconfirmed_faux_transactions(&self) -> Result, TransactionStorageError> { + let t = self.db.fetch_unconfirmed_faux_transactions()?; Ok(t) } - pub async fn get_confirmed_faux_transactions_from_height( + pub fn get_confirmed_faux_transactions_from_height( &self, height: u64, ) -> Result, TransactionStorageError> { - let db_clone = self.db.clone(); - let t = tokio::task::spawn_blocking(move || db_clone.fetch_confirmed_faux_transactions_from_height(height)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + let t = self.db.fetch_confirmed_faux_transactions_from_height(height)?; Ok(t) } - pub async fn fetch_last_mined_transaction(&self) -> Result, TransactionStorageError> { + pub fn fetch_last_mined_transaction(&self) -> Result, TransactionStorageError> { self.db.fetch_last_mined_transaction() } /// Light weight method to return completed but unconfirmed transactions that were not imported - pub async fn fetch_unconfirmed_transactions_info( + pub fn fetch_unconfirmed_transactions_info( &self, ) -> Result, TransactionStorageError> { self.db.fetch_unconfirmed_transactions_info() } /// This method returns all completed transactions that must be broadcast - pub async fn get_transactions_to_be_broadcast(&self) -> Result, TransactionStorageError> { + pub fn get_transactions_to_be_broadcast(&self) -> Result, TransactionStorageError> { self.db.get_transactions_to_be_broadcast() } - pub async fn get_completed_transaction_cancelled_or_not( + pub fn get_completed_transaction_cancelled_or_not( &self, tx_id: TxId, ) -> Result { - let db_clone = self.db.clone(); let key = DbKey::CompletedTransaction(tx_id); - let t = tokio::task::spawn_blocking(move || match db_clone.fetch(&DbKey::CompletedTransaction(tx_id)) { + let t = match self.db.fetch(&DbKey::CompletedTransaction(tx_id)) { Ok(None) => Err(TransactionStorageError::ValueNotFound(key)), Ok(Some(DbValue::CompletedTransaction(pt))) => Ok(pt), Ok(Some(other)) => unexpected_result(key, other), Err(e) => log_error(key, e), - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(*t) } - pub async fn get_pending_inbound_transactions( + pub fn get_pending_inbound_transactions( &self, ) -> Result, TransactionStorageError> { - self.get_pending_inbound_transactions_by_cancelled(false).await + self.get_pending_inbound_transactions_by_cancelled(false) } - pub async fn get_cancelled_pending_inbound_transactions( + pub fn get_cancelled_pending_inbound_transactions( &self, ) -> Result, TransactionStorageError> { - self.get_pending_inbound_transactions_by_cancelled(true).await + self.get_pending_inbound_transactions_by_cancelled(true) } - async fn get_pending_inbound_transactions_by_cancelled( + fn get_pending_inbound_transactions_by_cancelled( &self, cancelled: bool, ) -> Result, TransactionStorageError> { - let db_clone = self.db.clone(); - let key = if cancelled { DbKey::CancelledPendingInboundTransactions } else { DbKey::PendingInboundTransactions }; - let t = tokio::task::spawn_blocking(move || match db_clone.fetch(&key) { + let t = match self.db.fetch(&key) { Ok(None) => log_error( key, TransactionStorageError::UnexpectedResult( @@ -559,37 +504,33 @@ where T: TransactionBackend + 'static Ok(Some(DbValue::PendingInboundTransactions(pt))) => Ok(pt), Ok(Some(other)) => unexpected_result(key, other), Err(e) => log_error(key, e), - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(t) } - pub async fn get_pending_outbound_transactions( + pub fn get_pending_outbound_transactions( &self, ) -> Result, TransactionStorageError> { - self.get_pending_outbound_transactions_by_cancelled(false).await + self.get_pending_outbound_transactions_by_cancelled(false) } - pub async fn get_cancelled_pending_outbound_transactions( + pub fn get_cancelled_pending_outbound_transactions( &self, ) -> Result, TransactionStorageError> { - self.get_pending_outbound_transactions_by_cancelled(true).await + self.get_pending_outbound_transactions_by_cancelled(true) } - async fn get_pending_outbound_transactions_by_cancelled( + fn get_pending_outbound_transactions_by_cancelled( &self, cancelled: bool, ) -> Result, TransactionStorageError> { - let db_clone = self.db.clone(); - let key = if cancelled { DbKey::CancelledPendingOutboundTransactions } else { DbKey::PendingOutboundTransactions }; - let t = tokio::task::spawn_blocking(move || match db_clone.fetch(&key) { + let t = match self.db.fetch(&key) { Ok(None) => log_error( key, TransactionStorageError::UnexpectedResult( @@ -599,75 +540,58 @@ where T: TransactionBackend + 'static Ok(Some(DbValue::PendingOutboundTransactions(pt))) => Ok(pt), Ok(Some(other)) => unexpected_result(key, other), Err(e) => log_error(key, e), - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(t) } - pub async fn get_pending_transaction_counterparty_pub_key_by_tx_id( + pub fn get_pending_transaction_counterparty_pub_key_by_tx_id( &mut self, tx_id: TxId, ) -> Result { - let db_clone = self.db.clone(); - let pub_key = - tokio::task::spawn_blocking(move || db_clone.get_pending_transaction_counterparty_pub_key_by_tx_id(tx_id)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + let pub_key = self.db.get_pending_transaction_counterparty_pub_key_by_tx_id(tx_id)?; Ok(pub_key) } - pub async fn get_completed_transactions( - &self, - ) -> Result, TransactionStorageError> { - self.get_completed_transactions_by_cancelled(false).await + pub fn get_completed_transactions(&self) -> Result, TransactionStorageError> { + self.get_completed_transactions_by_cancelled(false) } - pub async fn get_cancelled_completed_transactions( + pub fn get_cancelled_completed_transactions( &self, ) -> Result, TransactionStorageError> { - self.get_completed_transactions_by_cancelled(true).await + self.get_completed_transactions_by_cancelled(true) } - pub async fn get_any_transaction(&self, tx_id: TxId) -> Result, TransactionStorageError> { - let db_clone = self.db.clone(); + pub fn get_any_transaction(&self, tx_id: TxId) -> Result, TransactionStorageError> { let key = DbKey::AnyTransaction(tx_id); - let t = tokio::task::spawn_blocking(move || match db_clone.fetch(&key) { + let t = match self.db.fetch(&key) { Ok(None) => Ok(None), Ok(Some(DbValue::WalletTransaction(pt))) => Ok(Some(*pt)), Ok(Some(other)) => unexpected_result(key, other), Err(e) => log_error(key, e), - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(t) } - pub async fn get_any_cancelled_transaction( + pub fn get_any_cancelled_transaction( &self, tx_id: TxId, ) -> Result, TransactionStorageError> { - let db_clone = self.db.clone(); - - let tx = tokio::task::spawn_blocking(move || db_clone.fetch_any_cancelled_transaction(tx_id)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + let tx = self.db.fetch_any_cancelled_transaction(tx_id)?; Ok(tx) } - async fn get_completed_transactions_by_cancelled( + fn get_completed_transactions_by_cancelled( &self, cancelled: bool, ) -> Result, TransactionStorageError> { - let db_clone = self.db.clone(); - let key = if cancelled { DbKey::CancelledCompletedTransactions } else { DbKey::CompletedTransactions }; - let t = tokio::task::spawn_blocking(move || match db_clone.fetch(&key) { + let t = match self.db.fetch(&key) { Ok(None) => log_error( key, TransactionStorageError::UnexpectedResult("Could not retrieve completed transactions".to_string()), @@ -675,88 +599,55 @@ where T: TransactionBackend + 'static Ok(Some(DbValue::CompletedTransactions(pt))) => Ok(pt), Ok(Some(other)) => unexpected_result(key, other), Err(e) => log_error(key, e), - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(t) } /// This method moves a `PendingOutboundTransaction` to the `CompleteTransaction` collection. - pub async fn complete_outbound_transaction( + pub fn complete_outbound_transaction( &self, tx_id: TxId, transaction: CompletedTransaction, ) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || db_clone.complete_outbound_transaction(tx_id, transaction)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) + self.db.complete_outbound_transaction(tx_id, transaction) } /// This method moves a `PendingInboundTransaction` to the `CompleteTransaction` collection. - pub async fn complete_inbound_transaction( + pub fn complete_inbound_transaction( &self, tx_id: TxId, transaction: CompletedTransaction, ) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || db_clone.complete_inbound_transaction(tx_id, transaction)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) + self.db.complete_inbound_transaction(tx_id, transaction) } - pub async fn reject_completed_transaction( + pub fn reject_completed_transaction( &self, tx_id: TxId, reason: TxCancellationReason, ) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.reject_completed_transaction(tx_id, reason)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; - Ok(()) + self.db.reject_completed_transaction(tx_id, reason) } - pub async fn cancel_pending_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.set_pending_transaction_cancellation_status(tx_id, true)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; - Ok(()) + pub fn cancel_pending_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { + self.db.set_pending_transaction_cancellation_status(tx_id, true) } - pub async fn uncancel_pending_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.set_pending_transaction_cancellation_status(tx_id, false)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; - Ok(()) + pub fn uncancel_pending_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { + self.db.set_pending_transaction_cancellation_status(tx_id, false) } - pub async fn mark_direct_send_success(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.mark_direct_send_success(tx_id)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; - Ok(()) + pub fn mark_direct_send_success(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { + self.db.mark_direct_send_success(tx_id) } /// Indicated that the specified completed transaction has been broadcast into the mempool - pub async fn broadcast_completed_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || db_clone.broadcast_completed_transaction(tx_id)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) + pub fn broadcast_completed_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { + self.db.broadcast_completed_transaction(tx_id) } /// Faux transaction added to the database with imported status - pub async fn add_utxo_import_transaction_with_status( + pub fn add_utxo_import_transaction_with_status( &self, tx_id: TxId, amount: MicroTari, @@ -770,8 +661,8 @@ where T: TransactionBackend + 'static ) -> Result<(), TransactionStorageError> { let transaction = CompletedTransaction::new( tx_id, - source_public_key.clone(), - comms_public_key.clone(), + source_public_key, + comms_public_key, amount, MicroTari::from(0), Transaction::new( @@ -790,84 +681,50 @@ where T: TransactionBackend + 'static mined_timestamp, ); - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || { - db_clone.write(WriteOperation::Insert(DbKeyValuePair::CompletedTransaction( + self.db + .write(WriteOperation::Insert(DbKeyValuePair::CompletedTransaction( tx_id, Box::new(transaction), - ))) - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + )))?; Ok(()) } - pub async fn cancel_coinbase_transaction_at_block_height( + pub fn cancel_coinbase_transaction_at_block_height( &self, block_height: u64, ) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || db_clone.cancel_coinbase_transaction_at_block_height(block_height)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) + self.db.cancel_coinbase_transaction_at_block_height(block_height) } - pub async fn find_coinbase_transaction_at_block_height( + pub fn find_coinbase_transaction_at_block_height( &self, block_height: u64, amount: MicroTari, ) -> Result, TransactionStorageError> { - let db_clone = self.db.clone(); - - tokio::task::spawn_blocking(move || db_clone.find_coinbase_transaction_at_block_height(block_height, amount)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) + self.db.find_coinbase_transaction_at_block_height(block_height, amount) } - pub async fn apply_encryption(&self, cipher: XChaCha20Poly1305) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.apply_encryption(cipher)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) + pub fn apply_encryption(&self, cipher: XChaCha20Poly1305) -> Result<(), TransactionStorageError> { + self.db.apply_encryption(cipher) } - pub async fn remove_encryption(&self) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.remove_encryption()) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string())) - .and_then(|inner_result| inner_result) + pub fn remove_encryption(&self) -> Result<(), TransactionStorageError> { + self.db.remove_encryption() } - pub async fn increment_send_count(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.increment_send_count(tx_id)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; - Ok(()) + pub fn increment_send_count(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { + self.db.increment_send_count(tx_id) } - pub async fn set_transaction_as_unmined(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.set_transaction_as_unmined(tx_id)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; - Ok(()) + pub fn set_transaction_as_unmined(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { + self.db.set_transaction_as_unmined(tx_id) } - pub async fn mark_all_transactions_as_unvalidated(&self) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.mark_all_transactions_as_unvalidated()) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; - Ok(()) + pub fn mark_all_transactions_as_unvalidated(&self) -> Result<(), TransactionStorageError> { + self.db.mark_all_transactions_as_unvalidated() } - pub async fn set_transaction_mined_height( + pub fn set_transaction_mined_height( &self, tx_id: TxId, mined_height: u64, @@ -877,43 +734,29 @@ where T: TransactionBackend + 'static is_confirmed: bool, is_faux: bool, ) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || { - db_clone.update_mined_height( - tx_id, - mined_height, - mined_in_block, - mined_timestamp, - num_confirmations, - is_confirmed, - is_faux, - ) - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; - Ok(()) + self.db.update_mined_height( + tx_id, + mined_height, + mined_in_block, + mined_timestamp, + num_confirmations, + is_confirmed, + is_faux, + ) } - pub async fn get_pending_inbound_transaction_sender_info( + pub fn get_pending_inbound_transaction_sender_info( &self, ) -> Result, TransactionStorageError> { - let db_clone = self.db.clone(); - - let t = tokio::task::spawn_blocking(move || match db_clone.get_pending_inbound_transaction_sender_info() { + let t = match self.db.get_pending_inbound_transaction_sender_info() { Ok(v) => Ok(v), Err(e) => log_error(DbKey::PendingInboundTransactions, e), - }) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; + }?; Ok(t) } - pub async fn abandon_coinbase_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { - let db_clone = self.db.clone(); - tokio::task::spawn_blocking(move || db_clone.abandon_coinbase_transaction(tx_id)) - .await - .map_err(|err| TransactionStorageError::BlockingTaskSpawnError(err.to_string()))??; - Ok(()) + pub fn abandon_coinbase_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { + self.db.abandon_coinbase_transaction(tx_id) } } diff --git a/base_layer/wallet/src/transaction_service/tasks/check_faux_transaction_status.rs b/base_layer/wallet/src/transaction_service/tasks/check_faux_transaction_status.rs index b92ca86a65..17542e8595 100644 --- a/base_layer/wallet/src/transaction_service/tasks/check_faux_transaction_status.rs +++ b/base_layer/wallet/src/transaction_service/tasks/check_faux_transaction_status.rs @@ -49,14 +49,14 @@ pub async fn check_faux_transactions( event_publisher: TransactionEventSender, tip_height: u64, ) { - let mut all_faux_transactions: Vec = match db.get_imported_transactions().await { + let mut all_faux_transactions: Vec = match db.get_imported_transactions() { Ok(txs) => txs, Err(e) => { error!(target: LOG_TARGET, "Problem retrieving imported transactions: {}", e); return; }, }; - let mut unconfirmed_faux = match db.get_unconfirmed_faux_transactions().await { + let mut unconfirmed_faux = match db.get_unconfirmed_faux_transactions() { Ok(txs) => txs, Err(e) => { error!( @@ -69,7 +69,7 @@ pub async fn check_faux_transactions( all_faux_transactions.append(&mut unconfirmed_faux); // Reorged faux transactions cannot be detected by excess signature, thus use last known confirmed transaction // height or current tip height with safety margin to determine if these should be returned - let last_mined_transaction = match db.fetch_last_mined_transaction().await { + let last_mined_transaction = match db.fetch_last_mined_transaction() { Ok(tx) => tx, Err(_) => None, }; @@ -79,7 +79,7 @@ pub async fn check_faux_transactions( } else { height_with_margin }; - let mut confirmed_faux = match db.get_confirmed_faux_transactions_from_height(check_height).await { + let mut confirmed_faux = match db.get_confirmed_faux_transactions_from_height(check_height) { Ok(txs) => txs, Err(e) => { error!( @@ -134,17 +134,15 @@ pub async fn check_faux_transactions( num_confirmations, is_valid, ); - let result = db - .set_transaction_mined_height( - tx.tx_id, - mined_height, - mined_in_block, - 0, - num_confirmations, - is_confirmed, - is_valid, - ) - .await; + let result = db.set_transaction_mined_height( + tx.tx_id, + mined_height, + mined_in_block, + 0, + num_confirmations, + is_confirmed, + is_valid, + ); if let Err(e) = result { error!( target: LOG_TARGET, diff --git a/base_layer/wallet/src/transaction_service/tasks/send_finalized_transaction.rs b/base_layer/wallet/src/transaction_service/tasks/send_finalized_transaction.rs index 094b482057..66ca04aab2 100644 --- a/base_layer/wallet/src/transaction_service/tasks/send_finalized_transaction.rs +++ b/base_layer/wallet/src/transaction_service/tasks/send_finalized_transaction.rs @@ -25,7 +25,7 @@ use std::{convert::TryInto, time::Duration}; use log::*; use tari_common_types::transaction::TxId; -use tari_comms::{peer_manager::NodeId, types::CommsPublicKey}; +use tari_comms::types::CommsPublicKey; use tari_comms_dht::{ domain_message::OutboundDomainMessage, outbound::{OutboundEncryption, OutboundMessageRequester, SendMessageResponse}, @@ -114,6 +114,7 @@ pub async fn send_finalized_transaction_message_direct( &TariMessageType::TransactionFinalized, finalized_transaction_message.clone(), ), + "transaction finalized".to_string(), ) .await { @@ -222,7 +223,7 @@ async fn send_transaction_finalized_message_store_and_forward( ) -> Result { match outbound_message_service .closest_broadcast( - NodeId::from_public_key(&destination_pubkey), + destination_pubkey.clone(), OutboundEncryption::encrypt_for(destination_pubkey.clone()), vec![], OutboundDomainMessage::new(&TariMessageType::TransactionFinalized, msg.clone()), diff --git a/base_layer/wallet/src/transaction_service/tasks/send_transaction_cancelled.rs b/base_layer/wallet/src/transaction_service/tasks/send_transaction_cancelled.rs index df08c324e9..5cac558ee4 100644 --- a/base_layer/wallet/src/transaction_service/tasks/send_transaction_cancelled.rs +++ b/base_layer/wallet/src/transaction_service/tasks/send_transaction_cancelled.rs @@ -20,7 +20,7 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. use tari_common_types::transaction::TxId; -use tari_comms::{peer_manager::NodeId, types::CommsPublicKey}; +use tari_comms::types::CommsPublicKey; use tari_comms_dht::{ domain_message::OutboundDomainMessage, outbound::{OutboundEncryption, OutboundMessageRequester}, @@ -43,12 +43,13 @@ pub async fn send_transaction_cancelled_message( .send_direct( destination_public_key.clone(), OutboundDomainMessage::new(&TariMessageType::TransactionCancelled, proto_message.clone()), + "transaction cancelled".to_string(), ) .await?; let _message_send_state = outbound_message_service .closest_broadcast( - NodeId::from_public_key(&destination_public_key), + destination_public_key.clone(), OutboundEncryption::encrypt_for(destination_public_key), vec![], OutboundDomainMessage::new(&TariMessageType::SenderPartialTransaction, proto_message), diff --git a/base_layer/wallet/src/transaction_service/tasks/send_transaction_reply.rs b/base_layer/wallet/src/transaction_service/tasks/send_transaction_reply.rs index 5ca4409e2c..9c81ba5255 100644 --- a/base_layer/wallet/src/transaction_service/tasks/send_transaction_reply.rs +++ b/base_layer/wallet/src/transaction_service/tasks/send_transaction_reply.rs @@ -24,7 +24,7 @@ use std::time::Duration; use log::*; use tari_common_types::transaction::TxId; -use tari_comms::{peer_manager::NodeId, types::CommsPublicKey}; +use tari_comms::types::CommsPublicKey; use tari_comms_dht::{ domain_message::OutboundDomainMessage, outbound::{OutboundEncryption, OutboundMessageRequester, SendMessageResponse}, @@ -95,6 +95,7 @@ pub async fn send_transaction_reply_direct( .send_direct( inbound_transaction.source_public_key.clone(), OutboundDomainMessage::new(&TariMessageType::ReceiverPartialTransactionReply, proto_message.clone()), + "wallet transaction reply".to_string(), ) .await { @@ -200,7 +201,7 @@ async fn send_transaction_reply_store_and_forward( ) -> Result { match outbound_message_service .closest_broadcast( - NodeId::from_public_key(&destination_pubkey), + destination_pubkey.clone(), OutboundEncryption::encrypt_for(destination_pubkey.clone()), vec![], OutboundDomainMessage::new(&TariMessageType::ReceiverPartialTransactionReply, msg), diff --git a/base_layer/wallet/src/utxo_scanner_service/error.rs b/base_layer/wallet/src/utxo_scanner_service/error.rs index d3414b6c0d..e3f2f4b485 100644 --- a/base_layer/wallet/src/utxo_scanner_service/error.rs +++ b/base_layer/wallet/src/utxo_scanner_service/error.rs @@ -61,4 +61,6 @@ pub enum UtxoScannerError { OverflowError, #[error("FixedHash size error: `{0}`")] FixedHashSizeError(#[from] FixedHashSizeError), + #[error("Connectivity has shut down")] + ConnectivityShutdown, } diff --git a/base_layer/wallet/src/utxo_scanner_service/initializer.rs b/base_layer/wallet/src/utxo_scanner_service/initializer.rs index f693ce4511..d53cfaa5a3 100644 --- a/base_layer/wallet/src/utxo_scanner_service/initializer.rs +++ b/base_layer/wallet/src/utxo_scanner_service/initializer.rs @@ -31,7 +31,7 @@ use tokio::sync::broadcast; use crate::{ base_node_service::handle::BaseNodeServiceHandle, - connectivity_service::{WalletConnectivityHandle, WalletConnectivityInterface}, + connectivity_service::WalletConnectivityHandle, output_manager_service::handle::OutputManagerHandle, storage::database::{WalletBackend, WalletDatabase}, transaction_service::handle::TransactionServiceHandle, @@ -97,14 +97,14 @@ where T: WalletBackend + 'static let wallet_connectivity = handles.expect_handle::(); let base_node_service_handle = handles.expect_handle::(); - let scanning_service = UtxoScannerService::::builder() + let scanning_service = UtxoScannerService::::builder() .with_peers(vec![]) .with_retry_limit(2) .with_mode(UtxoScannerMode::Scanning) .build_with_resources( backend, comms_connectivity, - wallet_connectivity.get_current_base_node_watcher(), + wallet_connectivity.clone(), output_manager_service, transaction_service, node_identity, diff --git a/base_layer/wallet/src/utxo_scanner_service/service.rs b/base_layer/wallet/src/utxo_scanner_service/service.rs index 406826d02e..29a5f6dbe9 100644 --- a/base_layer/wallet/src/utxo_scanner_service/service.rs +++ b/base_layer/wallet/src/utxo_scanner_service/service.rs @@ -36,6 +36,7 @@ use tokio::{ use crate::{ base_node_service::handle::{BaseNodeEvent, BaseNodeServiceHandle}, + connectivity_service::WalletConnectivityInterface, error::WalletError, output_manager_service::handle::OutputManagerHandle, storage::database::{WalletBackend, WalletDatabase}, @@ -55,10 +56,8 @@ pub const LOG_TARGET: &str = "wallet::utxo_scanning"; // this only samples 1 header per new block. A ticket has been added to the backlog to think about this #LOGGED pub const SCANNED_BLOCK_CACHE_SIZE: u64 = 720; -pub struct UtxoScannerService -where TBackend: WalletBackend + 'static -{ - pub(crate) resources: UtxoScannerResources, +pub struct UtxoScannerService { + pub(crate) resources: UtxoScannerResources, pub(crate) retry_limit: usize, pub(crate) peer_seeds: Vec, pub(crate) mode: UtxoScannerMode, @@ -69,14 +68,16 @@ where TBackend: WalletBackend + 'static recovery_message_watch: watch::Receiver, } -impl UtxoScannerService -where TBackend: WalletBackend + 'static +impl UtxoScannerService +where + TBackend: WalletBackend + 'static, + TWalletConnectivity: WalletConnectivityInterface, { pub fn new( peer_seeds: Vec, retry_limit: usize, mode: UtxoScannerMode, - resources: UtxoScannerResources, + resources: UtxoScannerResources, shutdown_signal: ShutdownSignal, event_sender: broadcast::Sender, base_node_service: BaseNodeServiceHandle, @@ -96,7 +97,7 @@ where TBackend: WalletBackend + 'static } } - fn create_task(&self, shutdown_signal: ShutdownSignal) -> UtxoScannerTask { + fn create_task(&self, shutdown_signal: ShutdownSignal) -> UtxoScannerTask { UtxoScannerTask { resources: self.resources.clone(), peer_seeds: self.peer_seeds.clone(), @@ -190,9 +191,10 @@ where TBackend: WalletBackend + 'static } #[derive(Clone)] -pub struct UtxoScannerResources { +pub struct UtxoScannerResources { pub db: WalletDatabase, pub comms_connectivity: ConnectivityRequester, + pub wallet_connectivity: TWalletConnectivity, pub current_base_node_watcher: watch::Receiver>, pub output_manager_service: OutputManagerHandle, pub transaction_service: TransactionServiceHandle, diff --git a/base_layer/wallet/src/utxo_scanner_service/utxo_scanner_task.rs b/base_layer/wallet/src/utxo_scanner_service/utxo_scanner_task.rs index 2d9ceb9c65..3366b311d2 100644 --- a/base_layer/wallet/src/utxo_scanner_service/utxo_scanner_task.rs +++ b/base_layer/wallet/src/utxo_scanner_service/utxo_scanner_task.rs @@ -32,7 +32,13 @@ use tari_common_types::{ transaction::{ImportStatus, TxId}, types::HashOutput, }; -use tari_comms::{peer_manager::NodeId, traits::OrOptional, types::CommsPublicKey, PeerConnection}; +use tari_comms::{ + peer_manager::NodeId, + protocol::rpc::RpcClientLease, + traits::OrOptional, + types::CommsPublicKey, + PeerConnection, +}; use tari_core::{ base_node::rpc::BaseNodeWalletRpcClient, blocks::BlockHeader, @@ -47,6 +53,7 @@ use tari_utilities::hex::Hex; use tokio::sync::broadcast; use crate::{ + connectivity_service::WalletConnectivityInterface, error::WalletError, storage::database::WalletBackend, transaction_service::error::{TransactionServiceError, TransactionStorageError}, @@ -61,10 +68,8 @@ use crate::{ pub const LOG_TARGET: &str = "wallet::utxo_scanning"; -pub struct UtxoScannerTask -where TBackend: WalletBackend + 'static -{ - pub(crate) resources: UtxoScannerResources, +pub struct UtxoScannerTask { + pub(crate) resources: UtxoScannerResources, pub(crate) event_sender: broadcast::Sender, pub(crate) retry_limit: usize, pub(crate) num_retries: usize, @@ -73,14 +78,16 @@ where TBackend: WalletBackend + 'static pub(crate) mode: UtxoScannerMode, pub(crate) shutdown_signal: ShutdownSignal, } -impl UtxoScannerTask -where TBackend: WalletBackend + 'static +impl UtxoScannerTask +where + TBackend: WalletBackend + 'static, + TWalletConnectivity: WalletConnectivityInterface, { pub async fn run(mut self) -> Result<(), UtxoScannerError> { if self.mode == UtxoScannerMode::Recovery { - self.set_recovery_mode().await?; + self.set_recovery_mode()?; } else { - let in_progress = self.check_recovery_mode().await?; + let in_progress = self.check_recovery_mode()?; if in_progress { warn!( target: LOG_TARGET, @@ -98,8 +105,7 @@ where TBackend: WalletBackend + 'static Some(peer) => match self.attempt_sync(peer.clone()).await { Ok((num_outputs_recovered, final_height, final_amount, elapsed)) => { debug!(target: LOG_TARGET, "Scanned to height #{}", final_height); - self.finalize(num_outputs_recovered, final_height, final_amount, elapsed) - .await?; + self.finalize(num_outputs_recovered, final_height, final_amount, elapsed)?; return Ok(()); }, Err(e) => { @@ -125,9 +131,8 @@ where TBackend: WalletBackend + 'static if self.num_retries >= self.retry_limit { self.publish_event(UtxoScannerEvent::ScanningFailed); return Err(UtxoScannerError::UtxoScanningError(format!( - "Failed to scan UTXO's after {} attempt(s) using all {} sync peer(s). Aborting...", + "Failed to scan UTXO's after {} attempt(s) using sync peer(s). Aborting...", self.num_retries, - self.peer_seeds.len() ))); } @@ -139,7 +144,7 @@ where TBackend: WalletBackend + 'static } } - async fn finalize( + fn finalize( &self, num_outputs_recovered: u64, final_height: u64, @@ -159,13 +164,12 @@ where TBackend: WalletBackend + 'static // Presence of scanning keys are used to determine if a wallet is busy with recovery or not. if self.mode == UtxoScannerMode::Recovery { - self.clear_recovery_mode().await?; + self.clear_recovery_mode()?; } Ok(()) } async fn connect_to_peer(&mut self, peer: NodeId) -> Result { - self.publish_event(UtxoScannerEvent::ConnectingToBaseNode(peer.clone())); debug!( target: LOG_TARGET, "Attempting UTXO sync with seed peer {} ({})", self.peer_index, peer, @@ -192,11 +196,19 @@ where TBackend: WalletBackend + 'static } async fn attempt_sync(&mut self, peer: NodeId) -> Result<(u64, u64, MicroTari, Duration), UtxoScannerError> { - let mut connection = self.connect_to_peer(peer.clone()).await?; + self.publish_event(UtxoScannerEvent::ConnectingToBaseNode(peer.clone())); + let selected_peer = self.resources.wallet_connectivity.get_current_base_node_id(); - let mut client = connection - .connect_rpc_using_builder(BaseNodeWalletRpcClient::builder().with_deadline(Duration::from_secs(60))) - .await?; + let mut client = if selected_peer.map(|p| p == peer).unwrap_or(false) { + // Use the wallet connectivity service so that RPC pools are correctly managed + self.resources + .wallet_connectivity + .obtain_base_node_wallet_rpc_client() + .await + .ok_or(UtxoScannerError::ConnectivityShutdown)? + } else { + self.establish_new_rpc_connection(&peer).await? + }; let latency = client.get_last_request_latency(); self.publish_event(UtxoScannerEvent::ConnectedToBaseNode( @@ -243,7 +255,7 @@ where TBackend: WalletBackend + 'static } else { // The node does not know of any of our cached headers so we will start the scan anew from the // wallet birthday - self.resources.db.clear_scanned_blocks().await?; + self.resources.db.clear_scanned_blocks()?; let birthday_height_hash = self.get_birthday_header_height_hash(&mut client).await?; ScannedBlock { @@ -297,6 +309,17 @@ where TBackend: WalletBackend + 'static } } + async fn establish_new_rpc_connection( + &mut self, + peer: &NodeId, + ) -> Result, UtxoScannerError> { + let mut connection = self.connect_to_peer(peer.clone()).await?; + let client = connection + .connect_rpc_using_builder(BaseNodeWalletRpcClient::builder().with_deadline(Duration::from_secs(60))) + .await?; + Ok(RpcClientLease::new(client)) + } + async fn get_chain_tip_header( &self, client: &mut BaseNodeWalletRpcClient, @@ -314,7 +337,7 @@ where TBackend: WalletBackend + 'static current_tip_height: u64, client: &mut BaseNodeWalletRpcClient, ) -> Result, UtxoScannerError> { - let scanned_blocks = self.resources.db.get_scanned_blocks().await?; + let scanned_blocks = self.resources.db.get_scanned_blocks()?; debug!( target: LOG_TARGET, "Found {} cached previously scanned blocks", @@ -376,10 +399,7 @@ where TBackend: WalletBackend + 'static target: LOG_TARGET, "Reorg detected on base node. Removing scanned blocks from height {}", block.height ); - self.resources - .db - .clear_scanned_blocks_from_and_higher(block.height) - .await?; + self.resources.db.clear_scanned_blocks_from_and_higher(block.height)?; } if let Some(sb) = found_scanned_block { @@ -466,21 +486,17 @@ where TBackend: WalletBackend + 'static .import_utxos_to_transaction_service(found_outputs, current_height, mined_timestamp) .await?; let block_hash = current_header_hash.try_into()?; - self.resources - .db - .save_scanned_block(ScannedBlock { - header_hash: block_hash, - height: current_height, - num_outputs: Some(count), - amount: Some(amount), - timestamp: Utc::now().naive_utc(), - }) - .await?; + self.resources.db.save_scanned_block(ScannedBlock { + header_hash: block_hash, + height: current_height, + num_outputs: Some(count), + amount: Some(amount), + timestamp: Utc::now().naive_utc(), + })?; self.resources .db - .clear_scanned_blocks_before_height(current_height.saturating_sub(SCANNED_BLOCK_CACHE_SIZE), true) - .await?; + .clear_scanned_blocks_before_height(current_height.saturating_sub(SCANNED_BLOCK_CACHE_SIZE), true)?; if current_height % PROGRESS_REPORT_INTERVAL == 0 { debug!( @@ -525,12 +541,12 @@ where TBackend: WalletBackend + 'static .await? .into_iter() .map(|ro| { - ( - ro.output, - self.resources.recovery_message.clone(), - ImportStatus::Imported, - ro.tx_id, - ) + let status = if ro.output.features.is_coinbase() { + ImportStatus::Coinbase + } else { + ImportStatus::Imported + }; + (ro.output, self.resources.recovery_message.clone(), status, ro.tx_id) }) .collect(), ); @@ -563,15 +579,22 @@ where TBackend: WalletBackend + 'static ) -> Result<(u64, MicroTari), UtxoScannerError> { let mut num_recovered = 0u64; let mut total_amount = MicroTari::from(0); - // Because we do not know the source public key we are making it the default key of zeroes to make it clear this - // value is a placeholder. - let source_public_key = CommsPublicKey::default(); + let default_key = CommsPublicKey::default(); + let self_key = self.resources.node_identity.public_key().clone(); for (uo, message, import_status, tx_id) in utxos { + let source_public_key = if uo.features.is_coinbase() { + // its a coinbase, so we know we mined it and it comes from us. + &self_key + } else { + // Because we do not know the source public key we are making it the default key of zeroes to make it + // clear this value is a placeholder. + &default_key + }; match self .import_unblinded_utxo_to_transaction_service( uo.clone(), - &source_public_key, + source_public_key, message, import_status, tx_id, @@ -600,25 +623,23 @@ where TBackend: WalletBackend + 'static Ok((num_recovered, total_amount)) } - async fn set_recovery_mode(&self) -> Result<(), UtxoScannerError> { + fn set_recovery_mode(&self) -> Result<(), UtxoScannerError> { self.resources .db - .set_client_key_value(RECOVERY_KEY.to_owned(), Utc::now().to_string()) - .await?; + .set_client_key_value(RECOVERY_KEY.to_owned(), Utc::now().to_string())?; Ok(()) } - async fn check_recovery_mode(&self) -> Result { + fn check_recovery_mode(&self) -> Result { self.resources .db .get_client_key_from_str::(RECOVERY_KEY.to_owned()) - .await .map(|x| x.is_some()) .map_err(UtxoScannerError::from) // in case if `get_client_key_from_str` returns not exactly that type } - async fn clear_recovery_mode(&self) -> Result<(), UtxoScannerError> { - let _ = self.resources.db.clear_client_value(RECOVERY_KEY.to_owned()).await?; + fn clear_recovery_mode(&self) -> Result<(), UtxoScannerError> { + let _ = self.resources.db.clear_client_value(RECOVERY_KEY.to_owned())?; Ok(()) } @@ -646,7 +667,7 @@ where TBackend: WalletBackend + 'static source_public_key.clone(), message, Some(unblinded_output.features.maturity), - import_status, + import_status.clone(), Some(tx_id), Some(current_height), Some(mined_timestamp), @@ -655,12 +676,13 @@ where TBackend: WalletBackend + 'static info!( target: LOG_TARGET, - "UTXO (Commitment: {}) imported into wallet as 'ImportStatus::FauxUnconfirmed'", + "UTXO (Commitment: {}) imported into wallet as 'ImportStatus::{}'", unblinded_output .as_transaction_input(&self.resources.factories.commitment)? .commitment() .map_err(WalletError::TransactionError)? .to_hex(), + import_status ); Ok(tx_id) @@ -676,7 +698,7 @@ where TBackend: WalletBackend + 'static &self, client: &mut BaseNodeWalletRpcClient, ) -> Result { - let birthday = self.resources.db.get_wallet_birthday().await?; + let birthday = self.resources.db.get_wallet_birthday()?; // Calculate the unix epoch time of two days before the wallet birthday. This is to avoid any weird time zone // issues let epoch_time = u64::from(birthday.saturating_sub(2)) * 60 * 60 * 24; diff --git a/base_layer/wallet/src/utxo_scanner_service/uxto_scanner_service_builder.rs b/base_layer/wallet/src/utxo_scanner_service/uxto_scanner_service_builder.rs index 73a9dff018..ac80c30ae3 100644 --- a/base_layer/wallet/src/utxo_scanner_service/uxto_scanner_service_builder.rs +++ b/base_layer/wallet/src/utxo_scanner_service/uxto_scanner_service_builder.rs @@ -22,14 +22,14 @@ use std::sync::Arc; -use tari_comms::{connectivity::ConnectivityRequester, peer_manager::Peer, types::CommsPublicKey, NodeIdentity}; +use tari_comms::{connectivity::ConnectivityRequester, types::CommsPublicKey, NodeIdentity}; use tari_core::transactions::CryptoFactories; use tari_shutdown::ShutdownSignal; use tokio::sync::{broadcast, watch}; use crate::{ base_node_service::handle::BaseNodeServiceHandle, - connectivity_service::WalletConnectivityInterface, + connectivity_service::{WalletConnectivityHandle, WalletConnectivityInterface}, output_manager_service::handle::OutputManagerHandle, storage::{ database::{WalletBackend, WalletDatabase}, @@ -108,10 +108,11 @@ impl UtxoScannerServiceBuilder { &mut self, wallet: &WalletSqlite, shutdown_signal: ShutdownSignal, - ) -> UtxoScannerService { + ) -> UtxoScannerService { let resources = UtxoScannerResources { db: wallet.db.clone(), comms_connectivity: wallet.comms.connectivity(), + wallet_connectivity: wallet.wallet_connectivity.clone(), current_base_node_watcher: wallet.wallet_connectivity.get_current_base_node_watcher(), output_manager_service: wallet.output_manager_service.clone(), transaction_service: wallet.transaction_service.clone(), @@ -136,11 +137,11 @@ impl UtxoScannerServiceBuilder { ) } - pub fn build_with_resources( + pub fn build_with_resources( &mut self, db: WalletDatabase, comms_connectivity: ConnectivityRequester, - base_node_watcher: watch::Receiver>, + wallet_connectivity: TWalletConnectivity, output_manager_service: OutputManagerHandle, transaction_service: TransactionServiceHandle, node_identity: Arc, @@ -150,11 +151,12 @@ impl UtxoScannerServiceBuilder { base_node_service: BaseNodeServiceHandle, one_sided_message_watch: watch::Receiver, recovery_message_watch: watch::Receiver, - ) -> UtxoScannerService { + ) -> UtxoScannerService { let resources = UtxoScannerResources { db, comms_connectivity, - current_base_node_watcher: base_node_watcher, + current_base_node_watcher: wallet_connectivity.get_current_base_node_watcher(), + wallet_connectivity, output_manager_service, transaction_service, node_identity, diff --git a/base_layer/wallet/src/wallet.rs b/base_layer/wallet/src/wallet.rs index 80bb177614..a027e57b22 100644 --- a/base_layer/wallet/src/wallet.rs +++ b/base_layer/wallet/src/wallet.rs @@ -267,15 +267,11 @@ where // Persist the comms node address and features after it has been spawned to capture any modifications made // during comms startup. In the case of a Tor Transport the public address could have been generated - wallet_database - .set_node_address(comms.node_identity().public_address()) - .await?; - wallet_database - .set_node_features(comms.node_identity().features()) - .await?; + wallet_database.set_node_address(comms.node_identity().public_address())?; + wallet_database.set_node_features(comms.node_identity().features())?; let identity_sig = comms.node_identity().identity_signature_read().as_ref().cloned(); if let Some(identity_sig) = identity_sig { - wallet_database.set_comms_identity_signature(identity_sig).await?; + wallet_database.set_comms_identity_signature(identity_sig)?; } Ok(Self { @@ -678,7 +674,7 @@ where /// in which case this will fail. pub async fn apply_encryption(&mut self, passphrase: SafePassword) -> Result<(), WalletError> { debug!(target: LOG_TARGET, "Applying wallet encryption."); - let cipher = self.db.apply_encryption(passphrase).await?; + let cipher = self.db.apply_encryption(passphrase)?; self.output_manager_service.apply_encryption(cipher.clone()).await?; self.transaction_service.apply_encryption(cipher.clone()).await?; self.key_manager_service.apply_encryption(cipher).await?; @@ -691,18 +687,18 @@ where self.output_manager_service.remove_encryption().await?; self.transaction_service.remove_encryption().await?; self.key_manager_service.remove_encryption().await?; - self.db.remove_encryption().await?; + self.db.remove_encryption()?; Ok(()) } /// Utility function to find out if there is data in the database indicating that there is an incomplete recovery /// process in progress - pub async fn is_recovery_in_progress(&self) -> Result { - Ok(self.db.get_client_key_value(RECOVERY_KEY.to_string()).await?.is_some()) + pub fn is_recovery_in_progress(&self) -> Result { + Ok(self.db.get_client_key_value(RECOVERY_KEY.to_string())?.is_some()) } - pub async fn get_seed_words(&self, language: &MnemonicLanguage) -> Result, WalletError> { - let master_seed = self.db.get_master_seed().await?.ok_or_else(|| { + pub fn get_seed_words(&self, language: &MnemonicLanguage) -> Result, WalletError> { + let master_seed = self.db.get_master_seed()?.ok_or_else(|| { WalletError::WalletStorageError(WalletStorageError::RecoverySeedError( "Cipher Seed not found".to_string(), )) @@ -713,24 +709,24 @@ where } } -pub async fn read_or_create_master_seed( +pub fn read_or_create_master_seed( recovery_seed: Option, db: &WalletDatabase, ) -> Result { - let db_master_seed = db.get_master_seed().await?; + let db_master_seed = db.get_master_seed()?; let master_seed = match recovery_seed { None => match db_master_seed { None => { let seed = CipherSeed::new(); - db.set_master_seed(seed.clone()).await?; + db.set_master_seed(seed.clone())?; seed }, Some(seed) => seed, }, Some(recovery_seed) => { if db_master_seed.is_none() { - db.set_master_seed(recovery_seed.clone()).await?; + db.set_master_seed(recovery_seed.clone())?; recovery_seed } else { error!( diff --git a/base_layer/wallet/tests/contacts_service.rs b/base_layer/wallet/tests/contacts_service.rs index 62520c7471..e31f5e5cd4 100644 --- a/base_layer/wallet/tests/contacts_service.rs +++ b/base_layer/wallet/tests/contacts_service.rs @@ -83,7 +83,6 @@ pub fn setup_contacts_service( peer_database_name: random::string(8), max_concurrent_inbound_tasks: 10, max_concurrent_outbound_tasks: 10, - outbound_buffer_size: 100, dht: DhtConfig { discovery_request_timeout: Duration::from_secs(1), auto_join: true, diff --git a/base_layer/wallet/tests/output_manager_service_tests/service.rs b/base_layer/wallet/tests/output_manager_service_tests/service.rs index efab1a6d0f..bb2d5e5e30 100644 --- a/base_layer/wallet/tests/output_manager_service_tests/service.rs +++ b/base_layer/wallet/tests/output_manager_service_tests/service.rs @@ -146,17 +146,20 @@ async fn setup_output_manager_service() * amount; - let err = oms - .fee_estimate(spendable_amount, fee_per_gram, 1, 2) + let fee = oms + .fee_estimate(spendable_amount, UtxoSelectionCriteria::default(), fee_per_gram, 1, 2) .await - .unwrap_err(); - assert!(matches!(err, OutputManagerError::FundsPending)); + .unwrap(); + assert_eq!(fee, MicroTari::from(250)); - // test not enough funds let broke_amount = spendable_amount + MicroTari::from(2000); - let err = oms.fee_estimate(broke_amount, fee_per_gram, 1, 2).await.unwrap_err(); - assert!(matches!(err, OutputManagerError::NotEnoughFunds)); + let fee = oms + .fee_estimate(broke_amount, UtxoSelectionCriteria::default(), fee_per_gram, 1, 2) + .await + .unwrap(); + assert_eq!(fee, MicroTari::from(250)); // coin split uses the "Largest" selection strategy let (_, tx, utxos_total_value) = oms.create_coin_split(vec![], amount, 5, fee_per_gram).await.unwrap(); let expected_fee = fee_calc.calculate(fee_per_gram, 1, 1, 6, default_metadata_byte_size() * 6); assert_eq!(tx.body.get_total_fee(), expected_fee); - assert_eq!(utxos_total_value, MicroTari::from(10_000)); + assert_eq!(utxos_total_value, MicroTari::from(5_000)); // test that largest utxo was encumbered let utxos = oms.get_unspent_outputs().await.unwrap(); @@ -507,7 +529,6 @@ async fn test_utxo_selection_no_chain_metadata() { #[tokio::test] #[allow(clippy::identity_op)] #[allow(clippy::too_many_lines)] -#[ignore] async fn test_utxo_selection_with_chain_metadata() { let factories = CryptoFactories::default(); let (connection, _tempdir) = get_temp_sqlite_database_connection(); @@ -561,22 +582,23 @@ async fn test_utxo_selection_with_chain_metadata() { assert_eq!(utxos.len(), 10); // test fee estimates - let fee = oms.fee_estimate(amount, fee_per_gram, 1, 2).await.unwrap(); + let fee = oms + .fee_estimate(amount, UtxoSelectionCriteria::default(), fee_per_gram, 1, 2) + .await + .unwrap(); let expected_fee = fee_calc.calculate(fee_per_gram, 1, 2, 3, default_metadata_byte_size() * 3); assert_eq!(fee, expected_fee); - // test fee estimates are maturity aware - // even though we have utxos for the fee, they can't be spent because they are not mature yet let spendable_amount = (1..=6).sum::() * amount; - let err = oms - .fee_estimate(spendable_amount, fee_per_gram, 1, 2) + let fee = oms + .fee_estimate(spendable_amount, UtxoSelectionCriteria::default(), fee_per_gram, 1, 2) .await - .unwrap_err(); - assert!(matches!(err, OutputManagerError::NotEnoughFunds)); + .unwrap(); + assert_eq!(fee, MicroTari::from(250)); // test coin split is maturity aware let (_, tx, utxos_total_value) = oms.create_coin_split(vec![], amount, 5, fee_per_gram).await.unwrap(); - assert_eq!(utxos_total_value, MicroTari::from(6_000)); + assert_eq!(utxos_total_value, MicroTari::from(5_000)); let expected_fee = fee_calc.calculate(fee_per_gram, 1, 1, 6, default_metadata_byte_size() * 6); assert_eq!(tx.body.get_total_fee(), expected_fee); @@ -1113,7 +1135,6 @@ async fn sending_transaction_persisted_while_offline() { } #[tokio::test] -#[ignore] async fn coin_split_with_change() { let factories = CryptoFactories::default(); let (connection, _tempdir) = get_temp_sqlite_database_connection(); @@ -1203,10 +1224,8 @@ async fn handle_coinbase_with_bulletproofs_rewinding() { let reward1 = MicroTari::from(1000); let fees1 = MicroTari::from(500); - let value1 = reward1 + fees1; let reward2 = MicroTari::from(2000); let fees2 = MicroTari::from(500); - let value2 = reward2 + fees2; let reward3 = MicroTari::from(3000); let fees3 = MicroTari::from(500); let value3 = reward3 + fees3; @@ -1217,13 +1236,14 @@ async fn handle_coinbase_with_bulletproofs_rewinding() { .await .unwrap(); assert_eq!(oms.output_manager_handle.get_unspent_outputs().await.unwrap().len(), 0); + // pending coinbases should not show up as pending incoming assert_eq!( oms.output_manager_handle .get_balance() .await .unwrap() .pending_incoming_balance, - value1 + MicroTari::from(0) ); let _tx2 = oms @@ -1238,7 +1258,7 @@ async fn handle_coinbase_with_bulletproofs_rewinding() { .await .unwrap() .pending_incoming_balance, - value1 + value2 + MicroTari::from(0) ); let tx3 = oms .output_manager_handle @@ -1252,7 +1272,7 @@ async fn handle_coinbase_with_bulletproofs_rewinding() { .await .unwrap() .pending_incoming_balance, - value1 + value2 + value3 + MicroTari::from(0) ); let output = tx3.body.outputs()[0].clone(); @@ -1278,7 +1298,6 @@ async fn test_txo_validation() { let mut oms = setup_output_manager_service(backend, ks_backend, true).await; - oms.wallet_connectivity_mock.notify_base_node_set(oms.node_id.to_peer()); // Now we add the connection let mut connection = oms .mock_rpc_service @@ -1458,8 +1477,7 @@ async fn test_txo_validation() { MicroTari::from(output1_value) - MicroTari::from(900_000) - MicroTari::from(1260) + //Output4 = output 1 -900_000 and 1260 for fees - MicroTari::from(8_000_000) + - MicroTari::from(16_000_000) + MicroTari::from(8_000_000) ); // Output 1: Spent in Block 5 - Unconfirmed @@ -1830,7 +1848,6 @@ async fn test_txo_revalidation() { let mut oms = setup_output_manager_service(backend, ks_backend, true).await; - oms.wallet_connectivity_mock.notify_base_node_set(oms.node_id.to_peer()); // Now we add the connection let mut connection = oms .mock_rpc_service diff --git a/base_layer/wallet/tests/output_manager_service_tests/storage.rs b/base_layer/wallet/tests/output_manager_service_tests/storage.rs index a1dd6958b3..9c46c6301e 100644 --- a/base_layer/wallet/tests/output_manager_service_tests/storage.rs +++ b/base_layer/wallet/tests/output_manager_service_tests/storage.rs @@ -33,6 +33,7 @@ use tari_wallet::output_manager_service::{ database::{OutputManagerBackend, OutputManagerDatabase}, models::DbUnblindedOutput, sqlite_db::OutputManagerSqliteDatabase, + OutputSource, }, }; use tokio::runtime::Runtime; @@ -54,7 +55,7 @@ pub fn test_db_backend(backend: T) { MicroTari::from(100 + OsRng.next_u64() % 1000), &factories.commitment, )); - let mut uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None).unwrap(); + let mut uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None, OutputSource::Unknown).unwrap(); uo.unblinded_output.features.maturity = i; db.add_unspent_output(uo.clone()).unwrap(); unspent_outputs.push(uo); @@ -101,7 +102,7 @@ pub fn test_db_backend(backend: T) { MicroTari::from(100 + OsRng.next_u64() % 1000), &factories.commitment, )); - let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None).unwrap(); + let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None, OutputSource::Unknown).unwrap(); db.add_unspent_output(uo.clone()).unwrap(); pending_tx.outputs_to_be_spent.push(uo); } @@ -111,7 +112,7 @@ pub fn test_db_backend(backend: T) { MicroTari::from(100 + OsRng.next_u64() % 1000), &factories.commitment, )); - let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None).unwrap(); + let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None, OutputSource::Unknown).unwrap(); pending_tx.outputs_to_be_received.push(uo); } db.encumber_outputs( @@ -246,7 +247,8 @@ pub fn test_db_backend(backend: T) { MicroTari::from(100 + OsRng.next_u64() % 1000), &factories.commitment, )); - let output_to_be_received = DbUnblindedOutput::from_unblinded_output(uo, &factories, None).unwrap(); + let output_to_be_received = + DbUnblindedOutput::from_unblinded_output(uo, &factories, None, OutputSource::Unknown).unwrap(); db.add_output_to_be_received(TxId::from(11u64), output_to_be_received.clone(), None) .unwrap(); pending_incoming_balance += output_to_be_received.unblinded_output.value; @@ -347,7 +349,7 @@ pub async fn test_short_term_encumberance() { &factories.commitment, ) .await; - let mut uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None).unwrap(); + let mut uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None, OutputSource::Unknown).unwrap(); uo.unblinded_output.features.maturity = i; db.add_unspent_output(uo.clone()).unwrap(); unspent_outputs.push(uo); @@ -398,7 +400,7 @@ pub async fn test_no_duplicate_outputs() { // create an output let (_ti, uo) = make_input(&mut OsRng, MicroTari::from(1000), &factories.commitment).await; - let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None).unwrap(); + let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None, OutputSource::Unknown).unwrap(); // add it to the database let result = db.add_unspent_output(uo.clone()); @@ -422,3 +424,28 @@ pub async fn test_no_duplicate_outputs() { let outputs = db.fetch_mined_unspent_outputs().unwrap(); assert_eq!(outputs.len(), 1); } + +#[tokio::test] +pub async fn test_mark_as_unmined() { + let factories = CryptoFactories::default(); + let (connection, _tempdir) = get_temp_sqlite_database_connection(); + let backend = OutputManagerSqliteDatabase::new(connection, None); + let db = OutputManagerDatabase::new(backend); + + // create an output + let (_ti, uo) = make_input(&mut OsRng, MicroTari::from(1000), &factories.commitment).await; + let uo = DbUnblindedOutput::from_unblinded_output(uo, &factories, None, OutputSource::Unknown).unwrap(); + + // add it to the database + db.add_unspent_output(uo.clone()).unwrap(); + db.set_received_output_mined_height(uo.hash, 1, FixedHash::zero(), 1, true, 0) + .unwrap(); + let o = db.get_last_mined_output().unwrap().unwrap(); + assert_eq!(o.hash, uo.hash); + db.set_output_to_unmined_and_invalid(uo.hash).unwrap(); + assert!(db.get_last_mined_output().unwrap().is_none()); + let o = db.get_invalid_outputs().unwrap().pop().unwrap(); + assert_eq!(o.hash, uo.hash); + assert!(o.mined_height.is_none()); + assert!(o.mined_in_block.is_none()); +} diff --git a/base_layer/wallet/tests/transaction_service_tests/service.rs b/base_layer/wallet/tests/transaction_service_tests/service.rs index 16de95db39..0227ac7053 100644 --- a/base_layer/wallet/tests/transaction_service_tests/service.rs +++ b/base_layer/wallet/tests/transaction_service_tests/service.rs @@ -182,7 +182,7 @@ async fn setup_transaction_service>( let db = WalletDatabase::new(WalletSqliteDatabase::new(db_connection.clone(), None).unwrap()); let metadata = ChainMetadata::new(std::i64::MAX as u64, FixedHash::zero(), 0, 0, 0, 0); - db.set_chain_metadata(metadata).await.unwrap(); + db.set_chain_metadata(metadata).unwrap(); let ts_backend = TransactionServiceSqliteDatabase::new(db_connection.clone(), None); let oms_backend = OutputManagerSqliteDatabase::new(db_connection.clone(), None); @@ -292,7 +292,7 @@ async fn setup_transaction_service_no_comms( mock_rpc_server.serve(); - let wallet_connectivity_service_mock = create_wallet_connectivity_mock(); + let mut wallet_connectivity_service_mock = create_wallet_connectivity_mock(); let mut rpc_server_connection = mock_rpc_server .create_connection(base_node_identity.to_peer(), protocol_name.into()) @@ -300,6 +300,8 @@ async fn setup_transaction_service_no_comms( wallet_connectivity_service_mock .set_base_node_wallet_rpc_client(connect_rpc_client(&mut rpc_server_connection).await); + wallet_connectivity_service_mock.set_base_node(base_node_identity.to_peer()); + wallet_connectivity_service_mock.base_node_changed().await; let constants = ConsensusConstantsBuilder::new(Network::Weatherwax).build(); @@ -478,7 +480,7 @@ async fn manage_single_transaction() { let (bob_connection, _tempdir) = make_wallet_database_connection(Some(database_path.clone())); let shutdown = Shutdown::new(); - let (mut alice_ts, mut alice_oms, _alice_comms, mut alice_connectivity) = setup_transaction_service( + let (mut alice_ts, mut alice_oms, _alice_comms, _alice_connectivity) = setup_transaction_service( alice_node_identity.clone(), vec![], factories.clone(), @@ -489,13 +491,11 @@ async fn manage_single_transaction() { ) .await; - alice_connectivity.set_base_node(base_node_identity.to_peer()); - let mut alice_event_stream = alice_ts.get_event_stream(); sleep(Duration::from_secs(2)).await; - let (mut bob_ts, mut bob_oms, bob_comms, mut bob_connectivity) = setup_transaction_service( + let (mut bob_ts, mut bob_oms, bob_comms, _bob_connectivity) = setup_transaction_service( bob_node_identity.clone(), vec![alice_node_identity.clone()], factories.clone(), @@ -505,7 +505,6 @@ async fn manage_single_transaction() { shutdown.to_signal(), ) .await; - bob_connectivity.set_base_node(base_node_identity.to_peer()); let mut bob_event_stream = bob_ts.get_event_stream(); @@ -522,6 +521,7 @@ async fn manage_single_transaction() { .send_transaction( bob_node_identity.public_key().clone(), value, + UtxoSelectionCriteria::default(), OutputFeatures::default(), MicroTari::from(4), "".to_string() @@ -535,6 +535,7 @@ async fn manage_single_transaction() { .send_transaction( bob_node_identity.public_key().clone(), value, + UtxoSelectionCriteria::default(), OutputFeatures::default(), MicroTari::from(4), message, @@ -618,7 +619,7 @@ async fn single_transaction_to_self() { let (db_connection, _tempdir) = make_wallet_database_connection(Some(database_path.clone())); let shutdown = Shutdown::new(); - let (mut alice_ts, mut alice_oms, _alice_comms, mut alice_connectivity) = setup_transaction_service( + let (mut alice_ts, mut alice_oms, _alice_comms, _alice_connectivity) = setup_transaction_service( alice_node_identity.clone(), vec![], factories.clone(), @@ -629,8 +630,6 @@ async fn single_transaction_to_self() { ) .await; - alice_connectivity.set_base_node(base_node_identity.to_peer()); - let initial_wallet_value = 25000.into(); let (_utxo, uo1) = make_input(&mut OsRng, initial_wallet_value, &factories.commitment).await; @@ -641,6 +640,7 @@ async fn single_transaction_to_self() { .send_transaction( alice_node_identity.public_key().clone(), value, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 20.into(), message.clone(), @@ -697,7 +697,7 @@ async fn send_one_sided_transaction_to_other() { let (db_connection, _tempdir) = make_wallet_database_connection(Some(database_path.clone())); let shutdown = Shutdown::new(); - let (mut alice_ts, mut alice_oms, _alice_comms, mut alice_connectivity) = setup_transaction_service( + let (mut alice_ts, mut alice_oms, _alice_comms, _alice_connectivity) = setup_transaction_service( alice_node_identity, vec![], factories.clone(), @@ -710,8 +710,6 @@ async fn send_one_sided_transaction_to_other() { let mut alice_event_stream = alice_ts.get_event_stream(); - alice_connectivity.set_base_node(base_node_identity.to_peer()); - let initial_wallet_value = 25000.into(); let (_utxo, uo1) = make_input(&mut OsRng, initial_wallet_value, &factories.commitment).await; let mut alice_oms_clone = alice_oms.clone(); @@ -724,6 +722,7 @@ async fn send_one_sided_transaction_to_other() { .send_one_sided_transaction( bob_node_identity.public_key().clone(), value, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 20.into(), message.clone(), @@ -803,7 +802,7 @@ async fn recover_one_sided_transaction() { let (bob_connection, _tempdir) = make_wallet_database_connection(Some(database_path2.clone())); let shutdown = Shutdown::new(); - let (mut alice_ts, alice_oms, _alice_comms, mut alice_connectivity) = setup_transaction_service( + let (mut alice_ts, alice_oms, _alice_comms, _alice_connectivity) = setup_transaction_service( alice_node_identity, vec![], factories.clone(), @@ -835,8 +834,6 @@ async fn recover_one_sided_transaction() { let mut cloned_bob_oms = bob_oms.clone(); cloned_bob_oms.add_known_script(known_script).await.unwrap(); - alice_connectivity.set_base_node(base_node_identity.to_peer()); - let initial_wallet_value = 25000.into(); let (_utxo, uo1) = make_input(&mut OsRng, initial_wallet_value, &factories.commitment).await; let mut alice_oms_clone = alice_oms; @@ -849,6 +846,7 @@ async fn recover_one_sided_transaction() { .send_one_sided_transaction( bob_node_identity.public_key().clone(), value, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 20.into(), message.clone(), @@ -908,7 +906,7 @@ async fn test_htlc_send_and_claim() { let bob_connection = run_migration_and_create_sqlite_connection(&bob_db_path, 16).unwrap(); let shutdown = Shutdown::new(); - let (mut alice_ts, mut alice_oms, _alice_comms, mut alice_connectivity) = setup_transaction_service( + let (mut alice_ts, mut alice_oms, _alice_comms, _alice_connectivity) = setup_transaction_service( alice_node_identity, vec![], factories.clone(), @@ -928,8 +926,6 @@ async fn test_htlc_send_and_claim() { let mut alice_event_stream = alice_ts.get_event_stream(); - alice_connectivity.set_base_node(base_node_identity.to_peer()); - let initial_wallet_value = 25000.into(); let (_utxo, uo1) = make_input(&mut OsRng, initial_wallet_value, &factories.commitment).await; let mut alice_oms_clone = alice_oms.clone(); @@ -940,7 +936,13 @@ async fn test_htlc_send_and_claim() { let mut alice_ts_clone = alice_ts.clone(); let bob_pubkey = bob_ts_interface.base_node_identity.public_key().clone(); let (tx_id, pre_image, output) = alice_ts_clone - .send_sha_atomic_swap_transaction(bob_pubkey, value, 20.into(), message.clone()) + .send_sha_atomic_swap_transaction( + bob_pubkey, + value, + UtxoSelectionCriteria::default(), + 20.into(), + message.clone(), + ) .await .expect("Alice sending HTLC transaction"); @@ -1024,7 +1026,7 @@ async fn send_one_sided_transaction_to_self() { let (alice_connection, _tempdir) = make_wallet_database_connection(Some(database_path.clone())); let shutdown = Shutdown::new(); - let (alice_ts, alice_oms, _alice_comms, mut alice_connectivity) = setup_transaction_service( + let (alice_ts, alice_oms, _alice_comms, _alice_connectivity) = setup_transaction_service( alice_node_identity.clone(), vec![], factories.clone(), @@ -1035,8 +1037,6 @@ async fn send_one_sided_transaction_to_self() { ) .await; - alice_connectivity.set_base_node(base_node_identity.to_peer()); - let initial_wallet_value = 2500.into(); let (_utxo, uo1) = make_input(&mut OsRng, initial_wallet_value, &factories.commitment).await; let mut alice_oms_clone = alice_oms; @@ -1049,6 +1049,7 @@ async fn send_one_sided_transaction_to_self() { .send_one_sided_transaction( alice_node_identity.public_key().clone(), value, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 20.into(), message.clone(), @@ -1066,7 +1067,6 @@ async fn send_one_sided_transaction_to_self() { #[tokio::test] async fn manage_multiple_transactions() { - env_logger::init(); let factories = CryptoFactories::default(); // Alice's parameters let alice_node_identity = Arc::new(NodeIdentity::random( @@ -1187,6 +1187,7 @@ async fn manage_multiple_transactions() { .send_transaction( bob_node_identity.public_key().clone(), value_a_to_b_1, + UtxoSelectionCriteria::default(), OutputFeatures::default(), MicroTari::from(20), "a to b 1".to_string(), @@ -1199,6 +1200,7 @@ async fn manage_multiple_transactions() { .send_transaction( carol_node_identity.public_key().clone(), value_a_to_c_1, + UtxoSelectionCriteria::default(), OutputFeatures::default(), MicroTari::from(20), "a to c 1".to_string(), @@ -1213,6 +1215,7 @@ async fn manage_multiple_transactions() { .send_transaction( alice_node_identity.public_key().clone(), value_b_to_a_1, + UtxoSelectionCriteria::default(), OutputFeatures::default(), MicroTari::from(20), "b to a 1".to_string(), @@ -1223,6 +1226,7 @@ async fn manage_multiple_transactions() { .send_transaction( bob_node_identity.public_key().clone(), value_a_to_b_2, + UtxoSelectionCriteria::default(), OutputFeatures::default(), MicroTari::from(20), "a to b 2".to_string(), @@ -1349,6 +1353,7 @@ async fn test_accepting_unknown_tx_id_and_malformed_reply() { .send_transaction( bob_node_identity.public_key().clone(), MicroTari::from(5000), + UtxoSelectionCriteria::default(), OutputFeatures::default(), MicroTari::from(20), "".to_string(), @@ -1732,6 +1737,7 @@ async fn discovery_async_return_test() { .send_transaction( bob_node_identity.public_key().clone(), value_a_to_c_1, + UtxoSelectionCriteria::default(), OutputFeatures::default(), MicroTari::from(20), "Discovery Tx!".to_string(), @@ -1768,6 +1774,7 @@ async fn discovery_async_return_test() { .send_transaction( carol_node_identity.public_key().clone(), value_a_to_c_1, + UtxoSelectionCriteria::default(), OutputFeatures::default(), MicroTari::from(20), "Discovery Tx2!".to_string(), @@ -2037,6 +2044,7 @@ async fn test_transaction_cancellation() { .send_transaction( bob_node_identity.public_key().clone(), amount_sent, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 100 * uT, "Testing Message".to_string(), @@ -2358,6 +2366,7 @@ async fn test_direct_vs_saf_send_of_tx_reply_and_finalize() { .send_transaction( bob_node_identity.public_key().clone(), amount_sent, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 100 * uT, "Testing Message".to_string(), @@ -2542,6 +2551,7 @@ async fn test_direct_vs_saf_send_of_tx_reply_and_finalize() { .send_transaction( bob_node_identity.public_key().clone(), amount_sent, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 100 * uT, "Testing Message".to_string(), @@ -2674,6 +2684,7 @@ async fn test_tx_direct_send_behaviour() { .send_transaction( bob_node_identity.public_key().clone(), amount_sent, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 100 * uT, "Testing Message1".to_string(), @@ -2717,6 +2728,7 @@ async fn test_tx_direct_send_behaviour() { .send_transaction( bob_node_identity.public_key().clone(), amount_sent, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 100 * uT, "Testing Message2".to_string(), @@ -2765,6 +2777,7 @@ async fn test_tx_direct_send_behaviour() { .send_transaction( bob_node_identity.public_key().clone(), amount_sent, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 100 * uT, "Testing Message3".to_string(), @@ -2813,6 +2826,7 @@ async fn test_tx_direct_send_behaviour() { .send_transaction( bob_node_identity.public_key().clone(), amount_sent, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 100 * uT, "Testing Message4".to_string(), @@ -3100,7 +3114,7 @@ async fn test_coinbase_transactions_rejection_same_hash_but_accept_on_same_heigh .await .unwrap() .pending_incoming_balance, - fees1 + reward1 + MicroTari::from(0) ); // Create a second coinbase txn at the first block height, with same output hash as the previous one @@ -3128,7 +3142,7 @@ async fn test_coinbase_transactions_rejection_same_hash_but_accept_on_same_heigh .await .unwrap() .pending_incoming_balance, - fees1 + reward1 + MicroTari::from(0) ); // Create another coinbase Txn at the same block height; the previous one should not be cancelled @@ -3142,7 +3156,7 @@ async fn test_coinbase_transactions_rejection_same_hash_but_accept_on_same_heigh .get_completed_transactions() .await .unwrap(); // Only one valid coinbase txn remains - assert_eq!(transactions.len(), 1); + assert_eq!(transactions.len(), 2); let _tx_id2 = transactions .values() .find(|tx| tx.amount == fees2 + reward2) @@ -3155,7 +3169,7 @@ async fn test_coinbase_transactions_rejection_same_hash_but_accept_on_same_heigh .await .unwrap() .pending_incoming_balance, - fees1 + reward1 + fees2 + reward2 + MicroTari::from(0) ); // Create a third coinbase Txn at the second block height; all the three should be valid @@ -3169,7 +3183,7 @@ async fn test_coinbase_transactions_rejection_same_hash_but_accept_on_same_heigh .get_completed_transactions() .await .unwrap(); - assert_eq!(transactions.len(), 2); + assert_eq!(transactions.len(), 3); let _tx_id3 = transactions .values() .find(|tx| tx.amount == fees3 + reward3) @@ -3182,10 +3196,10 @@ async fn test_coinbase_transactions_rejection_same_hash_but_accept_on_same_heigh .await .unwrap() .pending_incoming_balance, - fees1 + reward1 + fees2 + reward2 + fees3 + reward3 + MicroTari::from(0) ); - assert!(!transactions.values().any(|tx| tx.amount == fees1 + reward1)); + assert!(transactions.values().any(|tx| tx.amount == fees1 + reward1)); assert!(transactions.values().any(|tx| tx.amount == fees2 + reward2)); assert!(transactions.values().any(|tx| tx.amount == fees3 + reward3)); } @@ -3237,7 +3251,7 @@ async fn test_coinbase_generation_and_monitoring() { .await .unwrap() .pending_incoming_balance, - fees1 + reward1 + MicroTari::from(0) ); // Create another coinbase Txn at the next block height @@ -3264,7 +3278,7 @@ async fn test_coinbase_generation_and_monitoring() { .await .unwrap() .pending_incoming_balance, - fees1 + reward1 + fees2 + reward2 + MicroTari::from(0) ); // Take out a second one at the second height which should not overwrite the initial one @@ -3278,7 +3292,7 @@ async fn test_coinbase_generation_and_monitoring() { .get_completed_transactions() .await .unwrap(); - assert_eq!(transactions.len(), 2); + assert_eq!(transactions.len(), 3); let tx_id2b = transactions .values() .find(|tx| tx.amount == fees2b + reward2) @@ -3291,17 +3305,12 @@ async fn test_coinbase_generation_and_monitoring() { .await .unwrap() .pending_incoming_balance, - fees1 + reward1 + fees2b + reward2 + fees2 + reward2 + MicroTari::from(0) ); assert!(transactions.values().any(|tx| tx.amount == fees1 + reward1)); assert!(transactions.values().any(|tx| tx.amount == fees2b + reward2)); - // Start the transaction protocols - alice_ts_interface - .wallet_connectivity_service_mock - .set_base_node(alice_ts_interface.base_node_identity.to_peer()); - let delay = sleep(Duration::from_secs(30)); tokio::pin!(delay); let mut count = 0usize; @@ -3328,8 +3337,8 @@ async fn test_coinbase_generation_and_monitoring() { ); // Now we will test validation where tx1 will not be found but tx2b will be unconfirmed, then confirmed. - let tx1 = db.get_completed_transaction(tx_id1).await.unwrap(); - let tx2b = db.get_completed_transaction(tx_id2b).await.unwrap(); + let tx1 = db.get_completed_transaction(tx_id1).unwrap(); + let tx2b = db.get_completed_transaction(tx_id2b).unwrap(); let mut block_headers = HashMap::new(); for i in 0..=4 { @@ -3374,10 +3383,6 @@ async fn test_coinbase_generation_and_monitoring() { .base_node_rpc_mock_state .set_transaction_query_batch_responses(batch_query_response); - alice_ts_interface - .wallet_connectivity_service_mock - .set_base_node(alice_ts_interface.base_node_identity.to_peer()); - alice_ts_interface .transaction_service_handle .validate_transactions() @@ -3386,7 +3391,7 @@ async fn test_coinbase_generation_and_monitoring() { let _tx_batch_query_calls = alice_ts_interface .base_node_rpc_mock_state - .wait_pop_transaction_batch_query_calls(1, Duration::from_secs(30)) + .wait_pop_transaction_batch_query_calls(2, Duration::from_secs(30)) .await .unwrap(); @@ -3396,7 +3401,7 @@ async fn test_coinbase_generation_and_monitoring() { .await .unwrap(); - assert_eq!(completed_txs.len(), 2); + assert_eq!(completed_txs.len(), 3); let tx = completed_txs.get(&tx_id1).unwrap(); assert_eq!(tx.status, TransactionStatus::Coinbase); @@ -3436,7 +3441,8 @@ async fn test_coinbase_generation_and_monitoring() { let _tx_batch_query_calls = alice_ts_interface .base_node_rpc_mock_state - .wait_pop_transaction_batch_query_calls(1, Duration::from_secs(30)) + // TODO: This is a flaky test; changing the pop count = 3 below makes the test fail often + .wait_pop_transaction_batch_query_calls(2, Duration::from_secs(30)) .await .unwrap(); @@ -3488,7 +3494,7 @@ async fn test_coinbase_abandoned() { .await .unwrap() .pending_incoming_balance, - fees1 + reward1 + MicroTari::from(0) ); let transaction_query_batch_responses = vec![TxQueryBatchResponseProto { @@ -3512,17 +3518,12 @@ async fn test_coinbase_abandoned() { .base_node_rpc_mock_state .set_transaction_query_batch_responses(batch_query_response); - // Start the transaction protocols - alice_ts_interface - .wallet_connectivity_service_mock - .set_base_node(alice_ts_interface.base_node_identity.to_peer()); - let balance = alice_ts_interface .output_manager_service_handle .get_balance() .await .unwrap(); - assert_eq!(balance.pending_incoming_balance, fees1 + reward1); + assert_eq!(balance.pending_incoming_balance, MicroTari::from(0)); let validation_id = alice_ts_interface .transaction_service_handle @@ -3617,7 +3618,7 @@ async fn test_coinbase_abandoned() { .await .unwrap() .pending_incoming_balance, - fees2 + reward2 + MicroTari::from(0) ); let transaction_query_batch_responses = vec![ @@ -3937,10 +3938,11 @@ async fn test_coinbase_transaction_reused_for_same_height() { .unwrap(); assert_eq!(transactions.len(), 1); + let mut amount = MicroTari::zero(); for tx in transactions.values() { - assert_eq!(tx.amount, fees1 + reward1); + amount += tx.amount; } - // balance should be fees1 + reward1, not double + assert_eq!(amount, fees1 + reward1); assert_eq!( ts_interface .output_manager_service_handle @@ -3948,7 +3950,7 @@ async fn test_coinbase_transaction_reused_for_same_height() { .await .unwrap() .pending_incoming_balance, - fees1 + reward1 + MicroTari::from(0) ); // a requested coinbase transaction for the same height but new amount should be different @@ -3964,10 +3966,12 @@ async fn test_coinbase_transaction_reused_for_same_height() { .get_completed_transactions() .await .unwrap(); - assert_eq!(transactions.len(), 1); // tx1 and tx2 should be cancelled + assert_eq!(transactions.len(), 2); + let mut amount = MicroTari::zero(); for tx in transactions.values() { - assert_eq!(tx.amount, fees2 + reward2); + amount += tx.amount; } + assert_eq!(amount, fees1 + reward1 + fees2 + reward2); assert_eq!( ts_interface .output_manager_service_handle @@ -3975,7 +3979,7 @@ async fn test_coinbase_transaction_reused_for_same_height() { .await .unwrap() .pending_incoming_balance, - fees1 + reward1 + fees2 + reward2 + MicroTari::from(0) ); // a requested coinbase transaction for a new height should be different @@ -3991,10 +3995,12 @@ async fn test_coinbase_transaction_reused_for_same_height() { .get_completed_transactions() .await .unwrap(); - assert_eq!(transactions.len(), 2); + assert_eq!(transactions.len(), 3); + let mut amount = MicroTari::zero(); for tx in transactions.values() { - assert_eq!(tx.amount, fees2 + reward2); + amount += tx.amount; } + assert_eq!(amount, fees1 + reward1 + fees2 + reward2 + fees2 + reward2); assert_eq!( ts_interface .output_manager_service_handle @@ -4002,7 +4008,7 @@ async fn test_coinbase_transaction_reused_for_same_height() { .await .unwrap() .pending_incoming_balance, - fees1 + reward1 + 2 * (fees2 + reward2) + MicroTari::from(0) ); } @@ -4044,6 +4050,7 @@ async fn test_transaction_resending() { .send_transaction( bob_node_identity.public_key().clone(), amount_sent, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 100 * uT, "Testing Message".to_string(), @@ -4179,7 +4186,7 @@ async fn test_transaction_resending() { assert!(alice_ts_interface .outbound_service_mock_state - .wait_call_count(1, Duration::from_secs(5)) + .wait_call_count(1, Duration::from_secs(8)) .await .is_err()); @@ -4531,6 +4538,7 @@ async fn test_replying_to_cancelled_tx() { .send_transaction( bob_node_identity.public_key().clone(), amount_sent, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 100 * uT, "Testing Message".to_string(), @@ -4653,6 +4661,7 @@ async fn test_transaction_timeout_cancellation() { .send_transaction( bob_node_identity.public_key().clone(), amount_sent, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 20 * uT, "Testing Message".to_string(), @@ -4907,6 +4916,7 @@ async fn transaction_service_tx_broadcast() { .send_transaction( bob_node_identity.public_key().clone(), amount_sent1, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 100 * uT, "Testing Message".to_string(), @@ -4967,6 +4977,7 @@ async fn transaction_service_tx_broadcast() { .send_transaction( bob_node_identity.public_key().clone(), amount_sent2, + UtxoSelectionCriteria::default(), OutputFeatures::default(), 20 * uT, "Testing Message2".to_string(), @@ -5062,7 +5073,10 @@ async fn transaction_service_tx_broadcast() { let tx1_fee = alice_completed_tx1.fee; - assert_eq!(alice_completed_tx1.status, TransactionStatus::Completed); + assert!( + alice_completed_tx1.status == TransactionStatus::Completed || + alice_completed_tx1.status == TransactionStatus::Broadcast + ); let _transactions = alice_ts_interface .base_node_rpc_mock_state @@ -5163,7 +5177,10 @@ async fn transaction_service_tx_broadcast() { .remove(&tx_id2) .expect("Transaction must be in collection"); - assert_eq!(alice_completed_tx2.status, TransactionStatus::Completed); + assert!( + alice_completed_tx2.status == TransactionStatus::Completed || + alice_completed_tx2.status == TransactionStatus::Broadcast + ); let _transactions = alice_ts_interface .base_node_rpc_mock_state @@ -5289,23 +5306,15 @@ async fn broadcast_all_completed_transactions_on_startup() { mined_timestamp: None, }); - assert!(alice_ts_interface - .transaction_service_handle - .restart_broadcast_protocols() - .await - .is_err()); - - alice_ts_interface - .wallet_connectivity_service_mock - .set_base_node(alice_ts_interface.base_node_identity.to_peer()); - + // Note: The event stream has to be assigned before the broadcast protocol is restarted otherwise the events will be + // dropped + let mut event_stream = alice_ts_interface.transaction_service_handle.get_event_stream(); assert!(alice_ts_interface .transaction_service_handle .restart_broadcast_protocols() .await .is_ok()); - let mut event_stream = alice_ts_interface.transaction_service_handle.get_event_stream(); let delay = sleep(Duration::from_secs(60)); tokio::pin!(delay); let mut found1 = false; diff --git a/base_layer/wallet/tests/transaction_service_tests/storage.rs b/base_layer/wallet/tests/transaction_service_tests/storage.rs index 6b0da3b13b..236e9e2ca3 100644 --- a/base_layer/wallet/tests/transaction_service_tests/storage.rs +++ b/base_layer/wallet/tests/transaction_service_tests/storage.rs @@ -60,10 +60,8 @@ use tari_wallet::{ }, }; use tempfile::tempdir; -use tokio::runtime::Runtime; pub fn test_db_backend(backend: T) { - let runtime = Runtime::new().unwrap(); let mut db = TransactionDatabase::new(backend); let factories = CryptoFactories::default(); let input = create_unblinded_output( @@ -123,25 +121,18 @@ pub fn test_db_backend(backend: T) { send_count: 0, last_send_timestamp: None, }); - assert!( - !runtime.block_on(db.transaction_exists(tx_id)).unwrap(), - "TxId should not exist" - ); + assert!(!db.transaction_exists(tx_id).unwrap(), "TxId should not exist"); - runtime - .block_on(db.add_pending_outbound_transaction(outbound_txs[i].tx_id, outbound_txs[i].clone())) + db.add_pending_outbound_transaction(outbound_txs[i].tx_id, outbound_txs[i].clone()) .unwrap(); - assert!( - runtime.block_on(db.transaction_exists(tx_id)).unwrap(), - "TxId should exist" - ); + assert!(db.transaction_exists(tx_id).unwrap(), "TxId should exist"); } - let retrieved_outbound_txs = runtime.block_on(db.get_pending_outbound_transactions()).unwrap(); + let retrieved_outbound_txs = db.get_pending_outbound_transactions().unwrap(); assert_eq!(outbound_txs.len(), messages.len()); for i in outbound_txs.iter().take(messages.len()) { - let retrieved_outbound_tx = runtime.block_on(db.get_pending_outbound_transaction(i.tx_id)).unwrap(); + let retrieved_outbound_tx = db.get_pending_outbound_transaction(i.tx_id).unwrap(); assert_eq!(&retrieved_outbound_tx, i); assert_eq!(retrieved_outbound_tx.send_count, 0); assert!(retrieved_outbound_tx.last_send_timestamp.is_none()); @@ -149,19 +140,12 @@ pub fn test_db_backend(backend: T) { assert_eq!(&retrieved_outbound_txs.get(&i.tx_id).unwrap(), &i); } - runtime - .block_on(db.increment_send_count(outbound_txs[0].tx_id)) - .unwrap(); - let retrieved_outbound_tx = runtime - .block_on(db.get_pending_outbound_transaction(outbound_txs[0].tx_id)) - .unwrap(); + db.increment_send_count(outbound_txs[0].tx_id).unwrap(); + let retrieved_outbound_tx = db.get_pending_outbound_transaction(outbound_txs[0].tx_id).unwrap(); assert_eq!(retrieved_outbound_tx.send_count, 1); assert!(retrieved_outbound_tx.last_send_timestamp.is_some()); - let any_outbound_tx = runtime - .block_on(db.get_any_transaction(outbound_txs[0].tx_id)) - .unwrap() - .unwrap(); + let any_outbound_tx = db.get_any_transaction(outbound_txs[0].tx_id).unwrap().unwrap(); if let WalletTransaction::PendingOutbound(tx) = any_outbound_tx { assert_eq!(tx, retrieved_outbound_tx); } else { @@ -192,20 +176,13 @@ pub fn test_db_backend(backend: T) { send_count: 0, last_send_timestamp: None, }); - assert!( - !runtime.block_on(db.transaction_exists(tx_id)).unwrap(), - "TxId should not exist" - ); - runtime - .block_on(db.add_pending_inbound_transaction(tx_id, inbound_txs[i].clone())) + assert!(!db.transaction_exists(tx_id).unwrap(), "TxId should not exist"); + db.add_pending_inbound_transaction(tx_id, inbound_txs[i].clone()) .unwrap(); - assert!( - runtime.block_on(db.transaction_exists(tx_id)).unwrap(), - "TxId should exist" - ); + assert!(db.transaction_exists(tx_id).unwrap(), "TxId should exist"); } - let retrieved_inbound_txs = runtime.block_on(db.get_pending_inbound_transactions()).unwrap(); + let retrieved_inbound_txs = db.get_pending_inbound_transactions().unwrap(); assert_eq!(inbound_txs.len(), messages.len()); for i in inbound_txs.iter().take(messages.len()) { let retrieved_tx = retrieved_inbound_txs.get(&i.tx_id).unwrap(); @@ -214,34 +191,29 @@ pub fn test_db_backend(backend: T) { assert!(retrieved_tx.last_send_timestamp.is_none()); } - runtime.block_on(db.increment_send_count(inbound_txs[0].tx_id)).unwrap(); - let retrieved_inbound_tx = runtime - .block_on(db.get_pending_inbound_transaction(inbound_txs[0].tx_id)) - .unwrap(); + db.increment_send_count(inbound_txs[0].tx_id).unwrap(); + let retrieved_inbound_tx = db.get_pending_inbound_transaction(inbound_txs[0].tx_id).unwrap(); assert_eq!(retrieved_inbound_tx.send_count, 1); assert!(retrieved_inbound_tx.last_send_timestamp.is_some()); - let any_inbound_tx = runtime - .block_on(db.get_any_transaction(inbound_txs[0].tx_id)) - .unwrap() - .unwrap(); + let any_inbound_tx = db.get_any_transaction(inbound_txs[0].tx_id).unwrap().unwrap(); if let WalletTransaction::PendingInbound(tx) = any_inbound_tx { assert_eq!(tx, retrieved_inbound_tx); } else { panic!("Should have found inbound tx"); } - let inbound_pub_key = runtime - .block_on(db.get_pending_transaction_counterparty_pub_key_by_tx_id(inbound_txs[0].tx_id)) + let inbound_pub_key = db + .get_pending_transaction_counterparty_pub_key_by_tx_id(inbound_txs[0].tx_id) .unwrap(); assert_eq!(inbound_pub_key, inbound_txs[0].source_public_key); - assert!(runtime - .block_on(db.get_pending_transaction_counterparty_pub_key_by_tx_id(100u64.into())) + assert!(db + .get_pending_transaction_counterparty_pub_key_by_tx_id(100u64.into()) .is_err()); - let outbound_pub_key = runtime - .block_on(db.get_pending_transaction_counterparty_pub_key_by_tx_id(outbound_txs[0].tx_id)) + let outbound_pub_key = db + .get_pending_transaction_counterparty_pub_key_by_tx_id(outbound_txs[0].tx_id) .unwrap(); assert_eq!(outbound_pub_key, outbound_txs[0].destination_public_key); @@ -281,20 +253,16 @@ pub fn test_db_backend(backend: T) { mined_in_block: None, mined_timestamp: None, }); - runtime - .block_on(db.complete_outbound_transaction(outbound_txs[i].tx_id, completed_txs[i].clone())) - .unwrap(); - runtime - .block_on( - db.complete_inbound_transaction(inbound_txs[i].tx_id, CompletedTransaction { - tx_id: inbound_txs[i].tx_id, - ..completed_txs[i].clone() - }), - ) + db.complete_outbound_transaction(outbound_txs[i].tx_id, completed_txs[i].clone()) .unwrap(); + db.complete_inbound_transaction(inbound_txs[i].tx_id, CompletedTransaction { + tx_id: inbound_txs[i].tx_id, + ..completed_txs[i].clone() + }) + .unwrap(); } - let retrieved_completed_txs = runtime.block_on(db.get_completed_transactions()).unwrap(); + let retrieved_completed_txs = db.get_completed_transactions().unwrap(); assert_eq!(retrieved_completed_txs.len(), 2 * messages.len()); for i in 0..messages.len() { @@ -311,254 +279,165 @@ pub fn test_db_backend(backend: T) { ); } - runtime - .block_on(db.increment_send_count(completed_txs[0].tx_id)) - .unwrap(); - runtime - .block_on(db.increment_send_count(completed_txs[0].tx_id)) - .unwrap(); - let retrieved_completed_tx = runtime - .block_on(db.get_completed_transaction(completed_txs[0].tx_id)) - .unwrap(); + db.increment_send_count(completed_txs[0].tx_id).unwrap(); + db.increment_send_count(completed_txs[0].tx_id).unwrap(); + let retrieved_completed_tx = db.get_completed_transaction(completed_txs[0].tx_id).unwrap(); assert_eq!(retrieved_completed_tx.send_count, 2); assert!(retrieved_completed_tx.last_send_timestamp.is_some()); assert!(retrieved_completed_tx.confirmations.is_none()); - assert!(runtime.block_on(db.fetch_last_mined_transaction()).unwrap().is_none()); + assert!(db.fetch_last_mined_transaction().unwrap().is_none()); - runtime - .block_on(db.set_transaction_mined_height(completed_txs[0].tx_id, 10, FixedHash::zero(), 0, 5, true, false)) + db.set_transaction_mined_height(completed_txs[0].tx_id, 10, FixedHash::zero(), 0, 5, true, false) .unwrap(); assert_eq!( - runtime - .block_on(db.fetch_last_mined_transaction()) - .unwrap() - .unwrap() - .tx_id, + db.fetch_last_mined_transaction().unwrap().unwrap().tx_id, completed_txs[0].tx_id ); - let retrieved_completed_tx = runtime - .block_on(db.get_completed_transaction(completed_txs[0].tx_id)) - .unwrap(); + let retrieved_completed_tx = db.get_completed_transaction(completed_txs[0].tx_id).unwrap(); assert_eq!(retrieved_completed_tx.confirmations, Some(5)); - let any_completed_tx = runtime - .block_on(db.get_any_transaction(completed_txs[0].tx_id)) - .unwrap() - .unwrap(); + let any_completed_tx = db.get_any_transaction(completed_txs[0].tx_id).unwrap().unwrap(); if let WalletTransaction::Completed(tx) = any_completed_tx { assert_eq!(tx, retrieved_completed_tx); } else { panic!("Should have found completed tx"); } - let completed_txs_map = runtime.block_on(db.get_completed_transactions()).unwrap(); + let completed_txs_map = db.get_completed_transactions().unwrap(); let num_completed_txs = completed_txs_map.len(); - assert_eq!( - runtime - .block_on(db.get_cancelled_completed_transactions()) - .unwrap() - .len(), - 0 - ); + assert_eq!(db.get_cancelled_completed_transactions().unwrap().len(), 0); let cancelled_tx_id = completed_txs_map[&1u64.into()].tx_id; - assert!(runtime - .block_on(db.get_cancelled_completed_transaction(cancelled_tx_id)) - .is_err()); - runtime - .block_on(db.reject_completed_transaction(cancelled_tx_id, TxCancellationReason::Unknown)) + assert!(db.get_cancelled_completed_transaction(cancelled_tx_id).is_err()); + db.reject_completed_transaction(cancelled_tx_id, TxCancellationReason::Unknown) .unwrap(); - let completed_txs_map = runtime.block_on(db.get_completed_transactions()).unwrap(); + let completed_txs_map = db.get_completed_transactions().unwrap(); assert_eq!(completed_txs_map.len(), num_completed_txs - 1); - runtime - .block_on(db.get_cancelled_completed_transaction(cancelled_tx_id)) + db.get_cancelled_completed_transaction(cancelled_tx_id) .expect("Should find cancelled transaction"); - let mut cancelled_txs = runtime.block_on(db.get_cancelled_completed_transactions()).unwrap(); + let mut cancelled_txs = db.get_cancelled_completed_transactions().unwrap(); assert_eq!(cancelled_txs.len(), 1); assert!(cancelled_txs.remove(&cancelled_tx_id).is_some()); - let any_cancelled_completed_tx = runtime - .block_on(db.get_any_transaction(cancelled_tx_id)) - .unwrap() - .unwrap(); + let any_cancelled_completed_tx = db.get_any_transaction(cancelled_tx_id).unwrap().unwrap(); if let WalletTransaction::Completed(tx) = any_cancelled_completed_tx { assert_eq!(tx.tx_id, cancelled_tx_id); } else { panic!("Should have found cancelled completed tx"); } - runtime - .block_on(db.add_pending_inbound_transaction( + db.add_pending_inbound_transaction( + 999u64.into(), + InboundTransaction::new( 999u64.into(), - InboundTransaction::new( - 999u64.into(), - PublicKey::from_secret_key(&PrivateKey::random(&mut OsRng)), - 22 * uT, - rtp, - TransactionStatus::Pending, - "To be cancelled".to_string(), - Utc::now().naive_utc(), - ), - )) - .unwrap(); + PublicKey::from_secret_key(&PrivateKey::random(&mut OsRng)), + 22 * uT, + rtp, + TransactionStatus::Pending, + "To be cancelled".to_string(), + Utc::now().naive_utc(), + ), + ) + .unwrap(); - assert_eq!( - runtime - .block_on(db.get_cancelled_pending_inbound_transactions()) - .unwrap() - .len(), - 0 - ); + assert_eq!(db.get_cancelled_pending_inbound_transactions().unwrap().len(), 0); - assert_eq!( - runtime.block_on(db.get_pending_inbound_transactions()).unwrap().len(), - 1 - ); + assert_eq!(db.get_pending_inbound_transactions().unwrap().len(), 1); assert!( - !runtime - .block_on(db.get_pending_inbound_transaction(999u64.into())) + !db.get_pending_inbound_transaction(999u64.into()) .unwrap() .direct_send_success ); - runtime.block_on(db.mark_direct_send_success(999u64.into())).unwrap(); + db.mark_direct_send_success(999u64.into()).unwrap(); assert!( - runtime - .block_on(db.get_pending_inbound_transaction(999u64.into())) + db.get_pending_inbound_transaction(999u64.into()) .unwrap() .direct_send_success ); - assert!(runtime - .block_on(db.get_cancelled_pending_inbound_transaction(999u64.into())) - .is_err()); - runtime.block_on(db.cancel_pending_transaction(999u64.into())).unwrap(); - runtime - .block_on(db.get_cancelled_pending_inbound_transaction(999u64.into())) + assert!(db.get_cancelled_pending_inbound_transaction(999u64.into()).is_err()); + db.cancel_pending_transaction(999u64.into()).unwrap(); + db.get_cancelled_pending_inbound_transaction(999u64.into()) .expect("Should find cancelled inbound tx"); - assert_eq!( - runtime - .block_on(db.get_cancelled_pending_inbound_transactions()) - .unwrap() - .len(), - 1 - ); + assert_eq!(db.get_cancelled_pending_inbound_transactions().unwrap().len(), 1); - assert_eq!( - runtime.block_on(db.get_pending_inbound_transactions()).unwrap().len(), - 0 - ); + assert_eq!(db.get_pending_inbound_transactions().unwrap().len(), 0); - let any_cancelled_inbound_tx = runtime - .block_on(db.get_any_transaction(999u64.into())) - .unwrap() - .unwrap(); + let any_cancelled_inbound_tx = db.get_any_transaction(999u64.into()).unwrap().unwrap(); if let WalletTransaction::PendingInbound(tx) = any_cancelled_inbound_tx { assert_eq!(tx.tx_id, TxId::from(999u64)); } else { panic!("Should have found cancelled inbound tx"); } - let mut cancelled_txs = runtime - .block_on(db.get_cancelled_pending_inbound_transactions()) - .unwrap(); + let mut cancelled_txs = db.get_cancelled_pending_inbound_transactions().unwrap(); assert_eq!(cancelled_txs.len(), 1); assert!(cancelled_txs.remove(&999u64.into()).is_some()); - runtime - .block_on(db.add_pending_outbound_transaction( + db.add_pending_outbound_transaction( + 998u64.into(), + OutboundTransaction::new( 998u64.into(), - OutboundTransaction::new( - 998u64.into(), - PublicKey::from_secret_key(&PrivateKey::random(&mut OsRng)), - 22 * uT, - stp.get_fee_amount().unwrap(), - stp, - TransactionStatus::Pending, - "To be cancelled".to_string(), - Utc::now().naive_utc(), - false, - ), - )) - .unwrap(); + PublicKey::from_secret_key(&PrivateKey::random(&mut OsRng)), + 22 * uT, + stp.get_fee_amount().unwrap(), + stp, + TransactionStatus::Pending, + "To be cancelled".to_string(), + Utc::now().naive_utc(), + false, + ), + ) + .unwrap(); assert!( - !runtime - .block_on(db.get_pending_outbound_transaction(998u64.into())) + !db.get_pending_outbound_transaction(998u64.into()) .unwrap() .direct_send_success ); - runtime.block_on(db.mark_direct_send_success(998u64.into())).unwrap(); + db.mark_direct_send_success(998u64.into()).unwrap(); assert!( - runtime - .block_on(db.get_pending_outbound_transaction(998u64.into())) + db.get_pending_outbound_transaction(998u64.into()) .unwrap() .direct_send_success ); - assert_eq!( - runtime - .block_on(db.get_cancelled_pending_outbound_transactions()) - .unwrap() - .len(), - 0 - ); + assert_eq!(db.get_cancelled_pending_outbound_transactions().unwrap().len(), 0); - assert_eq!( - runtime.block_on(db.get_pending_outbound_transactions()).unwrap().len(), - 1 - ); + assert_eq!(db.get_pending_outbound_transactions().unwrap().len(), 1); - assert!(runtime - .block_on(db.get_cancelled_pending_outbound_transaction(998u64.into())) - .is_err()); + assert!(db.get_cancelled_pending_outbound_transaction(998u64.into()).is_err()); - runtime.block_on(db.cancel_pending_transaction(998u64.into())).unwrap(); - runtime - .block_on(db.get_cancelled_pending_outbound_transaction(998u64.into())) + db.cancel_pending_transaction(998u64.into()).unwrap(); + db.get_cancelled_pending_outbound_transaction(998u64.into()) .expect("Should find cancelled outbound tx"); - assert_eq!( - runtime - .block_on(db.get_cancelled_pending_outbound_transactions()) - .unwrap() - .len(), - 1 - ); + assert_eq!(db.get_cancelled_pending_outbound_transactions().unwrap().len(), 1); - assert_eq!( - runtime.block_on(db.get_pending_outbound_transactions()).unwrap().len(), - 0 - ); + assert_eq!(db.get_pending_outbound_transactions().unwrap().len(), 0); - let mut cancelled_txs = runtime - .block_on(db.get_cancelled_pending_outbound_transactions()) - .unwrap(); + let mut cancelled_txs = db.get_cancelled_pending_outbound_transactions().unwrap(); assert_eq!(cancelled_txs.len(), 1); assert!(cancelled_txs.remove(&998u64.into()).is_some()); - let any_cancelled_outbound_tx = runtime - .block_on(db.get_any_transaction(998u64.into())) - .unwrap() - .unwrap(); + let any_cancelled_outbound_tx = db.get_any_transaction(998u64.into()).unwrap().unwrap(); if let WalletTransaction::PendingOutbound(tx) = any_cancelled_outbound_tx { assert_eq!(tx.tx_id, TxId::from(998u64)); } else { panic!("Should have found cancelled outbound tx"); } - let unmined_txs = runtime.block_on(db.fetch_unconfirmed_transactions_info()).unwrap(); + let unmined_txs = db.fetch_unconfirmed_transactions_info().unwrap(); assert_eq!(unmined_txs.len(), 4); - runtime - .block_on(db.set_transaction_as_unmined(completed_txs[0].tx_id)) - .unwrap(); + db.set_transaction_as_unmined(completed_txs[0].tx_id).unwrap(); - let unmined_txs = runtime.block_on(db.fetch_unconfirmed_transactions_info()).unwrap(); + let unmined_txs = db.fetch_unconfirmed_transactions_info().unwrap(); assert_eq!(unmined_txs.len(), 5); } diff --git a/base_layer/wallet/tests/transaction_service_tests/transaction_protocols.rs b/base_layer/wallet/tests/transaction_service_tests/transaction_protocols.rs index 753fd3cf14..eb4c040aad 100644 --- a/base_layer/wallet/tests/transaction_service_tests/transaction_protocols.rs +++ b/base_layer/wallet/tests/transaction_service_tests/transaction_protocols.rs @@ -181,7 +181,7 @@ pub async fn add_transaction_to_database( ) { let factories = CryptoFactories::default(); let (_utxo, uo0) = make_input(&mut OsRng, 10 * amount, &factories.commitment).await; - let (txs1, _uou1) = schema_to_transaction(&[txn_schema!(from: vec![uo0.clone()], to: vec![amount])]); + let (txs1, _uou1) = schema_to_transaction(&[txn_schema!(from: vec![uo0], to: vec![amount])]); let tx1 = (*txs1[0]).clone(); let completed_tx1 = CompletedTransaction::new( tx_id, @@ -189,7 +189,7 @@ pub async fn add_transaction_to_database( CommsPublicKey::default(), amount, 200 * uT, - tx1.clone(), + tx1, status.unwrap_or(TransactionStatus::Completed), "Test".to_string(), Utc::now().naive_local(), @@ -198,7 +198,7 @@ pub async fn add_transaction_to_database( None, None, ); - db.insert_completed_transaction(tx_id, completed_tx1).await.unwrap(); + db.insert_completed_transaction(tx_id, completed_tx1).unwrap(); } /// Simple task that responds with a OutputManagerResponse::TransactionCancelled response to any request made on this @@ -210,6 +210,7 @@ pub async fn oms_reply_channel_task( let (request, reply_tx) = request_context.split(); let response = match request { OutputManagerRequest::CancelTransaction(_) => Ok(OutputManagerResponse::TransactionCancelled), + OutputManagerRequest::SetCoinbaseAbandoned(_, _) => Ok(OutputManagerResponse::CoinbaseAbandonedSet), _ => Err(OutputManagerError::InvalidResponseError( "Unhandled request type".to_string(), )), @@ -253,7 +254,7 @@ async fn tx_broadcast_protocol_submit_success() { add_transaction_to_database(1u64.into(), 1 * T, None, None, resources.db.clone()).await; - let db_completed_tx = resources.db.get_completed_transaction(1u64.into()).await.unwrap(); + let db_completed_tx = resources.db.get_completed_transaction(1u64.into()).unwrap(); assert!(db_completed_tx.confirmations.is_none()); let protocol = TransactionBroadcastProtocol::new(1u64.into(), resources.clone(), timeout_watch.get_receiver()); @@ -351,7 +352,7 @@ async fn tx_broadcast_protocol_submit_rejection() { } // Check transaction is cancelled in db - let db_completed_tx = resources.db.get_completed_transaction(1u64.into()).await; + let db_completed_tx = resources.db.get_completed_transaction(1u64.into()); assert!(db_completed_tx.is_err()); // Check that the appropriate events were emitted @@ -460,7 +461,7 @@ async fn tx_broadcast_protocol_restart_protocol_as_query() { assert_eq!(result.unwrap(), TxId::from(1u64)); // Check transaction status is updated - let db_completed_tx = resources.db.get_completed_transaction(1u64.into()).await.unwrap(); + let db_completed_tx = resources.db.get_completed_transaction(1u64.into()).unwrap(); assert_eq!(db_completed_tx.status, TransactionStatus::Broadcast); } @@ -534,7 +535,7 @@ async fn tx_broadcast_protocol_submit_success_followed_by_rejection() { } // Check transaction is cancelled in db - let db_completed_tx = resources.db.get_completed_transaction(1u64.into()).await; + let db_completed_tx = resources.db.get_completed_transaction(1u64.into()); assert!(db_completed_tx.is_err()); // Check that the appropriate events were emitted @@ -620,7 +621,7 @@ async fn tx_broadcast_protocol_submit_already_mined() { assert_eq!(result.unwrap(), 1); // Check transaction status is updated - let db_completed_tx = resources.db.get_completed_transaction(1u64.into()).await.unwrap(); + let db_completed_tx = resources.db.get_completed_transaction(1u64.into()).unwrap(); assert_eq!(db_completed_tx.status, TransactionStatus::Completed); } @@ -718,7 +719,7 @@ async fn tx_broadcast_protocol_submit_and_base_node_gets_changed() { assert_eq!(result.unwrap(), TxId::from(1u64)); // Check transaction status is updated - let db_completed_tx = resources.db.get_completed_transaction(1u64.into()).await.unwrap(); + let db_completed_tx = resources.db.get_completed_transaction(1u64.into()).unwrap(); assert_eq!(db_completed_tx.status, TransactionStatus::Broadcast); } @@ -760,7 +761,7 @@ async fn tx_validation_protocol_tx_becomes_mined_unconfirmed_then_confirmed() { ) .await; - let tx2 = resources.db.get_completed_transaction(2u64.into()).await.unwrap(); + let tx2 = resources.db.get_completed_transaction(2u64.into()).unwrap(); let transaction_query_batch_responses = vec![TxQueryBatchResponseProto { signature: Some(SignatureProto::from( @@ -796,7 +797,7 @@ async fn tx_validation_protocol_tx_becomes_mined_unconfirmed_then_confirmed() { let result = join_handle.await.unwrap(); assert!(result.is_ok()); - let completed_txs = resources.db.get_completed_transactions().await.unwrap(); + let completed_txs = resources.db.get_completed_transactions().unwrap(); assert_eq!( completed_txs.get(&1u64.into()).unwrap().status, @@ -824,7 +825,7 @@ async fn tx_validation_protocol_tx_becomes_mined_unconfirmed_then_confirmed() { let result = join_handle.await.unwrap(); assert!(result.is_ok()); - let completed_txs = resources.db.get_completed_transactions().await.unwrap(); + let completed_txs = resources.db.get_completed_transactions().unwrap(); assert_eq!( completed_txs.get(&1u64.into()).unwrap().status, @@ -870,7 +871,7 @@ async fn tx_validation_protocol_tx_becomes_mined_unconfirmed_then_confirmed() { let result = join_handle.await.unwrap(); assert!(result.is_ok()); - let completed_txs = resources.db.get_completed_transactions().await.unwrap(); + let completed_txs = resources.db.get_completed_transactions().unwrap(); assert_eq!( completed_txs.get(&2u64.into()).unwrap().status, @@ -916,7 +917,7 @@ async fn tx_revalidation() { ) .await; - let tx2 = resources.db.get_completed_transaction(2u64.into()).await.unwrap(); + let tx2 = resources.db.get_completed_transaction(2u64.into()).unwrap(); // set tx2 as fully mined let transaction_query_batch_responses = vec![TxQueryBatchResponseProto { @@ -953,7 +954,7 @@ async fn tx_revalidation() { let result = join_handle.await.unwrap(); assert!(result.is_ok()); - let completed_txs = resources.db.get_completed_transactions().await.unwrap(); + let completed_txs = resources.db.get_completed_transactions().unwrap(); assert_eq!( completed_txs.get(&2u64.into()).unwrap().status, @@ -982,8 +983,8 @@ async fn tx_revalidation() { rpc_service_state.set_transaction_query_batch_responses(batch_query_response.clone()); // revalidate sets all to unvalidated, so lets check that thay are - resources.db.mark_all_transactions_as_unvalidated().await.unwrap(); - let completed_txs = resources.db.get_completed_transactions().await.unwrap(); + resources.db.mark_all_transactions_as_unvalidated().unwrap(); + let completed_txs = resources.db.get_completed_transactions().unwrap(); assert_eq!( completed_txs.get(&2u64.into()).unwrap().status, TransactionStatus::MinedConfirmed @@ -1004,7 +1005,7 @@ async fn tx_revalidation() { let result = join_handle.await.unwrap(); assert!(result.is_ok()); - let completed_txs = resources.db.get_completed_transactions().await.unwrap(); + let completed_txs = resources.db.get_completed_transactions().unwrap(); // data should now be updated and changed assert_eq!( completed_txs.get(&2u64.into()).unwrap().status, @@ -1072,13 +1073,13 @@ async fn tx_validation_protocol_reorg() { } rpc_service_state.set_blocks(block_headers.clone()); - let tx1 = resources.db.get_completed_transaction(1u64.into()).await.unwrap(); - let tx2 = resources.db.get_completed_transaction(2u64.into()).await.unwrap(); - let tx3 = resources.db.get_completed_transaction(3u64.into()).await.unwrap(); - let tx4 = resources.db.get_completed_transaction(4u64.into()).await.unwrap(); - let tx5 = resources.db.get_completed_transaction(5u64.into()).await.unwrap(); - let coinbase_tx1 = resources.db.get_completed_transaction(6u64.into()).await.unwrap(); - let coinbase_tx2 = resources.db.get_completed_transaction(7u64.into()).await.unwrap(); + let tx1 = resources.db.get_completed_transaction(1u64.into()).unwrap(); + let tx2 = resources.db.get_completed_transaction(2u64.into()).unwrap(); + let tx3 = resources.db.get_completed_transaction(3u64.into()).unwrap(); + let tx4 = resources.db.get_completed_transaction(4u64.into()).unwrap(); + let tx5 = resources.db.get_completed_transaction(5u64.into()).unwrap(); + let coinbase_tx1 = resources.db.get_completed_transaction(6u64.into()).unwrap(); + let coinbase_tx2 = resources.db.get_completed_transaction(7u64.into()).unwrap(); let transaction_query_batch_responses = vec![ TxQueryBatchResponseProto { @@ -1176,7 +1177,7 @@ async fn tx_validation_protocol_reorg() { let result = join_handle.await.unwrap(); assert!(result.is_ok()); - let completed_txs = resources.db.get_completed_transactions().await.unwrap(); + let completed_txs = resources.db.get_completed_transactions().unwrap(); let mut unconfirmed_count = 0; let mut confirmed_count = 0; for tx in completed_txs.values() { @@ -1295,7 +1296,7 @@ async fn tx_validation_protocol_reorg() { assert_eq!(rpc_service_state.take_get_header_by_height_calls().len(), 0); - let completed_txs = resources.db.get_completed_transactions().await.unwrap(); + let completed_txs = resources.db.get_completed_transactions().unwrap(); assert_eq!( completed_txs.get(&4u64.into()).unwrap().status, TransactionStatus::Completed @@ -1316,7 +1317,7 @@ async fn tx_validation_protocol_reorg() { completed_txs.get(&7u64.into()).unwrap().status, TransactionStatus::Coinbase ); - let cancelled_completed_txs = resources.db.get_cancelled_completed_transactions().await.unwrap(); + let cancelled_completed_txs = resources.db.get_cancelled_completed_transactions().unwrap(); assert!(matches!( cancelled_completed_txs.get(&6u64.into()).unwrap().cancelled, diff --git a/base_layer/wallet/tests/utxo_scanner.rs b/base_layer/wallet/tests/utxo_scanner.rs index f145a7ae2a..bb245d0502 100644 --- a/base_layer/wallet/tests/utxo_scanner.rs +++ b/base_layer/wallet/tests/utxo_scanner.rs @@ -45,7 +45,7 @@ use tari_test_utils::random; use tari_utilities::{epoch_time::EpochTime, ByteArray}; use tari_wallet::{ base_node_service::handle::{BaseNodeEvent, BaseNodeServiceHandle}, - connectivity_service::{create_wallet_connectivity_mock, WalletConnectivityInterface, WalletConnectivityMock}, + connectivity_service::{create_wallet_connectivity_mock, WalletConnectivityMock}, output_manager_service::storage::models::DbUnblindedOutput, storage::{ database::WalletDatabase, @@ -76,6 +76,7 @@ use support::{ }; use tari_comms::types::CommsPublicKey; use tari_wallet::{ + output_manager_service::storage::OutputSource, transaction_service::handle::TransactionServiceRequest, util::watch::Watch, utxo_scanner_service::handle::UtxoScannerHandle, @@ -84,14 +85,13 @@ use tari_wallet::{ use crate::support::transaction_service_mock::TransactionServiceMockState; pub struct UtxoScannerTestInterface { - scanner_service: Option>, + scanner_service: Option>, scanner_handle: UtxoScannerHandle, wallet_db: WalletDatabase, base_node_service_event_publisher: broadcast::Sender>, rpc_service_state: BaseNodeWalletRpcMockState, _rpc_mock_server: MockRpcServer>, _comms_connectivity_mock_state: ConnectivityManagerMockState, - _wallet_connectivity_mock: WalletConnectivityMock, transaction_service_mock_state: TransactionServiceMockState, oms_mock_state: OutputManagerMockState, shutdown_signal: Shutdown, @@ -173,7 +173,7 @@ async fn setup( let scanner_handle = UtxoScannerHandle::new(event_sender.clone(), one_sided_message_watch, recovery_message_watch); - let mut scanner_service_builder = UtxoScannerService::::builder(); + let mut scanner_service_builder = UtxoScannerService::::builder(); scanner_service_builder .with_peers(vec![server_node_identity.public_key().clone()]) @@ -191,7 +191,7 @@ async fn setup( let scanner_service = scanner_service_builder.build_with_resources( wallet_db.clone(), comms_connectivity, - wallet_connectivity_mock.get_current_base_node_watcher(), + wallet_connectivity_mock, oms_handle, ts_handle, node_identity, @@ -211,7 +211,6 @@ async fn setup( rpc_service_state, _rpc_mock_server: mock_server, _comms_connectivity_mock_state: comms_connectivity_mock_state, - _wallet_connectivity_mock: wallet_connectivity_mock, transaction_service_mock_state, oms_mock_state, shutdown_signal: shutdown, @@ -288,7 +287,7 @@ async fn test_utxo_scanner_recovery() { let cipher_seed = CipherSeed::new(); let birthday_epoch_time = u64::from(cipher_seed.birthday() - 2) * 60 * 60 * 24; - test_interface.wallet_db.set_master_seed(cipher_seed).await.unwrap(); + test_interface.wallet_db.set_master_seed(cipher_seed).unwrap(); const NUM_BLOCKS: u64 = 11; const BIRTHDAY_OFFSET: u64 = 5; @@ -322,7 +321,8 @@ async fn test_utxo_scanner_recovery() { let mut total_amount_to_recover = MicroTari::from(0); for (h, outputs) in &unblinded_outputs { for output in outputs.iter().skip(outputs.len() / 2) { - let dbo = DbUnblindedOutput::from_unblinded_output(output.clone(), &factories, None).unwrap(); + let dbo = DbUnblindedOutput::from_unblinded_output(output.clone(), &factories, None, OutputSource::Unknown) + .unwrap(); // Only the outputs in blocks after the birthday should be included in the recovered total if *h >= NUM_BLOCKS.saturating_sub(BIRTHDAY_OFFSET).saturating_sub(2) { total_outputs_to_recover += 1; @@ -370,7 +370,7 @@ async fn test_utxo_scanner_recovery_with_restart() { let cipher_seed = CipherSeed::new(); let birthday_epoch_time = u64::from(cipher_seed.birthday() - 2) * 60 * 60 * 24; - test_interface.wallet_db.set_master_seed(cipher_seed).await.unwrap(); + test_interface.wallet_db.set_master_seed(cipher_seed).unwrap(); test_interface .scanner_handle @@ -412,7 +412,8 @@ async fn test_utxo_scanner_recovery_with_restart() { let mut total_amount_to_recover = MicroTari::from(0); for (h, outputs) in &unblinded_outputs { for output in outputs.iter().skip(outputs.len() / 2) { - let dbo = DbUnblindedOutput::from_unblinded_output(output.clone(), &factories, None).unwrap(); + let dbo = DbUnblindedOutput::from_unblinded_output(output.clone(), &factories, None, OutputSource::Unknown) + .unwrap(); // Only the outputs in blocks after the birthday should be included in the recovered total if *h >= NUM_BLOCKS.saturating_sub(BIRTHDAY_OFFSET).saturating_sub(2) { total_outputs_to_recover += 1; @@ -533,7 +534,7 @@ async fn test_utxo_scanner_recovery_with_restart_and_reorg() { let cipher_seed = CipherSeed::new(); let birthday_epoch_time = u64::from(cipher_seed.birthday() - 2) * 60 * 60 * 24; - test_interface.wallet_db.set_master_seed(cipher_seed).await.unwrap(); + test_interface.wallet_db.set_master_seed(cipher_seed).unwrap(); const NUM_BLOCKS: u64 = 11; const BIRTHDAY_OFFSET: u64 = 5; @@ -566,7 +567,8 @@ async fn test_utxo_scanner_recovery_with_restart_and_reorg() { let mut db_unblinded_outputs = Vec::new(); for outputs in unblinded_outputs.values() { for output in outputs.iter().skip(outputs.len() / 2) { - let dbo = DbUnblindedOutput::from_unblinded_output(output.clone(), &factories, None).unwrap(); + let dbo = DbUnblindedOutput::from_unblinded_output(output.clone(), &factories, None, OutputSource::Unknown) + .unwrap(); db_unblinded_outputs.push(dbo); } } @@ -634,7 +636,8 @@ async fn test_utxo_scanner_recovery_with_restart_and_reorg() { let mut total_amount_to_recover = MicroTari::from(0); for (h, outputs) in &unblinded_outputs { for output in outputs.iter().skip(outputs.len() / 2) { - let dbo = DbUnblindedOutput::from_unblinded_output(output.clone(), &factories, None).unwrap(); + let dbo = DbUnblindedOutput::from_unblinded_output(output.clone(), &factories, None, OutputSource::Unknown) + .unwrap(); // Only the outputs in blocks after the birthday should be included in the recovered total if *h >= 4 { total_outputs_to_recover += 1; @@ -695,13 +698,12 @@ async fn test_utxo_scanner_scanned_block_cache_clearing() { .checked_sub_signed(ChronoDuration::days(1000)) .unwrap(), }) - .await .unwrap(); } let cipher_seed = CipherSeed::new(); let birthday_epoch_time = u64::from(cipher_seed.birthday() - 2) * 60 * 60 * 24; - test_interface.wallet_db.set_master_seed(cipher_seed).await.unwrap(); + test_interface.wallet_db.set_master_seed(cipher_seed).unwrap(); const NUM_BLOCKS: u64 = 11; const BIRTHDAY_OFFSET: u64 = 5; @@ -746,7 +748,6 @@ async fn test_utxo_scanner_scanned_block_cache_clearing() { amount: None, timestamp: Utc::now().naive_utc(), }) - .await .unwrap(); let mut scanner_event_stream = test_interface.scanner_handle.get_event_receiver(); @@ -771,7 +772,7 @@ async fn test_utxo_scanner_scanned_block_cache_clearing() { } } } - let scanned_blocks = test_interface.wallet_db.get_scanned_blocks().await.unwrap(); + let scanned_blocks = test_interface.wallet_db.get_scanned_blocks().unwrap(); use tari_wallet::utxo_scanner_service::service::SCANNED_BLOCK_CACHE_SIZE; let threshold = 800 + NUM_BLOCKS - 1 - SCANNED_BLOCK_CACHE_SIZE; @@ -804,7 +805,7 @@ async fn test_utxo_scanner_one_sided_payments() { let cipher_seed = CipherSeed::new(); let birthday_epoch_time = u64::from(cipher_seed.birthday() - 2) * 60 * 60 * 24; - test_interface.wallet_db.set_master_seed(cipher_seed).await.unwrap(); + test_interface.wallet_db.set_master_seed(cipher_seed).unwrap(); const NUM_BLOCKS: u64 = 11; const BIRTHDAY_OFFSET: u64 = 5; @@ -838,7 +839,8 @@ async fn test_utxo_scanner_one_sided_payments() { let mut total_amount_to_recover = MicroTari::from(0); for (h, outputs) in &unblinded_outputs { for output in outputs.iter().skip(outputs.len() / 2) { - let dbo = DbUnblindedOutput::from_unblinded_output(output.clone(), &factories, None).unwrap(); + let dbo = DbUnblindedOutput::from_unblinded_output(output.clone(), &factories, None, OutputSource::Unknown) + .unwrap(); // Only the outputs in blocks after the birthday should be included in the recovered total if *h >= NUM_BLOCKS.saturating_sub(BIRTHDAY_OFFSET).saturating_sub(2) { total_outputs_to_recover += 1; @@ -911,7 +913,8 @@ async fn test_utxo_scanner_one_sided_payments() { utxos_by_block.push(block11); block_headers.insert(NUM_BLOCKS, block_header11); - db_unblinded_outputs.push(DbUnblindedOutput::from_unblinded_output(uo, &factories, None).unwrap()); + db_unblinded_outputs + .push(DbUnblindedOutput::from_unblinded_output(uo, &factories, None, OutputSource::Unknown).unwrap()); test_interface .oms_mock_state .set_one_sided_payments(db_unblinded_outputs); diff --git a/base_layer/wallet/tests/wallet.rs b/base_layer/wallet/tests/wallet.rs index 5a6147eb44..a0cae8e830 100644 --- a/base_layer/wallet/tests/wallet.rs +++ b/base_layer/wallet/tests/wallet.rs @@ -95,7 +95,7 @@ use tempfile::tempdir; use tokio::{sync::mpsc, time::sleep}; pub mod support; -use tari_wallet::output_manager_service::storage::database::OutputManagerDatabase; +use tari_wallet::output_manager_service::{storage::database::OutputManagerDatabase, UtxoSelectionCriteria}; fn create_peer(public_key: CommsPublicKey, net_address: Multiaddr) -> Peer { Peer::new( @@ -129,7 +129,6 @@ async fn create_wallet( peer_database_name: random::string(8), max_concurrent_inbound_tasks: 10, max_concurrent_outbound_tasks: 10, - outbound_buffer_size: 100, dht: DhtConfig { discovery_request_timeout: Duration::from_secs(1), auto_join: true, @@ -175,7 +174,7 @@ async fn create_wallet( let _db_value = wallet_backend.write(WriteOperation::Insert(DbKeyValuePair::BaseNodeChainMetadata(metadata))); let wallet_db = WalletDatabase::new(wallet_backend); - let master_seed = read_or_create_master_seed(recovery_seed, &wallet_db).await?; + let master_seed = read_or_create_master_seed(recovery_seed, &wallet_db)?; let output_db = OutputManagerDatabase::new(output_manager_backend.clone()); @@ -274,6 +273,7 @@ async fn test_wallet() { .send_transaction( bob_identity.public_key().clone(), value, + UtxoSelectionCriteria::default(), OutputFeatures::default(), MicroTari::from(5), "".to_string(), @@ -401,19 +401,17 @@ async fn test_wallet() { let alice_seed = CipherSeed::new(); - alice_wallet.db.set_master_seed(alice_seed).await.unwrap(); + alice_wallet.db.set_master_seed(alice_seed).unwrap(); shutdown_a.trigger(); alice_wallet.wait_until_shutdown().await; - partial_wallet_backup(current_wallet_path.clone(), backup_wallet_path.clone()) - .await - .unwrap(); + partial_wallet_backup(current_wallet_path.clone(), backup_wallet_path.clone()).unwrap(); let connection = run_migration_and_create_sqlite_connection(¤t_wallet_path, 16).expect("Could not open Sqlite db"); let wallet_db = WalletDatabase::new(WalletSqliteDatabase::new(connection.clone(), None).unwrap()); - let master_seed = wallet_db.get_master_seed().await.unwrap(); + let master_seed = wallet_db.get_master_seed().unwrap(); assert!(master_seed.is_some()); // Checking that the backup has had its Comms Private Key is cleared. let connection = run_migration_and_create_sqlite_connection(&backup_wallet_path, 16).expect( @@ -421,7 +419,7 @@ async fn test_wallet() { db", ); let backup_wallet_db = WalletDatabase::new(WalletSqliteDatabase::new(connection.clone(), None).unwrap()); - let master_seed = backup_wallet_db.get_master_seed().await.unwrap(); + let master_seed = backup_wallet_db.get_master_seed().unwrap(); assert!(master_seed.is_none()); shutdown_b.trigger(); @@ -591,6 +589,7 @@ async fn test_store_and_forward_send_tx() { .send_transaction( carol_identity.public_key().clone(), value, + UtxoSelectionCriteria::default(), OutputFeatures::default(), MicroTari::from(3), "Store and Forward!".to_string(), @@ -672,7 +671,6 @@ async fn test_import_utxo() { peer_database_name: random::string(8), max_concurrent_inbound_tasks: 10, max_concurrent_outbound_tasks: 10, - outbound_buffer_size: 10, dht: Default::default(), allow_test_addresses: true, listener_liveness_allowlist_cidrs: StringList::new(), @@ -811,7 +809,7 @@ async fn test_recovery_birthday() { .await .unwrap(); - let db_birthday = wallet.db.get_wallet_birthday().await.unwrap(); + let db_birthday = wallet.db.get_wallet_birthday().unwrap(); assert_eq!(birthday, db_birthday); } diff --git a/base_layer/wallet_ffi/Cargo.toml b/base_layer/wallet_ffi/Cargo.toml index 8fb35a8234..8971f49797 100644 --- a/base_layer/wallet_ffi/Cargo.toml +++ b/base_layer/wallet_ffi/Cargo.toml @@ -3,22 +3,22 @@ name = "tari_wallet_ffi" authors = ["The Tari Development Community"] description = "Tari cryptocurrency wallet C FFI bindings" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [dependencies] -tari_core = { version = "^0.37", path = "../../base_layer/core", default-features = false, features = ["tari_mmr", "transactions"]} +tari_core = { version = "^0.38", path = "../../base_layer/core", default-features = false, features = ["tari_mmr", "transactions"]} tari_common = {path="../../common"} tari_common_types = {path="../common_types"} -tari_comms = { version = "^0.37", path = "../../comms/core", features = ["c_integration"]} -tari_comms_dht = { version = "^0.37", path = "../../comms/dht", default-features = false } -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } -tari_key_manager = { version = "^0.37", path = "../key_manager" } -tari_p2p = { version = "^0.37", path = "../p2p" } +tari_comms = { version = "^0.38", path = "../../comms/core", features = ["c_integration"]} +tari_comms_dht = { version = "^0.38", path = "../../comms/dht", default-features = false } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } +tari_key_manager = { version = "^0.38", path = "../key_manager" } +tari_p2p = { version = "^0.38", path = "../p2p" } tari_script = { path = "../../infrastructure/tari_script" } -tari_shutdown = { version = "^0.37", path = "../../infrastructure/shutdown" } +tari_shutdown = { version = "^0.38", path = "../../infrastructure/shutdown" } tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } -tari_wallet = { version = "^0.37", path = "../wallet", features = ["c_integration"]} +tari_wallet = { version = "^0.38", path = "../wallet", features = ["c_integration"]} chrono = { version = "0.4.19", default-features = false, features = ["serde"] } futures = { version = "^0.3.1", features =["compat", "std"]} @@ -29,8 +29,7 @@ log4rs = {version = "1.0.0", features = ["console_appender", "file_appender", "y openssl = { version = "0.10.41", features = ["vendored"] } rand = "0.8" thiserror = "1.0.26" -tokio = "1.11" -env_logger = "0.7.0" +tokio = "1.20" num-traits = "0.2.15" itertools = "0.10.3" @@ -50,9 +49,9 @@ crate-type = ["staticlib","cdylib"] [dev-dependencies] tempfile = "3.1.0" lazy_static = "1.3.0" -tari_key_manager = { version = "^0.37", path = "../key_manager" } -tari_common_types = { version = "^0.37", path = "../../base_layer/common_types"} -tari_test_utils = { version = "^0.37", path = "../../infrastructure/test_utils"} +tari_key_manager = { version = "^0.38", path = "../key_manager" } +tari_common_types = { version = "^0.38", path = "../../base_layer/common_types"} +tari_test_utils = { version = "^0.38", path = "../../infrastructure/test_utils"} tari_service_framework = { path = "../../base_layer/service_framework" } [build-dependencies] diff --git a/base_layer/wallet_ffi/build.rs b/base_layer/wallet_ffi/build.rs index 8c41d640db..29e32918c4 100644 --- a/base_layer/wallet_ffi/build.rs +++ b/base_layer/wallet_ffi/build.rs @@ -17,7 +17,7 @@ fn main() { parse: ParseConfig { parse_deps: true, include: Some(vec![ - // "tari_core".to_string(), + "tari_core".to_string(), "tari_common_types".to_string(), "tari_crypto".to_string(), "tari_p2p".to_string(), diff --git a/base_layer/wallet_ffi/src/callback_handler.rs b/base_layer/wallet_ffi/src/callback_handler.rs index d606f6ea71..4533ef0637 100644 --- a/base_layer/wallet_ffi/src/callback_handler.rs +++ b/base_layer/wallet_ffi/src/callback_handler.rs @@ -235,15 +235,15 @@ where TBackend: TransactionBackend + 'static trace!(target: LOG_TARGET, "Transaction Service Callback Handler event {:?}", msg); match (*msg).clone() { TransactionEvent::ReceivedTransaction(tx_id) => { - self.receive_transaction_event(tx_id).await; + self.receive_transaction_event(tx_id); self.trigger_balance_refresh().await; }, TransactionEvent::ReceivedTransactionReply(tx_id) => { - self.receive_transaction_reply_event(tx_id).await; + self.receive_transaction_reply_event(tx_id); self.trigger_balance_refresh().await; }, TransactionEvent::ReceivedFinalizedTransaction(tx_id) => { - self.receive_finalized_transaction_event(tx_id).await; + self.receive_finalized_transaction_event(tx_id); self.trigger_balance_refresh().await; }, TransactionEvent::TransactionSendResult(tx_id, status) => { @@ -251,27 +251,27 @@ where TBackend: TransactionBackend + 'static self.trigger_balance_refresh().await; }, TransactionEvent::TransactionCancelled(tx_id, reason) => { - self.receive_transaction_cancellation(tx_id, reason as u64).await; + self.receive_transaction_cancellation(tx_id, reason as u64); self.trigger_balance_refresh().await; }, TransactionEvent::TransactionBroadcast(tx_id) => { - self.receive_transaction_broadcast_event(tx_id).await; + self.receive_transaction_broadcast_event(tx_id); self.trigger_balance_refresh().await; }, TransactionEvent::TransactionMined{tx_id, is_valid: _} => { - self.receive_transaction_mined_event(tx_id).await; + self.receive_transaction_mined_event(tx_id); self.trigger_balance_refresh().await; }, TransactionEvent::TransactionMinedUnconfirmed{tx_id, num_confirmations, is_valid: _} => { - self.receive_transaction_mined_unconfirmed_event(tx_id, num_confirmations).await; + self.receive_transaction_mined_unconfirmed_event(tx_id, num_confirmations); self.trigger_balance_refresh().await; }, TransactionEvent::FauxTransactionConfirmed{tx_id, is_valid: _} => { - self.receive_faux_transaction_confirmed_event(tx_id).await; + self.receive_faux_transaction_confirmed_event(tx_id); self.trigger_balance_refresh().await; }, TransactionEvent::FauxTransactionUnconfirmed{tx_id, num_confirmations, is_valid: _} => { - self.receive_faux_transaction_unconfirmed_event(tx_id, num_confirmations).await; + self.receive_faux_transaction_unconfirmed_event(tx_id, num_confirmations); self.trigger_balance_refresh().await; }, TransactionEvent::TransactionValidationStateChanged(_request_key) => { @@ -358,8 +358,8 @@ where TBackend: TransactionBackend + 'static } } - async fn receive_transaction_event(&mut self, tx_id: TxId) { - match self.db.get_pending_inbound_transaction(tx_id).await { + fn receive_transaction_event(&mut self, tx_id: TxId) { + match self.db.get_pending_inbound_transaction(tx_id) { Ok(tx) => { debug!( target: LOG_TARGET, @@ -377,8 +377,8 @@ where TBackend: TransactionBackend + 'static } } - async fn receive_transaction_reply_event(&mut self, tx_id: TxId) { - match self.db.get_completed_transaction(tx_id).await { + fn receive_transaction_reply_event(&mut self, tx_id: TxId) { + match self.db.get_completed_transaction(tx_id) { Ok(tx) => { debug!( target: LOG_TARGET, @@ -393,8 +393,8 @@ where TBackend: TransactionBackend + 'static } } - async fn receive_finalized_transaction_event(&mut self, tx_id: TxId) { - match self.db.get_completed_transaction(tx_id).await { + fn receive_finalized_transaction_event(&mut self, tx_id: TxId) { + match self.db.get_completed_transaction(tx_id) { Ok(tx) => { debug!( target: LOG_TARGET, @@ -458,15 +458,15 @@ where TBackend: TransactionBackend + 'static } } - async fn receive_transaction_cancellation(&mut self, tx_id: TxId, reason: u64) { + fn receive_transaction_cancellation(&mut self, tx_id: TxId, reason: u64) { let mut transaction = None; - if let Ok(tx) = self.db.get_cancelled_completed_transaction(tx_id).await { + if let Ok(tx) = self.db.get_cancelled_completed_transaction(tx_id) { transaction = Some(tx); - } else if let Ok(tx) = self.db.get_cancelled_pending_outbound_transaction(tx_id).await { + } else if let Ok(tx) = self.db.get_cancelled_pending_outbound_transaction(tx_id) { let mut outbound_tx = CompletedTransaction::from(tx); outbound_tx.source_public_key = self.comms_public_key.clone(); transaction = Some(outbound_tx); - } else if let Ok(tx) = self.db.get_cancelled_pending_inbound_transaction(tx_id).await { + } else if let Ok(tx) = self.db.get_cancelled_pending_inbound_transaction(tx_id) { let mut inbound_tx = CompletedTransaction::from(tx); inbound_tx.destination_public_key = self.comms_public_key.clone(); transaction = Some(inbound_tx); @@ -491,8 +491,8 @@ where TBackend: TransactionBackend + 'static } } - async fn receive_transaction_broadcast_event(&mut self, tx_id: TxId) { - match self.db.get_completed_transaction(tx_id).await { + fn receive_transaction_broadcast_event(&mut self, tx_id: TxId) { + match self.db.get_completed_transaction(tx_id) { Ok(tx) => { debug!( target: LOG_TARGET, @@ -507,8 +507,8 @@ where TBackend: TransactionBackend + 'static } } - async fn receive_transaction_mined_event(&mut self, tx_id: TxId) { - match self.db.get_completed_transaction(tx_id).await { + fn receive_transaction_mined_event(&mut self, tx_id: TxId) { + match self.db.get_completed_transaction(tx_id) { Ok(tx) => { debug!( target: LOG_TARGET, @@ -523,8 +523,8 @@ where TBackend: TransactionBackend + 'static } } - async fn receive_transaction_mined_unconfirmed_event(&mut self, tx_id: TxId, confirmations: u64) { - match self.db.get_completed_transaction(tx_id).await { + fn receive_transaction_mined_unconfirmed_event(&mut self, tx_id: TxId, confirmations: u64) { + match self.db.get_completed_transaction(tx_id) { Ok(tx) => { debug!( target: LOG_TARGET, @@ -539,8 +539,8 @@ where TBackend: TransactionBackend + 'static } } - async fn receive_faux_transaction_confirmed_event(&mut self, tx_id: TxId) { - match self.db.get_completed_transaction(tx_id).await { + fn receive_faux_transaction_confirmed_event(&mut self, tx_id: TxId) { + match self.db.get_completed_transaction(tx_id) { Ok(tx) => { debug!( target: LOG_TARGET, @@ -555,8 +555,8 @@ where TBackend: TransactionBackend + 'static } } - async fn receive_faux_transaction_unconfirmed_event(&mut self, tx_id: TxId, confirmations: u64) { - match self.db.get_completed_transaction(tx_id).await { + fn receive_faux_transaction_unconfirmed_event(&mut self, tx_id: TxId, confirmations: u64) { + match self.db.get_completed_transaction(tx_id) { Ok(tx) => { debug!( target: LOG_TARGET, diff --git a/base_layer/wallet_ffi/src/callback_handler_tests.rs b/base_layer/wallet_ffi/src/callback_handler_tests.rs index 7e14b2bf1a..9448dbd422 100644 --- a/base_layer/wallet_ffi/src/callback_handler_tests.rs +++ b/base_layer/wallet_ffi/src/callback_handler_tests.rs @@ -247,8 +247,7 @@ mod test { "1".to_string(), Utc::now().naive_utc(), ); - runtime - .block_on(db.add_pending_inbound_transaction(1u64.into(), inbound_tx.clone())) + db.add_pending_inbound_transaction(1u64.into(), inbound_tx.clone()) .unwrap(); let completed_tx = CompletedTransaction::new( @@ -272,8 +271,7 @@ mod test { None, None, ); - runtime - .block_on(db.insert_completed_transaction(2u64.into(), completed_tx.clone())) + db.insert_completed_transaction(2u64.into(), completed_tx.clone()) .unwrap(); let stp = SenderTransactionProtocol::new_placeholder(); @@ -288,29 +286,25 @@ mod test { Utc::now().naive_utc(), false, ); - runtime - .block_on(db.add_pending_outbound_transaction(3u64.into(), outbound_tx.clone())) + db.add_pending_outbound_transaction(3u64.into(), outbound_tx.clone()) .unwrap(); - runtime.block_on(db.cancel_pending_transaction(3u64.into())).unwrap(); + db.cancel_pending_transaction(3u64.into()).unwrap(); let inbound_tx_cancelled = InboundTransaction { tx_id: 4u64.into(), ..inbound_tx.clone() }; - runtime - .block_on(db.add_pending_inbound_transaction(4u64.into(), inbound_tx_cancelled)) + db.add_pending_inbound_transaction(4u64.into(), inbound_tx_cancelled) .unwrap(); - runtime.block_on(db.cancel_pending_transaction(4u64.into())).unwrap(); + db.cancel_pending_transaction(4u64.into()).unwrap(); let completed_tx_cancelled = CompletedTransaction { tx_id: 5u64.into(), ..completed_tx.clone() }; - runtime - .block_on(db.insert_completed_transaction(5u64.into(), completed_tx_cancelled.clone())) + db.insert_completed_transaction(5u64.into(), completed_tx_cancelled.clone()) .unwrap(); - runtime - .block_on(db.reject_completed_transaction(5u64.into(), TxCancellationReason::Unknown)) + db.reject_completed_transaction(5u64.into(), TxCancellationReason::Unknown) .unwrap(); let faux_unconfirmed_tx = CompletedTransaction::new( @@ -334,8 +328,7 @@ mod test { Some(2), Some(NaiveDateTime::from_timestamp(0, 0)), ); - runtime - .block_on(db.insert_completed_transaction(6u64.into(), faux_unconfirmed_tx.clone())) + db.insert_completed_transaction(6u64.into(), faux_unconfirmed_tx.clone()) .unwrap(); let faux_confirmed_tx = CompletedTransaction::new( @@ -359,8 +352,7 @@ mod test { Some(5), Some(NaiveDateTime::from_timestamp(0, 0)), ); - runtime - .block_on(db.insert_completed_transaction(7u64.into(), faux_confirmed_tx.clone())) + db.insert_completed_transaction(7u64.into(), faux_confirmed_tx.clone()) .unwrap(); let (transaction_event_sender, transaction_event_receiver) = broadcast::channel(20); diff --git a/base_layer/wallet_ffi/src/lib.rs b/base_layer/wallet_ffi/src/lib.rs index 161a79e7c7..9985c4c332 100644 --- a/base_layer/wallet_ffi/src/lib.rs +++ b/base_layer/wallet_ffi/src/lib.rs @@ -125,7 +125,7 @@ use tari_script::{inputs, script}; use tari_shutdown::Shutdown; use tari_utilities::{hex, hex::Hex, SafePassword}; use tari_wallet::{ - connectivity_service::WalletConnectivityInterface, + connectivity_service::{WalletConnectivityHandle, WalletConnectivityInterface}, contacts_service::storage::database::Contact, error::{WalletError, WalletStorageError}, output_manager_service::{ @@ -135,6 +135,7 @@ use tari_wallet::{ models::DbUnblindedOutput, OutputStatus, }, + UtxoSelectionCriteria, }, storage::{ database::WalletDatabase, @@ -1031,8 +1032,8 @@ pub unsafe extern "C" fn public_key_to_emoji_id(pk: *mut TariPublicKey, error_ou return CString::into_raw(result); } - let emoji = EmojiId::from_pubkey(&(*pk)); - result = CString::new(emoji.as_str()).expect("Emoji will not fail."); + let emoji_id = EmojiId::from_public_key(&(*pk)); + result = CString::new(emoji_id.to_emoji_string().as_str()).expect("Emoji will not fail."); CString::into_raw(result) } @@ -1060,10 +1061,10 @@ pub unsafe extern "C" fn emoji_id_to_public_key(emoji: *const c_char, error_out: match CStr::from_ptr(emoji) .to_str() - .map_err(|_| EmojiIdError) - .and_then(EmojiId::str_to_pubkey) + .map_err(|_| EmojiIdError::InvalidEmoji) + .and_then(EmojiId::from_emoji_string) { - Ok(pk) => Box::into_raw(Box::new(pk)), + Ok(emoji_id) => Box::into_raw(Box::new(emoji_id.to_public_key())), Err(_) => { error = LibWalletError::from(InterfaceError::InvalidEmojiId).code; ptr::swap(error_out, &mut error as *mut c_int); @@ -2251,6 +2252,7 @@ pub unsafe extern "C" fn liveness_data_get_message_type( /// | 0 | Online | /// | 1 | Offline | /// | 2 | NeverSeen | +/// | 3 | Banned | /// /// # Safety /// The ```liveness_data_destroy``` method must be called when finished with a TariContactsLivenessData to prevent a @@ -3905,7 +3907,6 @@ pub unsafe extern "C" fn comms_config_create( peer_database_name: database_name_string, max_concurrent_inbound_tasks: 25, max_concurrent_outbound_tasks: 50, - outbound_buffer_size: 50, dht: DhtConfig { discovery_request_timeout: Duration::from_secs(discovery_timeout_in_secs), database_url: DbConnectionUrl::File(dht_database_path), @@ -4293,23 +4294,21 @@ pub unsafe extern "C" fn wallet_create( // If the transport type is Tor then check if there is a stored TorID, if there is update the Transport Type let mut comms_config = (*config).clone(); if let TransportType::Tor = comms_config.transport.transport_type { - comms_config.transport.tor.identity = runtime.block_on(wallet_database.get_tor_id()).ok().flatten(); + comms_config.transport.tor.identity = wallet_database.get_tor_id().ok().flatten(); } let result = runtime.block_on(async { let master_seed = read_or_create_master_seed(recovery_seed, &wallet_database) - .await .map_err(|err| WalletStorageError::RecoverySeedError(err.to_string()))?; let comms_secret_key = derive_comms_secret_key(&master_seed) .map_err(|err| WalletStorageError::RecoverySeedError(err.to_string()))?; - let node_features = wallet_database.get_node_features().await?.unwrap_or_default(); + let node_features = wallet_database.get_node_features()?.unwrap_or_default(); let node_address = wallet_database - .get_node_address() - .await? + .get_node_address()? .or_else(|| comms_config.public_address.clone()) .unwrap_or_else(Multiaddr::empty); - let identity_sig = wallet_database.get_comms_identity_signature().await?; + let identity_sig = wallet_database.get_comms_identity_signature()?; // This checks if anything has changed by validating the previous signature and if invalid, setting identity_sig // to None @@ -4333,7 +4332,7 @@ pub unsafe extern "C" fn wallet_create( .as_ref() .expect("unreachable panic") .clone(); - wallet_database.set_comms_identity_signature(sig).await?; + wallet_database.set_comms_identity_signature(sig)?; } Ok((master_seed, node_identity)) }); @@ -4359,7 +4358,7 @@ pub unsafe extern "C" fn wallet_create( ..Default::default() }; - let mut recovery_lookup = match runtime.block_on(wallet_database.get_client_key_value(RECOVERY_KEY.to_owned())) { + let mut recovery_lookup = match wallet_database.get_client_key_value(RECOVERY_KEY.to_owned()) { Err(_) => false, Ok(None) => false, Ok(Some(_)) => true, @@ -4394,7 +4393,7 @@ pub unsafe extern "C" fn wallet_create( Ok(mut w) => { // lets ensure the wallet tor_id is saved, this could have been changed during wallet startup if let Some(hs) = w.comms.hidden_service() { - if let Err(e) = runtime.block_on(w.db.set_tor_identity(hs.tor_identity().clone())) { + if let Err(e) = w.db.set_tor_identity(hs.tor_identity().clone()) { warn!(target: LOG_TARGET, "Could not save tor identity to db: {:?}", e); } } @@ -5405,6 +5404,8 @@ pub unsafe extern "C" fn balance_destroy(balance: *mut TariBalance) { /// `wallet` - The TariWallet pointer /// `dest_public_key` - The TariPublicKey pointer of the peer /// `amount` - The amount +/// `commitments` - A `TariVector` of "strings", tagged as `TariTypeTag::String`, containing commitment's hex values +/// (see `Commitment::to_hex()`) /// `fee_per_gram` - The transaction fee /// `message` - The pointer to a char array /// `error_out` - Pointer to an int which will be modified to an error code should one occur, may not be null. Functions @@ -5420,6 +5421,7 @@ pub unsafe extern "C" fn wallet_send_transaction( wallet: *mut TariWallet, dest_public_key: *mut TariPublicKey, amount: c_ulonglong, + commitments: *mut TariVector, fee_per_gram: c_ulonglong, message: *const c_char, one_sided: bool, @@ -5439,6 +5441,18 @@ pub unsafe extern "C" fn wallet_send_transaction( return 0; } + let selection_criteria = match commitments.as_ref() { + None => UtxoSelectionCriteria::default(), + Some(cs) => match cs.to_commitment_vec() { + Ok(cs) => UtxoSelectionCriteria::specific(cs), + Err(e) => { + error!(target: LOG_TARGET, "failed to convert from tari vector: {:?}", e); + ptr::replace(error_out, LibWalletError::from(e).code as c_int); + return 0; + }, + }, + }; + let message_string; if message.is_null() { error = LibWalletError::from(InterfaceError::NullError("message".to_string())).code; @@ -5473,6 +5487,7 @@ pub unsafe extern "C" fn wallet_send_transaction( .send_one_sided_to_stealth_address_transaction( (*dest_public_key).clone(), MicroTari::from(amount), + selection_criteria, OutputFeatures::default(), MicroTari::from(fee_per_gram), message_string, @@ -5491,6 +5506,7 @@ pub unsafe extern "C" fn wallet_send_transaction( .block_on((*wallet).wallet.transaction_service.send_transaction( (*dest_public_key).clone(), MicroTari::from(amount), + selection_criteria, OutputFeatures::default(), MicroTari::from(fee_per_gram), message_string, @@ -5510,6 +5526,8 @@ pub unsafe extern "C" fn wallet_send_transaction( /// ## Arguments /// `wallet` - The TariWallet pointer /// `amount` - The amount +/// `commitments` - A `TariVector` of "strings", tagged as `TariTypeTag::String`, containing commitment's hex values +/// (see `Commitment::to_hex()`) /// `fee_per_gram` - The fee per gram /// `num_kernels` - The number of transaction kernels /// `num_outputs` - The number of outputs @@ -5525,6 +5543,7 @@ pub unsafe extern "C" fn wallet_send_transaction( pub unsafe extern "C" fn wallet_get_fee_estimate( wallet: *mut TariWallet, amount: c_ulonglong, + commitments: *mut TariVector, fee_per_gram: c_ulonglong, num_kernels: c_ulonglong, num_outputs: c_ulonglong, @@ -5538,10 +5557,23 @@ pub unsafe extern "C" fn wallet_get_fee_estimate( return 0; } + let selection_criteria = match commitments.as_ref() { + None => UtxoSelectionCriteria::default(), + Some(cs) => match cs.to_commitment_vec() { + Ok(cs) => UtxoSelectionCriteria::specific(cs), + Err(e) => { + error!(target: LOG_TARGET, "failed to convert from tari vector: {:?}", e); + ptr::replace(error_out, LibWalletError::from(e).code as c_int); + return 0; + }, + }, + }; + match (*wallet) .runtime .block_on((*wallet).wallet.output_manager_service.fee_estimate( MicroTari::from(amount), + selection_criteria, MicroTari::from(fee_per_gram), num_kernels as usize, num_outputs as usize, @@ -6663,10 +6695,7 @@ pub unsafe extern "C" fn wallet_get_seed_words(wallet: *mut TariWallet, error_ou return ptr::null_mut(); } - match (*wallet) - .runtime - .block_on((*wallet).wallet.get_seed_words(&MnemonicLanguage::English)) - { + match (*wallet).wallet.get_seed_words(&MnemonicLanguage::English) { Ok(seed_words) => Box::into_raw(Box::new(TariSeedWords(seed_words))), Err(e) => { error = LibWalletError::from(e).code; @@ -6865,10 +6894,7 @@ pub unsafe extern "C" fn wallet_set_key_value( } } - match (*wallet) - .runtime - .block_on((*wallet).wallet.db.set_client_key_value(key_string, value_string)) - { + match (*wallet).wallet.db.set_client_key_value(key_string, value_string) { Ok(_) => true, Err(e) => { error = LibWalletError::from(WalletError::WalletStorageError(e)).code; @@ -6925,10 +6951,7 @@ pub unsafe extern "C" fn wallet_get_value( } } - match (*wallet) - .runtime - .block_on((*wallet).wallet.db.get_client_key_value(key_string)) - { + match (*wallet).wallet.db.get_client_key_value(key_string) { Ok(result) => match result { None => { error = LibWalletError::from(WalletError::WalletStorageError(WalletStorageError::ValuesNotFound)).code; @@ -6995,10 +7018,7 @@ pub unsafe extern "C" fn wallet_clear_value( } } - match (*wallet) - .runtime - .block_on((*wallet).wallet.db.clear_client_value(key_string)) - { + match (*wallet).wallet.db.clear_client_value(key_string) { Ok(result) => result, Err(e) => { error = LibWalletError::from(WalletError::WalletStorageError(e)).code; @@ -7032,7 +7052,7 @@ pub unsafe extern "C" fn wallet_is_recovery_in_progress(wallet: *mut TariWallet, return false; } - match (*wallet).runtime.block_on((*wallet).wallet.is_recovery_in_progress()) { + match (*wallet).wallet.is_recovery_in_progress() { Ok(result) => result, Err(e) => { error = LibWalletError::from(e).code; @@ -7116,7 +7136,7 @@ pub unsafe extern "C" fn wallet_start_recovery( let shutdown_signal = (*wallet).shutdown.to_signal(); let peer_public_keys: Vec = vec![(*base_node_public_key).clone()]; - let mut recovery_task_builder = UtxoScannerService::::builder(); + let mut recovery_task_builder = UtxoScannerService::::builder(); if !recovered_output_message.is_null() { let message_str = match CStr::from_ptr(recovered_output_message).to_str() { @@ -7265,17 +7285,10 @@ pub unsafe extern "C" fn file_partial_backup( } let backup_path = PathBuf::from(backup_path_string); - let runtime = Runtime::new(); - match runtime { - Ok(runtime) => match runtime.block_on(partial_wallet_backup(original_path, backup_path)) { - Ok(_) => (), - Err(e) => { - error = LibWalletError::from(WalletError::WalletStorageError(e)).code; - ptr::swap(error_out, &mut error as *mut c_int); - }, - }, + match partial_wallet_backup(original_path, backup_path) { + Ok(_) => (), Err(e) => { - error = LibWalletError::from(InterfaceError::TokioError(e.to_string())).code; + error = LibWalletError::from(WalletError::WalletStorageError(e)).code; ptr::swap(error_out, &mut error as *mut c_int); }, } @@ -8190,7 +8203,7 @@ mod test { assert_ne!((*private_bytes), (*public_bytes)); let emoji = public_key_to_emoji_id(public_key, error_ptr) as *mut c_char; let emoji_str = CStr::from_ptr(emoji).to_str().unwrap(); - assert!(EmojiId::is_valid(emoji_str)); + assert!(EmojiId::from_emoji_string(emoji_str).is_ok()); let pk_emoji = emoji_id_to_public_key(emoji, error_ptr); assert_eq!((*public_key), (*pk_emoji)); private_key_destroy(private_key); @@ -8519,13 +8532,11 @@ mod test { error_ptr, ); - let runtime = Runtime::new().unwrap(); - let connection = run_migration_and_create_sqlite_connection(&sql_database_path, 16).expect("Could not open Sqlite db"); let wallet_backend = WalletDatabase::new(WalletSqliteDatabase::new(connection, None).unwrap()); - let stored_seed = runtime.block_on(wallet_backend.get_master_seed()).unwrap(); + let stored_seed = wallet_backend.get_master_seed().unwrap(); drop(wallet_backend); assert!(stored_seed.is_none(), "No key should be stored yet"); @@ -8564,7 +8575,7 @@ mod test { run_migration_and_create_sqlite_connection(&sql_database_path, 16).expect("Could not open Sqlite db"); let wallet_backend = WalletDatabase::new(WalletSqliteDatabase::new(connection, None).unwrap()); - let stored_seed1 = runtime.block_on(wallet_backend.get_master_seed()).unwrap().unwrap(); + let stored_seed1 = wallet_backend.get_master_seed().unwrap().unwrap(); drop(wallet_backend); @@ -8605,7 +8616,7 @@ mod test { run_migration_and_create_sqlite_connection(&sql_database_path, 16).expect("Could not open Sqlite db"); let wallet_backend = WalletDatabase::new(WalletSqliteDatabase::new(connection, None).unwrap()); - let stored_seed2 = runtime.block_on(wallet_backend.get_master_seed()).unwrap().unwrap(); + let stored_seed2 = wallet_backend.get_master_seed().unwrap().unwrap(); assert_eq!(stored_seed1, stored_seed2); @@ -8624,7 +8635,7 @@ mod test { run_migration_and_create_sqlite_connection(&sql_database_path, 16).expect("Could not open Sqlite db"); let wallet_backend = WalletDatabase::new(WalletSqliteDatabase::new(connection, None).unwrap()); - let stored_seed = runtime.block_on(wallet_backend.get_master_seed()).unwrap(); + let stored_seed = wallet_backend.get_master_seed().unwrap(); assert!(stored_seed.is_none(), "key should be cleared"); drop(wallet_backend); diff --git a/base_layer/wallet_ffi/wallet.h b/base_layer/wallet_ffi/wallet.h index 09b190b334..362d617c7d 100644 --- a/base_layer/wallet_ffi/wallet.h +++ b/base_layer/wallet_ffi/wallet.h @@ -8,6 +8,11 @@ #include #include +/** + * The number of unique fields available. This always matches the number of variants in `OutputField`. + */ +#define OutputFields_NUM_FIELDS 10 + enum TariTypeTag { Text = 0, Utxo = 1, @@ -60,14 +65,28 @@ struct Contact; struct ContactsLivenessData; +struct Covenant; + struct EmojiSet; +/** + * value: u64 + tag: [u8; 16] + */ +struct EncryptedValue; + +struct FeePerGramStat; + struct FeePerGramStatsResponse; struct InboundTransaction; struct OutboundTransaction; +/** + * Options for UTXO's + */ +struct OutputFeatures; + /** * Configuration for a comms node */ @@ -138,6 +157,15 @@ struct TariSeedWords; struct TariWallet; +/** + * The transaction kernel tracks the excess for a given transaction. For an explanation of what the excess is, and + * why it is necessary, refer to the + * [Mimblewimble TLU post](https://tlu.tarilabs.com/protocols/mimblewimble-1/sources/PITCHME.link.html?highlight=mimblewimble#mimblewimble). + * The kernel also tracks other transaction metadata, such as the lock height for the transaction (i.e. the earliest + * this transaction can be mined) and the transaction fee, in cleartext. + */ +struct TransactionKernel; + struct TransactionSendStatus; struct TransportConfig; @@ -157,7 +185,7 @@ struct TariCoinPreview { uint64_t fee; }; -typedef TransactionKernel TariTransactionKernel; +typedef struct TransactionKernel TariTransactionKernel; /** * Define the explicit Public key implementation for the Tari base layer @@ -261,11 +289,11 @@ typedef RistrettoComSig ComSignature; typedef ComSignature TariCommitmentSignature; -typedef Covenant TariCovenant; +typedef struct Covenant TariCovenant; -typedef EncryptedValue TariEncryptedValue; +typedef struct EncryptedValue TariEncryptedValue; -typedef OutputFeatures TariOutputFeatures; +typedef struct OutputFeatures TariOutputFeatures; typedef struct Contact TariContact; @@ -287,7 +315,7 @@ typedef struct Balance TariBalance; typedef struct FeePerGramStatsResponse TariFeePerGramStats; -typedef FeePerGramStat TariFeePerGramStat; +typedef struct FeePerGramStat TariFeePerGramStat; struct TariUtxo { const char *commitment; @@ -1230,6 +1258,7 @@ int liveness_data_get_message_type(TariContactsLivenessData *liveness_data, * | 0 | Online | * | 1 | Offline | * | 2 | NeverSeen | + * | 3 | Banned | * * # Safety * The ```liveness_data_destroy``` method must be called when finished with a TariContactsLivenessData to prevent a @@ -2624,6 +2653,8 @@ void balance_destroy(TariBalance *balance); * `wallet` - The TariWallet pointer * `dest_public_key` - The TariPublicKey pointer of the peer * `amount` - The amount + * `commitments` - A `TariVector` of "strings", tagged as `TariTypeTag::String`, containing commitment's hex values + * (see `Commitment::to_hex()`) * `fee_per_gram` - The transaction fee * `message` - The pointer to a char array * `error_out` - Pointer to an int which will be modified to an error code should one occur, may not be null. Functions @@ -2638,6 +2669,7 @@ void balance_destroy(TariBalance *balance); unsigned long long wallet_send_transaction(struct TariWallet *wallet, TariPublicKey *dest_public_key, unsigned long long amount, + struct TariVector *commitments, unsigned long long fee_per_gram, const char *message, bool one_sided, @@ -2649,6 +2681,8 @@ unsigned long long wallet_send_transaction(struct TariWallet *wallet, * ## Arguments * `wallet` - The TariWallet pointer * `amount` - The amount + * `commitments` - A `TariVector` of "strings", tagged as `TariTypeTag::String`, containing commitment's hex values + * (see `Commitment::to_hex()`) * `fee_per_gram` - The fee per gram * `num_kernels` - The number of transaction kernels * `num_outputs` - The number of outputs @@ -2663,6 +2697,7 @@ unsigned long long wallet_send_transaction(struct TariWallet *wallet, */ unsigned long long wallet_get_fee_estimate(struct TariWallet *wallet, unsigned long long amount, + struct TariVector *commitments, unsigned long long fee_per_gram, unsigned long long num_kernels, unsigned long long num_outputs, diff --git a/changelog.md b/changelog.md index 25fe96c86c..d6eb470aa6 100644 --- a/changelog.md +++ b/changelog.md @@ -2,6 +2,90 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [0.38.3](https://github.com/tari-project/tari/compare/v0.38.2...v0.38.3) (2022-09-13) + + +### Bug Fixes + +* **ci:** libtor build on Ubuntu ([#4644](https://github.com/tari-project/tari/issues/4644)) ([6f69276](https://github.com/tari-project/tari/commit/6f692766d5cca5e9b393b2a06662c85fc7ca5aff)) +* **comms/messaging:** fix possible deadlock in outbound pipeline ([#4657](https://github.com/tari-project/tari/issues/4657)) ([3fcc6a0](https://github.com/tari-project/tari/commit/3fcc6a00c663dfab6ea7a196f56d689eda5990d2)) +* **core/sync:** handle deadline timeouts by changing peer ([#4649](https://github.com/tari-project/tari/issues/4649)) ([5ed997c](https://github.com/tari-project/tari/commit/5ed997cdf4ac29daa28f5e2654ace99a65ef0144)) +* fee estimate ([#4656](https://github.com/tari-project/tari/issues/4656)) ([d9de2e0](https://github.com/tari-project/tari/commit/d9de2e01c745afb7c876454510191982f1e9af68)) +* replace Luhn checksum with DammSum ([#4639](https://github.com/tari-project/tari/issues/4639)) ([c01471a](https://github.com/tari-project/tari/commit/c01471a663eae409d77ba703e40ecd2bb31df173)) + +### [0.38.2](https://github.com/tari-project/tari/compare/v0.38.1...v0.38.2) (2022-09-08) + + +### Bug Fixes + +* **comms/rpc:** detect early close in all cases ([#4647](https://github.com/tari-project/tari/issues/4647)) ([0125051](https://github.com/tari-project/tari/commit/0125051fe6d80dbf5fe65e91a2e47e9c89a09e5b)) +* exclude libtor from windows build ([#4631](https://github.com/tari-project/tari/issues/4631)) ([dffea23](https://github.com/tari-project/tari/commit/dffea2387b7f941eb798548b7eca819738f3e95e)) + +### [0.38.1](https://github.com/tari-project/tari/compare/v0.38.0...v0.38.1) (2022-09-07) + + +### Features + +* allow user to select specific UTXOs when sending transactions [#4514](https://github.com/tari-project/tari/issues/4514) ([#4523](https://github.com/tari-project/tari/issues/4523)) ([4b40e61](https://github.com/tari-project/tari/commit/4b40e61154e5aa7ee32914ca48540f4f583c1d91)) +* attempt to recognize the source of a recovered output ([#4580](https://github.com/tari-project/tari/issues/4580)) ([095196b](https://github.com/tari-project/tari/commit/095196bb684546eba00a9fd2e35c02ddda172437)) +* **ci:** merge non-critical & long-running CI into one workflow ([#4614](https://github.com/tari-project/tari/issues/4614)) ([a81228c](https://github.com/tari-project/tari/commit/a81228c4a363035b68c09b49a4435b6fa982f3b7)) +* **comms:** update yamux and snow dependencies ([#4600](https://github.com/tari-project/tari/issues/4600)) ([541877a](https://github.com/tari-project/tari/commit/541877a78b85bff9bc540b6e6d465b9bbf41ef7d)) +* console and FFI should have setting to not choose outputs that reveal the address [#4403](https://github.com/tari-project/tari/issues/4403) ([#4516](https://github.com/tari-project/tari/issues/4516)) ([17bb64e](https://github.com/tari-project/tari/commit/17bb64e4174549c846aa6f39ad0235cfd4d013f1)) +* hide Coinbases that are in the process of being mined ([#4602](https://github.com/tari-project/tari/issues/4602)) ([c6c47fc](https://github.com/tari-project/tari/commit/c6c47fcdc8a12078e2e1210964bdd3977b8a57ca)) +* let sql in wal mode provide async db, not app level spawn blocking (transaction service) ([#4597](https://github.com/tari-project/tari/issues/4597)) ([e17c1f9](https://github.com/tari-project/tari/commit/e17c1f9696e3f4aaca73d1f711735bbdc5ffa0ec)) +* make sure duplication check happens first in mempool ([#4627](https://github.com/tari-project/tari/issues/4627)) ([23e4894](https://github.com/tari-project/tari/commit/23e4894ddc21f8099a102b22bfb540c6c9dcd13d)) +* remove spawn blocking calls from wallet db (wallet storage)([#4591](https://github.com/tari-project/tari/issues/4591)) ([77bb10d](https://github.com/tari-project/tari/commit/77bb10d42e8c004406d0ddd69b65575f0e111cd1)) + + +### Bug Fixes + +* add Grpc authentication to merge mining proxy (see issue [#4587](https://github.com/tari-project/tari/issues/4587)) ([#4592](https://github.com/tari-project/tari/issues/4592)) ([004c219](https://github.com/tari-project/tari/commit/004c219643ae42c0c1afcdb835542e53b581bfa3)) +* change wallet log target from error to trace (see issue [#4586](https://github.com/tari-project/tari/issues/4586)) ([183fa6e](https://github.com/tari-project/tari/commit/183fa6e22eabb43037605c03236cdc81ce0a7dae)) +* cleanup logs ([#4590](https://github.com/tari-project/tari/issues/4590)) ([66c8032](https://github.com/tari-project/tari/commit/66c80327db77a26f8370bc7bd972b8d5abcaf619)) +* **comms:** only reap when number of connections exceeds threshold ([#4607](https://github.com/tari-project/tari/issues/4607)) ([415f339](https://github.com/tari-project/tari/commit/415f33989ad55a55a04ca4afc3f4c115a9e930c1)) +* **console_wallet:** use cli.non_interactive instead of propmt to show seed words ([#4612](https://github.com/tari-project/tari/issues/4612)) ([8ad67ab](https://github.com/tari-project/tari/commit/8ad67ab5e8626157e475b2d57d4c68ad43df5108)) +* **dht:** updates to message padding ([#4594](https://github.com/tari-project/tari/issues/4594)) ([cf4f9bf](https://github.com/tari-project/tari/commit/cf4f9bf1b555755d8be6fd7a3bd401f6bc154fdd)) +* ffi wallet file for unknown type name ([#4589](https://github.com/tari-project/tari/issues/4589)) ([5cbf9aa](https://github.com/tari-project/tari/commit/5cbf9aa95a9b03e9e9a95c9b823dd12e43aa30f1)) +* **outbound:** reduce messaging protocol error to debug ([#4578](https://github.com/tari-project/tari/issues/4578)) ([99cef05](https://github.com/tari-project/tari/commit/99cef051a341e506420c2a70517122ff68c60dba)) +* reduces RPC error log to debug when domain-level RPC service returns an error (fixes [#4579](https://github.com/tari-project/tari/issues/4579)) ([#4611](https://github.com/tari-project/tari/issues/4611)) ([86c030d](https://github.com/tari-project/tari/commit/86c030d7b3adbdf8b65394f6d3dc4ace61ba8c35)) +* remove unused dependencies ([#4624](https://github.com/tari-project/tari/issues/4624)) ([058f492](https://github.com/tari-project/tari/commit/058f492e7f61fec68583c3b0d08ffd4de470f27a)) +* remove window resize ([#4593](https://github.com/tari-project/tari/issues/4593)) ([896eff9](https://github.com/tari-project/tari/commit/896eff9b8df5b865fa511e3964231c983547e3a0)) +* stop race condition in output encumbrance ([#4613](https://github.com/tari-project/tari/issues/4613)) ([31e130a](https://github.com/tari-project/tari/commit/31e130a821cdba0daaa75da051c8c19237efbff0)) +* update cargo versions ([#4622](https://github.com/tari-project/tari/issues/4622)) ([07c1a29](https://github.com/tari-project/tari/commit/07c1a2949e07918a56fd00ba77698037e4212009)) +* use dht inbound error for decryption (Fixes [#4596](https://github.com/tari-project/tari/issues/4596)) ([#4601](https://github.com/tari-project/tari/issues/4601)) ([d9ef267](https://github.com/tari-project/tari/commit/d9ef2670df1a2e7c68e3751e0583f77eaf8bdf7c)) +* **wallet:** detect base node change during long-running protocols ([#4610](https://github.com/tari-project/tari/issues/4610)) ([2a2a8b6](https://github.com/tari-project/tari/commit/2a2a8b68ee2ff8bf2b4335288fd5fbff0d11ea92)) +* **wallet:** use RPC pool connections for non-recovery utxo scanning ([#4598](https://github.com/tari-project/tari/issues/4598)) ([7c9e22c](https://github.com/tari-project/tari/commit/7c9e22cb32ea9d8253dc11b45759a488c7ba1659)) + +## [0.38.0](https://github.com/tari-project/tari/compare/v0.37.0...v0.38.0) (2022-08-31) + + +### ⚠ BREAKING CHANGES + +* replace AES-GCM with XChaCha20-Poly1305 (#4550) + +### Features + +* **build:** multiple targeted build types with options for docker builds ([#4540](https://github.com/tari-project/tari/issues/4540)) ([7e7d053](https://github.com/tari-project/tari/commit/7e7d05351e157b8ca6d4d5b5e1e258a6281d6375)) +* **comms/rpc:** restrict rpc session per peer [#4497](https://github.com/tari-project/tari/issues/4497) ([#4549](https://github.com/tari-project/tari/issues/4549)) ([080bccf](https://github.com/tari-project/tari/commit/080bccf1a037f5574962704947d29d8f1218d42a)) +* **console-wallet:** detect local base node and prompt ([#4557](https://github.com/tari-project/tari/issues/4557)) ([887df88](https://github.com/tari-project/tari/commit/887df88d57fb4566b8383a3e33ad5caee4df762c)) +* remove spawn blocking calls from wallet db (contacts service) ([#4575](https://github.com/tari-project/tari/issues/4575)) ([7464581](https://github.com/tari-project/tari/commit/74645813ab836b19d9d722aaa189a2d190eb5c6e)) +* remove spawn blocking calls from wallet db (key manager service) ([#4564](https://github.com/tari-project/tari/issues/4564)) ([a5d5133](https://github.com/tari-project/tari/commit/a5d5133943bb11e8509a51aeb7f3d40b67bc065b)) +* update tor seed nodes for esmeralda network ([#4572](https://github.com/tari-project/tari/issues/4572)) ([c4cfc12](https://github.com/tari-project/tari/commit/c4cfc128f786be3806f51d699d89465756f97e7b)) +* upgrade to tokio 1.20.1 ([#4566](https://github.com/tari-project/tari/issues/4566)) ([777936a](https://github.com/tari-project/tari/commit/777936a0c2783635f77549d3f23520912b87b7bf)) + + +### Bug Fixes + +* **cucumber:** handles listHeaders response correctly ([#4551](https://github.com/tari-project/tari/issues/4551)) ([3958dde](https://github.com/tari-project/tari/commit/3958dde8114e4301c33a90073c1a2e3c973e0e5d)) +* deserializer for SafePassword ([#4565](https://github.com/tari-project/tari/issues/4565)) ([ee89960](https://github.com/tari-project/tari/commit/ee899606e0b9c9877c89fa35add3dc2fe54be30f)) +* ignored consensus tests (see issue [#4559](https://github.com/tari-project/tari/issues/4559)) ([#4571](https://github.com/tari-project/tari/issues/4571)) ([397fe67](https://github.com/tari-project/tari/commit/397fe673b3b47d57422db71523d8012381980e6c)) +* potential problem with not updating the OMS database ([#4563](https://github.com/tari-project/tari/issues/4563)) ([c867279](https://github.com/tari-project/tari/commit/c86727969ef3fffc124ab706d44c8845addbf415)) +* remove assets and tokens tabs from tari console wallet (see issue [#4543](https://github.com/tari-project/tari/issues/4543)) ([#4556](https://github.com/tari-project/tari/issues/4556)) ([11af787](https://github.com/tari-project/tari/commit/11af7875acfca85d82394d82852729952d638d98)) +* removed `seed_words` and `delete_seed_words` commands ([#4567](https://github.com/tari-project/tari/issues/4567)) ([0b2a155](https://github.com/tari-project/tari/commit/0b2a15585e88240c027175a24dd9757cca4218ac)) +* replace AES-GCM with XChaCha20-Poly1305 ([#4550](https://github.com/tari-project/tari/issues/4550)) ([85acc2f](https://github.com/tari-project/tari/commit/85acc2f1a06afa4e7b184e4577c2b081691783da)) +* resolve tests in output_manager_service_tests.rs (see issue [#4561](https://github.com/tari-project/tari/issues/4561)) ([#4577](https://github.com/tari-project/tari/issues/4577)) ([c69245b](https://github.com/tari-project/tari/commit/c69245bbf5e9f212c07bc1736cedd9351f4d6eef)) +* update rest of the crates to tokio 1.20 ([#4576](https://github.com/tari-project/tari/issues/4576)) ([ad24bf7](https://github.com/tari-project/tari/commit/ad24bf71714ffc091c9fce7c1fc224235e3666a9)) + ## [0.37.0](https://github.com/tari-project/tari/compare/v0.36.0...v0.37.0) (2022-08-25) diff --git a/common/Cargo.toml b/common/Cargo.toml index dd27b60571..687ac674fa 100644 --- a/common/Cargo.toml +++ b/common/Cargo.toml @@ -6,7 +6,7 @@ repository = "https://github.com/tari-project/tari" homepage = "https://tari.com" readme = "README.md" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [features] @@ -14,8 +14,8 @@ build = ["toml", "prost-build"] static-application-info = ["git2"] [dependencies] -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } -tari_common_types = { path = "../base_layer/common_types" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } + anyhow = "1.0.53" config = { version = "0.13.0", default_features = false, features = ["toml"] } derivative = "2.2.0" @@ -38,5 +38,5 @@ blake2 = "0.9.1" sha3 = "0.9.0" [dev-dependencies] -tari_test_utils = { version = "^0.37", path = "../infrastructure/test_utils"} +tari_test_utils = { version = "^0.38", path = "../infrastructure/test_utils"} toml = "0.5.8" diff --git a/common/config/presets/b_peer_seeds.toml b/common/config/presets/b_peer_seeds.toml index 7b07a2545c..cc512c5017 100644 --- a/common/config/presets/b_peer_seeds.toml +++ b/common/config/presets/b_peer_seeds.toml @@ -20,24 +20,24 @@ dns_seeds = ["seeds.esmeralda.tari.com"] # Custom specified peer seed nodes peer_seeds = [ - # 333388d1cbe3e2bd17453d052f - "c2eca9cf32261a1343e21ed718e79f25bfc74386e9305350b06f62047f519347::/onion3/6yxqk2ybo43u73ukfhyc42qn25echn4zegjpod2ccxzr2jd5atipwzqd:18141", - # 555575715a49fc242d756e52ca - "42fcde82b44af1de95a505d858cb31a422c56c4ac4747fbf3da47d648d4fc346::/onion3/2l3e7ysmihc23zybapdrsbcfg6omtjtfkvwj65dstnfxkwtai2fawtyd:18141", - # 77771f53be07fab4be5f1e1ff7 - "50e6aa8f6c50f1b9d9b3d438dfd2a29cfe1f3e3a650bd9e6b1e10f96b6c38f4d::/onion3/7s6y3cz5bnewlj5ypm7sekhgvqjyrq4bpaj5dyvvo7vxydj7hsmyf5ad:18141", - # 9999016f1f3a6162dddf5a45aa - "36a9df45e1423b5315ffa7a91521924210c8e1d1537ad0968450f20f21e5200d::/onion3/v24qfheti2rztlwzgk6v4kdbes3ra7mo3i2fobacqkbfrk656e3uvnid:18141", - # bbbb8358387d81c388fadb4649 - "be128d570e8ec7b15c101ee1a56d6c56dd7d109199f0bd02f182b71142b8675f::/onion3/ha422qsy743ayblgolui5pg226u42wfcklhc5p7nbhiytlsp4ir2syqd:18141", - # eeeeb0a943ed143e613a135392 - "3e0321c0928ca559ab3c0a396272dfaea705efce88440611a38ff3898b097217::/onion3/sl5ledjoaisst6d4fh7kde746dwweuge4m4mf5nkzdhmy57uwgtb7qqd:18141", - # 66664a0f95ce468941bb9de228 - "b0f797e7413b39b6646fa370e8394d3993ead124b8ba24325c3c07a05e980e7e::/ip4/35.177.93.69/tcp/18189", - # 22221bf814d5e524fce9ba5787 - "0eefb45a4de9484eca74846a4f47d2c8d38e76be1fec63b0112bd00d297c0928::/ip4/13.40.98.39/tcp/18189", - # 4444a0efd8388739d563bdd979 - "544ed2baed414307e119d12894e27f9ddbdfa2fd5b6528dc843f27903e951c30::/ip4/13.40.189.176/tcp/18189" + # 7777773c100a094c4feaa686cf + "d2cc8ad88271f075d7c3896179dc867a79115a136c9d9e175fe4ea774dafc75c::/onion3/atlitn6ewryimdviu4kjkjos3ift5v3ykvosgtnfgjocpdmondhykqid:18141", + # 222222c9629a9fcf5a71a18838 + "78b2c0bda70fd12a9987757ffc2851e197080af804353e8e025d28c785b6b447::/onion3/ysj76foyp7qkl7d5x63hyocmp5ydwcgkb25oalo23kj2vvx7zjvofqad:18141", + # 3333334aee7f7bfde22e77af02 + "8648575c606269b032f43cd0d54728628ddb911e636bd65ea36e867a5ffd3643::/onion3/5d2owx6uoqcsoapprattb4fmektm3rcpfyzmmwmf64dsu55mhcqef2yd:18141", + # 888888fe452d7db3e87224cafb + "083ff333ad7e0e9f3678b67378ec339074474342a6357de64a76bdf15e4c955b::/onion3/ldgdytcrwzfbmbpz3dmyi6yzqzqbeamitpb2saxzxmp52qywlmsg4vyd:18141", + # 555555cf2a79f8da9a6b1fecb3 + "ea420ae2948739bc35907b8ab5a2d41526ccef22ec92f8f8e2bb398500bf435a::/onion3/uybnlnzve4j4w2lj5bdoe2uurwsbjm73ck2cotlnknhu2l7msn26oeyd:18141", + # 444444f30fe3a4bf8e5937773e + "f688c69f2397dc0d4ad18168cd6ad13f93241a665acf19ab7f358fd661ac3d1c::/onion3/qejny5yprzidxt4rhstjmhsyfmeq4yb4r6tnn3pqowjr7e7roxcpxsqd:18141", + # 0000008034cc6453ffae1d0b80 + "40717ea5146cf6183c07469d188792b12a57b9da2e5af5bc50df270ff789257f::/onion3/qhmrwr2h3fnszwc4udhlgfpealm7mvw64enqghullrarc633fzmd6zqd:18141", + # bbbbbb1746d41d5be9936652fd + "faf52a5c6364e6bb7dc3a02743273115c7e218e1ef78f27d540c87b35715a005::/onion3/g5txoagsodgpkm2onsfn6r2fuzdzxlggaewre3edghdfzlw6szeo4cqd:18141", + # aaaaaac0add43b4b29a983891c + "a0e604c9a504558839a5c38faf034024a38c95fe6b04638b89dbfda756adff54::/onion3/vslf4ro52c4dktz2r5qybpwho3v25ikviwgvxf3ujryn2afock3qowad:18141", ] [igor.p2p.seeds] diff --git a/common/config/presets/c_base_node.toml b/common/config/presets/c_base_node.toml index 86cf41961f..8b72c4a989 100644 --- a/common/config/presets/c_base_node.toml +++ b/common/config/presets/c_base_node.toml @@ -75,9 +75,6 @@ identity_file = "config/base_node_id_esmeralda.json" # Liveness meta data auto ping interval between peers (default = 30 s) #metadata_auto_ping_interval = 30 -# Resize the CLI terminal on startup to a pre-defined size, or keep user settings (default = true) -#resize_terminal_on_startup = true - # Obscure GRPC error responses (default = false) #report_grpc_error = false @@ -160,13 +157,10 @@ track_reorgs = true #peer_database_name = "peers" # The maximum number of concurrent Inbound tasks allowed before back-pressure is applied to peers -#max_concurrent_inbound_tasks = 50 +#max_concurrent_inbound_tasks = 4 # The maximum number of concurrent outbound tasks allowed before back-pressure is applied to outbound messaging queue -#max_concurrent_outbound_tasks = 100 - -# The size of the buffer (channel) which holds pending outbound message requests -#outbound_buffer_size = 100 +#max_concurrent_outbound_tasks = 4 # Set to true to allow peers to provide test addresses (loopback, memory etc.). If set to false, memory # addresses, loopback, local-link (i.e addresses used in local tests) will not be accepted from peers. This diff --git a/common/config/presets/d_console_wallet.toml b/common/config/presets/d_console_wallet.toml index 61c18c2c05..a44929a546 100644 --- a/common/config/presets/d_console_wallet.toml +++ b/common/config/presets/d_console_wallet.toml @@ -32,8 +32,8 @@ # DO NOT EVER DELETE THIS FILE unless you (a) have backed up your seed phrase and (b) know what you are doing! #db_file = "db/console_wallet.db" -# The main wallet db sqlite database backend connection pool size for concurrent reads (default = 5) -#db_connection_pool_size = 5 +# The main wallet db sqlite database backend connection pool size for concurrent reads (default = 16) +#db_connection_pool_size = 16 # Console wallet password. Should you wish to start your console wallet without typing in your password, the following # options are available: @@ -61,6 +61,13 @@ #command_send_wait_timeout = 300 #command_send_wait_stage = "Broadcast" +# Wallets currently will choose the best outputs as inputs when spending, however since a lurking base node can +# generate a transaction graph of inputs to outputs with relative ease, a wallet may reveal its transaction +# history by including a (non-stealth address) one-sided payment. +# If set to `true`, then outputs received via simple one-sided transactions, won't be automatically selected as +# further transactions, but can still be selected individually as specific outputs. +#autoignore_onesided_utxos = false + # Set to true to enable grpc. (default = false) #grpc_enabled = false # The socket to expose for the gRPC base node server (default = "/ip4/127.0.0.1/tcp/18143") @@ -182,13 +189,10 @@ event_channel_size = 3500 #peer_database_name = "peers" # The maximum number of concurrent Inbound tasks allowed before back-pressure is applied to peers -#max_concurrent_inbound_tasks = 50 +#max_concurrent_inbound_tasks = 4 # The maximum number of concurrent outbound tasks allowed before back-pressure is applied to outbound messaging queue -#max_concurrent_outbound_tasks = 100 - -# The size of the buffer (channel) which holds pending outbound message requests -#outbound_buffer_size = 100 +#max_concurrent_outbound_tasks = 4 # Set to true to allow peers to provide test addresses (loopback, memory etc.). If set to false, memory # addresses, loopback, local-link (i.e addresses used in local tests) will not be accepted from peers. This diff --git a/common/config/presets/f_merge_mining_proxy.toml b/common/config/presets/f_merge_mining_proxy.toml index 17935f2129..52bab71161 100644 --- a/common/config/presets/f_merge_mining_proxy.toml +++ b/common/config/presets/f_merge_mining_proxy.toml @@ -42,6 +42,9 @@ monerod_url = [# stagenet # The Tari console wallet's GRPC address. (default = "/ip4/127.0.0.1/tcp/18143") #console_wallet_grpc_address = "/ip4/127.0.0.1/tcp/18143" +# GRPC authentication for the Tari console wallet (default = "none") +#wallet_grpc_authentication = { username: "miner", password: "$argon..." } + # Address of the tari_merge_mining_proxy application. (default = "/ip4/127.0.0.1/tcp/18081") #listener_address = "/ip4/127.0.0.1/tcp/18081" diff --git a/common_sqlite/Cargo.toml b/common_sqlite/Cargo.toml index 664ebc7e5f..df5e3749a0 100644 --- a/common_sqlite/Cargo.toml +++ b/common_sqlite/Cargo.toml @@ -3,7 +3,7 @@ name = "tari_common_sqlite" authors = ["The Tari Development Community"] description = "Tari cryptocurrency wallet library" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/comms/core/Cargo.toml b/comms/core/Cargo.toml index f0560f35b9..82a90960a3 100644 --- a/comms/core/Cargo.toml +++ b/comms/core/Cargo.toml @@ -6,15 +6,15 @@ repository = "https://github.com/tari-project/tari" homepage = "https://tari.com" readme = "README.md" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [dependencies] -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } tari_common = {path = "../../common"} tari_metrics = { path = "../../infrastructure/metrics" } -tari_storage = { version = "^0.37", path = "../../infrastructure/storage" } -tari_shutdown = { version = "^0.37", path = "../../infrastructure/shutdown" } +tari_storage = { version = "^0.38", path = "../../infrastructure/storage" } +tari_shutdown = { version = "^0.38", path = "../../infrastructure/shutdown" } tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } anyhow = "1.0.53" @@ -42,17 +42,17 @@ prost-types = "0.9.0" rand = "0.8" serde = "1.0.119" serde_derive = "1.0.119" -snow = { version = "=0.8.0", features = ["default-resolver"] } +snow = { version = "=0.9.0", features = ["default-resolver"] } thiserror = "1.0.26" -tokio = { version = "1.14", features = ["rt-multi-thread", "time", "sync", "signal", "net", "macros", "io-util"] } -tokio-stream = { version = "0.1.7", features = ["sync"] } +tokio = { version = "1.20", features = ["rt-multi-thread", "time", "sync", "signal", "net", "macros", "io-util"] } +tokio-stream = { version = "0.1.9", features = ["sync"] } tokio-util = { version = "0.6.7", features = ["codec", "compat"] } tower = {version = "0.4", features = ["util"]} tracing = "0.1.26" -yamux = "=0.9.0" +yamux = "=0.10.2" [dev-dependencies] -tari_test_utils = { version = "^0.37", path = "../../infrastructure/test_utils" } +tari_test_utils = { version = "^0.38", path = "../../infrastructure/test_utils" } tari_comms_rpc_macros = { version = "*", path = "../rpc_macros" } env_logger = "0.7.0" @@ -60,7 +60,7 @@ serde_json = "1.0.39" tempfile = "3.1.0" [build-dependencies] -tari_common = { version = "^0.37", path = "../../common", features = ["build"] } +tari_common = { version = "^0.38", path = "../../common", features = ["build"] } [features] c_integration = [] diff --git a/comms/core/examples/stress/node.rs b/comms/core/examples/stress/node.rs index fe595ec23d..37c403c107 100644 --- a/comms/core/examples/stress/node.rs +++ b/comms/core/examples/stress/node.rs @@ -31,7 +31,7 @@ use tari_comms::{ pipeline::SinkService, protocol::{messaging::MessagingProtocolExtension, ProtocolNotification, Protocols}, tor, - tor::{HsFlags, TorIdentity}, + tor::TorIdentity, transports::{predicate::FalsePredicate, SocksConfig, TcpWithTorTransport}, CommsBuilder, CommsNode, @@ -131,7 +131,6 @@ pub async fn create( .unwrap() } else { let mut hs_builder = tor::HiddenServiceBuilder::new() - .with_hs_flags(HsFlags::DETACH) .with_port_mapping(port) .with_control_server_address(TOR_CONTROL_PORT_ADDR.parse().unwrap()); diff --git a/comms/core/src/connectivity/config.rs b/comms/core/src/connectivity/config.rs index 8d995ceeed..2ebc47fe91 100644 --- a/comms/core/src/connectivity/config.rs +++ b/comms/core/src/connectivity/config.rs @@ -33,6 +33,9 @@ pub struct ConnectivityConfig { pub connection_pool_refresh_interval: Duration, /// True if connection reaping is enabled, otherwise false (default: true) pub is_connection_reaping_enabled: bool, + /// The minimum number of connections that must exist before any connections may be reaped + /// Default: 50 + pub reaper_min_connection_threshold: usize, /// The minimum age of the connection before it can be reaped. This prevents a connection that has just been /// established from being reaped due to inactivity. Default: 20 minutes pub reaper_min_inactive_age: Duration, @@ -54,6 +57,7 @@ impl Default for ConnectivityConfig { min_connectivity: 1, connection_pool_refresh_interval: Duration::from_secs(60), reaper_min_inactive_age: Duration::from_secs(20 * 60), + reaper_min_connection_threshold: 50, is_connection_reaping_enabled: true, max_failures_mark_offline: 1, connection_tie_break_linger: Duration::from_secs(2), diff --git a/comms/core/src/connectivity/connection_pool.rs b/comms/core/src/connectivity/connection_pool.rs index 68ac1f6ddc..4ef6a53f82 100644 --- a/comms/core/src/connectivity/connection_pool.rs +++ b/comms/core/src/connectivity/connection_pool.rs @@ -161,8 +161,10 @@ impl ConnectionPool { .unwrap_or(ConnectionStatus::NotConnected) } - pub fn get_inactive_connections_mut(&mut self, min_age: Duration) -> Vec<&mut PeerConnection> { - self.filter_connections_mut(|conn| conn.age() > min_age && conn.handle_count() <= 1) + pub fn get_inactive_outbound_connections_mut(&mut self, min_age: Duration) -> Vec<&mut PeerConnection> { + self.filter_connections_mut(|conn| { + conn.age() > min_age && conn.handle_count() <= 1 && conn.substream_count() > 2 + }) } pub(in crate::connectivity) fn filter_drain

(&mut self, mut predicate: P) -> Vec diff --git a/comms/core/src/connectivity/manager.rs b/comms/core/src/connectivity/manager.rs index 0d849c5fb1..1cf1c41b41 100644 --- a/comms/core/src/connectivity/manager.rs +++ b/comms/core/src/connectivity/manager.rs @@ -392,9 +392,18 @@ impl ConnectivityManagerActor { } async fn reap_inactive_connections(&mut self) { - let connections = self + let excess_connections = self .pool - .get_inactive_connections_mut(self.config.reaper_min_inactive_age); + .count_connected() + .saturating_sub(self.config.reaper_min_connection_threshold); + if excess_connections == 0 { + return; + } + + let mut connections = self + .pool + .get_inactive_outbound_connections_mut(self.config.reaper_min_inactive_age); + connections.truncate(excess_connections as usize); for conn in connections { if !conn.is_connected() { continue; @@ -402,8 +411,9 @@ impl ConnectivityManagerActor { debug!( target: LOG_TARGET, - "Disconnecting '{}' because connection was inactive", - conn.peer_node_id().short_str() + "Disconnecting '{}' because connection was inactive ({} handles)", + conn.peer_node_id().short_str(), + conn.handle_count() ); if let Err(err) = conn.disconnect().await { // Already disconnected diff --git a/comms/core/src/multiplexing/yamux.rs b/comms/core/src/multiplexing/yamux.rs index 70b3d25d08..dc847416ed 100644 --- a/comms/core/src/multiplexing/yamux.rs +++ b/comms/core/src/multiplexing/yamux.rs @@ -23,7 +23,6 @@ use std::{future::Future, io, pin::Pin, task::Poll}; use futures::{task::Context, Stream}; -use tari_shutdown::{Shutdown, ShutdownSignal}; use tokio::{ io::{AsyncRead, AsyncWrite, ReadBuf}, sync::mpsc, @@ -91,11 +90,10 @@ impl Yamux { where TSocket: futures::AsyncRead + futures::AsyncWrite + Unpin + Send + 'static, { - let shutdown = Shutdown::new(); let (incoming_tx, incoming_rx) = mpsc::channel(10); - let incoming = IncomingWorker::new(connection, incoming_tx, shutdown.to_signal()); + let incoming = IncomingWorker::new(connection, incoming_tx); runtime::task::spawn(incoming.run()); - IncomingSubstreams::new(incoming_rx, counter, shutdown) + IncomingSubstreams::new(incoming_rx, counter) } /// Get the yamux control struct @@ -166,19 +164,13 @@ impl Control { pub struct IncomingSubstreams { inner: mpsc::Receiver, substream_counter: AtomicRefCounter, - shutdown: Shutdown, } impl IncomingSubstreams { - pub(self) fn new( - inner: mpsc::Receiver, - substream_counter: AtomicRefCounter, - shutdown: Shutdown, - ) -> Self { + pub(self) fn new(inner: mpsc::Receiver, substream_counter: AtomicRefCounter) -> Self { Self { inner, substream_counter, - shutdown, } } @@ -201,12 +193,6 @@ impl Stream for IncomingSubstreams { } } -impl Drop for IncomingSubstreams { - fn drop(&mut self) { - self.shutdown.trigger(); - } -} - /// A yamux stream wrapper that can be read from and written to. #[derive(Debug)] pub struct Substream { @@ -258,41 +244,23 @@ impl From for stream_id::Id { struct IncomingWorker { connection: yamux::Connection, sender: mpsc::Sender, - shutdown_signal: ShutdownSignal, } impl IncomingWorker where TSocket: futures::AsyncRead + futures::AsyncWrite + Unpin + Send + 'static /* */ { - pub fn new( - connection: yamux::Connection, - sender: mpsc::Sender, - shutdown_signal: ShutdownSignal, - ) -> Self { - Self { - connection, - sender, - shutdown_signal, - } + pub fn new(connection: yamux::Connection, sender: mpsc::Sender) -> Self { + Self { connection, sender } } #[tracing::instrument(name = "yamux::incoming_worker::run", skip(self), fields(connection = %self.connection))] pub async fn run(mut self) { loop { tokio::select! { - biased; - - _ = self.shutdown_signal.wait() => { - let mut control = self.connection.control(); - if let Err(err) = control.close().await { - error!(target: LOG_TARGET, "Failed to close yamux connection: {}", err); - } - debug!( - target: LOG_TARGET, - "{} Yamux connection has closed", self.connection - ); + _ = self.sender.closed() => { + self.close().await; break - } + }, result = self.connection.next_stream() => { match result { @@ -336,14 +304,51 @@ where TSocket: futures::AsyncRead + futures::AsyncWrite + Unpin + Send + 'static } } } + + async fn close(&mut self) { + let mut control = self.connection.control(); + // Sends the close message once polled, while continuing to poll the connection future + let close_fut = control.close(); + tokio::pin!(close_fut); + loop { + tokio::select! { + biased; + + result = &mut close_fut => { + match result { + Ok(_) => break, + Err(err) => { + error!(target: LOG_TARGET, "Failed to close yamux connection: {}", err); + break; + } + } + }, + + result = self.connection.next_stream() => { + match result { + Ok(Some(_)) => continue, + Ok(None) => break, + Err(err) => { + error!(target: LOG_TARGET, "Error while closing yamux connection: {}", err); + continue; + } + } + } + } + } + debug!(target: LOG_TARGET, "{} Yamux connection has closed", self.connection); + } } #[cfg(test)] mod test { - use std::{io, time::Duration}; + use std::{io, sync::Arc, time::Duration}; use tari_test_utils::collect_stream; - use tokio::io::{AsyncReadExt, AsyncWriteExt}; + use tokio::{ + io::{AsyncReadExt, AsyncWriteExt}, + sync::Barrier, + }; use tokio_stream::StreamExt; use crate::{ @@ -455,6 +460,34 @@ mod test { Ok(()) } + #[runtime::test] + async fn rude_close_does_not_freeze() -> io::Result<()> { + let (dialer, listener) = MemorySocket::new_pair(); + + let barrier = Arc::new(Barrier::new(2)); + let b = barrier.clone(); + + task::spawn(async move { + // Drop immediately + let incoming = Yamux::upgrade_connection(listener, ConnectionDirection::Inbound) + .unwrap() + .into_incoming(); + drop(incoming); + b.wait().await; + }); + + let dialer = Yamux::upgrade_connection(dialer, ConnectionDirection::Outbound).unwrap(); + let mut dialer_control = dialer.get_yamux_control(); + let mut substream = dialer_control.open_stream().await.unwrap(); + barrier.wait().await; + + let mut buf = vec![]; + substream.read_to_end(&mut buf).await.unwrap(); + assert!(buf.is_empty()); + + Ok(()) + } + #[runtime::test] async fn send_big_message() -> io::Result<()> { #[allow(non_upper_case_globals)] diff --git a/comms/core/src/noise/crypto_resolver.rs b/comms/core/src/noise/crypto_resolver.rs index 51a5a6d227..0272f014d1 100644 --- a/comms/core/src/noise/crypto_resolver.rs +++ b/comms/core/src/noise/crypto_resolver.rs @@ -112,8 +112,8 @@ impl Dh for CommsDiffieHellman { self.secret_key.as_bytes() } - fn dh(&self, public_key: &[u8], out: &mut [u8]) -> Result<(), ()> { - let pk = CommsPublicKey::from_bytes(&public_key[..self.pub_len()]).map_err(|_| ())?; + fn dh(&self, public_key: &[u8], out: &mut [u8]) -> Result<(), snow::Error> { + let pk = CommsPublicKey::from_bytes(&public_key[..self.pub_len()]).map_err(|_| snow::Error::Dh)?; let shared = CommsPublicKey::shared_secret(&self.secret_key, &pk); let hash = noise_kdf(&shared); copy_slice!(hash, out); diff --git a/comms/core/src/pipeline/builder.rs b/comms/core/src/pipeline/builder.rs index 2aa88da405..9688101e86 100644 --- a/comms/core/src/pipeline/builder.rs +++ b/comms/core/src/pipeline/builder.rs @@ -30,16 +30,14 @@ use crate::{ }; const DEFAULT_MAX_CONCURRENT_TASKS: usize = 50; -const DEFAULT_OUTBOUND_BUFFER_SIZE: usize = 50; -type OutboundMessageSinkService = SinkService>; +type OutboundMessageSinkService = SinkService>; /// Message pipeline builder #[derive(Default)] pub struct Builder { max_concurrent_inbound_tasks: usize, max_concurrent_outbound_tasks: Option, - outbound_buffer_size: usize, inbound: Option, outbound_rx: Option>, outbound_pipeline_factory: Option TOutSvc>>, @@ -50,7 +48,6 @@ impl Builder<(), (), ()> { Self { max_concurrent_inbound_tasks: DEFAULT_MAX_CONCURRENT_TASKS, max_concurrent_outbound_tasks: None, - outbound_buffer_size: DEFAULT_OUTBOUND_BUFFER_SIZE, inbound: None, outbound_rx: None, outbound_pipeline_factory: None, @@ -69,11 +66,6 @@ impl Builder { self } - pub fn outbound_buffer_size(mut self, buf_size: usize) -> Self { - self.outbound_buffer_size = buf_size; - self - } - pub fn with_outbound_pipeline(self, receiver: mpsc::Receiver, factory: F) -> Builder where // Factory function takes in a SinkService and returns a new composed service @@ -87,7 +79,6 @@ impl Builder { max_concurrent_inbound_tasks: self.max_concurrent_inbound_tasks, max_concurrent_outbound_tasks: self.max_concurrent_outbound_tasks, inbound: self.inbound, - outbound_buffer_size: self.outbound_buffer_size, } } @@ -100,7 +91,6 @@ impl Builder { max_concurrent_outbound_tasks: self.max_concurrent_outbound_tasks, outbound_rx: self.outbound_rx, outbound_pipeline_factory: self.outbound_pipeline_factory, - outbound_buffer_size: self.outbound_buffer_size, } } } @@ -111,7 +101,7 @@ where TInSvc: Service + Clone + Send + 'static, { fn build_outbound(&mut self) -> Result, PipelineBuilderError> { - let (out_sender, out_receiver) = mpsc::channel(self.outbound_buffer_size); + let (out_sender, out_receiver) = mpsc::unbounded_channel(); let in_receiver = self .outbound_rx @@ -125,7 +115,7 @@ where let pipeline = (factory)(sink_service); Ok(OutboundPipelineConfig { in_receiver, - out_receiver, + out_receiver: Some(out_receiver), pipeline, }) } @@ -157,7 +147,7 @@ pub struct OutboundPipelineConfig { /// Messages read from this stream are passed to the pipeline pub in_receiver: mpsc::Receiver, /// Receiver of `OutboundMessage`s coming from the pipeline - pub out_receiver: mpsc::Receiver, + pub out_receiver: Option>, /// The pipeline (`tower::Service`) to run for each in_stream message pub pipeline: TPipeline, } diff --git a/comms/core/src/pipeline/inbound.rs b/comms/core/src/pipeline/inbound.rs index f77d5f66bb..5f91187252 100644 --- a/comms/core/src/pipeline/inbound.rs +++ b/comms/core/src/pipeline/inbound.rs @@ -20,12 +20,15 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -use std::{fmt::Display, time::Instant}; +use std::{ + fmt::Display, + time::{Duration, Instant}, +}; use futures::future::FusedFuture; use log::*; use tari_shutdown::ShutdownSignal; -use tokio::sync::mpsc; +use tokio::{sync::mpsc, time}; use tower::{Service, ServiceExt}; use crate::bounded_executor::BoundedExecutor; @@ -85,15 +88,17 @@ where let num_available = self.executor.num_available(); let max_available = self.executor.max_available(); - // Only emit this message if there is any concurrent usage - if num_available < max_available { - debug!( - target: LOG_TARGET, - "Inbound pipeline usage: {}/{}", - max_available - num_available, - max_available - ); - } + log!( + target: LOG_TARGET, + if num_available < max_available { + Level::Debug + } else { + Level::Trace + }, + "Inbound pipeline usage: {}/{}", + max_available - num_available, + max_available + ); let id = current_id; current_id = (current_id + 1) % u64::MAX; @@ -103,8 +108,19 @@ where .spawn(async move { let timer = Instant::now(); trace!(target: LOG_TARGET, "Start inbound pipeline {}", id); - if let Err(err) = service.oneshot(item).await { - warn!(target: LOG_TARGET, "Inbound pipeline returned an error: '{}'", err); + match time::timeout(Duration::from_secs(10), service.oneshot(item)).await { + Ok(Ok(_)) => {}, + Ok(Err(err)) => { + warn!(target: LOG_TARGET, "Inbound pipeline returned an error: '{}'", err); + }, + Err(_) => { + error!( + target: LOG_TARGET, + "Inbound pipeline {} timed out and was aborted. THIS SHOULD NOT HAPPEN: there was a \ + deadlock or excessive delay in processing this pipeline.", + id + ); + }, } trace!( target: LOG_TARGET, diff --git a/comms/core/src/pipeline/outbound.rs b/comms/core/src/pipeline/outbound.rs index 6f2dc115b3..83eed9f208 100644 --- a/comms/core/src/pipeline/outbound.rs +++ b/comms/core/src/pipeline/outbound.rs @@ -20,19 +20,16 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -use std::{fmt::Display, time::Instant}; +use std::{ + fmt::Display, + time::{Duration, Instant}, +}; -use futures::future::Either; use log::*; -use tokio::sync::mpsc; +use tokio::time; use tower::{Service, ServiceExt}; -use crate::{ - bounded_executor::OptionallyBoundedExecutor, - message::OutboundMessage, - pipeline::builder::OutboundPipelineConfig, - protocol::messaging::MessagingRequest, -}; +use crate::{bounded_executor::OptionallyBoundedExecutor, pipeline::builder::OutboundPipelineConfig}; const LOG_TARGET: &str = "comms::pipeline::outbound"; @@ -43,8 +40,6 @@ pub struct Outbound { executor: OptionallyBoundedExecutor, /// Outbound pipeline configuration containing the pipeline and it's in and out streams config: OutboundPipelineConfig, - /// Request sender for Messaging - messaging_request_tx: mpsc::Sender, } impl Outbound @@ -55,101 +50,69 @@ where TPipeline::Future: Send, { /// New outbound pipeline. - pub fn new( - executor: OptionallyBoundedExecutor, - config: OutboundPipelineConfig, - messaging_request_tx: mpsc::Sender, - ) -> Self { - Self { - executor, - config, - messaging_request_tx, - } + pub fn new(executor: OptionallyBoundedExecutor, config: OutboundPipelineConfig) -> Self { + Self { executor, config } } /// Run the outbound pipeline. pub async fn run(mut self) { let mut current_id = 0; - loop { - let either = tokio::select! { - next = self.config.in_receiver.recv() => Either::Left(next), - next = self.config.out_receiver.recv() => Either::Right(next) - }; - match either { - // Pipeline IN received a message. Spawn a new task for the pipeline - Either::Left(Some(msg)) => { - let num_available = self.executor.num_available(); - if let Some(max_available) = self.executor.max_available() { - // Only emit this message if there is any concurrent usage - if num_available < max_available { - debug!( + + while let Some(msg) = self.config.in_receiver.recv().await { + // Pipeline IN received a message. Spawn a new task for the pipeline + let num_available = self.executor.num_available(); + if let Some(max_available) = self.executor.max_available() { + log!( + target: LOG_TARGET, + if num_available < max_available { + Level::Debug + } else { + Level::Trace + }, + "Outbound pipeline usage: {}/{}", + max_available - num_available, + max_available + ); + } + let pipeline = self.config.pipeline.clone(); + let id = current_id; + current_id = (current_id + 1) % u64::MAX; + self.executor + .spawn(async move { + let timer = Instant::now(); + trace!(target: LOG_TARGET, "Start outbound pipeline {}", id); + match time::timeout(Duration::from_secs(10), pipeline.oneshot(msg)).await { + Ok(Ok(_)) => {}, + Ok(Err(err)) => { + error!( target: LOG_TARGET, - "Outbound pipeline usage: {}/{}", - max_available - num_available, - max_available + "Outbound pipeline {} returned an error: '{}'", id, err ); - } - } - let pipeline = self.config.pipeline.clone(); - let id = current_id; - current_id = (current_id + 1) % u64::MAX; - - self.executor - .spawn(async move { - let timer = Instant::now(); - trace!(target: LOG_TARGET, "Start outbound pipeline {}", id); - if let Err(err) = pipeline.oneshot(msg).await { - error!( - target: LOG_TARGET, - "Outbound pipeline {} returned an error: '{}'", id, err - ); - } - - trace!( + }, + Err(_) => { + error!( target: LOG_TARGET, - "Finished outbound pipeline {} in {:.2?}", - id, - timer.elapsed() + "Outbound pipeline {} timed out and was aborted. THIS SHOULD NOT HAPPEN: there was a \ + deadlock or excessive delay in processing this pipeline.", + id ); - }) - .await; - }, - // Pipeline IN channel closed - Either::Left(None) => { - info!( - target: LOG_TARGET, - "Outbound pipeline is shutting down because the in channel closed" - ); - break; - }, - // Pipeline OUT received a message - Either::Right(Some(out_msg)) => { - if self.messaging_request_tx.is_closed() { - // MessagingRequest channel closed - break; + }, } - self.send_messaging_request(out_msg).await; - }, - // Pipeline OUT channel closed - Either::Right(None) => { - info!( + + trace!( target: LOG_TARGET, - "Outbound pipeline is shutting down because the out channel closed" + "Finished outbound pipeline {} in {:.2?}", + id, + timer.elapsed() ); - break; - }, - } + }) + .await; } - } - async fn send_messaging_request(&mut self, out_msg: OutboundMessage) { - let msg_req = MessagingRequest::SendMessage(out_msg); - if let Err(err) = self.messaging_request_tx.send(msg_req).await { - error!( - target: LOG_TARGET, - "Failed to send OutboundMessage to Messaging protocol because '{}'", err - ); - } + info!( + target: LOG_TARGET, + "Outbound pipeline is shutting down because the in channel closed" + ); } } @@ -158,43 +121,37 @@ mod test { use std::time::Duration; use bytes::Bytes; - use tari_test_utils::{collect_recv, unpack_enum}; - use tokio::{runtime::Handle, time}; + use tari_test_utils::collect_recv; + use tokio::{runtime::Handle, sync::mpsc, time}; use super::*; - use crate::{pipeline::SinkService, runtime, utils}; + use crate::{message::OutboundMessage, pipeline::SinkService, runtime, utils}; #[runtime::test] async fn run() { const NUM_ITEMS: usize = 10; - let (tx, in_receiver) = mpsc::channel(NUM_ITEMS); + let (tx, mut in_receiver) = mpsc::channel(NUM_ITEMS); utils::mpsc::send_all( &tx, (0..NUM_ITEMS).map(|i| OutboundMessage::new(Default::default(), Bytes::copy_from_slice(&i.to_be_bytes()))), ) .await .unwrap(); - let (out_tx, out_rx) = mpsc::channel(NUM_ITEMS); - let (msg_tx, mut msg_rx) = mpsc::channel(NUM_ITEMS); + in_receiver.close(); + + let (out_tx, mut out_rx) = mpsc::unbounded_channel(); let executor = Handle::current(); - let pipeline = Outbound::new( - executor.clone().into(), - OutboundPipelineConfig { - in_receiver, - out_receiver: out_rx, - pipeline: SinkService::new(out_tx), - }, - msg_tx, - ); + let pipeline = Outbound::new(executor.clone().into(), OutboundPipelineConfig { + in_receiver, + out_receiver: None, + pipeline: SinkService::new(out_tx), + }); let spawned_task = executor.spawn(pipeline.run()); - msg_rx.close(); - let requests = collect_recv!(msg_rx, timeout = Duration::from_millis(5)); - for req in requests { - unpack_enum!(MessagingRequest::SendMessage(_o) = req); - } + let requests = collect_recv!(out_rx, timeout = Duration::from_millis(5)); + assert_eq!(requests.len(), NUM_ITEMS); // Check that this task ends because the stream has closed time::timeout(Duration::from_secs(5), spawned_task) diff --git a/comms/core/src/pipeline/sink.rs b/comms/core/src/pipeline/sink.rs index df7fe3cdb5..376792fd12 100644 --- a/comms/core/src/pipeline/sink.rs +++ b/comms/core/src/pipeline/sink.rs @@ -20,7 +20,7 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -use std::task::Poll; +use std::{future, task::Poll}; use futures::{future::BoxFuture, task::Context, FutureExt}; use tower::Service; @@ -59,3 +59,22 @@ where T: Send + 'static .boxed() } } +impl Service for SinkService> +where T: Send + 'static +{ + type Error = PipelineError; + type Future = future::Ready>; + type Response = (); + + fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll> { + Poll::Ready(Ok(())) + } + + fn call(&mut self, item: T) -> Self::Future { + let sink = self.0.clone(); + let result = sink + .send(item) + .map_err(|_| anyhow::anyhow!("sink closed in sink service")); + future::ready(result) + } +} diff --git a/comms/core/src/protocol/messaging/error.rs b/comms/core/src/protocol/messaging/error.rs index 9d38427675..eaf02e1672 100644 --- a/comms/core/src/protocol/messaging/error.rs +++ b/comms/core/src/protocol/messaging/error.rs @@ -68,6 +68,8 @@ impl From for MessagingProtocolError { ErrorKind::ConnectionReset | ErrorKind::ConnectionAborted | ErrorKind::BrokenPipe | + ErrorKind::WriteZero | + ErrorKind::NotConnected | ErrorKind::UnexpectedEof => MessagingProtocolError::ConnectionClosed(err), _ => MessagingProtocolError::Io(err), } diff --git a/comms/core/src/protocol/messaging/extension.rs b/comms/core/src/protocol/messaging/extension.rs index eabbc99800..9e8425e367 100644 --- a/comms/core/src/protocol/messaging/extension.rs +++ b/comms/core/src/protocol/messaging/extension.rs @@ -39,18 +39,14 @@ use crate::{ runtime::task, }; -/// Buffer size for inbound messages from _all_ peers. This should be large enough to buffer quite a few incoming -/// messages before creating backpressure on peers speaking the messaging protocol. -pub const INBOUND_MESSAGE_BUFFER_SIZE: usize = 100; +/// Buffer size for inbound messages from _all_ peers. If the message consumer is slow to get through this queue, +/// sending peers will start to experience backpressure (this is a good thing). +pub const INBOUND_MESSAGE_BUFFER_SIZE: usize = 10; /// Buffer size notifications that a peer wants to speak /tari/messaging. This buffer is used for all peers, but a low /// value is ok because this events happen once (or less) per connecting peer. For e.g. a value of 10 would allow 10 /// peers to concurrently request to speak /tari/messaging. pub const MESSAGING_PROTOCOL_EVENTS_BUFFER_SIZE: usize = 30; -/// Buffer size for requests to the messaging protocol. All outbound messages will be sent along this channel. Some -/// buffering may be required if the node needs to send many messages out at the same time. -pub const MESSAGING_REQUEST_BUFFER_SIZE: usize = 50; - /// Installs the messaging protocol pub struct MessagingProtocolExtension { event_tx: MessagingEventSender, @@ -73,17 +69,17 @@ where TInPipe::Future: Send + 'static, TOutReq: Send + 'static, { - fn install(self: Box, context: &mut ProtocolExtensionContext) -> Result<(), ProtocolExtensionError> { + fn install(mut self: Box, context: &mut ProtocolExtensionContext) -> Result<(), ProtocolExtensionError> { let (proto_tx, proto_rx) = mpsc::channel(MESSAGING_PROTOCOL_EVENTS_BUFFER_SIZE); context.add_protocol(&[MESSAGING_PROTOCOL.clone()], &proto_tx); - let (messaging_request_tx, messaging_request_rx) = mpsc::channel(MESSAGING_REQUEST_BUFFER_SIZE); let (inbound_message_tx, inbound_message_rx) = mpsc::channel(INBOUND_MESSAGE_BUFFER_SIZE); + let message_receiver = self.pipeline.outbound.out_receiver.take().unwrap(); let messaging = MessagingProtocol::new( context.connectivity(), proto_rx, - messaging_request_rx, + message_receiver, self.event_tx, inbound_message_tx, context.shutdown_signal(), @@ -106,7 +102,7 @@ where let executor = OptionallyBoundedExecutor::from_current(self.pipeline.max_concurrent_outbound_tasks); // Spawn outbound pipeline - let outbound = pipeline::Outbound::new(executor, self.pipeline.outbound, messaging_request_tx); + let outbound = pipeline::Outbound::new(executor, self.pipeline.outbound); task::spawn(outbound.run()); Ok(()) diff --git a/comms/core/src/protocol/messaging/mod.rs b/comms/core/src/protocol/messaging/mod.rs index a55ec3628c..9b45008474 100644 --- a/comms/core/src/protocol/messaging/mod.rs +++ b/comms/core/src/protocol/messaging/mod.rs @@ -37,14 +37,7 @@ mod inbound; mod metrics; mod outbound; mod protocol; -pub use protocol::{ - MessagingEvent, - MessagingEventReceiver, - MessagingEventSender, - MessagingProtocol, - MessagingRequest, - SendFailReason, -}; +pub use protocol::{MessagingEvent, MessagingEventReceiver, MessagingEventSender, MessagingProtocol, SendFailReason}; #[cfg(test)] mod test; diff --git a/comms/core/src/protocol/messaging/outbound.rs b/comms/core/src/protocol/messaging/outbound.rs index f67ab63581..9f8ff2831e 100644 --- a/comms/core/src/protocol/messaging/outbound.rs +++ b/comms/core/src/protocol/messaging/outbound.rs @@ -163,37 +163,28 @@ impl OutboundMessaging { } async fn try_dial_peer(&mut self) -> Result { - let span = span!( - Level::DEBUG, - "dial_peer", - node_id = self.peer_node_id.to_string().as_str() - ); - async move { - loop { - match self.connectivity.dial_peer(self.peer_node_id.clone()).await { - Ok(conn) => break Ok(conn), - Err(ConnectivityError::DialCancelled) => { - debug!( - target: LOG_TARGET, - "Dial was cancelled for peer '{}'. This is probably because of connection tie-breaking. \ - Retrying...", - self.peer_node_id, - ); - continue; - }, - Err(err) => { - debug!( - target: LOG_TARGET, - "MessagingProtocol failed to dial peer '{}' because '{:?}'", self.peer_node_id, err - ); + loop { + match self.connectivity.dial_peer(self.peer_node_id.clone()).await { + Ok(conn) => break Ok(conn), + Err(ConnectivityError::DialCancelled) => { + debug!( + target: LOG_TARGET, + "Dial was cancelled for peer '{}'. This is probably because of connection tie-breaking. \ + Retrying...", + self.peer_node_id, + ); + continue; + }, + Err(err) => { + debug!( + target: LOG_TARGET, + "MessagingProtocol failed to dial peer '{}' because '{:?}'", self.peer_node_id, err + ); - break Err(MessagingProtocolError::PeerDialFailed(err)); - }, - } + break Err(MessagingProtocolError::PeerDialFailed(err)); + }, } } - .instrument(span) - .await } async fn try_establish( @@ -232,27 +223,16 @@ impl OutboundMessaging { &mut self, conn: &mut PeerConnection, ) -> Result, MessagingProtocolError> { - let span = span!( - Level::DEBUG, - "open_substream", - node_id = self.peer_node_id.to_string().as_str() - ); - async move { - match conn.open_substream(&MESSAGING_PROTOCOL).await { - Ok(substream) => Ok(substream), - Err(err) => { - debug!( - target: LOG_TARGET, - "MessagingProtocol failed to open a substream to peer '{}' because '{}'", - self.peer_node_id, - err - ); - Err(err.into()) - }, - } + match conn.open_substream(&MESSAGING_PROTOCOL).await { + Ok(substream) => Ok(substream), + Err(err) => { + debug!( + target: LOG_TARGET, + "MessagingProtocol failed to open a substream to peer '{}' because '{}'", self.peer_node_id, err + ); + Err(err.into()) + }, } - .instrument(span) - .await } async fn start_forwarding_messages( @@ -290,10 +270,16 @@ impl OutboundMessaging { outbound_count.inc(); event!( Level::DEBUG, - "Message buffered for sending {} on stream {}", + "Message for peer '{}' sending {} on stream {}", + peer_node_id, out_msg, stream_id ); + debug!( + target: LOG_TARGET, + "Message for peer '{}' sending {} on stream {}", peer_node_id, out_msg, stream_id + ); + out_msg.reply_success(); Result::<_, MessagingProtocolError>::Ok(out_msg.body) }); diff --git a/comms/core/src/protocol/messaging/protocol.rs b/comms/core/src/protocol/messaging/protocol.rs index 3d02b055ff..2098e7e5c5 100644 --- a/comms/core/src/protocol/messaging/protocol.rs +++ b/comms/core/src/protocol/messaging/protocol.rs @@ -54,7 +54,7 @@ use crate::{ const LOG_TARGET: &str = "comms::protocol::messaging"; pub(super) static MESSAGING_PROTOCOL: Bytes = Bytes::from_static(b"t/msg/0.1"); -const INTERNAL_MESSAGING_EVENT_CHANNEL_SIZE: usize = 150; +const INTERNAL_MESSAGING_EVENT_CHANNEL_SIZE: usize = 10; /// The maximum amount of inbound messages to accept within the `RATE_LIMIT_RESTOCK_INTERVAL` window const RATE_LIMIT_CAPACITY: usize = 10; @@ -64,12 +64,6 @@ const MAX_FRAME_LENGTH: usize = 8 * 1_024 * 1_024; pub type MessagingEventSender = broadcast::Sender>; pub type MessagingEventReceiver = broadcast::Receiver>; -/// Request types for MessagingProtocol -#[derive(Debug)] -pub enum MessagingRequest { - SendMessage(OutboundMessage), -} - /// The reason for dial failure. This enum should contain simple variants which describe the kind of failure that /// occurred #[derive(Debug, Error, Copy, Clone)] @@ -110,7 +104,7 @@ pub struct MessagingProtocol { connectivity: ConnectivityRequester, proto_notification: mpsc::Receiver>, active_queues: HashMap>, - request_rx: mpsc::Receiver, + outbound_message_rx: mpsc::UnboundedReceiver, messaging_events_tx: MessagingEventSender, inbound_message_tx: mpsc::Sender, internal_messaging_event_tx: mpsc::Sender, @@ -126,7 +120,7 @@ impl MessagingProtocol { pub(super) fn new( connectivity: ConnectivityRequester, proto_notification: mpsc::Receiver>, - request_rx: mpsc::Receiver, + outbound_message_rx: mpsc::UnboundedReceiver, messaging_events_tx: MessagingEventSender, inbound_message_tx: mpsc::Sender, shutdown_signal: ShutdownSignal, @@ -138,7 +132,7 @@ impl MessagingProtocol { Self { connectivity, proto_notification, - request_rx, + outbound_message_rx, active_queues: Default::default(), messaging_events_tx, internal_messaging_event_rx, @@ -163,11 +157,11 @@ impl MessagingProtocol { loop { tokio::select! { Some(event) = self.internal_messaging_event_rx.recv() => { - self.handle_internal_messaging_event(event).await; + self.handle_internal_messaging_event(event); }, Some(msg) = self.retry_queue_rx.recv() => { - if let Err(err) = self.handle_retry_queue_messages(msg).await { + if let Err(err) = self.handle_retry_queue_messages(msg) { error!( target: LOG_TARGET, "Failed to retry outbound message because '{}'", @@ -176,8 +170,8 @@ impl MessagingProtocol { } }, - Some(req) = self.request_rx.recv() => { - if let Err(err) = self.handle_request(req).await { + Some(msg) = self.outbound_message_rx.recv() => { + if let Err(err) = self.send_message(msg) { error!( target: LOG_TARGET, "Failed to handle request because '{}'", @@ -187,7 +181,7 @@ impl MessagingProtocol { }, Some(notification) = self.proto_notification.recv() => { - self.handle_protocol_notification(notification).await; + self.handle_protocol_notification(notification); }, _ = &mut shutdown_signal => { @@ -204,7 +198,7 @@ impl MessagingProtocol { framing::canonical(socket, MAX_FRAME_LENGTH) } - async fn handle_internal_messaging_event(&mut self, event: MessagingEvent) { + fn handle_internal_messaging_event(&mut self, event: MessagingEvent) { use MessagingEvent::OutboundProtocolExited; trace!(target: LOG_TARGET, "Internal messaging event '{}'", event); match event { @@ -231,26 +225,15 @@ impl MessagingProtocol { } } - async fn handle_request(&mut self, req: MessagingRequest) -> Result<(), MessagingProtocolError> { - use MessagingRequest::SendMessage; - match req { - SendMessage(msg) => { - trace!(target: LOG_TARGET, "Received request to send message ({})", msg); - self.send_message(msg).await?; - }, - } - - Ok(()) - } - - async fn handle_retry_queue_messages(&mut self, msg: OutboundMessage) -> Result<(), MessagingProtocolError> { + fn handle_retry_queue_messages(&mut self, msg: OutboundMessage) -> Result<(), MessagingProtocolError> { debug!(target: LOG_TARGET, "Retrying outbound message ({})", msg); - self.send_message(msg).await?; + self.send_message(msg)?; Ok(()) } // #[tracing::instrument(skip(self, out_msg), err)] - async fn send_message(&mut self, out_msg: OutboundMessage) -> Result<(), MessagingProtocolError> { + fn send_message(&mut self, out_msg: OutboundMessage) -> Result<(), MessagingProtocolError> { + trace!(target: LOG_TARGET, "Received request to send message ({})", out_msg); let peer_node_id = out_msg.peer_node_id.clone(); let sender = loop { match self.active_queues.entry(peer_node_id.clone()) { @@ -315,7 +298,7 @@ impl MessagingProtocol { task::spawn(inbound_messaging.run(substream)); } - async fn handle_protocol_notification(&mut self, notification: ProtocolNotification) { + fn handle_protocol_notification(&mut self, notification: ProtocolNotification) { match notification.event { // Peer negotiated to speak the messaging protocol with us ProtocolEvent::NewInboundSubstream(node_id, substream) => { diff --git a/comms/core/src/protocol/messaging/test.rs b/comms/core/src/protocol/messaging/test.rs index 1d2c61febd..4344a55ee0 100644 --- a/comms/core/src/protocol/messaging/test.rs +++ b/comms/core/src/protocol/messaging/test.rs @@ -33,13 +33,7 @@ use tokio::{ time, }; -use super::protocol::{ - MessagingEvent, - MessagingEventReceiver, - MessagingProtocol, - MessagingRequest, - MESSAGING_PROTOCOL, -}; +use super::protocol::{MessagingEvent, MessagingEventReceiver, MessagingProtocol, MESSAGING_PROTOCOL}; use crate::{ message::{InboundMessage, MessageTag, MessagingReplyRx, OutboundMessage}, multiplexing::Substream, @@ -64,7 +58,7 @@ async fn spawn_messaging_protocol() -> ( Arc, ConnectivityManagerMockState, mpsc::Sender>, - mpsc::Sender, + mpsc::UnboundedSender, mpsc::Receiver, MessagingEventReceiver, Shutdown, @@ -78,7 +72,7 @@ async fn spawn_messaging_protocol() -> ( let peer_manager = PeerManager::new(CommsDatabase::new(), None).map(Arc::new).unwrap(); let node_identity = build_node_identity(PeerFeatures::COMMUNICATION_CLIENT); let (proto_tx, proto_rx) = mpsc::channel(10); - let (request_tx, request_rx) = mpsc::channel(100); + let (request_tx, request_rx) = mpsc::unbounded_channel(); let (inbound_msg_tx, inbound_msg_rx) = mpsc::channel(100); let (events_tx, events_rx) = broadcast::channel(100); @@ -173,7 +167,7 @@ async fn send_message_request() { // Send a message to node let out_msg = OutboundMessage::new(peer_node_identity.node_id().clone(), TEST_MSG1.clone()); - request_tx.send(MessagingRequest::SendMessage(out_msg)).await.unwrap(); + request_tx.send(out_msg).unwrap(); // Check that node got the message let stream = peer_conn_mock2.next_incoming_substream().await.unwrap(); @@ -193,7 +187,7 @@ async fn send_message_dial_failed() { let (reply_tx, reply_rx) = oneshot::channel(); let out_msg = OutboundMessage::with_reply(node_id, TEST_MSG1.clone(), reply_tx.into()); // Send a message to node 2 - request_tx.send(MessagingRequest::SendMessage(out_msg)).await.unwrap(); + request_tx.send(out_msg).unwrap(); let event = event_tx.recv().await.unwrap(); unpack_enum!(MessagingEvent::OutboundProtocolExited(_node_id) = &*event); @@ -221,14 +215,14 @@ async fn send_message_substream_bulk_failure() { conn_manager_mock.add_active_connection(conn1).await; async fn send_msg( - request_tx: &mut mpsc::Sender, + request_tx: &mut mpsc::UnboundedSender, node_id: NodeId, ) -> (MessageTag, MessagingReplyRx) { let (reply_tx, reply_rx) = oneshot::channel(); let out_msg = OutboundMessage::with_reply(node_id, TEST_MSG1.clone(), reply_tx.into()); let msg_tag = out_msg.tag; // Send a message to node 2 - request_tx.send(MessagingRequest::SendMessage(out_msg)).await.unwrap(); + request_tx.send(out_msg).unwrap(); (msg_tag, reply_rx) } @@ -300,7 +294,7 @@ async fn many_concurrent_send_message_requests() { }; msg_tags.push(out_msg.tag); reply_rxs.push(reply_rx); - request_tx.send(MessagingRequest::SendMessage(out_msg)).await.unwrap(); + request_tx.send(out_msg).unwrap(); } // Check that the node got the messages @@ -340,7 +334,7 @@ async fn many_concurrent_send_message_requests_that_fail() { }; msg_tags.push(out_msg.tag); reply_rxs.push(reply_rx); - request_tx.send(MessagingRequest::SendMessage(out_msg)).await.unwrap(); + request_tx.send(out_msg).unwrap(); } let unordered = reply_rxs.into_iter().collect::>(); diff --git a/comms/core/src/protocol/rpc/client/mod.rs b/comms/core/src/protocol/rpc/client/mod.rs index 982595f052..257905bf64 100644 --- a/comms/core/src/protocol/rpc/client/mod.rs +++ b/comms/core/src/protocol/rpc/client/mod.rs @@ -39,6 +39,7 @@ use std::{ use bytes::Bytes; use futures::{ + future, future::{BoxFuture, Either}, task::{Context, Poll}, FutureExt, @@ -491,7 +492,10 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin + Send + StreamId break; } } - None => break, + None => { + debug!(target: LOG_TARGET, "(stream={}) Request channel closed. Worker is terminating.", self.stream_id()); + break + }, } } } @@ -618,7 +622,7 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin + Send + StreamId ); } - let (response_tx, response_rx) = mpsc::channel(10); + let (response_tx, response_rx) = mpsc::channel(5); if let Err(mut rx) = reply.send(response_rx) { event!(Level::WARN, "Client request was cancelled after request was sent"); warn!( @@ -636,7 +640,7 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin + Send + StreamId if let Err(err) = self.send_request(req).await { warn!(target: LOG_TARGET, "{}", err); metrics::client_errors(&self.node_id, &self.protocol_id).inc(); - let _result = response_tx.send(Err(err.into())); + let _result = response_tx.send(Err(err.into())).await; return Ok(()); } @@ -654,7 +658,27 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin + Send + StreamId break; } - let resp = match self.read_response(request_id).await { + // Check if the response receiver has been dropped while receiving messages + let resp_result = { + let resp_fut = self.read_response(request_id); + tokio::pin!(resp_fut); + let closed_fut = response_tx.closed(); + tokio::pin!(closed_fut); + match future::select(resp_fut, closed_fut).await { + Either::Left((r, _)) => Some(r), + Either::Right(_) => None, + } + }; + let resp_result = match resp_result { + Some(r) => r, + None => { + self.premature_close(request_id, method).await?; + break; + }, + }; + + // let resp = match self.read_response(request_id).await { + let resp = match resp_result { Ok(resp) => { if let Some(t) = timer.take() { let _ = self.last_request_latency_tx.send(Some(t.elapsed())); @@ -682,14 +706,7 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin + Send + StreamId event!(Level::ERROR, "Response timed out"); metrics::client_timeouts(&self.node_id, &self.protocol_id).inc(); if response_tx.is_closed() { - let req = proto::rpc::RpcRequest { - request_id: u32::try_from(request_id).unwrap(), - method, - flags: RpcMessageFlags::FIN.bits().into(), - ..Default::default() - }; - - self.send_request(req).await?; + self.premature_close(request_id, method).await?; } else { let _result = response_tx.send(Err(RpcStatus::timed_out("Response timed out"))).await; } @@ -721,21 +738,7 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin + Send + StreamId // The consumer may drop the receiver before all responses are received. // We handle this by sending a 'FIN' message to the server. if response_tx.is_closed() { - warn!( - target: LOG_TARGET, - "(stream={}) Response receiver was dropped before the response/stream could complete for \ - protocol {}, interrupting the stream. ", - self.stream_id(), - self.protocol_name() - ); - let req = proto::rpc::RpcRequest { - request_id: u32::try_from(request_id).unwrap(), - method, - flags: RpcMessageFlags::FIN.bits().into(), - ..Default::default() - }; - - self.send_request(req).await?; + self.premature_close(request_id, method).await?; break; } else { let _result = response_tx.send(Ok(resp)).await; @@ -766,6 +769,29 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin + Send + StreamId Ok(()) } + async fn premature_close(&mut self, request_id: u16, method: u32) -> Result<(), RpcError> { + warn!( + target: LOG_TARGET, + "(stream={}) Response receiver was dropped before the response/stream could complete for protocol {}, \ + interrupting the stream. ", + self.stream_id(), + self.protocol_name() + ); + let req = proto::rpc::RpcRequest { + request_id: u32::try_from(request_id).unwrap(), + method, + flags: RpcMessageFlags::FIN.bits().into(), + deadline: self.config.deadline.map(|d| d.as_secs()).unwrap_or(0), + ..Default::default() + }; + + // If we cannot set FIN quickly, just exit + if let Ok(res) = time::timeout(Duration::from_secs(2), self.send_request(req)).await { + res?; + } + Ok(()) + } + async fn send_request(&mut self, req: proto::rpc::RpcRequest) -> Result<(), RpcError> { let payload = req.to_encoded_bytes(); if payload.len() > rpc::max_request_size() { diff --git a/comms/core/src/protocol/rpc/server/early_close.rs b/comms/core/src/protocol/rpc/server/early_close.rs new file mode 100644 index 0000000000..82973bb8ef --- /dev/null +++ b/comms/core/src/protocol/rpc/server/early_close.rs @@ -0,0 +1,119 @@ +// Copyright 2022. The Tari Project +// +// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the +// following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following +// disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the +// following disclaimer in the documentation and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote +// products derived from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +use std::{ + io, + pin::Pin, + task::{Context, Poll}, +}; + +use futures::Sink; +use tokio_stream::Stream; + +pub struct EarlyClose { + inner: TSock, +} + +impl> + Unpin> EarlyClose { + pub fn new(inner: TSock) -> Self { + Self { inner } + } +} + +impl Stream for EarlyClose { + type Item = TSock::Item; + + fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + Pin::new(&mut self.inner).poll_next(cx) + } +} + +impl Sink for EarlyClose +where TSock: Sink + Stream> + Unpin +{ + type Error = EarlyCloseError; + + fn poll_ready(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + if let Poll::Ready(r) = Pin::new(&mut self.inner).poll_ready(cx) { + return Poll::Ready(r.map_err(Into::into)); + } + check_for_early_close(&mut self.inner, cx) + } + + fn start_send(mut self: Pin<&mut Self>, item: TItem) -> Result<(), Self::Error> { + Pin::new(&mut self.inner).start_send(item)?; + Ok(()) + } + + fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + if let Poll::Ready(r) = Pin::new(&mut self.inner).poll_flush(cx) { + return Poll::Ready(r.map_err(Into::into)); + } + check_for_early_close(&mut self.inner, cx) + } + + fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + if let Poll::Ready(r) = Pin::new(&mut self.inner).poll_close(cx) { + return Poll::Ready(r.map_err(Into::into)); + } + check_for_early_close(&mut self.inner, cx) + } +} + +fn check_for_early_close> + Unpin>( + sock: &mut TSock, + cx: &mut Context<'_>, +) -> Poll>> { + match Pin::new(sock).poll_next(cx) { + Poll::Ready(Some(Ok(msg))) => Poll::Ready(Err(EarlyCloseError::UnexpectedMessage(msg))), + Poll::Ready(Some(Err(err))) if err.kind() == io::ErrorKind::WouldBlock => Poll::Pending, + Poll::Pending => Poll::Pending, + Poll::Ready(Some(Err(err))) => Poll::Ready(Err(err.into())), + Poll::Ready(None) => Poll::Ready(Err( + io::Error::new(io::ErrorKind::BrokenPipe, "Connection closed").into() + )), + } +} + +#[derive(Debug, thiserror::Error)] +pub enum EarlyCloseError { + #[error(transparent)] + Io(#[from] io::Error), + #[error("Unexpected message")] + UnexpectedMessage(T), +} + +impl EarlyCloseError { + pub fn io(&self) -> Option<&io::Error> { + match self { + Self::Io(err) => Some(err), + _ => None, + } + } + + pub fn unexpected_message(&self) -> Option<&T> { + match self { + EarlyCloseError::UnexpectedMessage(msg) => Some(msg), + _ => None, + } + } +} diff --git a/comms/core/src/protocol/rpc/server/error.rs b/comms/core/src/protocol/rpc/server/error.rs index 38f257b423..ea3458b4e5 100644 --- a/comms/core/src/protocol/rpc/server/error.rs +++ b/comms/core/src/protocol/rpc/server/error.rs @@ -22,10 +22,15 @@ use std::io; +use bytes::BytesMut; use prost::DecodeError; use tokio::sync::oneshot; -use crate::{peer_manager::NodeId, proto, protocol::rpc::handshake::RpcHandshakeError}; +use crate::{ + peer_manager::NodeId, + proto, + protocol::rpc::{handshake::RpcHandshakeError, server::early_close::EarlyCloseError}, +}; #[derive(Debug, thiserror::Error)] pub enum RpcServerError { @@ -55,6 +60,8 @@ pub enum RpcServerError { ServiceCallExceededDeadline, #[error("Stream read exceeded deadline")] ReadStreamExceededDeadline, + #[error("Early close error: {0}")] + EarlyCloseError(#[from] EarlyCloseError), } impl From for RpcServerError { diff --git a/comms/core/src/protocol/rpc/server/handle.rs b/comms/core/src/protocol/rpc/server/handle.rs index 06c5d1c645..8a82912cb5 100644 --- a/comms/core/src/protocol/rpc/server/handle.rs +++ b/comms/core/src/protocol/rpc/server/handle.rs @@ -23,10 +23,12 @@ use tokio::sync::{mpsc, oneshot}; use super::RpcServerError; +use crate::peer_manager::NodeId; #[derive(Debug)] pub enum RpcServerRequest { GetNumActiveSessions(oneshot::Sender), + GetNumActiveSessionsForPeer(NodeId, oneshot::Sender), } #[derive(Debug, Clone)] @@ -47,4 +49,13 @@ impl RpcServerHandle { .map_err(|_| RpcServerError::RequestCanceled)?; resp.await.map_err(Into::into) } + + pub async fn get_num_active_sessions_for(&mut self, peer: NodeId) -> Result { + let (req, resp) = oneshot::channel(); + self.sender + .send(RpcServerRequest::GetNumActiveSessionsForPeer(peer, req)) + .await + .map_err(|_| RpcServerError::RequestCanceled)?; + resp.await.map_err(Into::into) + } } diff --git a/comms/core/src/protocol/rpc/server/mod.rs b/comms/core/src/protocol/rpc/server/mod.rs index edf471f97f..6690e31418 100644 --- a/comms/core/src/protocol/rpc/server/mod.rs +++ b/comms/core/src/protocol/rpc/server/mod.rs @@ -34,6 +34,7 @@ mod metrics; pub mod mock; +mod early_close; mod router; use std::{ @@ -50,6 +51,7 @@ use std::{ }; use futures::{future, stream, stream::FuturesUnordered, SinkExt, StreamExt}; +use log::*; use prost::Message; use router::Router; use tokio::{sync::mpsc, task::JoinHandle, time}; @@ -78,6 +80,7 @@ use crate::{ rpc::{ body::BodyBytes, message::{RpcMethod, RpcResponse}, + server::early_close::EarlyClose, }, ProtocolEvent, ProtocolId, @@ -89,7 +92,7 @@ use crate::{ Substream, }; -const LOG_TARGET: &str = "comms::rpc"; +const LOG_TARGET: &str = "comms::rpc::server"; pub trait NamedProtocolService { const PROTOCOL_NAME: &'static [u8]; @@ -311,7 +314,8 @@ where } async fn handle_request(&self, req: RpcServerRequest) { - use RpcServerRequest::GetNumActiveSessions; + #[allow(clippy::enum_glob_use)] + use RpcServerRequest::*; match req { GetNumActiveSessions(reply) => { let max_sessions = self @@ -321,6 +325,10 @@ where let num_active = max_sessions.saturating_sub(self.executor.num_available()); let _ = reply.send(num_active); }, + GetNumActiveSessionsForPeer(node_id, reply) => { + let num_active = self.sessions.get(&node_id).copied().unwrap_or(0); + let _ = reply.send(num_active); + }, } } @@ -359,23 +367,23 @@ where } fn new_session_for(&mut self, node_id: NodeId) -> Result { + let count = self.sessions.entry(node_id.clone()).or_insert(0); match self.config.maximum_sessions_per_client { Some(max) if max > 0 => { - let count = self.sessions.entry(node_id.clone()).or_insert(0); - debug_assert!(*count <= max); if *count >= max { return Err(RpcServerError::MaxSessionsPerClientReached { node_id }); } - *count += 1; - Ok(*count) }, - Some(_) => Ok(0), - None => Ok(0), + Some(_) | None => {}, } + + *count += 1; + Ok(*count) } fn on_session_complete(&mut self, node_id: &NodeId) { + info!(target: LOG_TARGET, "Session complete for {}", node_id); if let Some(v) = self.sessions.get_mut(node_id) { *v -= 1; if *v == 0 { @@ -422,11 +430,20 @@ where }, }; - if let Err(err) = self.new_session_for(node_id.clone()) { - handshake - .reject_with_reason(HandshakeRejectReason::NoSessionsAvailable) - .await?; - return Err(err); + match self.new_session_for(node_id.clone()) { + Ok(num_sessions) => { + info!( + target: LOG_TARGET, + "NEW SESSION for {} ({} active) ", node_id, num_sessions + ); + }, + + Err(err) => { + handshake + .reject_with_reason(HandshakeRejectReason::NoSessionsAvailable) + .await?; + return Err(err); + }, } let version = handshake.perform_server_handshake().await?; @@ -451,7 +468,9 @@ where let num_sessions = metrics::num_sessions(&node_id, &service.protocol); num_sessions.inc(); service.start().await; + info!(target: LOG_TARGET, "END OF SESSION for {} ", node_id,); num_sessions.dec(); + node_id }) .map_err(|_| RpcServerError::MaximumSessionsReached)?; @@ -467,7 +486,7 @@ struct ActivePeerRpcService { protocol: ProtocolId, node_id: NodeId, service: TSvc, - framed: CanonicalFraming, + framed: EarlyClose>, comms_provider: TCommsProvider, logging_context_string: Arc, } @@ -497,7 +516,7 @@ where protocol, node_id, service, - framed, + framed: EarlyClose::new(framed), comms_provider, } } @@ -509,9 +528,17 @@ where ); if let Err(err) = self.run().await { metrics::error_counter(&self.node_id, &self.protocol, &err).inc(); - error!( + let level = match &err { + RpcServerError::Io(e) => err_to_log_level(e), + RpcServerError::EarlyCloseError(e) => e.io().map(err_to_log_level).unwrap_or(log::Level::Error), + _ => log::Level::Error, + }; + log!( target: LOG_TARGET, - "({}) Rpc server exited with an error: {}", self.logging_context_string, err + level, + "({}) Rpc server exited with an error: {}", + self.logging_context_string, + err ); } } @@ -525,11 +552,14 @@ where request_bytes.observe(frame.len() as f64); if let Err(err) = self.handle_request(frame.freeze()).await { if let Err(err) = self.framed.close().await { - error!( + let level = err.io().map(err_to_log_level).unwrap_or(log::Level::Error); + + log!( target: LOG_TARGET, + level, "({}) Failed to close substream after socket error: {}", self.logging_context_string, - err + err, ); } error!( @@ -663,7 +693,7 @@ where self.process_body(request_id, deadline, body).await?; }, Err(err) => { - error!( + debug!( target: LOG_TARGET, "{} Service returned an error: {}", self.logging_context_string, err ); @@ -709,44 +739,50 @@ where .map(|resp| Bytes::from(resp.to_encoded_bytes())); loop { - // Check if the client interrupted the outgoing stream - if let Err(err) = self.check_interruptions().await { - match err { - err @ RpcServerError::ClientInterruptedStream => { - debug!(target: LOG_TARGET, "Stream was interrupted: {}", err); - break; - }, - err => { - error!(target: LOG_TARGET, "Stream was interrupted: {}", err); - return Err(err); - }, - } - } - let next_item = log_timing( self.logging_context_string.clone(), request_id, "message read", stream.next(), ); - match time::timeout(deadline, next_item).await { - Ok(Some(msg)) => { - response_bytes.observe(msg.len() as f64); - debug!( - target: LOG_TARGET, - "({}) Sending body len = {}", - self.logging_context_string, - msg.len() - ); + let timeout = time::sleep(deadline); - self.framed.send(msg).await?; + tokio::select! { + // Check if the client interrupted the outgoing stream + Err(err) = self.check_interruptions() => { + match err { + err @ RpcServerError::ClientInterruptedStream => { + debug!(target: LOG_TARGET, "Stream was interrupted by client: {}", err); + break; + }, + err => { + error!(target: LOG_TARGET, "Stream was interrupted: {}", err); + return Err(err); + }, + } }, - Ok(None) => { - debug!(target: LOG_TARGET, "{} Request complete", self.logging_context_string,); - break; + msg = next_item => { + match msg { + Some(msg) => { + response_bytes.observe(msg.len() as f64); + debug!( + target: LOG_TARGET, + "({}) Sending body len = {}", + self.logging_context_string, + msg.len() + ); + + self.framed.send(msg).await?; + }, + None => { + debug!(target: LOG_TARGET, "{} Request complete", self.logging_context_string,); + break; + }, + } }, - Err(_) => { - debug!( + + _ = timeout => { + debug!( target: LOG_TARGET, "({}) Failed to return result within client deadline ({:.0?})", self.logging_context_string, @@ -760,8 +796,8 @@ where ) .inc(); break; - }, - } + } + } // end select! } // end loop Ok(()) } @@ -817,11 +853,9 @@ async fn log_timing>(context_str: Arc, request_ ret } -#[allow(clippy::cognitive_complexity)] fn into_response(request_id: u32, result: Result) -> RpcResponse { match result { Ok(msg) => { - trace!(target: LOG_TARGET, "Sending body len = {}", msg.len()); let mut flags = RpcMessageFlags::empty(); if msg.is_finished() { flags |= RpcMessageFlags::FIN; @@ -844,3 +878,10 @@ fn into_response(request_id: u32, result: Result) -> RpcRe }, } } + +fn err_to_log_level(err: &io::Error) -> log::Level { + match err.kind() { + io::ErrorKind::BrokenPipe | io::ErrorKind::WriteZero => log::Level::Debug, + _ => log::Level::Error, + } +} diff --git a/comms/core/src/protocol/rpc/test/smoke.rs b/comms/core/src/protocol/rpc/test/smoke.rs index 515ba4f41c..6ebb3ea466 100644 --- a/comms/core/src/protocol/rpc/test/smoke.rs +++ b/comms/core/src/protocol/rpc/test/smoke.rs @@ -551,7 +551,7 @@ async fn max_per_client_sessions() { let socket = inbound.incoming_mut().next().await.unwrap(); let framed = framing::canonical(socket, 1024); - let mut client = GreetingClient::builder() + let client = GreetingClient::builder() .with_deadline(Duration::from_secs(5)) .connect(framed) .await @@ -568,7 +568,6 @@ async fn max_per_client_sessions() { unpack_enum!(RpcError::HandshakeError(err) = err); unpack_enum!(RpcHandshakeError::Rejected(HandshakeRejectReason::NoSessionsAvailable) = err); - client.close().await; drop(client); let substream = outbound.get_yamux_control().open_stream().await.unwrap(); muxer diff --git a/comms/core/src/test_utils/mocks/connectivity_manager.rs b/comms/core/src/test_utils/mocks/connectivity_manager.rs index 6dda3f9f0e..ae29b9211c 100644 --- a/comms/core/src/test_utils/mocks/connectivity_manager.rs +++ b/comms/core/src/test_utils/mocks/connectivity_manager.rs @@ -63,6 +63,7 @@ struct State { active_conns: HashMap, pending_conns: HashMap>>>, selected_connections: Vec, + banned_peers: Vec<(NodeId, Duration, String)>, connectivity_status: ConnectivityStatus, } @@ -175,6 +176,10 @@ impl ConnectivityManagerMockState { self.event_tx.send(event).unwrap(); } + pub async fn take_banned_peers(&self) -> Vec<(NodeId, Duration, String)> { + self.with_state(|state| state.banned_peers.drain(..).collect()).await + } + pub(self) async fn with_state(&self, f: F) -> R where F: FnOnce(&mut State) -> R { let mut lock = self.inner.lock().await; @@ -263,7 +268,13 @@ impl ConnectivityManagerMock { unimplemented!() }, GetAllConnectionStates(_) => unimplemented!(), - BanPeer(_, _, _) => {}, + BanPeer(node_id, duration, reason) => { + self.state + .with_state(|state| { + state.banned_peers.push((node_id, duration, reason)); + }) + .await + }, AddPeerToAllowList(_) => {}, RemovePeerFromAllowList(_) => {}, GetActiveConnections(reply) => { diff --git a/comms/core/src/utils/datetime.rs b/comms/core/src/utils/datetime.rs index e61e2581ff..f71a48b3d1 100644 --- a/comms/core/src/utils/datetime.rs +++ b/comms/core/src/utils/datetime.rs @@ -28,7 +28,7 @@ pub fn safe_future_datetime_from_duration(duration: Duration) -> DateTime { let old_duration = chrono::Duration::from_std(duration).unwrap_or_else(|_| chrono::Duration::max_value()); Utc::now() .checked_add_signed(old_duration) - .unwrap_or(chrono::MAX_DATETIME) + .unwrap_or(DateTime::::MAX_UTC) } pub fn format_duration(duration: Duration) -> String { @@ -52,7 +52,7 @@ pub fn format_local_datetime(datetime: &NaiveDateTime) -> String { } pub fn is_max_datetime(datetime: &NaiveDateTime) -> bool { - chrono::MAX_DATETIME.naive_utc() == *datetime + DateTime::::MAX_UTC.naive_utc() == *datetime } #[cfg(test)] diff --git a/comms/core/tests/greeting_service.rs b/comms/core/tests/greeting_service.rs index f06c738b51..e455e00fde 100644 --- a/comms/core/tests/greeting_service.rs +++ b/comms/core/tests/greeting_service.rs @@ -107,6 +107,7 @@ impl GreetingRpc for GreetingService { id, item_size, num_items, + delay_ms: delay_secs, } = request.into_message(); let (tx, rx) = mpsc::channel(10); let t = std::time::Instant::now(); @@ -118,7 +119,20 @@ impl GreetingRpc for GreetingService { .take(usize::try_from(num_items).unwrap()) .enumerate() { - tx.send(item).await.unwrap(); + if delay_secs > 0 { + time::sleep(Duration::from_millis(delay_secs)).await; + } + if tx.send(item).await.is_err() { + log::info!( + "[{}] reqid: {} t={:.2?} STREAM INTERRUPTED {}/{}", + id, + req_id, + t.elapsed(), + i + 1, + num_items + ); + return; + } log::info!( "[{}] reqid: {} t={:.2?} sent {}/{}", id, @@ -160,4 +174,6 @@ pub struct StreamLargeItemsRequest { pub num_items: u64, #[prost(uint64, tag = "3")] pub item_size: u64, + #[prost(uint64, tag = "4")] + pub delay_ms: u64, } diff --git a/comms/core/tests/rpc.rs b/comms/core/tests/rpc.rs new file mode 100644 index 0000000000..90e393012d --- /dev/null +++ b/comms/core/tests/rpc.rs @@ -0,0 +1,125 @@ +// Copyright 2021, The Tari Project +// +// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the +// following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following +// disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the +// following disclaimer in the documentation and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote +// products derived from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#![cfg(feature = "rpc")] + +mod greeting_service; +use greeting_service::{GreetingClient, GreetingServer, GreetingService, StreamLargeItemsRequest}; + +mod helpers; +use std::time::Duration; + +use futures::StreamExt; +use helpers::create_comms; +use tari_comms::{ + protocol::rpc::{RpcServer, RpcServerHandle}, + transports::TcpTransport, + CommsNode, +}; +use tari_shutdown::{Shutdown, ShutdownSignal}; +use tari_test_utils::async_assert_eventually; +use tokio::time; + +async fn spawn_node(signal: ShutdownSignal) -> (CommsNode, RpcServerHandle) { + let rpc_server = RpcServer::builder() + .with_unlimited_simultaneous_sessions() + .finish() + .add_service(GreetingServer::new(GreetingService::default())); + + let rpc_server_hnd = rpc_server.get_handle(); + let comms = create_comms(signal) + .add_rpc_server(rpc_server) + .spawn_with_transport(TcpTransport::new()) + .await + .unwrap(); + + comms + .node_identity() + .set_public_address(comms.listening_address().clone()); + (comms, rpc_server_hnd) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 4)] +async fn client_prematurely_ends_session() { + env_logger::init(); + let shutdown = Shutdown::new(); + let (node1, _rpc_server1) = spawn_node(shutdown.to_signal()).await; + let (node2, mut rpc_server2) = spawn_node(shutdown.to_signal()).await; + + node1 + .peer_manager() + .add_peer(node2.node_identity().to_peer()) + .await + .unwrap(); + + let mut conn1_2 = node1 + .connectivity() + .dial_peer(node2.node_identity().node_id().clone()) + .await + .unwrap(); + + { + let mut client = conn1_2.connect_rpc::().await.unwrap(); + + let num_sessions = rpc_server2 + .get_num_active_sessions_for(node1.node_identity().node_id().clone()) + .await + .unwrap(); + assert_eq!(num_sessions, 1); + + let mut stream = client + .stream_large_items(StreamLargeItemsRequest { + id: 1, + num_items: 100, + item_size: 2300 * 1024, + delay_ms: 50, + }) + .await + .unwrap(); + + let mut count = 0; + while let Some(r) = stream.next().await { + count += 1; + + let data = r.unwrap(); + assert_eq!(data.len(), 2300 * 1024); + // Prematurely drop the stream + if count == 5 { + log::info!("Ending the stream prematurely"); + drop(stream); + break; + } + } + + // Drop stream and client + } + + time::sleep(Duration::from_secs(1)).await; + async_assert_eventually!( + rpc_server2 + .get_num_active_sessions_for(node1.node_identity().node_id().clone()) + .await + .unwrap(), + expect = 0, + max_attempts = 20, + interval = Duration::from_millis(1000) + ); +} diff --git a/comms/core/tests/rpc_stress.rs b/comms/core/tests/rpc_stress.rs index 3c77537f5f..708121ca3d 100644 --- a/comms/core/tests/rpc_stress.rs +++ b/comms/core/tests/rpc_stress.rs @@ -40,7 +40,7 @@ use tari_comms::{ use tari_shutdown::{Shutdown, ShutdownSignal}; use tokio::{task, time::Instant}; -pub async fn spawn_node(signal: ShutdownSignal) -> CommsNode { +async fn spawn_node(signal: ShutdownSignal) -> CommsNode { let rpc_server = RpcServer::builder() .with_unlimited_simultaneous_sessions() .finish() @@ -132,6 +132,7 @@ async fn run_stress_test(test_params: Params) { id: i as u64, num_items: num_items as u64, item_size: payload_size as u64, + delay_ms: 0, }) .await .unwrap(); diff --git a/comms/dht/Cargo.toml b/comms/dht/Cargo.toml index fb26dd27a9..1f3f972a95 100644 --- a/comms/dht/Cargo.toml +++ b/comms/dht/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "tari_comms_dht" -version = "0.37.0" +version = "0.38.3" authors = ["The Tari Development Community"] description = "Tari comms DHT module" repository = "https://github.com/tari-project/tari" @@ -10,13 +10,13 @@ license = "BSD-3-Clause" edition = "2018" [dependencies] -tari_comms = { version = "^0.37", path = "../core", features = ["rpc"] } +tari_comms = { version = "^0.38", path = "../core", features = ["rpc"] } tari_common = { path = "../../common" } -tari_comms_rpc_macros = { version = "^0.37", path = "../rpc_macros" } -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } +tari_comms_rpc_macros = { version = "^0.38", path = "../rpc_macros" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } -tari_shutdown = { version = "^0.37", path = "../../infrastructure/shutdown" } -tari_storage = { version = "^0.37", path = "../../infrastructure/storage" } +tari_shutdown = { version = "^0.38", path = "../../infrastructure/shutdown" } +tari_storage = { version = "^0.38", path = "../../infrastructure/storage" } tari_common_sqlite = { path = "../../common_sqlite" } anyhow = "1.0.53" @@ -43,15 +43,15 @@ zeroize = "1.4.0" # Uncomment for tokio tracing via tokio-console (needs "tracing" features) #console-subscriber = "0.1.3" -#tokio = { version = "1.14", features = ["rt", "macros", "tracing"] } +#tokio = { version = "1.20", features = ["rt", "macros", "tracing"] } # Uncomment for normal use (non tokio-console tracing) -tokio = { version = "1.14", features = ["rt", "macros"] } +tokio = { version = "1.20", features = ["rt", "macros"] } # tower-filter dependencies pin-project = "0.4" [dev-dependencies] -tari_test_utils = { version = "^0.37", path = "../../infrastructure/test_utils" } +tari_test_utils = { version = "^0.38", path = "../../infrastructure/test_utils" } env_logger = "0.7.0" futures-test = { version = "0.3.5" } @@ -59,13 +59,13 @@ futures-util = "^0.3.1" lazy_static = "1.4.0" lmdb-zero = "0.4.4" tempfile = "3.1.0" -tokio-stream = { version = "0.1.7", features = ["sync"] } +tokio-stream = { version = "0.1.9", features = ["sync"] } petgraph = "0.5.1" clap = "2.33.0" [build-dependencies] -tari_common = { version = "^0.37", path = "../../common" } +tari_common = { version = "^0.38", path = "../../common" } [features] test-mocks = [] diff --git a/comms/dht/examples/memory_net/utilities.rs b/comms/dht/examples/memory_net/utilities.rs index b942e74197..35271742dd 100644 --- a/comms/dht/examples/memory_net/utilities.rs +++ b/comms/dht/examples/memory_net/utilities.rs @@ -148,7 +148,7 @@ pub async fn discovery(wallets: &[TestNode], messaging_events_rx: &mut NodeEvent .discovery_service_requester() .discover_peer( wallet2.node_identity().public_key().clone(), - wallet2.node_identity().node_id().clone().into(), + wallet2.node_identity().public_key().clone().into(), ) .await; @@ -275,6 +275,7 @@ pub async fn do_network_wide_propagation(nodes: &mut [TestNode], origin_node_ind OutboundEncryption::ClearText, vec![], OutboundDomainMessage::new(&0i32, PUBLIC_MESSAGE.to_string()), + "Memory net example".to_string(), ) .await .unwrap(); @@ -442,7 +443,7 @@ pub async fn do_store_and_forward_message_propagation( .dht .outbound_requester() .closest_broadcast( - node_identity.node_id().clone(), + node_identity.public_key().clone(), OutboundEncryption::encrypt_for(node_identity.public_key().clone()), vec![], OutboundDomainMessage::new(&123i32, secret_message.clone()), @@ -949,7 +950,6 @@ async fn setup_comms_dht( let dht_outbound_layer = dht.outbound_middleware_layer(); let pipeline = pipeline::Builder::new() - .outbound_buffer_size(10) .with_outbound_pipeline(outbound_rx, |sink| { ServiceBuilder::new().layer(dht_outbound_layer).service(sink) }) diff --git a/comms/dht/examples/propagation/node.rs b/comms/dht/examples/propagation/node.rs index 0fe1f4c342..4c563aaa60 100644 --- a/comms/dht/examples/propagation/node.rs +++ b/comms/dht/examples/propagation/node.rs @@ -31,7 +31,7 @@ use tari_comms::{ pipeline::SinkService, protocol::{messaging::MessagingProtocolExtension, NodeNetworkInfo}, tor, - tor::{HsFlags, TorIdentity}, + tor::TorIdentity, CommsBuilder, CommsNode, NodeIdentity, @@ -91,7 +91,6 @@ pub async fn create>( let (event_tx, _) = broadcast::channel(1); let mut hs_builder = tor::HiddenServiceBuilder::new() - .with_hs_flags(HsFlags::DETACH) .with_port_mapping(onion_port) .with_control_server_address(TOR_CONTROL_PORT_ADDR.parse().unwrap()); diff --git a/comms/dht/examples/propagation_stress.rs b/comms/dht/examples/propagation_stress.rs index e15d820315..865a5b45d1 100644 --- a/comms/dht/examples/propagation_stress.rs +++ b/comms/dht/examples/propagation_stress.rs @@ -118,7 +118,7 @@ async fn prompt(node: &CommsNode, dht: &Dht) -> anyhow::Result<()> { let msg = OutboundDomainMessage::new(&999, PropagationMessage::new(u32::try_from(i).unwrap(), opts.msg_size)); let states = match opts.send_method { SendMethod::Direct => outbound - .send_direct_node_id(opts.peer.node_id.clone(), msg) + .send_direct_node_id(opts.peer.node_id.clone(), msg, "Example stress".to_string()) .await .map(MessageSendStates::from)?, SendMethod::Propagated => { diff --git a/comms/dht/src/actor.rs b/comms/dht/src/actor.rs index f4f63308fd..bed2b99e9e 100644 --- a/comms/dht/src/actor.rs +++ b/comms/dht/src/actor.rs @@ -450,8 +450,9 @@ impl DhtActor { .send_message_no_header( SendMessageParams::new() .closest(node_identity.node_id().clone(), vec![]) - .with_destination(node_identity.node_id().clone().into()) + .with_destination(node_identity.public_key().clone().into()) .with_dht_message_type(DhtMessageType::Join) + .with_debug_info("Broadcast join".to_string()) .force_origin() .finish(), message, @@ -549,10 +550,7 @@ impl DhtActor { Ok(candidates) }, Propagate(destination, exclude) => { - let dest_node_id = destination - .node_id() - .cloned() - .or_else(|| destination.public_key().map(NodeId::from_public_key)); + let dest_node_id = destination.to_derived_node_id(); let connections = match dest_node_id { Some(node_id) => { @@ -1171,7 +1169,7 @@ mod test { let peers = requester .select_peers(BroadcastStrategy::Propagate( - conn_out.peer_node_id().clone().into(), + node_identity.public_key().clone().into(), Vec::new(), )) .await diff --git a/comms/dht/src/connectivity/mod.rs b/comms/dht/src/connectivity/mod.rs index edb13d3779..6d008e52a3 100644 --- a/comms/dht/src/connectivity/mod.rs +++ b/comms/dht/src/connectivity/mod.rs @@ -406,11 +406,6 @@ impl DhtConnectivity { self.insert_neighbour(peer); }); - // Drop any connection handles that removed from the neighbour pool - difference.iter().for_each(|peer| { - self.remove_connection_handle(peer); - }); - if !new_neighbours.is_empty() { self.connectivity.request_many_dials(new_neighbours).await?; } diff --git a/comms/dht/src/crypt.rs b/comms/dht/src/crypt.rs index a2c6c31214..518cace315 100644 --- a/comms/dht/src/crypt.rs +++ b/comms/dht/src/crypt.rs @@ -33,6 +33,7 @@ use chacha20poly1305::{ aead::{Aead, NewAead}, ChaCha20Poly1305, }; +use digest::Digest; use rand::{rngs::OsRng, RngCore}; use tari_comms::types::{CommsPublicKey, CommsSecretKey}; use tari_crypto::{ @@ -46,7 +47,7 @@ use crate::{ comms_dht_hash_domain_key_message, comms_dht_hash_domain_key_signature, envelope::{DhtMessageFlags, DhtMessageHeader, DhtMessageType, NodeDestination}, - outbound::DhtOutboundError, + error::DhtEncryptError, version::DhtProtocolVersion, }; @@ -55,7 +56,6 @@ use crate::{ pub struct CipherKey(chacha20::Key); pub struct AuthenticatedCipherKey(chacha20poly1305::Key); -const LITTLE_ENDIAN_U32_SIZE_REPRESENTATION: usize = 4; const MESSAGE_BASE_LENGTH: usize = 6000; /// Generates a Diffie-Hellman secret `kx.G` as a `chacha20::Key` given secret scalar `k` and public key `P = x.G`. @@ -69,45 +69,68 @@ pub fn generate_ecdh_secret(secret_key: &CommsSecretKey, public_key: &CommsPubli output } -fn pad_message_to_base_length_multiple(message: &[u8]) -> Vec { - let n = message.len(); - // little endian representation of message length, to be appended to padded message, - // assuming our code runs on 64-bits system - let prepend_to_message = (n as u32).to_le_bytes(); - - let k = prepend_to_message.len(); - - let div_n_base_len = (n + k) / MESSAGE_BASE_LENGTH; - let output_size = (div_n_base_len + 1) * MESSAGE_BASE_LENGTH; - - // join prepend_message_len | message | zero_padding - let mut output = Vec::with_capacity(output_size); - output.extend_from_slice(&prepend_to_message); - output.extend_from_slice(message); - output.extend(std::iter::repeat(0u8).take(output_size - n - k)); - - output +fn pad_message_to_base_length_multiple(message: &[u8]) -> Result, DhtEncryptError> { + // We require a 32-bit length representation, and also don't want to overflow after including this encoding + if message.len() > ((u32::max_value() - (size_of::() as u32)) as usize) { + return Err(DhtEncryptError::PaddingError("Message is too long".to_string())); + } + let message_length = message.len(); + let encoded_length = (message_length as u32).to_le_bytes(); + + // Pad the message (if needed) to the next multiple of the base length + let padding_length = if ((message_length + size_of::()) % MESSAGE_BASE_LENGTH) == 0 { + 0 + } else { + MESSAGE_BASE_LENGTH - ((message_length + size_of::()) % MESSAGE_BASE_LENGTH) + }; + + // The padded message is the encoded length, message, and zero padding + let mut padded_message = Vec::with_capacity(size_of::() + message_length + padding_length); + padded_message.extend_from_slice(&encoded_length); + padded_message.extend_from_slice(message); + padded_message.extend(std::iter::repeat(0u8).take(padding_length)); + + Ok(padded_message) } -fn get_original_message_from_padded_text(message: &[u8]) -> Result, DhtOutboundError> { - let mut le_bytes = [0u8; 4]; - le_bytes.copy_from_slice(&message[..LITTLE_ENDIAN_U32_SIZE_REPRESENTATION]); +fn get_original_message_from_padded_text(padded_message: &[u8]) -> Result, DhtEncryptError> { + // NOTE: This function can return errors relating to message length + // It is important not to leak error types to an adversary, or to have timing differences - // obtain length of original message, assuming our code runs on 64-bits system - let original_message_len = u32::from_le_bytes(le_bytes) as usize; + // The padded message must be long enough to extract the encoded message length + if padded_message.len() < size_of::() { + return Err(DhtEncryptError::PaddingError( + "Padded message is not long enough for length extraction".to_string(), + )); + } + + // The padded message must be a multiple of the base length + if (padded_message.len() % MESSAGE_BASE_LENGTH) != 0 { + return Err(DhtEncryptError::PaddingError( + "Padded message must be a multiple of the base length".to_string(), + )); + } - if original_message_len > message.len() { - return Err(DhtOutboundError::CipherError( - "Original length message is invalid".to_string(), + // Decode the message length + let mut encoded_length = [0u8; size_of::()]; + encoded_length.copy_from_slice(&padded_message[0..size_of::()]); + let message_length = u32::from_le_bytes(encoded_length) as usize; + + // The padded message is too short for the decoded length + let end = message_length + .checked_add(size_of::()) + .ok_or_else(|| DhtEncryptError::PaddingError("Claimed unpadded message length is too large".to_string()))?; + if end > padded_message.len() { + return Err(DhtEncryptError::CipherError( + "Claimed unpadded message length is too large".to_string(), )); } - // obtain original message - let start = LITTLE_ENDIAN_U32_SIZE_REPRESENTATION; - let end = LITTLE_ENDIAN_U32_SIZE_REPRESENTATION + original_message_len; - let original_message = &message[start..end]; + // Remove the padding (we don't check for valid padding, as this is offloaded to authentication) + let start = size_of::(); + let unpadded_message = &padded_message[start..end]; - Ok(original_message.to_vec()) + Ok(unpadded_message.to_vec()) } pub fn generate_key_message(data: &[u8]) -> CipherKey { @@ -127,11 +150,9 @@ pub fn generate_key_signature_for_authenticated_encryption(data: &[u8]) -> Authe } /// Decrypts cipher text using ChaCha20 stream cipher given the cipher key and cipher text with integral nonce. -pub fn decrypt(cipher_key: &CipherKey, cipher_text: &[u8]) -> Result, DhtOutboundError> { +pub fn decrypt(cipher_key: &CipherKey, cipher_text: &[u8]) -> Result, DhtEncryptError> { if cipher_text.len() < size_of::() { - return Err(DhtOutboundError::CipherError( - "Cipher text is not long enough to include nonce".to_string(), - )); + return Err(DhtEncryptError::InvalidDecryptionNonceNotIncluded); } let (nonce, cipher_text) = cipher_text.split_at(size_of::()); @@ -149,7 +170,7 @@ pub fn decrypt(cipher_key: &CipherKey, cipher_text: &[u8]) -> Result, Dh pub fn decrypt_with_chacha20_poly1305( cipher_key: &AuthenticatedCipherKey, cipher_signature: &[u8], -) -> Result, DhtOutboundError> { +) -> Result, DhtEncryptError> { let nonce = [0u8; size_of::()]; let nonce_ga = chacha20poly1305::Nonce::from_slice(&nonce); @@ -157,15 +178,15 @@ pub fn decrypt_with_chacha20_poly1305( let cipher = ChaCha20Poly1305::new(&cipher_key.0); let decrypted_signature = cipher .decrypt(nonce_ga, cipher_signature) - .map_err(|_| DhtOutboundError::CipherError(String::from("Authenticated decryption failed")))?; + .map_err(|_| DhtEncryptError::InvalidAuthenticatedDecryption)?; Ok(decrypted_signature) } /// Encrypt the plain text using the ChaCha20 stream cipher -pub fn encrypt(cipher_key: &CipherKey, plain_text: &[u8]) -> Vec { +pub fn encrypt(cipher_key: &CipherKey, plain_text: &[u8]) -> Result, DhtEncryptError> { // pad plain_text to avoid message length leaks - let plain_text = pad_message_to_base_length_multiple(plain_text); + let plain_text = pad_message_to_base_length_multiple(plain_text)?; let mut nonce = [0u8; size_of::()]; OsRng.fill_bytes(&mut nonce); @@ -178,7 +199,7 @@ pub fn encrypt(cipher_key: &CipherKey, plain_text: &[u8]) -> Vec { buf[nonce.len()..].copy_from_slice(plain_text.as_slice()); cipher.apply_keystream(&mut buf[nonce.len()..]); - buf + Ok(buf) } /// Produces authenticated encryption of the signature using the ChaCha20-Poly1305 stream cipher, @@ -189,7 +210,7 @@ pub fn encrypt(cipher_key: &CipherKey, plain_text: &[u8]) -> Vec { pub fn encrypt_with_chacha20_poly1305( cipher_key: &AuthenticatedCipherKey, signature: &[u8], -) -> Result, DhtOutboundError> { +) -> Result, DhtEncryptError> { let nonce = [0u8; size_of::()]; let nonce_ga = chacha20poly1305::Nonce::from_slice(&nonce); @@ -198,7 +219,7 @@ pub fn encrypt_with_chacha20_poly1305( // length of encrypted equals signature.len() + 16 (the latter being the tag size for ChaCha20-poly1305) let encrypted = cipher .encrypt(nonce_ga, signature) - .map_err(|_| DhtOutboundError::CipherError(String::from("Authenticated encryption failed")))?; + .map_err(|_| DhtEncryptError::CipherError(String::from("Authenticated encryption failed")))?; Ok(encrypted) } @@ -241,19 +262,16 @@ pub fn create_message_domain_separated_hash_parts( // we digest the given data into a domain independent hash function to produce a signature // use of the hashing API for domain separation and deal with variable length input - let domain_separated_hash = comms_dht_hash_domain_challenge() - .chain(&protocol_version.as_bytes()) + let hasher = comms_dht_hash_domain_challenge() + .chain(protocol_version.as_bytes()) .chain(destination.to_inner_bytes()) - .chain(&(message_type as i32).to_le_bytes()) - .chain(&flags.bits().to_le_bytes()) - .chain(&expires) - .chain(&e_pk) - .chain(&body) - .finalize(); + .chain((message_type as i32).to_le_bytes()) + .chain(flags.bits().to_le_bytes()) + .chain(expires) + .chain(e_pk) + .chain(body); - let mut output = [0u8; 32]; - output.copy_from_slice(domain_separated_hash.as_ref()); - output + Digest::finalize(hasher).into() } #[cfg(test)] @@ -268,7 +286,7 @@ mod test { let pk = CommsPublicKey::default(); let key = CipherKey(*chacha20::Key::from_slice(pk.as_bytes())); let plain_text = "Last enemy position 0830h AJ 9863".as_bytes().to_vec(); - let encrypted = encrypt(&key, &plain_text); + let encrypted = encrypt(&key, &plain_text).unwrap(); let decrypted = decrypt(&key, &encrypted).unwrap(); assert_eq!(decrypted, plain_text); } @@ -306,7 +324,7 @@ mod test { let encrypted = cipher .encrypt(nonce_ga, signature) - .map_err(|_| DhtOutboundError::CipherError(String::from("Authenticated encryption failed"))) + .map_err(|_| DhtEncryptError::CipherError(String::from("Authenticated encryption failed"))) .unwrap(); assert_eq!(encrypted.len(), n + 16); @@ -333,7 +351,7 @@ mod test { assert!(decrypt_with_chacha20_poly1305(&key, encrypted.as_slice()) .unwrap_err() .to_string() - .contains("Authenticated decryption failed")); + .contains("Invalid authenticated decryption")); } #[test] @@ -353,7 +371,7 @@ mod test { assert!(decrypt_with_chacha20_poly1305(&key, encrypted.as_slice()) .unwrap_err() .to_string() - .contains("Authenticated decryption failed")); + .contains("Invalid authenticated decryption")); } #[test] @@ -375,7 +393,7 @@ mod test { assert!(decrypt_with_chacha20_poly1305(&other_key, encrypted.as_slice()) .unwrap_err() .to_string() - .contains("Authenticated decryption failed")); + .contains("Invalid authenticated decryption")); } #[test] @@ -387,7 +405,7 @@ mod test { .take(MESSAGE_BASE_LENGTH - message.len() - prepend_message.len()) .collect::>(); - let pad_message = pad_message_to_base_length_multiple(message); + let pad_message = pad_message_to_base_length_multiple(message).unwrap(); // padded message is of correct length assert_eq!(pad_message.len(), MESSAGE_BASE_LENGTH); @@ -404,7 +422,7 @@ mod test { // test for large message let message = &[100u8; MESSAGE_BASE_LENGTH * 8 - 100]; let prepend_message = (message.len() as u32).to_le_bytes(); - let pad_message = pad_message_to_base_length_multiple(message); + let pad_message = pad_message_to_base_length_multiple(message).unwrap(); let pad = std::iter::repeat(0u8) .take((8 * MESSAGE_BASE_LENGTH) - message.len() - prepend_message.len()) .collect::>(); @@ -428,7 +446,7 @@ mod test { .take((9 * MESSAGE_BASE_LENGTH) - message.len() - prepend_message.len()) .collect::>(); - let pad_message = pad_message_to_base_length_multiple(message); + let pad_message = pad_message_to_base_length_multiple(message).unwrap(); // padded message is of correct length assert_eq!(pad_message.len(), 9 * MESSAGE_BASE_LENGTH); @@ -445,7 +463,7 @@ mod test { // test for empty message let message: [u8; 0] = []; let prepend_message = (message.len() as u32).to_le_bytes(); - let pad_message = pad_message_to_base_length_multiple(&message); + let pad_message = pad_message_to_base_length_multiple(&message).unwrap(); let pad = [0u8; MESSAGE_BASE_LENGTH - 4]; // padded message is of correct length @@ -462,32 +480,56 @@ mod test { assert_eq!(pad, pad_message[prepend_message.len() + message.len()..]); } + #[test] + fn unpadding_failure_modes() { + // The padded message is empty + let message: [u8; 0] = []; + assert!(get_original_message_from_padded_text(&message) + .unwrap_err() + .to_string() + .contains("Padded message is not long enough for length extraction")); + + // We cannot extract the message length + let message = [0u8; size_of::() - 1]; + assert!(get_original_message_from_padded_text(&message) + .unwrap_err() + .to_string() + .contains("Padded message is not long enough for length extraction")); + + // The padded message is not a multiple of the base length + let message = [0u8; 2 * MESSAGE_BASE_LENGTH + 1]; + assert!(get_original_message_from_padded_text(&message) + .unwrap_err() + .to_string() + .contains("Padded message must be a multiple of the base length")); + } + #[test] fn get_original_message_from_padded_text_successful() { // test for short message let message = vec![0u8, 10, 22, 11, 38, 74, 59, 91, 73, 82, 75, 23, 59]; - let pad_message = pad_message_to_base_length_multiple(message.as_slice()); + let pad_message = pad_message_to_base_length_multiple(message.as_slice()).unwrap(); let output_message = get_original_message_from_padded_text(pad_message.as_slice()).unwrap(); assert_eq!(message, output_message); // test for large message let message = vec![100u8; 1024]; - let pad_message = pad_message_to_base_length_multiple(message.as_slice()); + let pad_message = pad_message_to_base_length_multiple(message.as_slice()).unwrap(); let output_message = get_original_message_from_padded_text(pad_message.as_slice()).unwrap(); assert_eq!(message, output_message); // test for base message of base length let message = vec![100u8; 984]; - let pad_message = pad_message_to_base_length_multiple(message.as_slice()); + let pad_message = pad_message_to_base_length_multiple(message.as_slice()).unwrap(); let output_message = get_original_message_from_padded_text(pad_message.as_slice()).unwrap(); assert_eq!(message, output_message); // test for empty message let message: Vec = vec![]; - let pad_message = pad_message_to_base_length_multiple(message.as_slice()); + let pad_message = pad_message_to_base_length_multiple(message.as_slice()).unwrap(); let output_message = get_original_message_from_padded_text(pad_message.as_slice()).unwrap(); assert_eq!(message, output_message); @@ -496,7 +538,7 @@ mod test { #[test] fn padding_fails_if_pad_message_prepend_length_is_bigger_than_plaintext_length() { let message = "This is my secret message, keep it secret !".as_bytes(); - let mut pad_message = pad_message_to_base_length_multiple(message); + let mut pad_message = pad_message_to_base_length_multiple(message).unwrap(); // we modify the prepend length, in order to assert that the get original message // method will output a different length message @@ -514,7 +556,7 @@ mod test { assert!(get_original_message_from_padded_text(pad_message.as_slice()) .unwrap_err() .to_string() - .contains("Original length message is invalid")); + .contains("Claimed unpadded message length is too large")); } #[test] @@ -524,7 +566,7 @@ mod test { let pk = CommsPublicKey::default(); let key = CipherKey(*chacha20::Key::from_slice(pk.as_bytes())); let message = "My secret message, keep it secret !".as_bytes().to_vec(); - let mut encrypted = encrypt(&key, &message); + let mut encrypted = encrypt(&key, &message).unwrap(); let n = encrypted.len(); encrypted[n - 1] += 1; @@ -537,9 +579,9 @@ mod test { let pk = CommsPublicKey::default(); let key = CipherKey(*chacha20::Key::from_slice(pk.as_bytes())); let message = "My secret message, keep it secret !".as_bytes().to_vec(); - let mut encrypted = encrypt(&key, &message); + let mut encrypted = encrypt(&key, &message).unwrap(); - encrypted[size_of::() + LITTLE_ENDIAN_U32_SIZE_REPRESENTATION + 1] += 1; + encrypted[size_of::() + size_of::() + 1] += 1; assert!(decrypt(&key, &encrypted).unwrap() != message); } diff --git a/comms/dht/src/dht.rs b/comms/dht/src/dht.rs index 0d0d76b99a..5361665f42 100644 --- a/comms/dht/src/dht.rs +++ b/comms/dht/src/dht.rs @@ -327,7 +327,6 @@ impl Dht { self.store_and_forward_requester(), self.dht_requester(), Arc::clone(&self.node_identity), - Arc::clone(&self.peer_manager), self.outbound_requester(), self.saf_response_signal_sender.clone(), )) @@ -359,6 +358,10 @@ impl Dht { S::Future: Send, { ServiceBuilder::new() + .layer(MessageLoggingLayer::new(format!( + "Pre Broadcast [{}]", + self.node_identity.node_id().short_str() + ))) .layer(outbound::BroadcastLayer::new( Arc::clone(&self.node_identity), self.dht_requester(), @@ -599,7 +602,7 @@ mod test { let node_identity2 = make_node_identity(); let ecdh_key = crypt::generate_ecdh_secret(node_identity2.secret_key(), node_identity2.public_key()); let key_message = crypt::generate_key_message(&ecdh_key); - let encrypted_bytes = crypt::encrypt(&key_message, &msg.to_encoded_bytes()); + let encrypted_bytes = crypt::encrypt(&key_message, &msg.to_encoded_bytes()).unwrap(); let dht_envelope = make_dht_envelope( &node_identity2, encrypted_bytes, @@ -616,7 +619,10 @@ mod test { service.call(inbound_message).await.unwrap(); - assert_eq!(oms_mock_state.call_count().await, 1); + oms_mock_state + .wait_call_count(1, Duration::from_secs(10)) + .await + .unwrap(); let (params, _) = oms_mock_state.pop_call().await.unwrap(); // Check that OMS got a request to forward with the original Dht Header diff --git a/comms/dht/src/discovery/service.rs b/comms/dht/src/discovery/service.rs index 63a7009a85..b6aeef7d31 100644 --- a/comms/dht/src/discovery/service.rs +++ b/comms/dht/src/discovery/service.rs @@ -325,7 +325,7 @@ impl DhtDiscoveryService { }; debug!( target: LOG_TARGET, - "Sending Discovery message for peer public key '{}' with destination {}", dest_public_key, destination + "Sending Discovery message for peer public key '{}' with destination {}", &dest_public_key, destination ); self.outbound_requester @@ -333,6 +333,7 @@ impl DhtDiscoveryService { SendMessageParams::new() .broadcast(Vec::new()) .with_destination(destination) + .with_debug_info(format!("discover: {}", &dest_public_key)) .with_encryption(OutboundEncryption::EncryptFor(dest_public_key)) .with_dht_message_type(DhtMessageType::Discovery) .finish(), diff --git a/comms/dht/src/envelope.rs b/comms/dht/src/envelope.rs index 3a471feb8c..27038803af 100644 --- a/comms/dht/src/envelope.rs +++ b/comms/dht/src/envelope.rs @@ -265,8 +265,6 @@ pub enum NodeDestination { Unknown, /// Destined for a particular public key PublicKey(Box), - /// Destined for a particular node id, or network region - NodeId(Box), } impl NodeDestination { @@ -283,40 +281,22 @@ impl NodeDestination { buf[1..].copy_from_slice(pk.as_bytes()); buf }, - NodeDestination::NodeId(node_id) => { - buf[0] = 2; - buf[1..=NodeId::byte_size()].copy_from_slice(node_id.as_bytes()); - buf - }, } } /// Returns a reference to the `CommsPublicKey` if the destination is `CommsPublicKey`. pub fn public_key(&self) -> Option<&CommsPublicKey> { - use NodeDestination::{NodeId, PublicKey, Unknown}; + use NodeDestination::{PublicKey, Unknown}; match self { Unknown => None, PublicKey(pk) => Some(pk), - NodeId(_) => None, - } - } - - /// Returns a reference to the `NodeId` if the destination is `NodeId`. - pub fn node_id(&self) -> Option<&NodeId> { - use NodeDestination::{NodeId, PublicKey, Unknown}; - match self { - Unknown => None, - PublicKey(_) => None, - NodeId(node_id) => Some(node_id), } } /// Returns the NodeId for this destination, deriving it from the PublicKey if necessary or returning None if the /// destination is `Unknown`. pub fn to_derived_node_id(&self) -> Option { - self.node_id() - .cloned() - .or_else(|| self.public_key().map(NodeId::from_public_key)) + self.public_key().map(NodeId::from_public_key) } /// Returns true if the destination is `Unknown`, otherwise false. @@ -327,7 +307,7 @@ impl NodeDestination { /// Returns true if the NodeIdentity NodeId or PublicKey is equal to this destination. #[inline] pub fn equals_node_identity(&self, other: &NodeIdentity) -> bool { - self == other.node_id() || self == other.public_key() + self == other.public_key() } } @@ -337,29 +317,16 @@ impl PartialEq for NodeDestination { } } -impl PartialEq for NodeDestination { - fn eq(&self, other: &NodeId) -> bool { - self.node_id().map(|node_id| node_id == other).unwrap_or(false) - } -} - impl PartialEq<&CommsPublicKey> for NodeDestination { fn eq(&self, other: &&CommsPublicKey) -> bool { self.public_key().map(|pk| pk == *other).unwrap_or(false) } } -impl PartialEq<&NodeId> for NodeDestination { - fn eq(&self, other: &&NodeId) -> bool { - self.node_id().map(|node_id| node_id == *other).unwrap_or(false) - } -} - impl Display for NodeDestination { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { match self { NodeDestination::Unknown => write!(f, "Unknown"), - NodeDestination::NodeId(node_id) => write!(f, "NodeId({})", node_id), NodeDestination::PublicKey(public_key) => write!(f, "PublicKey({})", public_key), } } @@ -380,9 +347,6 @@ impl TryFrom for NodeDestination { Destination::PublicKey(pk) => { CommsPublicKey::from_bytes(&pk).map(|pk| NodeDestination::PublicKey(Box::new(pk))) }, - Destination::NodeId(node_id) => { - NodeId::from_bytes(&node_id).map(|node_id| NodeDestination::NodeId(Box::new(node_id))) - }, } } } @@ -393,19 +357,12 @@ impl From for NodeDestination { } } -impl From for NodeDestination { - fn from(node_id: NodeId) -> Self { - NodeDestination::NodeId(Box::new(node_id)) - } -} - impl From for Destination { fn from(destination: NodeDestination) -> Self { - use NodeDestination::{NodeId, PublicKey, Unknown}; + use NodeDestination::{PublicKey, Unknown}; match destination { Unknown => Destination::Unknown(true), PublicKey(pk) => Destination::PublicKey(pk.to_vec()), - NodeId(node_id) => Destination::NodeId(node_id.to_vec()), } } } @@ -426,11 +383,6 @@ mod tests { assert!(NodeDestination::Unknown.to_inner_bytes().iter().all(|b| *b == 0)); let (_, pk) = CommsPublicKey::random_keypair(&mut OsRng); assert!(to_hex(&NodeDestination::PublicKey(Box::new(pk.clone())).to_inner_bytes()).contains(&pk.to_hex())); - let node_id = NodeId::from_public_key(&pk); - assert!( - to_hex(&NodeDestination::NodeId(Box::new(node_id.clone())).to_inner_bytes()) - .contains(&node_id.to_hex()) - ); } } } diff --git a/comms/dht/src/error.rs b/comms/dht/src/error.rs new file mode 100644 index 0000000000..a0d8718cee --- /dev/null +++ b/comms/dht/src/error.rs @@ -0,0 +1,37 @@ +// Copyright 2019, The Tari Project +// +// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the +// following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following +// disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the +// following disclaimer in the documentation and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote +// products derived from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum DhtEncryptError { + #[error("Message body invalid")] + InvalidMessageBody, + #[error("Invalid decryption, nonce not included")] + InvalidDecryptionNonceNotIncluded, + #[error("Invalid authenticated decryption")] + InvalidAuthenticatedDecryption, + #[error("Cipher error: `{0}`")] + CipherError(String), + #[error("Padding error: `{0}`")] + PaddingError(String), +} diff --git a/comms/dht/src/inbound/decryption.rs b/comms/dht/src/inbound/decryption.rs index 03b805c361..3c9c9e634c 100644 --- a/comms/dht/src/inbound/decryption.rs +++ b/comms/dht/src/inbound/decryption.rs @@ -37,6 +37,7 @@ use tower::{layer::Layer, Service, ServiceExt}; use crate::{ crypt, envelope::DhtMessageHeader, + error::DhtEncryptError, inbound::message::{DecryptedDhtMessage, DhtInboundMessage, ValidatedDhtInboundMessage}, message_signature::{MessageSignature, MessageSignatureError, ProtoMessageSignature}, DhtConfig, @@ -68,10 +69,10 @@ enum DecryptionError { MessageRejectDecryptionFailed, #[error("Failed to decode envelope body")] EnvelopeBodyDecodeFailed, - #[error("Failed to decrypt message body")] - MessageBodyDecryptionFailed, #[error("Encrypted message without a destination is invalid")] EncryptedMessageNoDestination, + #[error("Decryption failed: {0}")] + DecryptionFailedMalformedCipher(#[from] DhtEncryptError), } /// This layer is responsible for attempting to decrypt inbound messages. @@ -406,7 +407,7 @@ where S: Service ) -> Result { let key_message = crypt::generate_key_message(shared_secret); let decrypted = - crypt::decrypt(&key_message, message_body).map_err(|_| DecryptionError::MessageBodyDecryptionFailed)?; + crypt::decrypt(&key_message, message_body).map_err(DecryptionError::DecryptionFailedMalformedCipher)?; // Deserialization into an EnvelopeBody is done here to determine if the // decryption produced valid bytes or not. EnvelopeBody::decode(decrypted.as_slice()) @@ -432,7 +433,7 @@ where S: Service } Ok(body) }) - .map_err(|_| DecryptionError::MessageBodyDecryptionFailed) + .map_err(|_| DecryptionError::EnvelopeBodyDecodeFailed) } async fn success_not_encrypted( @@ -650,7 +651,7 @@ mod test { let key_message = crypt::generate_key_message(&shared_secret); let msg_tag = MessageTag::new(); - let message = crypt::encrypt(&key_message, &plain_text_msg); + let message = crypt::encrypt(&key_message, &plain_text_msg).unwrap(); let header = make_dht_header( &node_identity, &e_public_key, @@ -711,7 +712,7 @@ mod test { let key_message = crypt::generate_key_message(&shared_secret); let msg_tag = MessageTag::new(); - let message = crypt::encrypt(&key_message, &plain_text_msg); + let message = crypt::encrypt(&key_message, &plain_text_msg).unwrap(); let header = make_dht_header( &node_identity, &e_public_key, diff --git a/comms/dht/src/inbound/dht_handler/task.rs b/comms/dht/src/inbound/dht_handler/task.rs index 2a44000ac8..1760b47295 100644 --- a/comms/dht/src/inbound/dht_handler/task.rs +++ b/comms/dht/src/inbound/dht_handler/task.rs @@ -214,25 +214,24 @@ where S: Service return Ok(()); } - let origin_node_id = origin_peer.node_id; + let origin_public_key = origin_peer.public_key; // Only propagate a join that was not directly sent to this node - if dht_header.destination != self.node_identity.public_key() && - dht_header.destination != self.node_identity.node_id() - { + if dht_header.destination != self.node_identity.public_key() { debug!( target: LOG_TARGET, "Propagating Join message from peer '{}'", - origin_node_id.short_str() + origin_peer.node_id.short_str() ); // Propagate message to closer peers self.outbound_service - .send_raw( + .send_raw_no_wait( SendMessageParams::new() - .propagate(origin_node_id.clone().into(), vec![ - origin_node_id, + .propagate(origin_public_key.clone().into(), vec![ + origin_peer.node_id, source_peer.node_id.clone(), ]) + .with_debug_info("Propagating join message".to_string()) .with_dht_header(dht_header) .finish(), body.to_encoded_bytes(), @@ -351,9 +350,10 @@ where S: Service trace!(target: LOG_TARGET, "Sending discovery response to {}", dest_public_key); self.outbound_service - .send_message_no_header( + .send_message_no_header_no_wait( SendMessageParams::new() .direct_public_key(dest_public_key) + .with_debug_info("Sending discovery response".to_string()) .with_destination(NodeDestination::Unknown) .with_dht_message_type(DhtMessageType::DiscoveryResponse) .finish(), diff --git a/comms/dht/src/inbound/error.rs b/comms/dht/src/inbound/error.rs index 6681bdedc5..aec8ea076c 100644 --- a/comms/dht/src/inbound/error.rs +++ b/comms/dht/src/inbound/error.rs @@ -23,7 +23,12 @@ use tari_comms::{message::MessageError, peer_manager::PeerManagerError}; use thiserror::Error; -use crate::{discovery::DhtDiscoveryError, outbound::DhtOutboundError, peer_validator::PeerValidatorError}; +use crate::{ + discovery::DhtDiscoveryError, + error::DhtEncryptError, + outbound::DhtOutboundError, + peer_validator::PeerValidatorError, +}; #[derive(Debug, Error)] pub enum DhtInboundError { @@ -33,6 +38,8 @@ pub enum DhtInboundError { PeerManagerError(#[from] PeerManagerError), #[error("DhtOutboundError: {0}")] DhtOutboundError(#[from] DhtOutboundError), + #[error("DhtEncryptError: {0}")] + DhtEncryptError(#[from] DhtEncryptError), #[error("Message body invalid")] InvalidMessageBody, #[error("All given addresses were invalid")] diff --git a/comms/dht/src/inbound/forward.rs b/comms/dht/src/inbound/forward.rs index 808a8a263e..7ddd9e4fa7 100644 --- a/comms/dht/src/inbound/forward.rs +++ b/comms/dht/src/inbound/forward.rs @@ -212,32 +212,35 @@ where S: Service .expect("previous check that decryption failed"); let excluded_peers = vec![source_peer.node_id.clone()]; - let dest_node_id = dht_header.destination.node_id(); + let dest_node_id = dht_header.destination.to_derived_node_id(); let mut send_params = SendMessageParams::new(); match (dest_node_id, is_saf_stored) { (Some(node_id), Some(true)) => { - debug!( - target: LOG_TARGET, - "Forwarding SAF message directly to node: {}, {}", node_id, dht_header.message_tag + let debug_info = format!( + "Forwarding SAF message directly to node: {}, {}", + node_id, dht_header.message_tag ); - send_params.direct_or_closest_connected(node_id.clone(), excluded_peers); + debug!(target: LOG_TARGET, "{}", &debug_info); + send_params.with_debug_info(debug_info); + send_params.direct_or_closest_connected(node_id, excluded_peers); }, _ => { - debug!( - target: LOG_TARGET, + let debug_info = format!( "Propagating SAF message for {}, propagating it. {}", - dht_header.destination, - dht_header.message_tag + dht_header.destination, dht_header.message_tag ); - + debug!(target: LOG_TARGET, "{}", debug_info); + send_params.with_debug_info(debug_info); send_params.propagate(dht_header.destination.clone(), excluded_peers); }, }; if !is_already_forwarded { send_params.with_dht_header(dht_header.clone()); - self.outbound_service.send_raw(send_params.finish(), body).await?; + self.outbound_service + .send_raw_no_wait(send_params.finish(), body) + .await?; } Ok(()) @@ -248,16 +251,14 @@ where S: Service return pk == &source.public_key; } - if let Some(node_id) = destination.node_id() { - return node_id == &source.node_id; - } - false } } #[cfg(test)] mod test { + use std::time::Duration; + use tari_comms::{runtime, runtime::task, wrap_in_envelope_body}; use tokio::sync::mpsc; @@ -310,7 +311,10 @@ mod test { service.call(msg).await.unwrap(); assert!(spy.is_called()); - assert_eq!(oms_mock_state.call_count().await, 1); + oms_mock_state + .wait_call_count(1, Duration::from_secs(10)) + .await + .unwrap(); let (params, body) = oms_mock_state.pop_call().await.unwrap(); // Header and body are preserved when forwarding diff --git a/comms/dht/src/inbound/mod.rs b/comms/dht/src/inbound/mod.rs index 460efaeab5..776a54c4f3 100644 --- a/comms/dht/src/inbound/mod.rs +++ b/comms/dht/src/inbound/mod.rs @@ -38,6 +38,7 @@ mod metrics; pub use metrics::MetricsLayer; mod error; +pub use error::DhtInboundError; mod message; diff --git a/comms/dht/src/lib.rs b/comms/dht/src/lib.rs index 10d630e6e5..e84e202fb7 100644 --- a/comms/dht/src/lib.rs +++ b/comms/dht/src/lib.rs @@ -91,6 +91,9 @@ pub use dht::{Dht, DhtInitializationError}; mod discovery; pub use discovery::{DhtDiscoveryError, DhtDiscoveryRequester}; +mod error; +pub use error::DhtEncryptError; + mod network_discovery; pub use network_discovery::NetworkDiscoveryConfig; diff --git a/comms/dht/src/logging_middleware.rs b/comms/dht/src/logging_middleware.rs index 5789457095..9cfe176692 100644 --- a/comms/dht/src/logging_middleware.rs +++ b/comms/dht/src/logging_middleware.rs @@ -20,7 +20,12 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -use std::{borrow::Cow, fmt::Display, marker::PhantomData, task::Poll}; +use std::{ + borrow::Cow, + fmt::{Debug, Display}, + marker::PhantomData, + task::Poll, +}; use futures::task::Context; use log::*; @@ -75,7 +80,7 @@ impl<'a, S> MessageLoggingService<'a, S> { impl Service for MessageLoggingService<'_, S> where S: Service, - R: Display, + R: Display + Debug, { type Error = S::Error; type Future = S::Future; @@ -86,7 +91,7 @@ where } fn call(&mut self, msg: R) -> Self::Future { - trace!(target: LOG_TARGET, "{}{}", self.prefix_msg, msg); + debug!(target: LOG_TARGET, "{}{:?}", self.prefix_msg, msg); self.inner.call(msg) } } diff --git a/comms/dht/src/network_discovery/discovering.rs b/comms/dht/src/network_discovery/discovering.rs index 43a7ff2ee6..a5ae679a37 100644 --- a/comms/dht/src/network_discovery/discovering.rs +++ b/comms/dht/src/network_discovery/discovering.rs @@ -35,7 +35,12 @@ use super::{ state_machine::{DhtNetworkDiscoveryRoundInfo, DiscoveryParams, NetworkDiscoveryContext, StateEvent}, NetworkDiscoveryError, }; -use crate::{peer_validator::PeerValidator, proto::rpc::GetPeersRequest, rpc, DhtConfig}; +use crate::{ + peer_validator::{PeerValidator, PeerValidatorError}, + proto::rpc::GetPeersRequest, + rpc, + DhtConfig, +}; const LOG_TARGET: &str = "comms::dht::network_discovery"; @@ -209,6 +214,7 @@ impl Discovering { self.stats.num_duplicate_peers += 1; Ok(()) }, + Err(err @ PeerValidatorError::PeerManagerError(_)) => Err(err.into()), Err(err) => { warn!( target: LOG_TARGET, diff --git a/comms/dht/src/network_discovery/test.rs b/comms/dht/src/network_discovery/test.rs index 31897a87a6..1d791d459a 100644 --- a/comms/dht/src/network_discovery/test.rs +++ b/comms/dht/src/network_discovery/test.rs @@ -151,6 +151,57 @@ mod state_machine { assert_eq!(info.sync_peers, vec![peer_node_identity.node_id().clone()]); } + #[runtime::test] + #[allow(clippy::redundant_closure)] + async fn dht_banning_peers() { + const NUM_PEERS: usize = 3; + let config = DhtConfig { + num_neighbouring_nodes: 4, + network_discovery: NetworkDiscoveryConfig { + min_desired_peers: NUM_PEERS, + ..Default::default() + }, + ..DhtConfig::default_local_test() + }; + let (discovery_actor, connectivity_mock, peer_manager, node_identity, _event_rx, _shutdown) = + setup(config, make_node_identity(), vec![]).await; + + let mock = DhtRpcServiceMock::new(); + let service = rpc::DhtService::new(mock.clone()); + let protocol_name = service.as_protocol_name(); + + let mut mock_server = MockRpcServer::new(service, node_identity.clone()); + let peer_node_identity = build_node_identity(PeerFeatures::COMMUNICATION_NODE); + // Add the peer that we'll sync from + peer_manager.add_peer(peer_node_identity.to_peer()).await.unwrap(); + mock_server.serve(); + + // Create a connection to the RPC mock and then make it available to the connectivity manager mock + let connection = mock_server + .create_connection(peer_node_identity.to_peer(), protocol_name.into()) + .await; + + connectivity_mock + .set_connectivity_status(ConnectivityStatus::Online(NUM_PEERS)) + .await; + connectivity_mock.add_active_connection(connection).await; + + // Checking banning logic + let mut invalid_peer = make_node_identity().to_peer(); + invalid_peer.set_valid_identity_signature(make_node_identity().identity_signature_read().clone().unwrap()); + let resp = GetPeersResponse { + peer: Some(invalid_peer.clone().into()), + }; + mock.get_peers.set_response(Ok(vec![resp])).await; + + discovery_actor.spawn(); + + connectivity_mock.await_call_count(1).await; + let banned = connectivity_mock.take_banned_peers().await; + let (peer, _, _) = &banned[0]; + assert_eq!(peer, peer_node_identity.node_id()); + } + #[runtime::test] async fn it_shuts_down() { let (discovery, _, _, _, _, mut shutdown) = setup(Default::default(), make_node_identity(), vec![]).await; diff --git a/comms/dht/src/outbound/broadcast.rs b/comms/dht/src/outbound/broadcast.rs index 4632894a3d..8999d2fd41 100644 --- a/comms/dht/src/outbound/broadcast.rs +++ b/comms/dht/src/outbound/broadcast.rs @@ -262,15 +262,12 @@ where S: Service is_discovery_enabled, force_origin, dht_header, + debug_info: _, tag, } = params; match self.select_peers(broadcast_strategy.clone()).await { Ok(mut peers) => { - if reply_tx.is_closed() { - return Err(DhtOutboundError::ReplyChannelCanceled); - } - let mut reply_tx = Some(reply_tx); trace!( @@ -500,7 +497,7 @@ where S: Service // Generate key message for encryption of message let key_message = crypt::generate_key_message(&shared_ephemeral_secret); // Encrypt the message with the body with key message above - let encrypted_body = crypt::encrypt(&key_message, &body); + let encrypted_body = crypt::encrypt(&key_message, &body)?; // Produce domain separated signature signature let mac_signature = crypt::create_message_domain_separated_hash_parts( @@ -586,7 +583,7 @@ mod test { }; #[runtime::test] - async fn send_message_flood() { + async fn test_send_message_flood() { let pk = CommsPublicKey::default(); let example_peer = Peer::new( pk.clone(), @@ -651,7 +648,7 @@ mod test { } #[runtime::test] - async fn send_message_direct_not_found() { + async fn test_send_message_direct_not_found() { // Test for issue https://github.com/tari-project/tari/issues/959 let pk = CommsPublicKey::default(); @@ -696,7 +693,7 @@ mod test { } #[runtime::test] - async fn send_message_direct_dht_discovery() { + async fn test_send_message_direct_dht_discovery() { let node_identity = NodeIdentity::random( &mut OsRng, "/ip4/127.0.0.1/tcp/9000".parse().unwrap(), diff --git a/comms/dht/src/outbound/error.rs b/comms/dht/src/outbound/error.rs index 4b702e778b..e8ee3fcc34 100644 --- a/comms/dht/src/outbound/error.rs +++ b/comms/dht/src/outbound/error.rs @@ -26,10 +26,15 @@ use tari_utilities::message_format::MessageFormatError; use thiserror::Error; use tokio::sync::mpsc::error::SendError; -use crate::outbound::{message::SendFailure, DhtOutboundRequest}; +use crate::{ + error::DhtEncryptError, + outbound::{message::SendFailure, DhtOutboundRequest}, +}; #[derive(Debug, Error)] pub enum DhtOutboundError { + #[error("DhtEncryptError: {0}")] + DhtEncryptError(#[from] DhtEncryptError), #[error("`Failed to send: {0}")] SendError(#[from] SendError), #[error("MessageSerializationError: {0}")] @@ -42,8 +47,6 @@ pub enum DhtOutboundError { RequesterReplyChannelClosed, #[error("Peer selection failed")] PeerSelectionFailed, - #[error("Reply channel cancelled")] - ReplyChannelCanceled, #[error("Attempted to send a message to ourselves")] SendToOurselves, #[error("Discovery process failed")] @@ -56,6 +59,8 @@ pub enum DhtOutboundError { NoMessagesQueued, #[error("Cipher error: `{0}`")] CipherError(String), + #[error("Padding error: `{0}`")] + PaddingError(String), // TODO: clean up these errors } impl From for DhtOutboundError { diff --git a/comms/dht/src/outbound/message.rs b/comms/dht/src/outbound/message.rs index 237aa08afc..544287e090 100644 --- a/comms/dht/src/outbound/message.rs +++ b/comms/dht/src/outbound/message.rs @@ -191,12 +191,13 @@ impl fmt::Display for DhtOutboundMessage { }); write!( f, - "\n---- Outgoing message ---- \nSize: {} byte(s)\nType: {}\nPeer: {}\nHeader: {}\n{}\n----", + "\n---- Outgoing message ---- \nSize: {} byte(s)\nType: {}\nPeer: {}\nHeader: {}\n{}\n----\n{:?}\n", self.body.len(), self.dht_message_type, - self.destination_node_id, + self.destination, header_str, self.tag, + self.body ) } } diff --git a/comms/dht/src/outbound/message_params.rs b/comms/dht/src/outbound/message_params.rs index 1bd28ad766..2fb1aabf0e 100644 --- a/comms/dht/src/outbound/message_params.rs +++ b/comms/dht/src/outbound/message_params.rs @@ -68,6 +68,7 @@ pub struct FinalSendMessageParams { pub dht_message_type: DhtMessageType, pub dht_message_flags: DhtMessageFlags, pub dht_header: Option, + pub debug_info: Option, pub tag: Option, } @@ -82,6 +83,7 @@ impl Default for FinalSendMessageParams { force_origin: false, is_discovery_enabled: false, dht_header: None, + debug_info: None, tag: None, } } @@ -102,6 +104,11 @@ impl SendMessageParams { Default::default() } + pub fn with_debug_info(&mut self, debug_info: String) -> &mut Self { + self.params_mut().debug_info = Some(debug_info); + self + } + /// Set broadcast_strategy to DirectPublicKey pub fn direct_public_key(&mut self, public_key: CommsPublicKey) -> &mut Self { self.params_mut().broadcast_strategy = BroadcastStrategy::DirectPublicKey(Box::new(public_key)); diff --git a/comms/dht/src/outbound/mock.rs b/comms/dht/src/outbound/mock.rs index d56d26de24..7d7b58d926 100644 --- a/comms/dht/src/outbound/mock.rs +++ b/comms/dht/src/outbound/mock.rs @@ -184,34 +184,31 @@ impl OutboundServiceMock { match behaviour.direct { ResponseType::Queued => { let (response, mut inner_reply_tx) = self.add_call((*params).clone(), body).await; - reply_tx.send(response).expect("Reply channel cancelled"); + let _ignore = reply_tx.send(response); inner_reply_tx.reply_success(); }, ResponseType::QueuedFail => { let (response, mut inner_reply_tx) = self.add_call((*params).clone(), body).await; - reply_tx.send(response).expect("Reply channel cancelled"); + let _ignore = reply_tx.send(response); inner_reply_tx.reply_fail(SendFailReason::PeerDialFailed); }, ResponseType::QueuedSuccessDelay(delay) => { let (response, mut inner_reply_tx) = self.add_call((*params).clone(), body).await; - reply_tx.send(response).expect("Reply channel cancelled"); + let _ignore = reply_tx.send(response); sleep(delay).await; inner_reply_tx.reply_success(); }, resp => { - reply_tx - .send(SendMessageResponse::Failed(SendFailure::General(format!( - "Unexpected mock response {:?}", - resp - )))) - .expect("Reply channel cancelled"); + let _ignore = reply_tx.send(SendMessageResponse::Failed(SendFailure::General( + format!("Unexpected mock response {:?}", resp), + ))); }, }; }, BroadcastStrategy::ClosestNodes(_) => { if behaviour.broadcast == ResponseType::Queued { let (response, mut inner_reply_tx) = self.add_call((*params).clone(), body).await; - reply_tx.send(response).expect("Reply channel cancelled"); + let _ignore = reply_tx.send(response); inner_reply_tx.reply_success(); } else { reply_tx @@ -223,7 +220,7 @@ impl OutboundServiceMock { }, _ => { let (response, mut inner_reply_tx) = self.add_call((*params).clone(), body).await; - reply_tx.send(response).expect("Reply channel cancelled"); + let _ignore = reply_tx.send(response); inner_reply_tx.reply_success(); }, } diff --git a/comms/dht/src/outbound/requester.rs b/comms/dht/src/outbound/requester.rs index d4546f81a8..0b1e38e9ee 100644 --- a/comms/dht/src/outbound/requester.rs +++ b/comms/dht/src/outbound/requester.rs @@ -54,12 +54,14 @@ impl OutboundMessageRequester { &mut self, dest_public_key: CommsPublicKey, message: OutboundDomainMessage, + source_info: String, ) -> Result where T: prost::Message, { self.send_message( SendMessageParams::new() + .with_debug_info(format!("Send direct to {} from {}", &dest_public_key, source_info)) .direct_public_key(dest_public_key) .with_discovery(true) .finish(), @@ -73,13 +75,17 @@ impl OutboundMessageRequester { &mut self, dest_node_id: NodeId, message: OutboundDomainMessage, + source_info: String, ) -> Result where T: prost::Message, { let resp = self .send_message( - SendMessageParams::new().direct_node_id(dest_node_id.clone()).finish(), + SendMessageParams::new() + .direct_node_id(dest_node_id.clone()) + .with_debug_info(format!("Send direct to {}. Source: {}", dest_node_id, source_info)) + .finish(), message, ) .await?; @@ -132,6 +138,7 @@ impl OutboundMessageRequester { encryption: OutboundEncryption, exclude_peers: Vec, message: OutboundDomainMessage, + source_info: String, ) -> Result where T: prost::Message, @@ -139,6 +146,7 @@ impl OutboundMessageRequester { self.send_message( SendMessageParams::new() .broadcast(exclude_peers) + .with_debug_info(format!("broadcast requested from {}", source_info)) .with_encryption(encryption) .with_destination(destination) .finish(), @@ -155,7 +163,7 @@ impl OutboundMessageRequester { /// Use this strategy to broadcast a message destined for a particular peer. pub async fn closest_broadcast( &mut self, - destination_node_id: NodeId, + destination_public_key: CommsPublicKey, encryption: OutboundEncryption, exclude_peers: Vec, message: OutboundDomainMessage, @@ -165,9 +173,9 @@ impl OutboundMessageRequester { { self.send_message( SendMessageParams::new() - .closest(destination_node_id.clone(), exclude_peers) + .closest(NodeId::from_public_key(&destination_public_key), exclude_peers) .with_encryption(encryption) - .with_destination(destination_node_id.into()) + .with_destination(destination_public_key.into()) .finish(), message, ) @@ -184,12 +192,14 @@ impl OutboundMessageRequester { encryption: OutboundEncryption, exclude_peers: Vec, message: OutboundDomainMessage, + source_info: String, ) -> Result where T: prost::Message, { self.send_message( SendMessageParams::new() + .with_debug_info(source_info) .flood(exclude_peers) .with_destination(destination) .with_encryption(encryption) @@ -269,6 +279,22 @@ impl OutboundMessageRequester { self.send_raw(params, body).await } + /// Send a message without a domain header part + pub async fn send_message_no_header_no_wait( + &mut self, + params: FinalSendMessageParams, + message: T, + ) -> Result<(), DhtOutboundError> + where + T: prost::Message, + { + if cfg!(debug_assertions) { + trace!(target: LOG_TARGET, "Send Message: {} {:?}", params, message); + } + let body = wrap_in_envelope_body!(message).to_encoded_bytes(); + self.send_raw_no_wait(params, body).await + } + /// Send a raw message pub async fn send_raw( &mut self, @@ -285,6 +311,19 @@ impl OutboundMessageRequester { .map_err(|_| DhtOutboundError::RequesterReplyChannelClosed) } + /// Send a raw message + pub async fn send_raw_no_wait( + &mut self, + params: FinalSendMessageParams, + body: Vec, + ) -> Result<(), DhtOutboundError> { + let (reply_tx, _) = oneshot::channel(); + self.sender + .send(DhtOutboundRequest::SendMessage(Box::new(params), body.into(), reply_tx)) + .await?; + Ok(()) + } + #[cfg(test)] pub fn get_mpsc_sender(&self) -> mpsc::Sender { self.sender.clone() diff --git a/comms/dht/src/proto/envelope.proto b/comms/dht/src/proto/envelope.proto index a729866b02..38336a0b5b 100644 --- a/comms/dht/src/proto/envelope.proto +++ b/comms/dht/src/proto/envelope.proto @@ -30,8 +30,6 @@ message DhtHeader { bool unknown = 3; // Destined for a particular public key bytes public_key = 4; - // Destined for a particular node id, or network region - bytes node_id = 5; } // Origin public key of the message. This can be the same peer that sent the message diff --git a/comms/dht/src/store_forward/database/stored_message.rs b/comms/dht/src/store_forward/database/stored_message.rs index eca9cced45..b8d095d901 100644 --- a/comms/dht/src/store_forward/database/stored_message.rs +++ b/comms/dht/src/store_forward/database/stored_message.rs @@ -69,7 +69,10 @@ impl NewStoredMessage { origin_pubkey: authenticated_origin.as_ref().map(|pk| pk.to_hex()), message_type: dht_header.message_type as i32, destination_pubkey: dht_header.destination.public_key().map(|pk| pk.to_hex()), - destination_node_id: dht_header.destination.node_id().map(|node_id| node_id.to_hex()), + destination_node_id: dht_header + .destination + .to_derived_node_id() + .map(|node_id| node_id.to_hex()), is_encrypted: dht_header.flags.is_encrypted(), priority: priority as i32, header: { diff --git a/comms/dht/src/store_forward/error.rs b/comms/dht/src/store_forward/error.rs index 92a897b509..e3cc4b1e8b 100644 --- a/comms/dht/src/store_forward/error.rs +++ b/comms/dht/src/store_forward/error.rs @@ -33,6 +33,8 @@ use thiserror::Error; use crate::{ actor::DhtActorError, envelope::DhtMessageError, + error::DhtEncryptError, + inbound::DhtInboundError, message_signature::MessageSignatureError, outbound::DhtOutboundError, storage::StorageError, @@ -49,8 +51,12 @@ pub enum StoreAndForwardError { PeerManagerError(#[from] PeerManagerError), #[error("DhtOutboundError: {0}")] DhtOutboundError(#[from] DhtOutboundError), + #[error("DhtEncryptError: {0}")] + DhtEncryptError(#[from] DhtEncryptError), #[error("Received stored message has an invalid destination")] InvalidDestination, + #[error("DhtInboundError: {0}")] + DhtInboundError(#[from] DhtInboundError), #[error("Received stored message has an invalid origin signature: {0}")] InvalidMessageSignature(#[from] MessageSignatureError), #[error("Invalid envelope body")] diff --git a/comms/dht/src/store_forward/saf_handler/layer.rs b/comms/dht/src/store_forward/saf_handler/layer.rs index 618c9fe3d2..e79c5d8c18 100644 --- a/comms/dht/src/store_forward/saf_handler/layer.rs +++ b/comms/dht/src/store_forward/saf_handler/layer.rs @@ -22,7 +22,7 @@ use std::sync::Arc; -use tari_comms::peer_manager::{NodeIdentity, PeerManager}; +use tari_comms::peer_manager::NodeIdentity; use tokio::sync::mpsc; use tower::layer::Layer; @@ -38,7 +38,6 @@ pub struct MessageHandlerLayer { config: SafConfig, saf_requester: StoreAndForwardRequester, dht_requester: DhtRequester, - peer_manager: Arc, node_identity: Arc, outbound_service: OutboundMessageRequester, saf_response_signal_sender: mpsc::Sender<()>, @@ -50,7 +49,6 @@ impl MessageHandlerLayer { saf_requester: StoreAndForwardRequester, dht_requester: DhtRequester, node_identity: Arc, - peer_manager: Arc, outbound_service: OutboundMessageRequester, saf_response_signal_sender: mpsc::Sender<()>, ) -> Self { @@ -58,7 +56,6 @@ impl MessageHandlerLayer { config, saf_requester, dht_requester, - peer_manager, node_identity, outbound_service, @@ -77,7 +74,6 @@ impl Layer for MessageHandlerLayer { self.saf_requester.clone(), self.dht_requester.clone(), Arc::clone(&self.node_identity), - Arc::clone(&self.peer_manager), self.outbound_service.clone(), self.saf_response_signal_sender.clone(), ) diff --git a/comms/dht/src/store_forward/saf_handler/middleware.rs b/comms/dht/src/store_forward/saf_handler/middleware.rs index cd87e53610..3f689278f2 100644 --- a/comms/dht/src/store_forward/saf_handler/middleware.rs +++ b/comms/dht/src/store_forward/saf_handler/middleware.rs @@ -23,10 +23,7 @@ use std::{sync::Arc, task::Poll}; use futures::{future::BoxFuture, task::Context}; -use tari_comms::{ - peer_manager::{NodeIdentity, PeerManager}, - pipeline::PipelineError, -}; +use tari_comms::{peer_manager::NodeIdentity, pipeline::PipelineError}; use tokio::sync::mpsc; use tower::Service; @@ -44,7 +41,6 @@ pub struct MessageHandlerMiddleware { next_service: S, saf_requester: StoreAndForwardRequester, dht_requester: DhtRequester, - peer_manager: Arc, node_identity: Arc, outbound_service: OutboundMessageRequester, saf_response_signal_sender: mpsc::Sender<()>, @@ -57,7 +53,6 @@ impl MessageHandlerMiddleware { saf_requester: StoreAndForwardRequester, dht_requester: DhtRequester, node_identity: Arc, - peer_manager: Arc, outbound_service: OutboundMessageRequester, saf_response_signal_sender: mpsc::Sender<()>, ) -> Self { @@ -66,7 +61,6 @@ impl MessageHandlerMiddleware { next_service, saf_requester, dht_requester, - peer_manager, node_identity, outbound_service, @@ -95,7 +89,6 @@ where self.next_service.clone(), self.saf_requester.clone(), self.dht_requester.clone(), - Arc::clone(&self.peer_manager), self.outbound_service.clone(), Arc::clone(&self.node_identity), message, diff --git a/comms/dht/src/store_forward/saf_handler/task.rs b/comms/dht/src/store_forward/saf_handler/task.rs index 8b7281f65a..0aada15e4e 100644 --- a/comms/dht/src/store_forward/saf_handler/task.rs +++ b/comms/dht/src/store_forward/saf_handler/task.rs @@ -31,7 +31,7 @@ use log::*; use prost::Message; use tari_comms::{ message::{EnvelopeBody, MessageTag}, - peer_manager::{NodeId, NodeIdentity, Peer, PeerFeatures, PeerManager, PeerManagerError}, + peer_manager::{NodeId, NodeIdentity, Peer, PeerFeatures, PeerManagerError}, pipeline::PipelineError, types::CommsPublicKey, }; @@ -71,7 +71,6 @@ pub struct MessageHandlerTask { config: SafConfig, next_service: S, dht_requester: DhtRequester, - peer_manager: Arc, outbound_service: OutboundMessageRequester, node_identity: Arc, message: Option, @@ -87,7 +86,6 @@ where S: Service next_service: S, saf_requester: StoreAndForwardRequester, dht_requester: DhtRequester, - peer_manager: Arc, outbound_service: OutboundMessageRequester, node_identity: Arc, message: DecryptedDhtMessage, @@ -98,7 +96,6 @@ where S: Service saf_requester, dht_requester, next_service, - peer_manager, outbound_service, node_identity, message: Some(message), @@ -232,15 +229,13 @@ where S: Service match self .outbound_service - .send_message_no_header( + .send_message_no_header_no_wait( SendMessageParams::new() .direct_public_key(message.source_peer.public_key.clone()) .with_dht_message_type(DhtMessageType::SafStoredMessages) .finish(), stored_messages, ) - .await? - .resolve() .await { Ok(_) => { @@ -426,8 +421,6 @@ where S: Service message: ProtoStoredMessage, ) -> Result<(DecryptedDhtMessage, DateTime), StoreAndForwardError> { let node_identity = &self.node_identity; - let peer_manager = &self.peer_manager; - let config = &self.config; if message.dht_header.is_none() { return Err(StoreAndForwardError::DhtHeaderNotProvided); } @@ -442,7 +435,7 @@ where S: Service )) }) .transpose()? - .unwrap_or(chrono::MIN_DATETIME); + .unwrap_or(DateTime::::MIN_UTC); if stored_at > Utc::now() { return Err(StoreAndForwardError::StoredAtWasInFuture); @@ -489,7 +482,7 @@ where S: Service } // Check that the destination is either undisclosed, for us or for our network region - Self::check_destination(config, peer_manager, node_identity, &dht_header).await?; + Self::check_destination(node_identity, &dht_header).await?; // Attempt to decrypt the message (if applicable), and deserialize it let (authenticated_pk, decrypted_body) = @@ -527,20 +520,12 @@ where S: Service } async fn check_destination( - config: &SafConfig, - peer_manager: &PeerManager, node_identity: &NodeIdentity, dht_header: &DhtMessageHeader, ) -> Result<(), StoreAndForwardError> { let is_valid_destination = match &dht_header.destination { NodeDestination::Unknown => true, NodeDestination::PublicKey(pk) => node_identity.public_key() == &**pk, - // Pass this check if the node id equals ours or is in this node's region - NodeDestination::NodeId(node_id) if node_identity.node_id() == &**node_id => true, - NodeDestination::NodeId(node_id) => peer_manager - .in_network_region(node_identity.node_id(), node_id, config.num_neighbouring_nodes) - .await - .unwrap_or(false), }; if is_valid_destination { @@ -691,7 +676,6 @@ mod test { let spy = service_spy(); let (requester, mock_state) = create_store_and_forward_mock(); - let peer_manager = build_peer_manager(); let (outbound_requester, outbound_mock) = create_outbound_service_mock(10); let oms_mock_state = outbound_mock.get_state(); task::spawn(outbound_mock.run()); @@ -737,7 +721,6 @@ mod test { spy.to_service::(), requester.clone(), dht_requester.clone(), - peer_manager.clone(), outbound_requester.clone(), node_identity.clone(), message.clone(), @@ -795,7 +778,6 @@ mod test { spy.to_service::(), requester, dht_requester, - peer_manager, outbound_requester.clone(), node_identity.clone(), message, @@ -924,7 +906,6 @@ mod test { spy.to_service::(), saf_requester, dht_requester.clone(), - peer_manager, OutboundMessageRequester::new(oms_tx), node_identity, message, @@ -1011,7 +992,6 @@ mod test { spy.to_service::(), requester, dht_requester.clone(), - peer_manager, OutboundMessageRequester::new(oms_tx), node_identity, message, @@ -1085,7 +1065,6 @@ mod test { spy.to_service::(), saf_requester.clone(), dht_requester.clone(), - peer_manager.clone(), OutboundMessageRequester::new(oms_tx.clone()), node_identity.clone(), message.clone(), @@ -1106,7 +1085,6 @@ mod test { spy.to_service::(), saf_requester, dht_requester, - peer_manager, OutboundMessageRequester::new(oms_tx), node_identity, message, diff --git a/comms/dht/src/store_forward/store.rs b/comms/dht/src/store_forward/store.rs index 03b54c976e..61519cd8ca 100644 --- a/comms/dht/src/store_forward/store.rs +++ b/comms/dht/src/store_forward/store.rs @@ -372,9 +372,7 @@ where S: Service + Se let peer_manager = &self.peer_manager; let node_identity = &self.node_identity; - if message.dht_header.destination == node_identity.public_key() || - message.dht_header.destination == node_identity.node_id() - { + if message.dht_header.destination == node_identity.public_key() { log_not_eligible("the message is destined for this node"); return Ok(None); } diff --git a/comms/dht/src/test_utils/makers.rs b/comms/dht/src/test_utils/makers.rs index cffb0eb34e..7646346b3a 100644 --- a/comms/dht/src/test_utils/makers.rs +++ b/comms/dht/src/test_utils/makers.rs @@ -164,7 +164,7 @@ pub fn make_dht_envelope( if flags.is_encrypted() { let shared_secret = crypt::generate_ecdh_secret(&e_secret_key, node_identity.public_key()); let key_message = crypt::generate_key_message(&shared_secret); - message = crypt::encrypt(&key_message, &message); + message = crypt::encrypt(&key_message, &message).unwrap(); } let header = make_dht_header( node_identity, diff --git a/comms/dht/tests/dht.rs b/comms/dht/tests/dht.rs index 95e4b250ea..9928c1df79 100644 --- a/comms/dht/tests/dht.rs +++ b/comms/dht/tests/dht.rs @@ -201,7 +201,6 @@ async fn setup_comms_dht( let dht_outbound_layer = dht.outbound_middleware_layer(); let pipeline = pipeline::Builder::new() - .outbound_buffer_size(10) .with_outbound_pipeline(outbound_rx, |sink| { ServiceBuilder::new().layer(dht_outbound_layer).service(sink) }) @@ -358,7 +357,7 @@ async fn dht_discover_propagation() { .discovery_service_requester() .discover_peer( node_D.node_identity().public_key().clone(), - node_D.node_identity().node_id().clone().into(), + node_D.node_identity().public_key().clone().into(), ) .await .unwrap(); @@ -409,7 +408,7 @@ async fn dht_store_forward() { .with_encryption(OutboundEncryption::encrypt_for( node_C_node_identity.public_key().clone(), )) - .with_destination(node_C_node_identity.node_id().clone().into()) + .with_destination(node_C_node_identity.public_key().clone().into()) .finish(); let secret_msg1 = b"NCZW VUSX PNYM INHZ XMQX SFWX WLKJ AHSH"; @@ -573,7 +572,7 @@ async fn dht_propagate_dedup() { .dht .outbound_requester() .propagate( - node_D.node_identity().node_id().clone().into(), + node_D.node_identity().public_key().clone().into(), OutboundEncryption::encrypt_for(node_D.node_identity().public_key().clone()), vec![], out_msg, @@ -962,7 +961,7 @@ async fn dht_propagate_message_contents_not_malleable_ban() { .send_message_no_header( SendMessageParams::new() .direct_node_id(node_B.node_identity().node_id().clone()) - .with_destination(node_A.node_identity().node_id().clone().into()) + .with_destination(node_A.node_identity().public_key().clone().into()) .with_encryption(OutboundEncryption::ClearText) .force_origin() .finish(), @@ -985,7 +984,7 @@ async fn dht_propagate_message_contents_not_malleable_ban() { .outbound_requester() .send_raw( SendMessageParams::new() - .propagate(node_B.node_identity().node_id().clone().into(), vec![msg + .propagate(node_B.node_identity().public_key().clone().into(), vec![msg .source_peer .node_id .clone()]) @@ -1068,7 +1067,7 @@ async fn dht_header_not_malleable() { .send_message_no_header( SendMessageParams::new() .direct_node_id(node_B.node_identity().node_id().clone()) - .with_destination(node_A.node_identity().node_id().clone().into()) + .with_destination(node_A.node_identity().public_key().clone().into()) .with_encryption(OutboundEncryption::ClearText) .force_origin() .finish(), @@ -1091,7 +1090,7 @@ async fn dht_header_not_malleable() { .outbound_requester() .send_raw( SendMessageParams::new() - .propagate(node_B.node_identity().node_id().clone().into(), vec![msg + .propagate(node_B.node_identity().public_key().clone().into(), vec![msg .source_peer .node_id .clone()]) diff --git a/comms/rpc_macros/Cargo.toml b/comms/rpc_macros/Cargo.toml index a236c74b8f..4ca02086e6 100644 --- a/comms/rpc_macros/Cargo.toml +++ b/comms/rpc_macros/Cargo.toml @@ -6,7 +6,7 @@ repository = "https://github.com/tari-project/tari" homepage = "https://tari.com" readme = "README.md" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [lib] @@ -19,8 +19,8 @@ quote = "1.0.7" syn = { version = "1.0.38", features = ["fold"] } [dev-dependencies] -tari_comms = { version = "^0.37", path = "../core", features = ["rpc"] } -tari_test_utils = { version = "^0.37", path = "../../infrastructure/test_utils" } +tari_comms = { version = "^0.38", path = "../core", features = ["rpc"] } +tari_test_utils = { version = "^0.38", path = "../../infrastructure/test_utils" } futures = "0.3.5" prost = "0.9.0" diff --git a/infrastructure/derive/Cargo.toml b/infrastructure/derive/Cargo.toml index 628b4fc64a..01c27296b3 100644 --- a/infrastructure/derive/Cargo.toml +++ b/infrastructure/derive/Cargo.toml @@ -6,7 +6,7 @@ repository = "https://github.com/tari-project/tari" homepage = "https://tari.com" readme = "README.md" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [lib] diff --git a/infrastructure/libtor/Cargo.toml b/infrastructure/libtor/Cargo.toml index f5d91c8202..a46d9283f4 100644 --- a/infrastructure/libtor/Cargo.toml +++ b/infrastructure/libtor/Cargo.toml @@ -7,16 +7,15 @@ license = "BSD-3-Clause" [dependencies] tari_common = { path = "../../common" } tari_p2p = { path = "../../base_layer/p2p" } +tari_shutdown = { version = "^0.38", path = "../shutdown"} derivative = "2.2.0" log = "0.4.8" log4rs = { version = "1.0.0", default_features = false, features = ["config_parsing", "threshold_filter", "yaml_format"] } multiaddr = { version = "0.14.0" } - -# NB: make sure this crate is not included in any other crate used by wallet_ffi -[target.'cfg(unix)'.dependencies] -tari_shutdown = { version = "^0.37", path = "../shutdown"} -libtor = { version = "46.9.0", optional = true } rand = "0.8" tempfile = "3.1.0" tor-hash-passwd = "1.0.1" + +[target.'cfg(unix)'.dependencies] +libtor = { version="46.9.0"} diff --git a/infrastructure/libtor/src/lib.rs b/infrastructure/libtor/src/lib.rs index 3292bf2971..e632da607f 100644 --- a/infrastructure/libtor/src/lib.rs +++ b/infrastructure/libtor/src/lib.rs @@ -20,5 +20,10 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -#[cfg(all(unix, feature = "libtor"))] +// For some inexplicable reason if we don't include extern crate then we get libtor not defined errors in this crate on +// matrix builds +#[cfg(unix)] +#[allow(unused_extern_crates)] +extern crate libtor; +#[cfg(unix)] pub mod tor; diff --git a/infrastructure/metrics/Cargo.toml b/infrastructure/metrics/Cargo.toml index c704f6b880..2ccbf6959f 100644 --- a/infrastructure/metrics/Cargo.toml +++ b/infrastructure/metrics/Cargo.toml @@ -16,7 +16,7 @@ prometheus = "0.13.0" futures = { version = "0.3.15", default-features = false, optional = true } reqwest = { version = "0.11.4", default-features = false, optional = true } -tokio = { version = "1.7.1", optional = true, features = ["time", "rt-multi-thread"] } +tokio = { version = "1", optional = true, features = ["time", "rt-multi-thread"] } warp = { version = "0.3.1", optional = true, default-features = false } thiserror = "1.0.25" anyhow = { version = "1.0.53", optional = true } diff --git a/infrastructure/shutdown/Cargo.toml b/infrastructure/shutdown/Cargo.toml index abc044bbf9..be78a955a9 100644 --- a/infrastructure/shutdown/Cargo.toml +++ b/infrastructure/shutdown/Cargo.toml @@ -6,7 +6,7 @@ repository = "https://github.com/tari-project/tari" homepage = "https://tari.com" readme = "README.md" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/infrastructure/storage/Cargo.toml b/infrastructure/storage/Cargo.toml index cc4ed64a9f..0b426d0114 100644 --- a/infrastructure/storage/Cargo.toml +++ b/infrastructure/storage/Cargo.toml @@ -6,7 +6,7 @@ repository = "https://github.com/tari-project/tari" homepage = "https://tari.com" readme = "README.md" license = "BSD-3-Clause" -version = "0.37.0" +version = "0.38.3" edition = "2018" [dependencies] diff --git a/infrastructure/tari_script/Cargo.toml b/infrastructure/tari_script/Cargo.toml index 6498287469..51b0cac28f 100644 --- a/infrastructure/tari_script/Cargo.toml +++ b/infrastructure/tari_script/Cargo.toml @@ -11,9 +11,7 @@ readme = "README.md" license = "BSD-3-Clause" [dependencies] -tari_common_types = { path = "../../base_layer/common_types" } -tari_common = {path = "../../common"} -tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.4" } +tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.15.5" } tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", tag = "v0.4.5" } blake2 = "0.9" diff --git a/infrastructure/tari_script/src/script_commitment.rs b/infrastructure/tari_script/src/script_commitment.rs index 9a868d7a4b..cfe3ed5e84 100644 --- a/infrastructure/tari_script/src/script_commitment.rs +++ b/infrastructure/tari_script/src/script_commitment.rs @@ -16,10 +16,12 @@ // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. use digest::Digest; -use tari_common_types::types::CommitmentFactory; use tari_crypto::{ commitment::HomomorphicCommitmentFactory, - ristretto::{pedersen::PedersenCommitment, RistrettoSecretKey}, + ristretto::{ + pedersen::{extended_commitment_factory::ExtendedPedersenCommitmentFactory, PedersenCommitment}, + RistrettoSecretKey, + }, }; use tari_utilities::{ByteArray, ByteArrayError}; use thiserror::Error; @@ -115,7 +117,7 @@ impl ScriptCommitment { #[derive(Default)] pub struct ScriptCommitmentFactory { - factory: CommitmentFactory, + factory: ExtendedPedersenCommitmentFactory, } impl ScriptCommitmentFactory { diff --git a/infrastructure/test_utils/Cargo.toml b/infrastructure/test_utils/Cargo.toml index a849f666de..8e3d595c9e 100644 --- a/infrastructure/test_utils/Cargo.toml +++ b/infrastructure/test_utils/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "tari_test_utils" description = "Utility functions used in Tari test functions" -version = "0.37.0" +version = "0.38.3" authors = ["The Tari Development Community"] edition = "2018" license = "BSD-3-Clause" @@ -13,7 +13,7 @@ tari_shutdown = { version = "*", path = "../shutdown" } futures = { version = "^0.3.1" } rand = "0.8" -tokio = { version = "1.11", features = ["rt-multi-thread", "time", "sync"] } +tokio = { version = "1.20", features = ["rt-multi-thread", "time", "sync"] } tempfile = "3.1.0" [dev-dependencies] diff --git a/infrastructure/test_utils/src/futures/async_assert_eventually.rs b/infrastructure/test_utils/src/futures/async_assert_eventually.rs index 0449ab101d..cd7ef71eb2 100644 --- a/infrastructure/test_utils/src/futures/async_assert_eventually.rs +++ b/infrastructure/test_utils/src/futures/async_assert_eventually.rs @@ -43,7 +43,7 @@ macro_rules! async_assert_eventually { assert!( attempts <= $max_attempts, "assert_eventually assertion failed. Expression did not equal value after {} attempts.", - attempts + attempts - 1 ); tokio::time::sleep($interval).await; value = $check_expr; diff --git a/integration_tests/features/WalletCli.feature b/integration_tests/features/WalletCli.feature index b0784d007a..6bfd6c9306 100644 --- a/integration_tests/features/WalletCli.feature +++ b/integration_tests/features/WalletCli.feature @@ -53,6 +53,21 @@ Feature: Wallet CLI And mining node MINE mines 5 blocks Then I wait for wallet RECEIVER to have at least 1000000 uT + + Scenario: As a user I want to burn tari via command line + Given I have a seed node SEED + And I have a base node BASE connected to seed SEED + And I have wallet WALLET_A connected to base node BASE + And I have wallet WALLET_B connected to base node BASE + And I have mining node MINER_A connected to base node BASE and wallet WALLET_A + And I have mining node MINER_B connected to base node BASE and wallet WALLET_B + And mining node MINER_A mines 15 blocks + Then all nodes are at height 15 + When I wait for wallet WALLET_A to have at least 55000000000 uT + When I create a burn transaction of 45000000000 uT from WALLET_A via command line + Then I have mining node MINER_B mines 10 blocks + Then I get balance of wallet WALLET_A is at least 10000000000 uT via command line + @long-running Scenario: As a user I want to send one-sided via command line Given I have a seed node SEED diff --git a/integration_tests/features/WalletFFI.feature b/integration_tests/features/WalletFFI.feature index 36aaa97173..771a39c9ba 100644 --- a/integration_tests/features/WalletFFI.feature +++ b/integration_tests/features/WalletFFI.feature @@ -85,7 +85,8 @@ Feature: Wallet FFI Then I don't have contact with alias ALIAS in ffi wallet FFI_WALLET And I stop ffi wallet FFI_WALLET - @critical + # TODO: Was broken due to #4525 - fix underway + @critical @broken Scenario: As a client I want to receive contact liveness events Given I have a seed node SEED # Contact liveness is based on P2P messaging; ensure connectivity by forcing 'DirectOnly' diff --git a/integration_tests/features/support/node_steps.js b/integration_tests/features/support/node_steps.js index c5022cca00..500a51cfba 100644 --- a/integration_tests/features/support/node_steps.js +++ b/integration_tests/features/support/node_steps.js @@ -500,12 +500,14 @@ Then( ); Then(/node (.*) is at tip (.*)/, async function (node, name) { + // console.log("\nheaders:", this.headers, "\n"); const client = this.getClient(node); const header = await client.getTipHeader(); - // console.log("headers:", this.headers); + // console.log("\nheader:", header, "\n"); const existingHeader = this.headers[name]; + // console.log("\nexistingHeader:", existingHeader, "\n"); expect(existingHeader).to.not.be.null; - expect(existingHeader.header.hash.toString("hex")).to.equal( + expect(existingHeader.hash.toString("hex")).to.equal( header.hash.toString("hex") ); }); diff --git a/integration_tests/features/support/wallet_cli_steps.js b/integration_tests/features/support/wallet_cli_steps.js index 3d071f759a..6e244004b7 100644 --- a/integration_tests/features/support/wallet_cli_steps.js +++ b/integration_tests/features/support/wallet_cli_steps.js @@ -118,6 +118,15 @@ When( } ); +When( + "I create a burn transaction of {int} uT from {word} via command line", + { timeout: 180 * 1000 }, + async function (amount, name) { + let wallet = this.getWallet(name); + await wallet_run_command(wallet, `burn-tari ${amount}`, 180); + } +); + When( "I send one-sided {int} uT from {word} to {word} via command line", { timeout: 180 * 1000 }, diff --git a/integration_tests/helpers/baseNodeClient.js b/integration_tests/helpers/baseNodeClient.js index e7f55ad5aa..6a80a8e780 100644 --- a/integration_tests/helpers/baseNodeClient.js +++ b/integration_tests/helpers/baseNodeClient.js @@ -44,7 +44,7 @@ class BaseNodeClient { getHeaderAt(height) { return this.getHeaders(height, 1).then((header) => - header && header.length ? header[0].header : null + header && header.length ? header[0] : null ); } @@ -66,7 +66,7 @@ class BaseNodeClient { getTipHeader() { return this.getHeaders(0, 1).then((headers) => { - const header = headers[0].header; + const header = headers[0]; return Object.assign(header, { height: +header.height, }); diff --git a/integration_tests/helpers/baseNodeProcess.js b/integration_tests/helpers/baseNodeProcess.js index 059dd81b14..9980a561d2 100644 --- a/integration_tests/helpers/baseNodeProcess.js +++ b/integration_tests/helpers/baseNodeProcess.js @@ -70,6 +70,7 @@ class BaseNodeProcess { await this.runCommand("cargo", [ "build", "--release", + "--locked", "--bin", "tari_base_node", "-Z", @@ -157,7 +158,7 @@ class BaseNodeProcess { // Create convenience script - this is NOT used to start the base node in cucumber fs.writeFileSync( `${this.baseDir}/start_node.sh`, - "bash -c \"RUST_BACKTRACE=1 cargo run --release --bin tari_base_node -- -n --watch status -b . --network localnet $(grep -v '^#' .overrides)\"", + "bash -c \"RUST_BACKTRACE=1 cargo run --release --locked --bin tari_base_node -- -n --watch status -b . --network localnet $(grep -v '^#' .overrides)\"", { mode: 0o777 } ); diff --git a/integration_tests/helpers/ffi/ffiInterface.js b/integration_tests/helpers/ffi/ffiInterface.js index 51b75cb814..bd01ec34c5 100644 --- a/integration_tests/helpers/ffi/ffiInterface.js +++ b/integration_tests/helpers/ffi/ffiInterface.js @@ -34,6 +34,7 @@ class InterfaceFFI { const args = [ "build", "--release", + "--locked", "--package", "tari_wallet_ffi", "-Z", @@ -49,13 +50,7 @@ class InterfaceFFI { fs.mkdirSync(baseDir, { recursive: true }); fs.mkdirSync(baseDir + "/log", { recursive: true }); } - const ps = spawn(cmd, args, { - cwd: baseDir, - env: { - ...process.env, - CARGO_TARGET_DIR: process.cwd() + "/temp/ffi-target", - }, - }); + const ps = spawn(cmd, args); ps.on("close", (_code) => { resolve(ps); }); diff --git a/integration_tests/helpers/mergeMiningProxyProcess.js b/integration_tests/helpers/mergeMiningProxyProcess.js index 387083e9b2..0d38ad73d4 100644 --- a/integration_tests/helpers/mergeMiningProxyProcess.js +++ b/integration_tests/helpers/mergeMiningProxyProcess.js @@ -111,6 +111,7 @@ class MergeMiningProxyProcess { await this.runCommand("cargo", [ "build", "--release", + "--locked", "--bin", "tari_merge_mining_proxy", "-Z", diff --git a/integration_tests/helpers/miningNodeProcess.js b/integration_tests/helpers/miningNodeProcess.js index 48fda75e82..cea3aafea4 100644 --- a/integration_tests/helpers/miningNodeProcess.js +++ b/integration_tests/helpers/miningNodeProcess.js @@ -165,6 +165,7 @@ class MiningNodeProcess { await this.runCommand("cargo", [ "build", "--release", + "--locked", "--bin", "tari_miner", "-Z", diff --git a/integration_tests/helpers/transactionOutputHashing.js b/integration_tests/helpers/transactionOutputHashing.js index 2a2759f51c..d51178d4c9 100644 --- a/integration_tests/helpers/transactionOutputHashing.js +++ b/integration_tests/helpers/transactionOutputHashing.js @@ -41,6 +41,7 @@ const getTransactionOutputHash = function (output) { assertBufferType(output.script); assertBufferType(output.covenant); assertBufferType(output.encrypted_value, 24); + assertBufferType(output.sender_offset_public_key, 32); const hash = consensusHashers .transactionHasher("transaction_output") // version @@ -55,6 +56,8 @@ const getTransactionOutputHash = function (output) { .chain(toLengthEncoded(output.covenant)) // encrypted_value .chain(output.encrypted_value) + // sender offset public key + .chain(output.sender_offset_public_key) // minimum_value_promise .chain(toLittleEndian(output.minimum_value_promise, 64)) .finalize(); diff --git a/integration_tests/helpers/walletProcess.js b/integration_tests/helpers/walletProcess.js index 75b95d297f..661ea1f22f 100644 --- a/integration_tests/helpers/walletProcess.js +++ b/integration_tests/helpers/walletProcess.js @@ -189,6 +189,7 @@ class WalletProcess { let args = [ "build", "--release", + "--locked", "--bin", "tari_console_wallet", "-Z", diff --git a/package-lock.json b/package-lock.json index 391abd78d6..9fbc5d8ed6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "tari", - "version": "0.37.0", + "version": "0.38.3", "lockfileVersion": 2, "requires": true, "packages": {} diff --git a/scripts/install_ubuntu_dependencies.sh b/scripts/install_ubuntu_dependencies.sh new file mode 100755 index 0000000000..ea5ca5c54f --- /dev/null +++ b/scripts/install_ubuntu_dependencies.sh @@ -0,0 +1,16 @@ +apt-get -y install \ + openssl \ + libssl-dev \ + pkg-config \ + libsqlite3-dev \ + clang-10 \ + git \ + cmake \ + dh-autoreconf \ + libc++-dev \ + libc++abi-dev \ + libprotobuf-dev \ + protobuf-compiler \ + libncurses5-dev \ + libncursesw5-dev \ + zip