diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c9a5b4a65..c1e22e7f0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -3,40 +3,62 @@ name: CI on: push: branches: - - develop - main - tags-ignore: - - "**" + - develop paths-ignore: - - "**/CHANGELOG.md" + - '**/CHANGELOG.md' pull_request: workflow_dispatch: +concurrency: + group: ${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }} + cancel-in-progress: true + jobs: - build-publish: + test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: - token: ${{ secrets.GH_TOKEN || secrets.GITHUB_TOKEN }} - fetch-depth: 0 persist-credentials: false - name: Install redis run: sudo apt-get install -y redis-server + - name: Cache cargo + uses: actions/cache@v3 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + - name: Cargo test run: | rustup update RUST_BACKTRACE=1 cargo test --all --features redis_tests -- --test-threads=1 + build-publish: + runs-on: ubuntu-latest + needs: test + outputs: + docker_image_digest: ${{ steps.docker_push.outputs.digest }} + new_release_published: ${{ steps.semantic.outputs.new_release_published }} + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + - name: Semantic Release - uses: cycjimmy/semantic-release-action@v3 + uses: cycjimmy/semantic-release-action@v4 id: semantic # Only run on non-PR events or only PRs that aren't from forks if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository env: - GITHUB_TOKEN: ${{ secrets.GH_TOKEN || secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SEMANTIC_RELEASE_PACKAGE: ${{ github.event.repository.name }} with: semantic_version: 19 @@ -45,15 +67,21 @@ jobs: @semantic-release/git@10.0.1 conventional-changelog-conventionalcommits@6.1.0 + - name: Checkout tag + if: steps.semantic.outputs.new_release_version != '' + uses: actions/checkout@v4 + with: + persist-credentials: false + ref: v${{ steps.semantic.outputs.new_release_version }} + - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 + uses: docker/setup-buildx-action@v3 - name: Docker Meta id: meta - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: | - blockstack/${{ github.event.repository.name }} hirosystems/${{ github.event.repository.name }} tags: | type=ref,event=branch @@ -62,18 +90,134 @@ jobs: type=semver,pattern={{major}}.{{minor}},value=${{ steps.semantic.outputs.new_release_version }},enable=${{ steps.semantic.outputs.new_release_version != '' }} type=raw,value=latest,enable={{is_default_branch}} - - name: Login to DockerHub - uses: docker/login-action@v2 + - name: Log in to DockerHub + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_PASSWORD }} - - name: Build/Tag/Push Image - uses: docker/build-push-action@v2 + - name: Build/Push Image + uses: docker/build-push-action@v5 + id: docker_push with: context: . tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} file: ./dockerfiles/components/chainhook-node.dockerfile + cache-from: type=gha + cache-to: type=gha,mode=max # Only push if (there's a new release on main branch, or if building a non-main branch) and (Only run on non-PR events or only PRs that aren't from forks) - push: ${{ (github.ref != 'refs/heads/master' || steps.semantic.outputs.new_release_version != '') && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository) }} + push: ${{ (github.ref != 'refs/heads/main' || steps.semantic.outputs.new_release_version != '') && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository) }} + + deploy-dev: + runs-on: ubuntu-latest + strategy: + matrix: + k8s-env: [mainnet,testnet] + needs: build-publish + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository + env: + DEPLOY_ENV: dev + environment: + name: Development-${{ matrix.k8s-env }} + url: https://platform.dev.hiro.so/ + steps: + - name: Checkout actions repo + uses: actions/checkout@v4 + with: + ref: main + token: ${{ secrets.GH_TOKEN }} + repository: ${{ secrets.DEVOPS_ACTIONS_REPO }} + + - name: Deploy Chainhook build to Dev ${{ matrix.k8s-env }} + uses: ./actions/deploy + with: + docker_tag: ${{ needs.build-publish.outputs.docker_image_digest }} + file_pattern: manifests/chainhooks/${{ matrix.k8s-env }}/chainhook-node/${{ env.DEPLOY_ENV }}/base/kustomization.yaml + gh_token: ${{ secrets.GH_TOKEN }} + + auto-approve-dev: + runs-on: ubuntu-latest + if: needs.build-publish.outputs.new_release_published == 'true' && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository) + needs: build-publish + steps: + - name: Approve pending deployments + run: | + sleep 5 + ENV_IDS=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/chainhook/actions/runs/${{ github.run_id }}/pending_deployments" | jq -r '[.[].environment.id // empty]') + if [[ "${ENV_IDS}" != "[]" ]]; then + curl -s -X POST -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/chainhook/actions/runs/${{ github.run_id }}/pending_deployments" -d "{\"environment_ids\":${ENV_IDS},\"state\":\"approved\",\"comment\":\"auto approve\"}" + fi + + deploy-staging: + runs-on: ubuntu-latest + strategy: + matrix: + k8s-env: [mainnet,testnet] + needs: + - build-publish + - deploy-dev + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository + env: + DEPLOY_ENV: stg + environment: + name: Staging-${{ matrix.k8s-env }} + url: https://platform.stg.hiro.so/ + steps: + - name: Checkout actions repo + uses: actions/checkout@v4 + with: + ref: main + token: ${{ secrets.GH_TOKEN }} + repository: ${{ secrets.DEVOPS_ACTIONS_REPO }} + + - name: Deploy Chainhook build to Stg ${{ matrix.k8s-env }} + uses: ./actions/deploy + with: + docker_tag: ${{ needs.build-publish.outputs.docker_image_digest }} + file_pattern: manifests/chainhooks/${{ matrix.k8s-env }}/chainhook-node/${{ env.DEPLOY_ENV }}/base/kustomization.yaml + gh_token: ${{ secrets.GH_TOKEN }} + + auto-approve-stg: + runs-on: ubuntu-latest + if: needs.build-publish.outputs.new_release_published == 'true' && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository) + needs: + - build-publish + - deploy-dev + steps: + - name: Approve pending deployments + run: | + sleep 5 + ENV_IDS=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/chainhook/actions/runs/${{ github.run_id }}/pending_deployments" | jq -r '[.[].environment.id // empty]') + if [[ "${ENV_IDS}" != "[]" ]]; then + curl -s -X POST -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/hirosystems/chainhook/actions/runs/${{ github.run_id }}/pending_deployments" -d "{\"environment_ids\":${ENV_IDS},\"state\":\"approved\",\"comment\":\"auto approve\"}" + fi + + deploy-prod: + runs-on: ubuntu-latest + strategy: + matrix: + k8s-env: [mainnet,testnet] + needs: + - build-publish + - deploy-staging + if: needs.build-publish.outputs.new_release_published == 'true' && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository) + env: + DEPLOY_ENV: prd + environment: + name: Production-${{ matrix.k8s-env }} + url: https://platform.hiro.so/ + steps: + - name: Checkout actions repo + uses: actions/checkout@v4 + with: + ref: main + token: ${{ secrets.GH_TOKEN }} + repository: ${{ secrets.DEVOPS_ACTIONS_REPO }} + + - name: Deploy Chainhook build to Prd ${{ matrix.k8s-env }} + uses: ./actions/deploy + with: + docker_tag: ${{ needs.build-publish.outputs.docker_image_digest }} + file_pattern: manifests/chainhooks/${{ matrix.k8s-env }}/chainhook-node/${{ env.DEPLOY_ENV }}/base/kustomization.yaml + gh_token: ${{ secrets.GH_TOKEN }} diff --git a/.gitignore b/.gitignore index 9f6f049ca..d63cbe1d0 100644 --- a/.gitignore +++ b/.gitignore @@ -19,4 +19,5 @@ components/chainhook-types-js/dist *.redb cache/ -components/chainhook-cli/src/service/tests/fixtures/tmp \ No newline at end of file +components/chainhook-cli/src/service/tests/fixtures/tmp +components/chainhook-cli/src/archive/tests/fixtures/tmp \ No newline at end of file diff --git a/README.md b/README.md index bd0da3cb7..d6c7169b5 100644 --- a/README.md +++ b/README.md @@ -536,6 +536,9 @@ Additional configuration knobs available: // Include decoded clarity values in payload "decode_clarity_values": true + +// Include the contract ABI for transactions that deploy contracts: +"include_contract_abi": true ``` Putting all the pieces together: diff --git a/components/chainhook-cli/benches/codec/clarity.rs b/components/chainhook-cli/benches/codec/clarity.rs deleted file mode 100644 index 8f2397379..000000000 --- a/components/chainhook-cli/benches/codec/clarity.rs +++ /dev/null @@ -1,27 +0,0 @@ -use clarity_repl::clarity::codec::StacksString; -use clarity_repl::clarity::ClarityName; -use criterion::{black_box, criterion_group, criterion_main, Criterion}; -use hex::{decode, encode}; - -#[inline] -fn canonical_is_clarity_variable() { - let function_name = ClarityName::try_from("my-method-name").unwrap(); - StacksString::from(function_name.clone()).is_clarity_variable(); -} - -#[inline] -fn proposed_is_clarity_variable() { - let function_name = ClarityName::try_from("my-method-name").unwrap(); -} - -pub fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("canonical_is_clarity_variable ", |b| { - b.iter(|| canonical_is_clarity_variable()) - }); - c.bench_function("proposed_is_clarity_variable ", |b| { - b.iter(|| proposed_is_clarity_variable()) - }); -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/components/chainhook-cli/benches/codec/hex.rs b/components/chainhook-cli/benches/codec/hex.rs deleted file mode 100644 index 8b5fb30c3..000000000 --- a/components/chainhook-cli/benches/codec/hex.rs +++ /dev/null @@ -1,397 +0,0 @@ -use criterion::{black_box, criterion_group, criterion_main, Criterion}; -use hex::{decode, encode}; - -#[inline] -fn decode_hex_str_stacks(input: &str) -> Vec { - hex_bytes(input).unwrap() -} - -#[inline] -fn encode_hex_str_stacks() -> String { - hex::encode(&[ - 0x00, - 0x00, - 0x00, - 0x00, - 0x01, - 0x04, - 0x00, - 0xa4, - 0x68, - 0xe7, - 0x2d, - 0xf6, - 0x75, - 0x43, - 0x87, - 0x20, - 0x98, - 0x27, - 0x32, - 0x7b, - 0x98, - 0x9f, - 0x4a, - 0x99, - 0x54, - 0x2a, - 0x69, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x77, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0xb4, - 0x00, - 0x01, - 0x23, - 0x1c, - 0x21, - 0x39, - 0x49, - 0x45, - 0x53, - 0x17, - 0xd1, - 0x5f, - 0xf2, - 0x5d, - 0xfd, - 0x7a, - 0xd9, - 0xbc, - 0x04, - 0x26, - 0xcf, - 0xfa, - 0x5c, - 0x6f, - 0x5b, - 0xe4, - 0x6e, - 0x79, - 0xfa, - 0x1e, - 0x18, - 0x2b, - 0x61, - 0x45, - 0x18, - 0x0c, - 0x1c, - 0x13, - 0x5d, - 0x6c, - 0xcc, - 0xfc, - 0xc4, - 0x9e, - 0x85, - 0xcb, - 0x82, - 0x5e, - 0x61, - 0x52, - 0x28, - 0xda, - 0xf2, - 0xd1, - 0x66, - 0x3e, - 0x49, - 0xd7, - 0x0c, - 0xd5, - 0xd0, - 0xe1, - 0x96, - 0xd1, - 0x26, - 0xc0, - 0x03, - 0x02, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x05, - 0x16, - 0x7b, - 0x2d, - 0xd1, - 0xf0, - 0xd4, - 0x7f, - 0x59, - 0x67, - 0x21, - 0xc2, - 0x33, - 0xfd, - 0x9b, - 0x2e, - 0x99, - 0x19, - 0xac, - 0x91, - 0x28, - 0x8a, - 0x00, - 0x00, - 0x00, - 0x00, - 0x0c, - 0x1b, - 0x5e, - 0x48, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00 - ]) -} - - -#[inline] -fn decode_hex_str_crate(input: &str) -> Vec { - decode(input).unwrap() -} - -#[inline] -fn encode_hex_str_crate() -> String { - encode(&[ - 0x00, - 0x00, - 0x00, - 0x00, - 0x01, - 0x04, - 0x00, - 0xa4, - 0x68, - 0xe7, - 0x2d, - 0xf6, - 0x75, - 0x43, - 0x87, - 0x20, - 0x98, - 0x27, - 0x32, - 0x7b, - 0x98, - 0x9f, - 0x4a, - 0x99, - 0x54, - 0x2a, - 0x69, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x77, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0xb4, - 0x00, - 0x01, - 0x23, - 0x1c, - 0x21, - 0x39, - 0x49, - 0x45, - 0x53, - 0x17, - 0xd1, - 0x5f, - 0xf2, - 0x5d, - 0xfd, - 0x7a, - 0xd9, - 0xbc, - 0x04, - 0x26, - 0xcf, - 0xfa, - 0x5c, - 0x6f, - 0x5b, - 0xe4, - 0x6e, - 0x79, - 0xfa, - 0x1e, - 0x18, - 0x2b, - 0x61, - 0x45, - 0x18, - 0x0c, - 0x1c, - 0x13, - 0x5d, - 0x6c, - 0xcc, - 0xfc, - 0xc4, - 0x9e, - 0x85, - 0xcb, - 0x82, - 0x5e, - 0x61, - 0x52, - 0x28, - 0xda, - 0xf2, - 0xd1, - 0x66, - 0x3e, - 0x49, - 0xd7, - 0x0c, - 0xd5, - 0xd0, - 0xe1, - 0x96, - 0xd1, - 0x26, - 0xc0, - 0x03, - 0x02, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x05, - 0x16, - 0x7b, - 0x2d, - 0xd1, - 0xf0, - 0xd4, - 0x7f, - 0x59, - 0x67, - 0x21, - 0xc2, - 0x33, - 0xfd, - 0x9b, - 0x2e, - 0x99, - 0x19, - 0xac, - 0x91, - 0x28, - 0x8a, - 0x00, - 0x00, - 0x00, - 0x00, - 0x0c, - 0x1b, - 0x5e, - 0x48, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00, - 0x00 - ]) -} - - -pub fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("decode_hex_str ('hex_bytes' from stacks) <360>", |b| b.iter(|| decode_hex_str_stacks(black_box("00000000010400a468e72df6754387209827327b989f4a99542a69000000000000007700000000000000b40001231c213949455317d15ff25dfd7ad9bc0426cffa5c6f5be46e79fa1e182b6145180c1c135d6cccfcc49e85cb825e615228daf2d1663e49d70cd5d0e196d126c00302000000000005167b2dd1f0d47f596721c233fd9b2e9919ac91288a000000000c1b5e4800000000000000000000000000000000000000000000000000000000000000000000")))); - c.bench_function("decode_hex_str (function from crate 'hex') <360>", |b| b.iter(|| decode_hex_str_crate(black_box("00000000010400a468e72df6754387209827327b989f4a99542a69000000000000007700000000000000b40001231c213949455317d15ff25dfd7ad9bc0426cffa5c6f5be46e79fa1e182b6145180c1c135d6cccfcc49e85cb825e615228daf2d1663e49d70cd5d0e196d126c00302000000000005167b2dd1f0d47f596721c233fd9b2e9919ac91288a000000000c1b5e4800000000000000000000000000000000000000000000000000000000000000000000")))); - - c.bench_function("encode_to_hex_str ('to_hex' from stacks) <360>", |b| b.iter(|| encode_hex_str_stacks())); - c.bench_function("encode_to_hex_str (function from crate 'hex') <360>", |b| b.iter(|| encode_hex_str_crate())); -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/components/chainhook-cli/benches/storage/redis.rs b/components/chainhook-cli/benches/storage/redis.rs deleted file mode 100644 index fc6d6c8fa..000000000 --- a/components/chainhook-cli/benches/storage/redis.rs +++ /dev/null @@ -1,17 +0,0 @@ -use criterion::{black_box, criterion_group, criterion_main, Criterion}; -use chainhook_event_indexer::ingestion::start_ingesting; -use chainhook_sdk::indexer::IndexerConfig; - - -fn criterion_benchmark(c: &mut Criterion) { - let config = IndexerConfig { - stacks_node_rpc_url: "http://0.0.0.0:20443".into(), - bitcoind_rpc_url: "http://0.0.0.0:18443".into(), - bitcoind_rpc_username: "devnet".into(), - bitcoind_rpc_password: "devnet".into(), - }; - c.bench_function("redis", |b| b.iter(|| start_ingesting("/Users/ludovic/Downloads/stacks-blockchain-api.tsv".into(), config.clone()).unwrap())); -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/components/chainhook-cli/benches/storage/rocksdb.rs b/components/chainhook-cli/benches/storage/rocksdb.rs deleted file mode 100644 index 4bec91251..000000000 --- a/components/chainhook-cli/benches/storage/rocksdb.rs +++ /dev/null @@ -1,16 +0,0 @@ -use criterion::{black_box, criterion_group, criterion_main, Criterion}; - -fn ingest_tsv(n: u64) -> u64 { - match n { - 0 => 1, - 1 => 1, - n => fibonacci(n-1) + fibonacci(n-2), - } -} - -fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("fib 20", |b| b.iter(|| fibonacci(black_box(20)))); -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/components/chainhook-cli/benches/storage/sqlite.rs b/components/chainhook-cli/benches/storage/sqlite.rs deleted file mode 100644 index 4bec91251..000000000 --- a/components/chainhook-cli/benches/storage/sqlite.rs +++ /dev/null @@ -1,16 +0,0 @@ -use criterion::{black_box, criterion_group, criterion_main, Criterion}; - -fn ingest_tsv(n: u64) -> u64 { - match n { - 0 => 1, - 1 => 1, - n => fibonacci(n-1) + fibonacci(n-2), - } -} - -fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("fib 20", |b| b.iter(|| fibonacci(black_box(20)))); -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/components/chainhook-cli/src/archive/mod.rs b/components/chainhook-cli/src/archive/mod.rs index 1b87002eb..759e97071 100644 --- a/components/chainhook-cli/src/archive/mod.rs +++ b/components/chainhook-cli/src/archive/mod.rs @@ -167,3 +167,6 @@ pub async fn download_stacks_dataset_if_required(config: &mut Config, ctx: &Cont false } } + +#[cfg(test)] +pub mod tests; diff --git a/components/chainhook-cli/src/archive/tests/fixtures/stacks_blocks.tsv.gz b/components/chainhook-cli/src/archive/tests/fixtures/stacks_blocks.tsv.gz new file mode 100644 index 000000000..91813fb7c Binary files /dev/null and b/components/chainhook-cli/src/archive/tests/fixtures/stacks_blocks.tsv.gz differ diff --git a/components/chainhook-cli/src/archive/tests/mod.rs b/components/chainhook-cli/src/archive/tests/mod.rs new file mode 100644 index 000000000..eb97bfff9 --- /dev/null +++ b/components/chainhook-cli/src/archive/tests/mod.rs @@ -0,0 +1,84 @@ +use std::{ + fs::{self, File}, + io::Read, + net::{IpAddr, Ipv4Addr}, + thread::sleep, + time::Duration, +}; + +use chainhook_sdk::utils::Context; + +use crate::{ + archive::{ + default_tsv_file_path, default_tsv_sha_file_path, download_stacks_dataset_if_required, + }, + config::{Config, EventSourceConfig, UrlConfig}, + service::tests::helpers::get_free_port, +}; +use rocket::Config as RocketConfig; + +const GZ_DIR: &str = "src/archive/tests/fixtures/stacks_blocks.tsv.gz"; +const TMP_DIR: &str = "src/archive/tests/fixtures/tmp"; +const SHA256_HASH: &str = "49ca5f80b2a1303e7f7e98a4f9d39efeb35fd9f3696c4cd9615e0b5cd1f3dcfb"; + +#[get("/stacks_blocks.tsv.sha256")] +fn get_sha256() -> String { + format!("{SHA256_HASH}") +} + +#[get("/stacks_blocks.tsv.gz")] +fn get_gz() -> Vec { + let dir = format!("{}/{GZ_DIR}", env!("CARGO_MANIFEST_DIR")); + let mut f = File::open(dir).unwrap(); + let mut buffer: Vec = Vec::new(); + f.read_to_end(&mut buffer).unwrap(); + buffer +} + +async fn start_service(port: u16) { + let config = RocketConfig::figment() + .merge(("port", port)) + .merge(("address", IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)))) + .merge(("log_level", "off")); + let _rocket = rocket::build() + .configure(config) + .mount("/", routes![get_sha256, get_gz]) + .launch() + .await + .unwrap(); +} + +#[tokio::test] +async fn it_downloads_stacks_dataset_if_required() { + let port = get_free_port().unwrap(); + let mut config = Config::default(false, true, false, &None).unwrap(); + + config.storage.working_dir = format!("{}/{}", env!("CARGO_MANIFEST_DIR"), TMP_DIR); + config.event_sources = vec![EventSourceConfig::StacksTsvUrl(UrlConfig { + file_url: format!("http://0.0.0.0:{port}/stacks_blocks.tsv"), + })]; + let _ = hiro_system_kit::thread_named("Start tsv service") + .spawn(move || { + let future = start_service(port); + let _ = hiro_system_kit::nestable_block_on(future); + }) + .expect("unable to spawn thread"); + + sleep(Duration::new(1, 0)); + let logger = hiro_system_kit::log::setup_logger(); + let _guard = hiro_system_kit::log::setup_global_logger(logger.clone()); + let ctx = Context { + logger: Some(logger), + tracer: false, + }; + let mut config_clone = config.clone(); + assert!(download_stacks_dataset_if_required(&mut config, &ctx).await); + assert!(!download_stacks_dataset_if_required(&mut config_clone, &ctx).await); + + let mut tsv_file_path = config.expected_cache_path(); + tsv_file_path.push(default_tsv_file_path(&config.network.stacks_network)); + fs::remove_file(tsv_file_path).unwrap(); + let mut tsv_sha_file_path = config.expected_cache_path(); + tsv_sha_file_path.push(default_tsv_sha_file_path(&config.network.stacks_network)); + fs::remove_file(tsv_sha_file_path).unwrap(); +} diff --git a/components/chainhook-cli/src/cli/mod.rs b/components/chainhook-cli/src/cli/mod.rs index a07a7aebc..8ee706a45 100644 --- a/components/chainhook-cli/src/cli/mod.rs +++ b/components/chainhook-cli/src/cli/mod.rs @@ -342,6 +342,7 @@ async fn handle_command(opts: Opts, ctx: Context) -> Result<(), String> { expire_after_occurrence: None, capture_all_events: None, decode_clarity_values: None, + include_contract_abi: None, action: HookAction::FileAppend(FileHook { path: "arkadiko.txt".into() }) @@ -358,6 +359,7 @@ async fn handle_command(opts: Opts, ctx: Context) -> Result<(), String> { expire_after_occurrence: None, capture_all_events: None, decode_clarity_values: None, + include_contract_abi: None, action: HookAction::FileAppend(FileHook { path: "arkadiko.txt".into() }) diff --git a/components/chainhook-cli/src/config/file.rs b/components/chainhook-cli/src/config/file.rs index b735baadb..ad76a7a91 100644 --- a/components/chainhook-cli/src/config/file.rs +++ b/components/chainhook-cli/src/config/file.rs @@ -1,3 +1,5 @@ +use chainhook_sdk::types::BitcoinNetwork; + #[derive(Deserialize, Debug, Clone)] pub struct ConfigFile { pub storage: StorageConfigFile, @@ -43,7 +45,7 @@ pub struct LimitsConfigFile { #[derive(Deserialize, Debug, Clone)] pub struct NetworkConfigFile { - pub mode: String, + pub mode: NetworkConfigMode, pub bitcoind_rpc_url: String, pub bitcoind_rpc_username: String, pub bitcoind_rpc_password: String, @@ -51,3 +53,28 @@ pub struct NetworkConfigFile { pub stacks_node_rpc_url: Option, pub stacks_events_ingestion_port: Option, } + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "snake_case")] +pub enum NetworkConfigMode { + Devnet, + Testnet, + Mainnet, +} + +impl NetworkConfigMode { + pub fn from_bitcoin_network(network: &BitcoinNetwork) -> Self { + match network { + BitcoinNetwork::Regtest => NetworkConfigMode::Devnet, + BitcoinNetwork::Testnet => NetworkConfigMode::Testnet, + BitcoinNetwork::Mainnet => NetworkConfigMode::Mainnet, + } + } + pub fn as_str(&self) -> &str { + match self { + NetworkConfigMode::Devnet => "devnet", + NetworkConfigMode::Testnet => "testnet", + NetworkConfigMode::Mainnet => "mainnet", + } + } +} diff --git a/components/chainhook-cli/src/config/generator.rs b/components/chainhook-cli/src/config/generator.rs index 074bc328f..0e6aff195 100644 --- a/components/chainhook-cli/src/config/generator.rs +++ b/components/chainhook-cli/src/config/generator.rs @@ -1,6 +1,8 @@ +use super::file::NetworkConfigMode; use chainhook_sdk::types::BitcoinNetwork; pub fn generate_config(network: &BitcoinNetwork) -> String { + let mode = NetworkConfigMode::from_bitcoin_network(network); let network = format!("{:?}", network); let conf = format!( r#"[storage] @@ -15,7 +17,7 @@ working_dir = "cache" # database_uri = "redis://localhost:6379/" [network] -mode = "{network}" +mode = "{mode}" bitcoind_rpc_url = "http://localhost:8332" bitcoind_rpc_username = "devnet" bitcoind_rpc_password = "devnet" @@ -39,6 +41,7 @@ max_caching_memory_size_mb = 32000 [[event_source]] tsv_file_url = "https://archive.hiro.so/{network}/stacks-blockchain-api/{network}-stacks-blockchain-api-latest" "#, + mode = mode.as_str(), network = network.to_lowercase(), ); return conf; diff --git a/components/chainhook-cli/src/config/mod.rs b/components/chainhook-cli/src/config/mod.rs index 28eecfa3d..b690e623f 100644 --- a/components/chainhook-cli/src/config/mod.rs +++ b/components/chainhook-cli/src/config/mod.rs @@ -24,7 +24,7 @@ pub const BITCOIN_SCAN_THREAD_POOL_SIZE: usize = 10; pub const STACKS_MAX_PREDICATE_REGISTRATION: usize = 50; pub const BITCOIN_MAX_PREDICATE_REGISTRATION: usize = 50; -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq)] pub struct Config { pub storage: StorageConfig, pub http_api: PredicatesApi, @@ -33,25 +33,25 @@ pub struct Config { pub network: IndexerConfig, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq)] pub struct StorageConfig { pub working_dir: String, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq)] pub enum PredicatesApi { Off, On(PredicatesApiConfig), } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq)] pub struct PredicatesApiConfig { pub http_port: u16, pub database_uri: String, pub display_logs: bool, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq)] pub enum EventSourceConfig { StacksTsvPath(PathConfig), StacksTsvUrl(UrlConfig), @@ -59,17 +59,17 @@ pub enum EventSourceConfig { OrdinalsSqliteUrl(UrlConfig), } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq)] pub struct PathConfig { pub file_path: PathBuf, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq)] pub struct UrlConfig { pub file_url: String, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq)] pub struct LimitsConfig { pub max_number_of_bitcoin_predicates: usize, pub max_number_of_concurrent_bitcoin_scans: usize, @@ -232,20 +232,6 @@ impl Config { })); } - pub fn add_ordinals_sqlite_remote_source_url(&mut self, file_url: &str) { - self.event_sources - .push(EventSourceConfig::OrdinalsSqliteUrl(UrlConfig { - file_url: file_url.to_string(), - })); - } - - pub fn add_local_ordinals_sqlite_source(&mut self, file_path: &PathBuf) { - self.event_sources - .push(EventSourceConfig::OrdinalsSqlitePath(PathConfig { - file_path: file_path.clone(), - })); - } - pub fn expected_api_database_uri(&self) -> &str { &self.expected_api_config().database_uri } @@ -272,15 +258,6 @@ impl Config { destination_path } - fn expected_remote_ordinals_sqlite_base_url(&self) -> &String { - for source in self.event_sources.iter() { - if let EventSourceConfig::OrdinalsSqliteUrl(config) = source { - return &config.file_url; - } - } - panic!("expected remote-tsv source") - } - fn expected_remote_stacks_tsv_base_url(&self) -> &String { for source in self.event_sources.iter() { if let EventSourceConfig::StacksTsvUrl(config) = source { @@ -298,14 +275,6 @@ impl Config { format!("{}.gz", self.expected_remote_stacks_tsv_base_url()) } - pub fn expected_remote_ordinals_sqlite_sha256(&self) -> String { - format!("{}.sha256", self.expected_remote_ordinals_sqlite_base_url()) - } - - pub fn expected_remote_ordinals_sqlite_url(&self) -> String { - format!("{}.gz", self.expected_remote_ordinals_sqlite_base_url()) - } - pub fn rely_on_remote_stacks_tsv(&self) -> bool { for source in self.event_sources.iter() { if let EventSourceConfig::StacksTsvUrl(_config) = source { @@ -315,15 +284,6 @@ impl Config { false } - pub fn rely_on_remote_ordinals_sqlite(&self) -> bool { - for source in self.event_sources.iter() { - if let EventSourceConfig::OrdinalsSqliteUrl(_config) = source { - return true; - } - } - false - } - pub fn should_download_remote_stacks_tsv(&self) -> bool { let mut rely_on_remote_tsv = false; let mut remote_tsv_present_locally = false; @@ -338,20 +298,6 @@ impl Config { rely_on_remote_tsv == true && remote_tsv_present_locally == false } - pub fn should_download_remote_ordinals_sqlite(&self) -> bool { - let mut rely_on_remote_tsv = false; - let mut remote_tsv_present_locally = false; - for source in self.event_sources.iter() { - if let EventSourceConfig::OrdinalsSqliteUrl(_config) = source { - rely_on_remote_tsv = true; - } - if let EventSourceConfig::OrdinalsSqlitePath(_config) = source { - remote_tsv_present_locally = true; - } - } - rely_on_remote_tsv == true && remote_tsv_present_locally == false - } - pub fn default( devnet: bool, testnet: bool, @@ -465,3 +411,6 @@ pub fn default_cache_path() -> String { cache_path.push("cache"); format!("{}", cache_path.display()) } + +#[cfg(test)] +pub mod tests; diff --git a/components/chainhook-cli/src/config/tests/fixtures/devnet_chainhook.toml b/components/chainhook-cli/src/config/tests/fixtures/devnet_chainhook.toml new file mode 100644 index 000000000..dbdb4a589 --- /dev/null +++ b/components/chainhook-cli/src/config/tests/fixtures/devnet_chainhook.toml @@ -0,0 +1,35 @@ +[storage] +working_dir = "cache" + +# The Http Api allows you to register / deregister +# dynamically predicates. +# Disable by default. +# +# [http_api] +# http_port = 20456 +# database_uri = "redis://localhost:6379/" + +[network] +mode = "devnet" +bitcoind_rpc_url = "http://localhost:8332" +bitcoind_rpc_username = "devnet" +bitcoind_rpc_password = "devnet" +# Bitcoin block events can be received by Chainhook +# either through a Bitcoin node's ZeroMQ interface, +# or through the Stacks node. The Stacks node is +# used by default: +stacks_node_rpc_url = "http://localhost:20443" +# but zmq can be used instead: +# bitcoind_zmq_url = "tcp://0.0.0.0:18543" + +[limits] +max_number_of_bitcoin_predicates = 100 +max_number_of_concurrent_bitcoin_scans = 100 +max_number_of_stacks_predicates = 10 +max_number_of_concurrent_stacks_scans = 10 +max_number_of_processing_threads = 16 +max_number_of_networking_threads = 16 +max_caching_memory_size_mb = 32000 + +[[event_source]] +tsv_file_url = "https://archive.hiro.so/regtest/stacks-blockchain-api/regtest-stacks-blockchain-api-latest" diff --git a/components/chainhook-cli/src/config/tests/fixtures/local_tsv_chainhook.toml b/components/chainhook-cli/src/config/tests/fixtures/local_tsv_chainhook.toml new file mode 100644 index 000000000..e70a9ad34 --- /dev/null +++ b/components/chainhook-cli/src/config/tests/fixtures/local_tsv_chainhook.toml @@ -0,0 +1,35 @@ +[storage] +working_dir = "cache" + +# The Http Api allows you to register / deregister +# dynamically predicates. +# Disable by default. +# +# [http_api] +# http_port = 20456 +# database_uri = "redis://localhost:6379/" + +[network] +mode = "devnet" +bitcoind_rpc_url = "http://localhost:8332" +bitcoind_rpc_username = "devnet" +bitcoind_rpc_password = "devnet" +# Bitcoin block events can be received by Chainhook +# either through a Bitcoin node's ZeroMQ interface, +# or through the Stacks node. The Stacks node is +# used by default: +stacks_node_rpc_url = "http://localhost:20443" +# but zmq can be used instead: +# bitcoind_zmq_url = "tcp://0.0.0.0:18543" + +[limits] +max_number_of_bitcoin_predicates = 100 +max_number_of_concurrent_bitcoin_scans = 100 +max_number_of_stacks_predicates = 10 +max_number_of_concurrent_stacks_scans = 10 +max_number_of_processing_threads = 16 +max_number_of_networking_threads = 16 +max_caching_memory_size_mb = 32000 + +[[event_source]] +tsv_file_path = "./file.tsv" diff --git a/components/chainhook-cli/src/config/tests/fixtures/mainnet_chainhook.toml b/components/chainhook-cli/src/config/tests/fixtures/mainnet_chainhook.toml new file mode 100644 index 000000000..d403113ca --- /dev/null +++ b/components/chainhook-cli/src/config/tests/fixtures/mainnet_chainhook.toml @@ -0,0 +1,35 @@ +[storage] +working_dir = "cache" + +# The Http Api allows you to register / deregister +# dynamically predicates. +# Disable by default. +# +# [http_api] +# http_port = 20456 +# database_uri = "redis://localhost:6379/" + +[network] +mode = "mainnet" +bitcoind_rpc_url = "http://localhost:8332" +bitcoind_rpc_username = "devnet" +bitcoind_rpc_password = "devnet" +# Bitcoin block events can be received by Chainhook +# either through a Bitcoin node's ZeroMQ interface, +# or through the Stacks node. The Stacks node is +# used by default: +stacks_node_rpc_url = "http://localhost:20443" +# but zmq can be used instead: +# bitcoind_zmq_url = "tcp://0.0.0.0:18543" + +[limits] +max_number_of_bitcoin_predicates = 100 +max_number_of_concurrent_bitcoin_scans = 100 +max_number_of_stacks_predicates = 10 +max_number_of_concurrent_stacks_scans = 10 +max_number_of_processing_threads = 16 +max_number_of_networking_threads = 16 +max_caching_memory_size_mb = 32000 + +[[event_source]] +tsv_file_url = "https://archive.hiro.so/mainnet/stacks-blockchain-api/mainnet-stacks-blockchain-api-latest" diff --git a/components/chainhook-cli/src/config/tests/fixtures/testnet_chainhook.toml b/components/chainhook-cli/src/config/tests/fixtures/testnet_chainhook.toml new file mode 100644 index 000000000..dc5c4b0ef --- /dev/null +++ b/components/chainhook-cli/src/config/tests/fixtures/testnet_chainhook.toml @@ -0,0 +1,35 @@ +[storage] +working_dir = "cache" + +# The Http Api allows you to register / deregister +# dynamically predicates. +# Disable by default. +# +# [http_api] +# http_port = 20456 +# database_uri = "redis://localhost:6379/" + +[network] +mode = "testnet" +bitcoind_rpc_url = "http://localhost:8332" +bitcoind_rpc_username = "devnet" +bitcoind_rpc_password = "devnet" +# Bitcoin block events can be received by Chainhook +# either through a Bitcoin node's ZeroMQ interface, +# or through the Stacks node. The Stacks node is +# used by default: +stacks_node_rpc_url = "http://localhost:20443" +# but zmq can be used instead: +# bitcoind_zmq_url = "tcp://0.0.0.0:18543" + +[limits] +max_number_of_bitcoin_predicates = 100 +max_number_of_concurrent_bitcoin_scans = 100 +max_number_of_stacks_predicates = 10 +max_number_of_concurrent_stacks_scans = 10 +max_number_of_processing_threads = 16 +max_number_of_networking_threads = 16 +max_caching_memory_size_mb = 32000 + +[[event_source]] +tsv_file_url = "https://archive.hiro.so/testnet/stacks-blockchain-api/testnet-stacks-blockchain-api-latest" diff --git a/components/chainhook-cli/src/config/tests/fixtures/unsupported_chainhook.toml b/components/chainhook-cli/src/config/tests/fixtures/unsupported_chainhook.toml new file mode 100644 index 000000000..835cac1e4 --- /dev/null +++ b/components/chainhook-cli/src/config/tests/fixtures/unsupported_chainhook.toml @@ -0,0 +1,35 @@ +[storage] +working_dir = "cache" + +# The Http Api allows you to register / deregister +# dynamically predicates. +# Disable by default. +# +# [http_api] +# http_port = 20456 +# database_uri = "redis://localhost:6379/" + +[network] +mode = "unsupported" +bitcoind_rpc_url = "http://localhost:8332" +bitcoind_rpc_username = "devnet" +bitcoind_rpc_password = "devnet" +# Bitcoin block events can be received by Chainhook +# either through a Bitcoin node's ZeroMQ interface, +# or through the Stacks node. The Stacks node is +# used by default: +stacks_node_rpc_url = "http://localhost:20443" +# but zmq can be used instead: +# bitcoind_zmq_url = "tcp://0.0.0.0:18543" + +[limits] +max_number_of_bitcoin_predicates = 100 +max_number_of_concurrent_bitcoin_scans = 100 +max_number_of_stacks_predicates = 10 +max_number_of_concurrent_stacks_scans = 10 +max_number_of_processing_threads = 16 +max_number_of_networking_threads = 16 +max_caching_memory_size_mb = 32000 + +[[event_source]] +tsv_file_url = "https://archive.hiro.so/regtest/stacks-blockchain-api/regtest-stacks-blockchain-api-latest" diff --git a/components/chainhook-cli/src/config/tests/mod.rs b/components/chainhook-cli/src/config/tests/mod.rs new file mode 100644 index 000000000..ec0701e4b --- /dev/null +++ b/components/chainhook-cli/src/config/tests/mod.rs @@ -0,0 +1,140 @@ +use std::path::PathBuf; + +use crate::config::{file::NetworkConfigMode, PredicatesApi, PredicatesApiConfig}; + +use super::{generator::generate_config, Config, ConfigFile, EventSourceConfig, PathConfig}; +use chainhook_sdk::types::{BitcoinNetwork, StacksNetwork}; +use test_case::test_case; + +const LOCAL_DIR: &str = env!("CARGO_MANIFEST_DIR"); +#[test_case(BitcoinNetwork::Regtest)] +#[test_case(BitcoinNetwork::Testnet)] +#[test_case(BitcoinNetwork::Mainnet)] +fn config_from_file_matches_generator_for_all_networks(network: BitcoinNetwork) { + let mode = NetworkConfigMode::from_bitcoin_network(&network); + let path = format!( + "{}/src/config/tests/fixtures/{}_chainhook.toml", + LOCAL_DIR, + mode.as_str() + ); + let from_path_config = Config::from_file_path(&path).unwrap(); + let generated_config_str = generate_config(&network); + let generated_config_file: ConfigFile = toml::from_str(&generated_config_str).unwrap(); + let generated_config = Config::from_config_file(generated_config_file).unwrap(); + assert_eq!(generated_config, from_path_config); +} + +#[test] +fn config_from_file_allows_local_tsv_file() { + let path = format!( + "{}/src/config/tests/fixtures/local_tsv_chainhook.toml", + LOCAL_DIR, + ); + + Config::from_file_path(&path).expect("failed to generate config with local tsv path"); +} + +#[test] +fn parse_config_from_file_rejects_config_with_unsupported_mode() { + let path = format!( + "{}/src/config/tests/fixtures/unsupported_chainhook.toml", + LOCAL_DIR + ); + Config::from_file_path(&path) + .expect_err("Did not reject unsupported network mode as expected."); +} + +#[test] +fn is_http_api_enabled_handles_both_modes() { + let mut config = Config::default(true, false, false, &None).unwrap(); + assert!(!config.is_http_api_enabled()); + config.http_api = PredicatesApi::On(PredicatesApiConfig { + http_port: 0, + database_uri: format!(""), + display_logs: false, + }); + assert!(config.is_http_api_enabled()); +} + +#[test] +fn should_download_remote_stacks_tsv_handles_both_modes() { + let url_src = EventSourceConfig::StacksTsvUrl(super::UrlConfig { + file_url: String::new(), + }); + let path_src = EventSourceConfig::StacksTsvPath(PathConfig { + file_path: PathBuf::new(), + }); + let mut config = Config::default(true, false, false, &None).unwrap(); + + config.event_sources = vec![url_src.clone(), path_src.clone()]; + assert_eq!(config.should_download_remote_stacks_tsv(), false); + + config.event_sources = vec![path_src.clone()]; + assert_eq!(config.should_download_remote_stacks_tsv(), false); + + config.event_sources = vec![]; + assert_eq!(config.should_download_remote_stacks_tsv(), false); + + config.event_sources = vec![url_src.clone()]; + assert_eq!(config.should_download_remote_stacks_tsv(), true); +} + +#[test] +#[should_panic(expected = "expected remote-tsv source")] +fn expected_remote_stacks_tsv_base_url_panics_if_missing() { + let url_src = EventSourceConfig::StacksTsvUrl(super::UrlConfig { + file_url: format!("test"), + }); + let mut config = Config::default(true, false, false, &None).unwrap(); + + config.event_sources = vec![url_src.clone()]; + assert_eq!(config.expected_remote_stacks_tsv_base_url(), "test"); + + config.event_sources = vec![]; + config.expected_remote_stacks_tsv_base_url(); +} + +#[test] +#[should_panic(expected = "expected local-tsv source")] +fn expected_local_stacks_tsv_base_url_panics_if_missing() { + let path = PathBuf::from("test"); + let path_src = EventSourceConfig::StacksTsvPath(PathConfig { + file_path: path.clone(), + }); + let mut config = Config::default(true, false, false, &None).unwrap(); + + config.event_sources = vec![path_src.clone()]; + assert_eq!(config.expected_local_stacks_tsv_file(), &path); + + config.event_sources = vec![]; + config.expected_local_stacks_tsv_file(); +} + +#[test] +fn add_local_stacks_tsv_source_allows_adding_src() { + let mut config = Config::default(true, false, false, &None).unwrap(); + assert_eq!(config.event_sources.len(), 0); + let path = PathBuf::from("test"); + config.add_local_stacks_tsv_source(&path); + assert_eq!(config.event_sources.len(), 1); +} +#[test] +fn it_has_default_config_for_each_network() { + let config = Config::default(true, false, false, &None).unwrap(); + assert_eq!(config.network.bitcoin_network, BitcoinNetwork::Regtest); + assert_eq!(config.network.stacks_network, StacksNetwork::Devnet); + let config = Config::default(false, true, false, &None).unwrap(); + assert_eq!(config.network.bitcoin_network, BitcoinNetwork::Testnet); + assert_eq!(config.network.stacks_network, StacksNetwork::Testnet); + let config = Config::default(false, false, true, &None).unwrap(); + assert_eq!(config.network.bitcoin_network, BitcoinNetwork::Mainnet); + assert_eq!(config.network.stacks_network, StacksNetwork::Mainnet); + let path = format!( + "{}/src/config/tests/fixtures/devnet_chainhook.toml", + LOCAL_DIR + ); + let config = Config::default(false, false, false, &Some(path)).unwrap(); + assert_eq!(config.network.bitcoin_network, BitcoinNetwork::Regtest); + assert_eq!(config.network.stacks_network, StacksNetwork::Devnet); + Config::default(true, true, false, &None).expect_err("expected invalid combination error"); +} diff --git a/components/chainhook-cli/src/scan/bitcoin.rs b/components/chainhook-cli/src/scan/bitcoin.rs index 4224104b3..3ca2f4201 100644 --- a/components/chainhook-cli/src/scan/bitcoin.rs +++ b/components/chainhook-cli/src/scan/bitcoin.rs @@ -137,7 +137,7 @@ pub async fn scan_bitcoin_chainstate_via_rpc_using_predicate( Err((e, _)) => { warn!( ctx.expect_logger(), - "Unable to standardize block#{} {}: {}", current_block_height, block_hash, e + "Unable to standardize block #{} {}: {}", current_block_height, block_hash, e ); continue; } @@ -219,6 +219,15 @@ pub async fn scan_bitcoin_chainstate_via_rpc_using_predicate( ); if let Some(ref mut predicates_db_conn) = predicates_db_conn { + set_predicate_scanning_status( + &predicate_spec.key(), + number_of_blocks_to_scan, + number_of_blocks_scanned, + number_of_times_triggered, + last_block_scanned.index, + predicates_db_conn, + ctx, + ); if let Some(predicate_end_block) = predicate_spec.end_block { if predicate_end_block == last_block_scanned.index { // todo: we need to find a way to check if this block is confirmed @@ -237,15 +246,6 @@ pub async fn scan_bitcoin_chainstate_via_rpc_using_predicate( return Ok(true); } } - set_predicate_scanning_status( - &predicate_spec.key(), - number_of_blocks_to_scan, - number_of_blocks_scanned, - number_of_times_triggered, - last_block_scanned.index, - predicates_db_conn, - ctx, - ); } return Ok(false); @@ -293,7 +293,7 @@ pub async fn execute_predicates_action<'a>( BitcoinChainhookOccurrence::File(path, bytes) => { file_append(path, bytes, &ctx)? } - BitcoinChainhookOccurrence::Data(_payload) => unreachable!(), + BitcoinChainhookOccurrence::Data(_payload) => {} }; } } diff --git a/components/chainhook-cli/src/scan/stacks.rs b/components/chainhook-cli/src/scan/stacks.rs index 79dc6cff4..1341f73c4 100644 --- a/components/chainhook-cli/src/scan/stacks.rs +++ b/components/chainhook-cli/src/scan/stacks.rs @@ -377,6 +377,15 @@ pub async fn scan_stacks_chainstate_via_rocksdb_using_predicate( ); if let Some(ref mut predicates_db_conn) = predicates_db_conn { + set_predicate_scanning_status( + &predicate_spec.key(), + number_of_blocks_to_scan, + number_of_blocks_scanned, + number_of_times_triggered, + last_block_scanned.index, + predicates_db_conn, + ctx, + ); if let Some(predicate_end_block) = predicate_spec.end_block { if predicate_end_block == last_block_scanned.index { let is_confirmed = match get_stacks_block_at_block_height( @@ -412,15 +421,6 @@ pub async fn scan_stacks_chainstate_via_rocksdb_using_predicate( return Ok((last_block_scanned, true)); } } - set_predicate_scanning_status( - &predicate_spec.key(), - number_of_blocks_to_scan, - number_of_blocks_scanned, - number_of_times_triggered, - last_block_scanned.index, - predicates_db_conn, - ctx, - ); } Ok((last_block_scanned, false)) } diff --git a/components/chainhook-cli/src/service/mod.rs b/components/chainhook-cli/src/service/mod.rs index cc641b951..aaf204679 100644 --- a/components/chainhook-cli/src/service/mod.rs +++ b/components/chainhook-cli/src/service/mod.rs @@ -78,7 +78,11 @@ impl Service { }; leftover_scans.push((predicate.clone(), Some(scanning_data))); } - _ => {} + PredicateStatus::UnconfirmedExpiration(_) => {} + PredicateStatus::ConfirmedExpiration(_) | PredicateStatus::Interrupted(_) => { + // Confirmed and Interrupted predicates don't need to be reregistered. + continue; + } } match chainhook_config.register_specification(predicate) { Ok(_) => { @@ -535,6 +539,7 @@ impl Service { #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] #[serde(rename_all = "snake_case")] +#[serde(tag = "type", content = "info")] /// A high-level view of how `PredicateStatus` is used/updated can be seen here: docs/images/predicate-status-flowchart/PredicateStatusFlowchart.png. pub enum PredicateStatus { Scanning(ScanningData), @@ -550,14 +555,14 @@ pub struct ScanningData { pub number_of_blocks_to_scan: u64, pub number_of_blocks_evaluated: u64, pub number_of_times_triggered: u64, - pub last_occurrence: u128, + pub last_occurrence: Option, pub last_evaluated_block_height: u64, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct StreamingData { - pub last_occurrence: u128, - pub last_evaluation: u128, + pub last_occurrence: Option, + pub last_evaluation: u64, pub number_of_times_triggered: u64, pub number_of_blocks_evaluated: u64, pub last_evaluated_block_height: u64, @@ -567,7 +572,7 @@ pub struct StreamingData { pub struct ExpiredData { pub number_of_blocks_evaluated: u64, pub number_of_times_triggered: u64, - pub last_occurrence: u128, + pub last_occurrence: Option, pub last_evaluated_block_height: u64, pub expired_at_block_height: u64, } @@ -660,10 +665,10 @@ fn set_predicate_streaming_status( predicates_db_conn: &mut Connection, ctx: &Context, ) { - let now_ms = SystemTime::now() + let now_secs = SystemTime::now() .duration_since(UNIX_EPOCH) .expect("Could not get current time in ms") - .as_millis(); + .as_secs(); let ( last_occurrence, number_of_blocks_evaluated, @@ -712,10 +717,11 @@ fn set_predicate_streaming_status( PredicateStatus::New | PredicateStatus::Interrupted(_) | PredicateStatus::ConfirmedExpiration(_) => { - unreachable!("unreachable predicate status: {:?}", status) + warn!(ctx.expect_logger(), "Attempting to set Streaming status when previous status was {:?} for predicate {}", status, predicate_key); + return; } }, - None => (0, 0, 0, 0), + None => (None, 0, 0, 0), } }; let ( @@ -728,7 +734,7 @@ fn set_predicate_streaming_status( last_triggered_height, triggered_count, } => ( - now_ms, + Some(now_secs.clone()), number_of_times_triggered + triggered_count, number_of_blocks_evaluated + triggered_count, last_triggered_height, @@ -754,7 +760,7 @@ fn set_predicate_streaming_status( predicate_key, PredicateStatus::Streaming(StreamingData { last_occurrence, - last_evaluation: now_ms, + last_evaluation: now_secs, number_of_times_triggered, last_evaluated_block_height, number_of_blocks_evaluated, @@ -776,46 +782,47 @@ pub fn set_predicate_scanning_status( predicates_db_conn: &mut Connection, ctx: &Context, ) { - let now_ms = SystemTime::now() + let now_secs = SystemTime::now() .duration_since(UNIX_EPOCH) .expect("Could not get current time in ms") - .as_millis(); + .as_secs(); let current_status = retrieve_predicate_status(&predicate_key, predicates_db_conn); let last_occurrence = match current_status { Some(status) => match status { PredicateStatus::Scanning(scanning_data) => { if number_of_times_triggered > scanning_data.number_of_times_triggered { - now_ms + Some(now_secs) } else { scanning_data.last_occurrence } } PredicateStatus::Streaming(streaming_data) => { if number_of_times_triggered > streaming_data.number_of_times_triggered { - now_ms + Some(now_secs) } else { streaming_data.last_occurrence } } PredicateStatus::UnconfirmedExpiration(expired_data) => { if number_of_times_triggered > expired_data.number_of_times_triggered { - now_ms + Some(now_secs) } else { expired_data.last_occurrence } } PredicateStatus::New => { if number_of_times_triggered > 0 { - now_ms + Some(now_secs) } else { - 0 + None } } - PredicateStatus::Interrupted(_) | PredicateStatus::ConfirmedExpiration(_) => { - unreachable!("unreachable predicate status: {:?}", status) + PredicateStatus::ConfirmedExpiration(_) | PredicateStatus::Interrupted(_) => { + warn!(ctx.expect_logger(), "Attempting to set Scanning status when previous status was {:?} for predicate {}", status, predicate_key); + return; } }, - None => 0, + None => None, }; update_predicate_status( @@ -832,9 +839,7 @@ pub fn set_predicate_scanning_status( ); } -/// Updates a predicate's status to `InitialScanCompleted`. -/// -/// Preserves the scanning metrics from the predicate's previous status +/// Updates a predicate's status to `UnconfirmedExpiration`. pub fn set_unconfirmed_expiration_status( chain: &Chain, number_of_new_blocks_evaluated: u64, @@ -854,17 +859,17 @@ pub fn set_unconfirmed_expiration_status( Some(status) => match status { PredicateStatus::Scanning(ScanningData { number_of_blocks_to_scan: _, - number_of_blocks_evaluated, + number_of_blocks_evaluated: _, number_of_times_triggered, last_occurrence, last_evaluated_block_height, }) => ( - number_of_blocks_evaluated + number_of_new_blocks_evaluated, + number_of_new_blocks_evaluated, number_of_times_triggered, last_occurrence, last_evaluated_block_height, ), - PredicateStatus::New => (0, 0, 0, 0), + PredicateStatus::New => (0, 0, None, 0), PredicateStatus::Streaming(StreamingData { last_occurrence, last_evaluation: _, @@ -892,15 +897,12 @@ pub fn set_unconfirmed_expiration_status( expired_at_block_height, ) } - PredicateStatus::Interrupted(_) => { - unreachable!("unreachable predicate status: {:?}", status) - } - PredicateStatus::ConfirmedExpiration(_) => { - warn!(ctx.expect_logger(), "Attempting to set UnconfirmedExpiration status when ConfirmedExpiration status has already been set for predicate {}", predicate_key); + PredicateStatus::ConfirmedExpiration(_) | PredicateStatus::Interrupted(_) => { + warn!(ctx.expect_logger(), "Attempting to set UnconfirmedExpiration status when previous status was {:?} for predicate {}", status, predicate_key); return; } }, - None => (0, 0, 0, 0), + None => (0, 0, None, 0), }; update_predicate_status( predicate_key, @@ -935,9 +937,16 @@ pub fn set_confirmed_expiration_status( let expired_data = match current_status { Some(status) => match status { PredicateStatus::UnconfirmedExpiration(expired_data) => expired_data, - _ => unreachable!("unreachable predicate status: {:?}", status), + PredicateStatus::ConfirmedExpiration(_) + | PredicateStatus::Interrupted(_) + | PredicateStatus::New + | PredicateStatus::Scanning(_) + | PredicateStatus::Streaming(_) => { + warn!(ctx.expect_logger(), "Attempting to set ConfirmedExpiration status when previous status was {:?} for predicate {}", status, predicate_key); + return; + } }, - None => unreachable!(), + None => unreachable!("found no status for predicate: {}", predicate_key), }; update_predicate_status( predicate_key, diff --git a/components/chainhook-cli/src/service/tests/helpers/mock_bitcoin_rpc.rs b/components/chainhook-cli/src/service/tests/helpers/mock_bitcoin_rpc.rs index 7760e6390..75675070a 100644 --- a/components/chainhook-cli/src/service/tests/helpers/mock_bitcoin_rpc.rs +++ b/components/chainhook-cli/src/service/tests/helpers/mock_bitcoin_rpc.rs @@ -1,4 +1,10 @@ -use chainhook_sdk::bitcoincore_rpc_json::bitcoin::TxMerkleNode; +use chainhook_sdk::bitcoincore_rpc_json::GetRawTransactionResultVoutScriptPubKey; +use chainhook_sdk::indexer::bitcoin::BitcoinBlockFullBreakdown; +use chainhook_sdk::indexer::bitcoin::BitcoinTransactionFullBreakdown; +use chainhook_sdk::indexer::bitcoin::BitcoinTransactionInputFullBreakdown; +use chainhook_sdk::indexer::bitcoin::BitcoinTransactionInputPrevoutFullBreakdown; +use chainhook_sdk::indexer::bitcoin::BitcoinTransactionOutputFullBreakdown; +use chainhook_sdk::indexer::bitcoin::GetRawTransactionResultVinScriptSig; use rocket::serde::json::Value; use std::cmp::max; use std::collections::HashMap; @@ -11,14 +17,13 @@ use std::sync::RwLock; use chainhook_sdk::bitcoincore_rpc_json::bitcoin::hashes::sha256d::Hash; use chainhook_sdk::bitcoincore_rpc_json::bitcoin::Amount; use chainhook_sdk::bitcoincore_rpc_json::bitcoin::BlockHash; -use chainhook_sdk::bitcoincore_rpc_json::GetBlockResult; use chainhook_sdk::bitcoincore_rpc_json::GetBlockchainInfoResult; use chainhook_sdk::bitcoincore_rpc_json::GetNetworkInfoResult; use rocket::serde::json::Json; use rocket::Config; use rocket::State; -use super::height_to_hash_str; +use super::branch_and_height_to_hash_str; #[derive(Clone, Serialize, Deserialize, Debug)] #[serde(crate = "rocket::serde")] @@ -29,71 +34,148 @@ struct Rpc { params: Vec, } -fn height_to_hash(height: u64) -> BlockHash { - let hash = Hash::from_str(&height_to_hash_str(height)).unwrap(); +fn branch_and_height_to_hash(branch: Option, height: u64) -> BlockHash { + let hash = Hash::from_str(&branch_and_height_to_hash_str(branch, height)).unwrap(); BlockHash::from_hash(hash) } -fn height_to_merkle_node(height: u64) -> TxMerkleNode { - let hash = Hash::from_str(&height_to_hash_str(height)).unwrap(); - TxMerkleNode::from_hash(hash) +#[derive(Clone, Serialize, Deserialize, Debug)] +#[serde(crate = "rocket::serde")] +pub struct TipData { + pub branch: BranchKey, + pub parent_branch_key: Option, + pub parent_height_at_fork: Option, } -#[post("/increment-chain-tip")] -fn handle_increment_chain_tip(chain_tip: &State>>) -> Value { - let mut chain_tip = chain_tip.inner().write().unwrap(); +#[post( + "/increment-chain-tip", + format = "application/json", + data = "" +)] +fn handle_increment_chain_tip( + tip_data: Json, + fork_tracker_rw_lock: &State>>>, +) -> Value { + let tip_data = tip_data.into_inner(); + let branch = tip_data.branch; + let mut fork_tracker = fork_tracker_rw_lock.inner().write().unwrap(); + let (chain_tip, _parent_info) = match fork_tracker.get_mut(&branch) { + None => { + let parent_branch = tip_data.parent_branch_key.unwrap(); + let parent_height_at_fork = tip_data.parent_height_at_fork.unwrap(); + let branch_chain_tip = parent_height_at_fork + 1; + fork_tracker.insert( + branch, + ( + branch_chain_tip, + Some((parent_branch, parent_height_at_fork)), + ), + ); + return json!(branch_chain_tip); + } + Some(tip) => tip, + }; *chain_tip += 1; json!(chain_tip.to_owned()) } #[post("/", format = "application/json", data = "")] -fn handle_rpc(rpc: Json, chain_tip: &State>>) -> Value { +fn handle_rpc( + rpc: Json, + fork_tracker_rw_lock: &State>>>, +) -> Value { let rpc = rpc.into_inner(); - let chain_tip = *chain_tip.inner().read().unwrap(); + let fork_tracker = fork_tracker_rw_lock.inner().read().unwrap(); match rpc.method.as_str() { "getblock" => { let hash = rpc.params[0].as_str().unwrap(); - let prefix = hash.chars().take_while(|&ch| ch == '0').collect::(); + let mut chars = hash.chars(); + let branch = chars.next().unwrap(); + let prefix = chars.take_while(|&ch| ch == '0').collect::(); let height = hash.split(&prefix).collect::>()[1]; let height = height.parse::().unwrap_or(0); - if height > chain_tip { + let (chain_tip, parent_data) = fork_tracker.get(&branch).unwrap_or(&(0, None)); + if &height > chain_tip { return json!({ "id": rpc.id, "jsonrpc": rpc.jsonrpc, "error": format!("invalid request: requested block is above chain tip: height {}, chain tip: {}", height, chain_tip) }); } - let next_block_hash = if height == chain_tip { - None - } else { - Some(height_to_hash(height + 1)) - }; + let confirmations = max(0, chain_tip - height) as i32; + let previousblockhash = if height == 0 { None } else { - Some(height_to_hash(height - 1)) + let parent_height = height - 1; + let mut parent_branch = branch; + if let Some((parent_branch_key, parent_height_at_fork)) = parent_data { + if &parent_height == parent_height_at_fork { + parent_branch = *parent_branch_key; + } + } + Some(branch_and_height_to_hash_str( + Some(parent_branch), + parent_height, + )) }; - let block = GetBlockResult { - hash: BlockHash::from_hash(Hash::from_str(hash).unwrap()), + + let coinbase = BitcoinTransactionFullBreakdown { + txid: branch_and_height_to_hash_str(Some(branch), height), + vin: vec![BitcoinTransactionInputFullBreakdown { + sequence: 0, + txid: None, + vout: None, + script_sig: None, + txinwitness: None, + prevout: None, + }], + vout: vec![BitcoinTransactionOutputFullBreakdown { + value: Amount::ZERO, + n: 0, + script_pub_key: GetRawTransactionResultVoutScriptPubKey { + asm: format!(""), + hex: vec![], + req_sigs: None, + type_: None, + addresses: None, + }, + }], + }; + let tx = BitcoinTransactionFullBreakdown { + txid: branch_and_height_to_hash_str(Some(branch), height + 1), + vin: vec![BitcoinTransactionInputFullBreakdown { + sequence: 0, + txid: Some(branch_and_height_to_hash_str(Some(branch), height + 1)), + vout: Some(1), + script_sig: Some(GetRawTransactionResultVinScriptSig { hex: format!("") }), + txinwitness: Some(vec![format!("")]), + prevout: Some(BitcoinTransactionInputPrevoutFullBreakdown { + height: height, + value: Amount::ZERO, + }), + }], + vout: vec![BitcoinTransactionOutputFullBreakdown { + value: Amount::ZERO, + n: 0, + script_pub_key: GetRawTransactionResultVoutScriptPubKey { + asm: format!(""), + hex: vec![], + req_sigs: None, + type_: None, + addresses: None, + }, + }], + }; + let block = BitcoinBlockFullBreakdown { + hash: hash.into(), confirmations, - size: 0, - strippedsize: None, - weight: 0, height: height as usize, - version: 19000, - version_hex: None, - merkleroot: height_to_merkle_node(height), - tx: vec![], + tx: vec![coinbase, tx], time: 0, - mediantime: None, nonce: 0, - bits: "".to_string(), - difficulty: 0.0, - chainwork: vec![], - n_tx: 0, previousblockhash, - nextblockhash: next_block_hash, }; json!({ "id": rpc.id, @@ -102,13 +184,18 @@ fn handle_rpc(rpc: Json, chain_tip: &State>>) -> Value { }) } "getblockchaininfo" => { - let hash = format!("{:0>64}", chain_tip.to_string()); - let hash = Hash::from_str(&hash).unwrap(); + let (branch, (chain_tip, _)) = fork_tracker + .iter() + .max_by(|a, b| a.1.cmp(&b.1)) + .map(|kv| kv) + .unwrap(); + + let hash = branch_and_height_to_hash(Some(*branch), *chain_tip); let blockchain_info = GetBlockchainInfoResult { chain: "regtest".into(), blocks: chain_tip.to_owned(), headers: 0, - best_block_hash: BlockHash::from_hash(hash), + best_block_hash: hash, difficulty: 0.0, median_time: 0, verification_progress: 0.0, @@ -154,29 +241,62 @@ fn handle_rpc(rpc: Json, chain_tip: &State>>) -> Value { }) } "getblockhash" => { + let (branch, _) = fork_tracker + .iter() + .max_by(|a, b| a.1.cmp(&b.1)) + .map(|kv| kv) + .unwrap(); + let height = rpc.params[0].as_u64().unwrap(); - let hash = format!("{:0>64}", height.to_string()); - let hash = Hash::from_str(&hash).unwrap(); - let hash = BlockHash::from_hash(hash); + let hash = branch_and_height_to_hash(Some(*branch), height); json!({ "id": serde_json::to_value(rpc.id).unwrap(), "jsonrpc": rpc.jsonrpc, "result": serde_json::to_value(hash).unwrap(), }) } - _ => unimplemented!("unsupported rpc endpoint"), + "gettxoutproof" => { + json!({ + "id": serde_json::to_value(rpc.id).unwrap(), + "jsonrpc": rpc.jsonrpc, + "result": "00", + }) + } + "getaddressinfo" => { + json!({ + "id": serde_json::to_value(rpc.id).unwrap(), + "jsonrpc": rpc.jsonrpc, + "result": { + "address": rpc.params[0] + }, + }) + } + "sendrawtransaction" => { + json!({ + "id": serde_json::to_value(rpc.id).unwrap(), + "jsonrpc": rpc.jsonrpc, + "result": "success", + }) + } + _ => unimplemented!("unsupported rpc endpoint: {}", rpc.method.as_str()), } } +type BranchKey = char; +type Height = u64; +type ForkPoint = (BranchKey, Height); +type ForkData = (Height, Option); pub async fn mock_bitcoin_rpc(port: u16, starting_chain_tip: u64) { let config = Config::figment() .merge(("port", port)) .merge(("address", IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)))) .merge(("log_level", "off")); - let chain_tip_rw_lock = Arc::new(RwLock::new(starting_chain_tip)); + let fork_tracker: HashMap = + HashMap::from([('0', (starting_chain_tip, None))]); + let fork_tracker_rw_lock = Arc::new(RwLock::new(fork_tracker)); let _rocket = rocket::build() .configure(config) - .manage(chain_tip_rw_lock) + .manage(fork_tracker_rw_lock) .mount("/", routes![handle_rpc, handle_increment_chain_tip]) .launch() .await diff --git a/components/chainhook-cli/src/service/tests/helpers/mock_service.rs b/components/chainhook-cli/src/service/tests/helpers/mock_service.rs index 608df7a90..20518105a 100644 --- a/components/chainhook-cli/src/service/tests/helpers/mock_service.rs +++ b/components/chainhook-cli/src/service/tests/helpers/mock_service.rs @@ -9,8 +9,10 @@ use crate::config::DEFAULT_REDIS_URI; use crate::service::http_api::start_predicate_api_server; use crate::service::PredicateStatus; use crate::service::Service; +use chainhook_sdk::chainhooks::types::ChainhookFullSpecification; use chainhook_sdk::indexer::IndexerConfig; use chainhook_sdk::observer::ObserverCommand; +use chainhook_sdk::observer::ObserverMetrics; use chainhook_sdk::types::BitcoinBlockSignaling; use chainhook_sdk::types::BitcoinNetwork; use chainhook_sdk::types::Chain; @@ -18,6 +20,7 @@ use chainhook_sdk::types::StacksNetwork; use chainhook_sdk::types::StacksNodeConfig; use chainhook_sdk::utils::Context; use redis::Commands; +use reqwest::Method; use rocket::serde::json::Value as JsonValue; use rocket::Shutdown; use std::path::PathBuf; @@ -41,7 +44,6 @@ pub async fn get_predicate_status(uuid: &str, port: u16) -> Result { attempts += 1; - println!("reattempting get predicate status"); if attempts == 10 { return Err(format!("no result field on get predicate response")); } else { @@ -101,7 +103,6 @@ pub async fn filter_predicate_status_from_all_predicates( }, None => { attempts += 1; - println!("reattempting get predicates"); if attempts == 10 { return Err(format!("no result field on get predicates response")); } else { @@ -118,28 +119,8 @@ pub async fn call_register_predicate( predicate: &JsonValue, port: u16, ) -> Result { - let client = reqwest::Client::new(); - let res =client - .post(format!("http://localhost:{port}/v1/chainhooks")) - .header("Content-Type", "application/json") - .json(predicate) - .send() - .await - .map_err(|e| { - format!( - "Failed to make POST request to localhost:{port}/v1/chainhooks: {}", - e - ) - })? - .json::() - .await - .map_err(|e| { - format!( - "Failed to deserialize response of POST request to localhost:{port}/v1/chainhooks: {}", - e - ) - })?; - Ok(res) + let url = format!("http://localhost:{port}/v1/chainhooks"); + call_observer_svc(&url, Method::POST, Some(predicate)).await } pub async fn call_deregister_predicate( @@ -147,73 +128,58 @@ pub async fn call_deregister_predicate( predicate_uuid: &str, port: u16, ) -> Result { - let client = reqwest::Client::new(); let chain = match chain { Chain::Bitcoin => "bitcoin", Chain::Stacks => "stacks", }; let url = format!("http://localhost:{port}/v1/chainhooks/{chain}/{predicate_uuid}"); - let res = client - .delete(&url) - .header("Content-Type", "application/json") - .send() - .await - .map_err(|e| format!("Failed to make DELETE request to {url}: {}", e))? - .json::() - .await - .map_err(|e| { - format!( - "Failed to deserialize response of DELETE request to {url}: {}", - e - ) - })?; - Ok(res) + call_observer_svc(&url, Method::DELETE, None).await } pub async fn call_get_predicate(predicate_uuid: &str, port: u16) -> Result { - let client = reqwest::Client::new(); - let res =client - .get(format!("http://localhost:{port}/v1/chainhooks/{predicate_uuid}")) - .send() - .await - .map_err(|e| { - format!( - "Failed to make GET request to localhost:8765/v1/chainhooks/<{predicate_uuid}>: {}", - e - ) - })? - .json::() - .await - .map_err(|e| { - format!( - "Failed to deserialize response of GET request to localhost:{port}/v1/chainhooks/{predicate_uuid}: {}", - e - ) - })?; - Ok(res) + let url = format!("http://localhost:{port}/v1/chainhooks/{predicate_uuid}"); + call_observer_svc(&url, Method::GET, None).await } pub async fn call_get_predicates(port: u16) -> Result { + let url = format!("http://localhost:{port}/v1/chainhooks"); + call_observer_svc(&url, Method::GET, None).await +} + +pub async fn call_observer_svc( + url: &str, + method: Method, + json: Option<&JsonValue>, +) -> Result { let client = reqwest::Client::new(); - let res =client - .get(format!("http://localhost:{port}/v1/chainhooks")) - .send() - .await - .map_err(|e| { - format!( - "Failed to make GET request to localhost:8765/v1/chainhooks: {}", - e - ) - })? - .json::() - .await - .map_err(|e| { - format!( - "Failed to deserialize response of GET request to localhost:{port}/v1/chainhooks: {}", - e - ) - })?; - Ok(res) + let req = match (&method, json) { + (&Method::GET, None) => client.get(url), + (&Method::POST, None) => client.post(url).header("Content-Type", "application/json"), + (&Method::POST, Some(json)) => client + .post(url) + .header("Content-Type", "application/json") + .json(json), + (&Method::DELETE, None) => client + .delete(url) + .header("Content-Type", "application/json"), + _ => unimplemented!(), + }; + req.send() + .await + .map_err(|e| format!("Failed to make {method} request to {url}: {e}",))? + .json::() + .await + .map_err(|e| format!("Failed to deserialize response of {method} request to {url}: {e}",)) +} + +pub async fn call_ping(port: u16) -> Result { + let url = format!("http://localhost:{port}/ping"); + let res = call_observer_svc(&url, Method::GET, None).await?; + match res.get("result") { + Some(result) => serde_json::from_value(result.clone()) + .map_err(|e| format!("failed to parse observer metrics {}", e.to_string())), + None => Err(format!("Failed parse result of observer ping")), + } } pub async fn build_predicate_api_server(port: u16) -> (Receiver, Shutdown) { @@ -288,19 +254,13 @@ pub fn flush_redis(port: u16) { let client = redis::Client::open(format!("redis://localhost:{port}/")) .expect("unable to connect to redis"); let mut predicate_db_conn = client.get_connection().expect("unable to connect to redis"); - let predicate_keys: Vec = predicate_db_conn - .scan_match("predicate:*") + let db_keys: Vec = predicate_db_conn + .scan_match("*") .unwrap() .into_iter() .collect(); - for k in predicate_keys { - predicate_db_conn - .hdel::<_, _, ()>(&k, "predicates") - .unwrap(); - predicate_db_conn.hdel::<_, _, ()>(&k, "status").unwrap(); - predicate_db_conn - .hdel::<_, _, ()>(&k, "specification") - .unwrap(); + for k in db_keys { + predicate_db_conn.del::<_, ()>(&k).unwrap(); } } @@ -348,16 +308,17 @@ pub fn get_chainhook_config( }, } } + pub async fn start_chainhook_service( config: Config, chainhook_port: u16, + startup_predicates: Option>, ctx: &Context, ) -> Result<(), String> { let mut service = Service::new(config, ctx.clone()); - let startup_predicates = vec![]; - let _ = hiro_system_kit::thread_named("Stacks service") + let _ = hiro_system_kit::thread_named("Chainhook service") .spawn(move || { - let future = service.run(startup_predicates); + let future = service.run(startup_predicates.unwrap_or(vec![])); let _ = hiro_system_kit::nestable_block_on(future); }) .map_err(|e| { diff --git a/components/chainhook-cli/src/service/tests/helpers/mock_stacks_node.rs b/components/chainhook-cli/src/service/tests/helpers/mock_stacks_node.rs index f591522e6..e3f46b3d7 100644 --- a/components/chainhook-cli/src/service/tests/helpers/mock_stacks_node.rs +++ b/components/chainhook-cli/src/service/tests/helpers/mock_stacks_node.rs @@ -1,8 +1,14 @@ use crate::scan::stacks::{Record, RecordKind}; +use crate::service::tests::helpers::mock_bitcoin_rpc::TipData; use chainhook_sdk::indexer::bitcoin::NewBitcoinBlock; -use chainhook_sdk::indexer::stacks::{NewBlock, NewTransaction}; +use chainhook_sdk::indexer::stacks::{NewBlock, NewEvent, NewTransaction}; +use chainhook_sdk::types::{ + FTBurnEventData, FTMintEventData, FTTransferEventData, NFTBurnEventData, NFTMintEventData, + NFTTransferEventData, STXBurnEventData, STXLockEventData, STXMintEventData, + STXTransferEventData, SmartContractEventData, StacksTransactionEvent, +}; -use super::height_to_prefixed_hash; +use super::{branch_and_height_to_prefixed_hash, height_to_prefixed_hash}; pub const TEST_WORKING_DIR: &str = "src/service/tests/fixtures/tmp"; @@ -15,6 +21,96 @@ pub fn create_tmp_working_dir() -> Result<(String, String), String> { .map_err(|e| format!("failed to create temp working dir: {}", e.to_string()))?; Ok((working_dir, tsv_dir)) } +fn create_stacks_new_event(tx_index: u64, index: u32, event: StacksTransactionEvent) -> NewEvent { + let mut event_type = String::new(); + let stx_transfer_event = if let StacksTransactionEvent::STXTransferEvent(data) = &event { + event_type = format!("stx_transfer"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let stx_mint_event = if let StacksTransactionEvent::STXMintEvent(data) = &event { + event_type = format!("stx_mint"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let stx_burn_event = if let StacksTransactionEvent::STXBurnEvent(data) = &event { + event_type = format!("stx_burn"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let stx_lock_event = if let StacksTransactionEvent::STXLockEvent(data) = &event { + event_type = format!("stx_lock"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let nft_transfer_event = if let StacksTransactionEvent::NFTTransferEvent(data) = &event { + event_type = format!("nft_transfer"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let nft_mint_event = if let StacksTransactionEvent::NFTMintEvent(data) = &event { + event_type = format!("nft_mint"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let nft_burn_event = if let StacksTransactionEvent::NFTBurnEvent(data) = &event { + event_type = format!("nft_burn"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let ft_transfer_event = if let StacksTransactionEvent::FTTransferEvent(data) = &event { + event_type = format!("ft_transfer"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let ft_mint_event = if let StacksTransactionEvent::FTMintEvent(data) = &event { + event_type = format!("ft_mint"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let ft_burn_event = if let StacksTransactionEvent::FTBurnEvent(data) = &event { + event_type = format!("ft_burn"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let contract_event = if let StacksTransactionEvent::SmartContractEvent(data) = &event { + event_type = format!("smart_contract_print_event"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + NewEvent { + txid: format!("transaction_id_{tx_index}"), + committed: false, + event_index: index, + event_type, + stx_transfer_event, + stx_mint_event, + stx_burn_event, + stx_lock_event, + nft_transfer_event, + nft_mint_event, + nft_burn_event, + ft_transfer_event, + ft_mint_event, + ft_burn_event, + data_var_set_event: None, + data_map_insert_event: None, + data_map_update_event: None, + data_map_delete_event: None, + contract_event, + } +} fn create_stacks_new_transaction(index: u64) -> NewTransaction { NewTransaction { @@ -24,6 +120,7 @@ fn create_stacks_new_transaction(index: u64) -> NewTransaction { raw_result: format!("0x0703"), raw_tx: format!("0x00000000010400e2cd0871da5bdd38c4d5569493dc3b14aac4e0a10000000000000019000000000000000000008373b16e4a6f9d87864c314dd77bbd8b27a2b1805e96ec5a6509e7e4f833cd6a7bdb2462c95f6968a867ab6b0e8f0a6498e600dbc46cfe9f84c79709da7b9637010200000000040000000000000000000000000000000000000000000000000000000000000000"), execution_cost: None, + contract_abi: None } } @@ -35,6 +132,106 @@ pub fn create_stacks_new_block(height: u64, burn_block_height: u64) -> NewBlock burn_block_height - 1 }; + let mut events = vec![]; + events.push(create_stacks_new_event( + 0, + events.len() as u32, + StacksTransactionEvent::STXTransferEvent(STXTransferEventData { + sender: format!(""), + recipient: format!(""), + amount: format!("1"), + }), + )); + events.push(create_stacks_new_event( + 0, + events.len() as u32, + StacksTransactionEvent::STXMintEvent(STXMintEventData { + recipient: format!(""), + amount: format!("1"), + }), + )); + events.push(create_stacks_new_event( + 0, + events.len() as u32, + StacksTransactionEvent::STXBurnEvent(STXBurnEventData { + sender: format!(""), + amount: format!("1"), + }), + )); + events.push(create_stacks_new_event( + 0, + events.len() as u32, + StacksTransactionEvent::STXLockEvent(STXLockEventData { + locked_amount: format!("1"), + unlock_height: format!(""), + locked_address: format!(""), + }), + )); + events.push(create_stacks_new_event( + 0, + events.len() as u32, + StacksTransactionEvent::NFTTransferEvent(NFTTransferEventData { + asset_class_identifier: format!(""), + hex_asset_identifier: format!(""), + sender: format!(""), + recipient: format!(""), + }), + )); + events.push(create_stacks_new_event( + 0, + events.len() as u32, + StacksTransactionEvent::NFTMintEvent(NFTMintEventData { + asset_class_identifier: format!(""), + hex_asset_identifier: format!(""), + recipient: format!(""), + }), + )); + events.push(create_stacks_new_event( + 0, + events.len() as u32, + StacksTransactionEvent::NFTBurnEvent(NFTBurnEventData { + asset_class_identifier: format!(""), + hex_asset_identifier: format!(""), + sender: format!(""), + }), + )); + events.push(create_stacks_new_event( + 0, + events.len() as u32, + StacksTransactionEvent::FTTransferEvent(FTTransferEventData { + asset_class_identifier: format!(""), + sender: format!(""), + recipient: format!(""), + amount: format!("1"), + }), + )); + events.push(create_stacks_new_event( + 0, + events.len() as u32, + StacksTransactionEvent::FTMintEvent(FTMintEventData { + asset_class_identifier: format!(""), + recipient: format!(""), + amount: format!("1"), + }), + )); + events.push(create_stacks_new_event( + 0, + events.len() as u32, + StacksTransactionEvent::FTBurnEvent(FTBurnEventData { + asset_class_identifier: format!(""), + sender: format!(""), + amount: format!("1"), + }), + )); + events.push(create_stacks_new_event( + 0, + events.len() as u32, + StacksTransactionEvent::SmartContractEvent(SmartContractEventData { + contract_identifier: format!(""), + topic: format!("print"), + hex_value: format!(""), + }), + )); NewBlock { block_height: height, block_hash: height_to_prefixed_hash(height), @@ -50,7 +247,7 @@ pub fn create_stacks_new_block(height: u64, burn_block_height: u64) -> NewBlock parent_burn_block_height: burn_block_height, parent_burn_block_timestamp: 0, transactions: (0..4).map(|i| create_stacks_new_transaction(i)).collect(), - events: vec![], + events, matured_miner_rewards: vec![], } } @@ -112,9 +309,9 @@ pub async fn mine_stacks_block( Ok(()) } -fn create_new_burn_block(burn_block_height: u64) -> NewBitcoinBlock { +fn create_new_burn_block(branch: Option, burn_block_height: u64) -> NewBitcoinBlock { NewBitcoinBlock { - burn_block_hash: height_to_prefixed_hash(burn_block_height), + burn_block_hash: branch_and_height_to_prefixed_hash(branch, burn_block_height), burn_block_height, reward_recipients: vec![], reward_slot_holders: vec![], @@ -122,19 +319,25 @@ fn create_new_burn_block(burn_block_height: u64) -> NewBitcoinBlock { } } -pub async fn mine_burn_block( - stacks_ingestion_port: u16, +async fn call_increment_chain_tip( bitcoin_rpc_port: u16, + branch: Option, burn_block_height: u64, + parent_branch_key: Option, + parent_height_at_fork: Option, ) -> Result<(), String> { - let block = create_new_burn_block(burn_block_height); - let serialized_block = serde_json::to_string(&block) - .map_err(|e| format!("failed to serialize burn block: {}", e.to_string()))?; let client = reqwest::Client::new(); + let tip_data = TipData { + branch: branch.unwrap_or('0'), + parent_branch_key, + parent_height_at_fork, + }; let res = client .post(format!( "http://localhost:{bitcoin_rpc_port}/increment-chain-tip" )) + .header("Content-Type", "application/json") + .json(&serde_json::to_value(tip_data).unwrap()) .send() .await .map_err(|e| { @@ -152,6 +355,18 @@ pub async fn mine_burn_block( ) })?; assert_eq!(burn_block_height.to_string(), res); + Ok(()) +} + +async fn call_new_burn_block( + stacks_ingestion_port: u16, + branch: Option, + burn_block_height: u64, +) -> Result<(), String> { + let block = create_new_burn_block(branch, burn_block_height); + let serialized_block = serde_json::to_string(&block) + .map_err(|e| format!("failed to serialize burn block: {}", e.to_string()))?; + let client = reqwest::Client::new(); let _res = client .post(format!( "http://localhost:{stacks_ingestion_port}/new_burn_block" @@ -171,3 +386,36 @@ pub async fn mine_burn_block( })?; Ok(()) } + +pub async fn mine_burn_block( + stacks_ingestion_port: u16, + bitcoin_rpc_port: u16, + branch: Option, + burn_block_height: u64, +) -> Result<(), String> { + call_increment_chain_tip(bitcoin_rpc_port, branch, burn_block_height, None, None).await?; + + call_new_burn_block(stacks_ingestion_port, branch, burn_block_height).await?; + Ok(()) +} + +pub async fn create_burn_fork_at( + stacks_ingestion_port: u16, + bitcoin_rpc_port: u16, + branch: Option, + burn_block_height: u64, + fork_branch: char, + fork_at_height: u64, +) -> Result<(), String> { + call_increment_chain_tip( + bitcoin_rpc_port, + branch, + burn_block_height, + Some(fork_branch), + Some(fork_at_height), + ) + .await?; + + call_new_burn_block(stacks_ingestion_port, branch, burn_block_height).await?; + Ok(()) +} diff --git a/components/chainhook-cli/src/service/tests/helpers/mod.rs b/components/chainhook-cli/src/service/tests/helpers/mod.rs index 8f78b5084..76e186edc 100644 --- a/components/chainhook-cli/src/service/tests/helpers/mod.rs +++ b/components/chainhook-cli/src/service/tests/helpers/mod.rs @@ -12,6 +12,14 @@ fn height_to_hash_str(height: u64) -> String { format!("{:0>64}", height.to_string()) } +pub fn branch_and_height_to_prefixed_hash(branch: Option, height: u64) -> String { + format!("0x{}", branch_and_height_to_hash_str(branch, height)) +} +fn branch_and_height_to_hash_str(branch: Option, height: u64) -> String { + let branch = branch.unwrap_or('0'); + format!("{branch}{:0>63}", height.to_string()) +} + pub fn get_free_port() -> Result { let listener = TcpListener::bind("127.0.0.1:0") .map_err(|e| format!("Failed to bind to port 0: {}", e.to_string()))?; diff --git a/components/chainhook-cli/src/service/tests/mod.rs b/components/chainhook-cli/src/service/tests/mod.rs index 747c99210..be18e9520 100644 --- a/components/chainhook-cli/src/service/tests/mod.rs +++ b/components/chainhook-cli/src/service/tests/mod.rs @@ -1,8 +1,13 @@ -use chainhook_sdk::types::Chain; +use chainhook_sdk::chainhooks::types::{ + ChainhookFullSpecification, ChainhookSpecification, StacksChainhookFullSpecification, +}; +use chainhook_sdk::types::{Chain, StacksNetwork}; use chainhook_sdk::utils::Context; use rocket::serde::json::Value as JsonValue; use rocket::Shutdown; +use std::fs::{self}; use std::net::TcpListener; +use std::path::PathBuf; use std::process::Child; use std::thread::sleep; use std::time::Duration; @@ -26,9 +31,14 @@ use crate::service::tests::helpers::mock_service::{ build_predicate_api_server, call_get_predicate, call_register_predicate, get_chainhook_config, get_predicate_status, }; -use crate::service::PredicateStatus; +use crate::service::tests::helpers::mock_stacks_node::create_burn_fork_at; +use crate::service::{PredicateStatus, PredicateStatus::*, ScanningData, StreamingData}; + +use super::http_api::document_predicate_api_server; +use super::{update_predicate_spec, update_predicate_status}; -mod helpers; +pub mod helpers; +mod observer_tests; async fn test_register_predicate(predicate: JsonValue) -> Result<(), (String, Shutdown)> { // perhaps a little janky, we bind to the port 0 to find an open one, then @@ -251,27 +261,84 @@ async fn it_handles_stacks_predicates_with_filters(filters: JsonValue) { } } -fn assert_confirmed_expiration_status(status: PredicateStatus) { +fn assert_confirmed_expiration_status( + (status, expected_evaluations, expected_occurrences): ( + PredicateStatus, + Option, + Option, + ), +) { match status { - PredicateStatus::ConfirmedExpiration(_) => {} + PredicateStatus::ConfirmedExpiration(data) => { + if let Some(expected) = expected_evaluations { + assert_eq!( + data.number_of_blocks_evaluated, expected, + "incorrect number of blocks evaluated" + ); + } + if let Some(expected) = expected_occurrences { + assert_eq!( + data.number_of_times_triggered, expected, + "incorrect number of predicates triggered" + ); + } + } _ => panic!("expected ConfirmedExpiration status, found {:?}", status), } } -fn assert_unconfirmed_expiration_status(status: PredicateStatus) { +fn assert_unconfirmed_expiration_status( + (status, expected_evaluations, expected_occurrences): ( + PredicateStatus, + Option, + Option, + ), +) { match status { - PredicateStatus::UnconfirmedExpiration(_) => {} + PredicateStatus::UnconfirmedExpiration(data) => { + if let Some(expected) = expected_evaluations { + assert_eq!( + data.number_of_blocks_evaluated, expected, + "incorrect number of blocks evaluated" + ); + } + if let Some(expected) = expected_occurrences { + assert_eq!( + data.number_of_times_triggered, expected, + "incorrect number of predicates triggered" + ); + } + } _ => panic!("expected UnconfirmedExpiration status, found {:?}", status), } } -fn assert_streaming_status(status: PredicateStatus) { +fn assert_streaming_status( + (status, expected_evaluations, expected_occurrences): ( + PredicateStatus, + Option, + Option, + ), +) { match status { - PredicateStatus::Streaming(_) => {} + PredicateStatus::Streaming(data) => { + if let Some(expected) = expected_evaluations { + assert_eq!( + data.number_of_blocks_evaluated, expected, + "incorrect number of blocks evaluated" + ); + } + if let Some(expected) = expected_occurrences { + assert_eq!( + data.number_of_times_triggered, expected, + "incorrect number of predicates triggered" + ); + } + } _ => panic!("expected Streaming status, found {:?}", status), } } -fn assert_interrupted_status(status: PredicateStatus) { +fn assert_interrupted_status((status, _, _): (PredicateStatus, Option, Option)) { match status { PredicateStatus::Interrupted(_) => {} _ => panic!("expected Interrupted status, found {:?}", status), @@ -314,6 +381,8 @@ async fn await_new_scanning_status_complete( async fn setup_stacks_chainhook_test( starting_chain_tip: u64, + redis_seed: Option<(StacksChainhookFullSpecification, PredicateStatus)>, + startup_predicates: Option>, ) -> (Child, String, u16, u16, u16, u16) { let ( redis_port, @@ -326,12 +395,7 @@ async fn setup_stacks_chainhook_test( let mut redis_process = start_redis(redis_port) .await .unwrap_or_else(|e| panic!("test failed with error: {e}")); - - let (working_dir, tsv_dir) = create_tmp_working_dir().unwrap_or_else(|e| { - flush_redis(redis_port); - redis_process.kill().unwrap(); - panic!("test failed with error: {e}"); - }); + flush_redis(redis_port); let logger = hiro_system_kit::log::setup_logger(); let _guard = hiro_system_kit::log::setup_global_logger(logger.clone()); @@ -340,6 +404,37 @@ async fn setup_stacks_chainhook_test( tracer: false, }; + if let Some((predicate, status)) = redis_seed { + let client = redis::Client::open(format!("redis://localhost:{redis_port}/")) + .unwrap_or_else(|e| { + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + let mut connection = client.get_connection().unwrap_or_else(|e| { + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + let stacks_spec = predicate + .into_selected_network_specification(&StacksNetwork::Devnet) + .unwrap_or_else(|e| { + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + + let spec = ChainhookSpecification::Stacks(stacks_spec); + update_predicate_spec(&spec.key(), &spec, &mut connection, &ctx); + update_predicate_status(&spec.key(), status, &mut connection, &ctx); + } + + let (working_dir, tsv_dir) = create_tmp_working_dir().unwrap_or_else(|e| { + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + write_stacks_blocks_to_tsv(starting_chain_tip, &tsv_dir).unwrap_or_else(|e| { std::fs::remove_dir_all(&working_dir).unwrap(); flush_redis(redis_port); @@ -366,7 +461,7 @@ async fn setup_stacks_chainhook_test( panic!("test failed with error: {e}"); }); - start_chainhook_service(config, chainhook_service_port, &ctx) + start_chainhook_service(config, chainhook_service_port, startup_predicates, &ctx) .await .unwrap_or_else(|e| { std::fs::remove_dir_all(&working_dir).unwrap(); @@ -384,11 +479,13 @@ async fn setup_stacks_chainhook_test( ) } -#[test_case(5, 0, Some(1), Some(3) => using assert_confirmed_expiration_status; "predicate_end_block lower than starting_chain_tip ends with ConfirmedExpiration status")] -#[test_case(5, 0, Some(1), None => using assert_streaming_status; "no predicate_end_block ends with Streaming status")] -#[test_case(3, 0, Some(1), Some(5) => using assert_streaming_status; "predicate_end_block greater than chain_tip ends with Streaming status")] -#[test_case(5, 3, Some(1), Some(7) => using assert_unconfirmed_expiration_status; "predicate_end_block greater than starting_chain_tip and mining until end_block ends with UnconfirmedExpiration status")] -#[test_case(0, 0, None, None => using assert_interrupted_status; "ommitting start_block ends with Interrupted status")] +#[test_case(5, 0, Some(1), Some(3), Some(3), Some(3) => using assert_confirmed_expiration_status; "predicate_end_block lower than starting_chain_tip ends with ConfirmedExpiration status")] +#[test_case(5, 0, Some(1), None, Some(5), Some(5) => using assert_streaming_status; "no predicate_end_block ends with Streaming status")] +#[test_case(3, 0, Some(1), Some(5), Some(3), Some(3) => using assert_streaming_status; "predicate_end_block greater than chain_tip ends with Streaming status")] +#[test_case(5, 4, Some(1), Some(7), Some(9), Some(7) => using assert_unconfirmed_expiration_status; "predicate_end_block greater than starting_chain_tip and mining until end_block ends with UnconfirmedExpiration status")] +#[test_case(1, 3, Some(1), Some(3), Some(4), Some(3) => using assert_unconfirmed_expiration_status; "predicate_end_block greater than starting_chain_tip and mining blocks so that predicate_end_block confirmations < CONFIRMED_SEGMENT_MINIMUM_LENGTH ends with UnconfirmedExpiration status")] +#[test_case(3, 7, Some(1), Some(4), Some(9), Some(4) => using assert_confirmed_expiration_status; "predicate_end_block greater than starting_chain_tip and mining blocks so that predicate_end_block confirmations >= CONFIRMED_SEGMENT_MINIMUM_LENGTH ends with ConfirmedExpiration status")] +#[test_case(0, 0, None, None, None, None => using assert_interrupted_status; "ommitting start_block ends with Interrupted status")] #[tokio::test] #[cfg_attr(not(feature = "redis_tests"), ignore)] async fn test_stacks_predicate_status_is_updated( @@ -396,7 +493,9 @@ async fn test_stacks_predicate_status_is_updated( blocks_to_mine: u64, predicate_start_block: Option, predicate_end_block: Option, -) -> PredicateStatus { + expected_evaluations: Option, + expected_occurrences: Option, +) -> (PredicateStatus, Option, Option) { let ( mut redis_process, working_dir, @@ -404,12 +503,12 @@ async fn test_stacks_predicate_status_is_updated( redis_port, stacks_ingestion_port, _, - ) = setup_stacks_chainhook_test(starting_chain_tip).await; + ) = setup_stacks_chainhook_test(starting_chain_tip, None, None).await; let uuid = &get_random_uuid(); let predicate = build_stacks_payload( Some("devnet"), - Some(json!({"scope":"block_height", "lower_than": 100})), + Some(json!({"scope":"block_height", "lower_than": 600})), None, Some(json!({"start_block": predicate_start_block, "end_block": predicate_end_block})), Some(uuid), @@ -470,7 +569,7 @@ async fn test_stacks_predicate_status_is_updated( std::fs::remove_dir_all(&working_dir).unwrap(); flush_redis(redis_port); redis_process.kill().unwrap(); - result + (result, expected_evaluations, expected_occurrences) } async fn setup_bitcoin_chainhook_test( @@ -488,6 +587,7 @@ async fn setup_bitcoin_chainhook_test( .await .unwrap_or_else(|e| panic!("test failed with error: {e}")); + flush_redis(redis_port); let (working_dir, tsv_dir) = create_tmp_working_dir().unwrap_or_else(|e| { flush_redis(redis_port); redis_process.kill().unwrap(); @@ -518,7 +618,7 @@ async fn setup_bitcoin_chainhook_test( &tsv_dir, ); - start_chainhook_service(config, chainhook_service_port, &ctx) + start_chainhook_service(config, chainhook_service_port, None, &ctx) .await .unwrap_or_else(|e| { std::fs::remove_dir_all(&working_dir).unwrap(); @@ -536,11 +636,11 @@ async fn setup_bitcoin_chainhook_test( ) } -#[test_case(5, 1, Some(1), Some(3) => using assert_unconfirmed_expiration_status; "predicate_end_block lower than starting_chain_tip with predicate_end_block confirmations < CONFIRMED_SEGMENT_MINIMUM_LENGTH ends with UnconfirmedExpiration status")] -#[test_case(10, 1, Some(1), Some(3) => using assert_confirmed_expiration_status; "predicate_end_block lower than starting_chain_tip with predicate_end_block confirmations >= CONFIRMED_SEGMENT_MINIMUM_LENGTH ends with ConfirmedExpiration status")] -#[test_case(1, 3, Some(1), Some(3) => using assert_unconfirmed_expiration_status; "predicate_end_block greater than starting_chain_tip and mining blocks so that predicate_end_block confirmations < CONFIRMED_SEGMENT_MINIMUM_LENGTH ends with UnconfirmedExpiration status")] -#[test_case(3, 7, Some(1), Some(4) => using assert_confirmed_expiration_status; "predicate_end_block greater than starting_chain_tip and mining blocks so that predicate_end_block confirmations >= CONFIRMED_SEGMENT_MINIMUM_LENGTH ends with ConfirmedExpiration status")] -#[test_case(0, 0, None, None => using assert_interrupted_status; "ommitting start_block ends with Interrupted status")] +#[test_case(5, 1, Some(1), Some(3), Some(3), Some(3) => using assert_unconfirmed_expiration_status; "predicate_end_block lower than starting_chain_tip with predicate_end_block confirmations < CONFIRMED_SEGMENT_MINIMUM_LENGTH ends with UnconfirmedExpiration status")] +#[test_case(10, 1, Some(1), Some(3), Some(3), Some(3) => using assert_confirmed_expiration_status; "predicate_end_block lower than starting_chain_tip with predicate_end_block confirmations >= CONFIRMED_SEGMENT_MINIMUM_LENGTH ends with ConfirmedExpiration status")] +#[test_case(1, 3, Some(1), Some(3), Some(4), Some(3) => using assert_unconfirmed_expiration_status; "predicate_end_block greater than starting_chain_tip and mining blocks so that predicate_end_block confirmations < CONFIRMED_SEGMENT_MINIMUM_LENGTH ends with UnconfirmedExpiration status")] +#[test_case(3, 7, Some(1), Some(4), Some(9), Some(4) => using assert_confirmed_expiration_status; "predicate_end_block greater than starting_chain_tip and mining blocks so that predicate_end_block confirmations >= CONFIRMED_SEGMENT_MINIMUM_LENGTH ends with ConfirmedExpiration status")] +#[test_case(0, 0, None, None, None, None => using assert_interrupted_status; "ommitting start_block ends with Interrupted status")] #[tokio::test] #[cfg_attr(not(feature = "redis_tests"), ignore)] async fn test_bitcoin_predicate_status_is_updated( @@ -548,7 +648,9 @@ async fn test_bitcoin_predicate_status_is_updated( blocks_to_mine: u64, predicate_start_block: Option, predicate_end_block: Option, -) -> PredicateStatus { + expected_evaluations: Option, + expected_occurrences: Option, +) -> (PredicateStatus, Option, Option) { let ( mut redis_process, working_dir, @@ -563,7 +665,9 @@ async fn test_bitcoin_predicate_status_is_updated( Some("regtest"), Some(json!({"scope":"block"})), None, - Some(json!({"start_block": predicate_start_block, "end_block": predicate_end_block})), + Some( + json!({"start_block": predicate_start_block, "end_block": predicate_end_block, "include_proof": true}), + ), Some(uuid), ); @@ -589,6 +693,7 @@ async fn test_bitcoin_predicate_status_is_updated( mine_burn_block( stacks_ingestion_port, bitcoin_rpc_port, + None, i + starting_chain_tip, ) .await @@ -622,7 +727,152 @@ async fn test_bitcoin_predicate_status_is_updated( std::fs::remove_dir_all(&working_dir).unwrap(); flush_redis(redis_port); redis_process.kill().unwrap(); - result + (result, expected_evaluations, expected_occurrences) +} + +/// +/// ┌─> predicate start block +/// │ ┌─> reorg, predicate scans from A(3) to B(6) +/// │ │ ┌─> predicate end block (unconfirmed set) +/// A(1) -> A(2) -> A(3) -> A(4) -> A(5) │ │ ┌─> predicate status confirmed +/// \ -> B(4) -> B(5) -> B(6) -> B(7) -> B(8) -> B(9) -> B(10) -> B(11) -> B(12) +/// +/// +#[test_case(5, 3, 9, Some(2), Some(7); "ommitting start_block ends with Interrupted status")] +#[tokio::test] +#[cfg_attr(not(feature = "redis_tests"), ignore)] +async fn test_bitcoin_predicate_status_is_updated_with_reorg( + genesis_chain_blocks_to_mine: u64, + fork_point: u64, + fork_blocks_to_mine: u64, + predicate_start_block: Option, + predicate_end_block: Option, +) { + let starting_chain_tip = 0; + let ( + mut redis_process, + working_dir, + chainhook_service_port, + redis_port, + stacks_ingestion_port, + bitcoin_rpc_port, + ) = setup_bitcoin_chainhook_test(starting_chain_tip).await; + + let uuid = &get_random_uuid(); + let predicate = build_bitcoin_payload( + Some("regtest"), + Some(json!({"scope":"block"})), + None, + Some( + json!({"start_block": predicate_start_block, "end_block": predicate_end_block, "include_proof": true}), + ), + Some(uuid), + ); + + let _ = call_register_predicate(&predicate, chainhook_service_port) + .await + .unwrap_or_else(|e| { + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + + let genesis_branch_key = '0'; + let first_block_mined_height = starting_chain_tip + 1; + let last_block_mined_height = genesis_chain_blocks_to_mine + first_block_mined_height; + for block_height in first_block_mined_height..last_block_mined_height { + mine_burn_block( + stacks_ingestion_port, + bitcoin_rpc_port, + Some(genesis_branch_key), + block_height, + ) + .await + .unwrap_or_else(|e| { + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + } + + sleep(Duration::new(2, 0)); + let status = get_predicate_status(uuid, chainhook_service_port) + .await + .unwrap_or_else(|e| { + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + assert_streaming_status((status, None, None)); + + let branch_key = '1'; + let first_fork_block_mined_height = fork_point + 1; + create_burn_fork_at( + stacks_ingestion_port, + bitcoin_rpc_port, + Some(branch_key), + first_fork_block_mined_height, + genesis_branch_key, + fork_point, + ) + .await + .unwrap_or_else(|e| { + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + + let reorg_point = last_block_mined_height + 1; + let first_fork_block_mined_height = first_fork_block_mined_height + 1; + let last_fork_block_mined_height = first_fork_block_mined_height + fork_blocks_to_mine; + + for block_height in first_fork_block_mined_height..last_fork_block_mined_height { + mine_burn_block( + stacks_ingestion_port, + bitcoin_rpc_port, + Some(branch_key), + block_height, + ) + .await + .unwrap_or_else(|e| { + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + if block_height == reorg_point { + sleep(Duration::new(2, 0)); + let status = get_predicate_status(uuid, chainhook_service_port) + .await + .unwrap_or_else(|e| { + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + assert_streaming_status((status, None, None)); + } + } + + sleep(Duration::new(2, 0)); + let status = get_predicate_status(uuid, chainhook_service_port) + .await + .unwrap_or_else(|e| { + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + + assert_confirmed_expiration_status((status, None, None)); + + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); } #[test_case(Chain::Stacks; "for stacks chain")] @@ -631,7 +881,7 @@ async fn test_bitcoin_predicate_status_is_updated( #[cfg_attr(not(feature = "redis_tests"), ignore)] async fn test_deregister_predicate(chain: Chain) { let (mut redis_process, working_dir, chainhook_service_port, redis_port, _, _) = match &chain { - Chain::Stacks => setup_stacks_chainhook_test(0).await, + Chain::Stacks => setup_stacks_chainhook_test(0, None, None).await, Chain::Bitcoin => setup_bitcoin_chainhook_test(0).await, }; @@ -649,7 +899,7 @@ async fn test_deregister_predicate(chain: Chain) { Some("regtest"), Some(json!({"scope":"block"})), None, - Some(json!({"start_block": 1, "end_block": 2})), + Some(json!({"start_block": 1, "end_block": 2, "include_proof": true})), Some(uuid), ), }; @@ -710,3 +960,160 @@ async fn test_deregister_predicate(chain: Chain) { flush_redis(redis_port); redis_process.kill().unwrap(); } + +#[test_case(New, 6 => using assert_confirmed_expiration_status; "preloaded predicate with new status should get scanned until completion")] +#[test_case(Scanning(ScanningData { + number_of_blocks_evaluated: 4, + number_of_blocks_to_scan: 1, + number_of_times_triggered: 0, + last_occurrence: None, + last_evaluated_block_height: 4 +}), 6 => using assert_confirmed_expiration_status; "preloaded predicate with scanning status should get scanned until completion")] +#[test_case(Streaming(StreamingData { + number_of_blocks_evaluated: 4, + number_of_times_triggered: 0, + last_occurrence: None, + last_evaluation: 0, + last_evaluated_block_height: 4 +}), 6 => using assert_confirmed_expiration_status; "preloaded predicate with streaming status and last evaluated height below tip should get scanned until completion")] +#[test_case(Streaming(StreamingData { + number_of_blocks_evaluated: 5, + number_of_times_triggered: 0, + last_occurrence: None, + last_evaluation: 0, + last_evaluated_block_height: 5 +}), 5 => using assert_streaming_status; "preloaded predicate with streaming status and last evaluated height at tip should be streamed")] +#[tokio::test] +#[cfg_attr(not(feature = "redis_tests"), ignore)] +async fn test_restarting_with_saved_predicates( + starting_status: PredicateStatus, + starting_chain_tip: u64, +) -> (PredicateStatus, Option, Option) { + let uuid = &get_random_uuid(); + let predicate = build_stacks_payload( + Some("devnet"), + Some(json!({"scope":"block_height", "lower_than": 100})), + None, + Some(json!({"start_block": 1, "end_block": 6})), + Some(uuid), + ); + let predicate = + serde_json::from_value(predicate).expect("failed to set up stacks chanhook spec for test"); + + let (mut redis_process, working_dir, chainhook_service_port, redis_port, _, _) = + setup_stacks_chainhook_test(starting_chain_tip, Some((predicate, starting_status)), None) + .await; + + await_new_scanning_status_complete(uuid, chainhook_service_port) + .await + .unwrap_or_else(|e| { + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + + sleep(Duration::new(2, 0)); + let result = get_predicate_status(uuid, chainhook_service_port) + .await + .unwrap_or_else(|e| { + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + (result, None, None) +} + +#[tokio::test] +#[cfg_attr(not(feature = "redis_tests"), ignore)] +async fn it_allows_specifying_startup_predicate() { + let uuid = &get_random_uuid(); + let predicate = build_stacks_payload( + Some("devnet"), + Some(json!({"scope":"block_height", "lower_than": 100})), + None, + Some(json!({"start_block": 1, "end_block": 2})), + Some(uuid), + ); + let predicate = + serde_json::from_value(predicate).expect("failed to set up stacks chanhook spec for test"); + let startup_predicate = ChainhookFullSpecification::Stacks(predicate); + let (mut redis_process, working_dir, chainhook_service_port, redis_port, _, _) = + setup_stacks_chainhook_test(3, None, Some(vec![startup_predicate])).await; + + await_new_scanning_status_complete(uuid, chainhook_service_port) + .await + .unwrap_or_else(|e| { + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + + sleep(Duration::new(2, 0)); + let result = get_predicate_status(uuid, chainhook_service_port) + .await + .unwrap_or_else(|e| { + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + assert_confirmed_expiration_status((result, None, None)); +} + +#[tokio::test] +#[cfg_attr(not(feature = "redis_tests"), ignore)] +async fn register_predicate_responds_409_if_uuid_in_use() { + let uuid = &get_random_uuid(); + let predicate = build_stacks_payload( + Some("devnet"), + Some(json!({"scope":"block_height", "lower_than": 100})), + None, + Some(json!({"start_block": 1, "end_block": 2})), + Some(uuid), + ); + let stacks_spec = serde_json::from_value(predicate.clone()) + .expect("failed to set up stacks chanhook spec for test"); + let startup_predicate = ChainhookFullSpecification::Stacks(stacks_spec); + + let (mut redis_process, working_dir, chainhook_service_port, redis_port, _, _) = + setup_stacks_chainhook_test(3, None, Some(vec![startup_predicate])).await; + + let result = call_register_predicate(&predicate, chainhook_service_port) + .await + .unwrap_or_else(|e| { + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + assert_eq!(result.get("status"), Some(&json!(409))); + + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); +} + +#[test] +fn it_generates_open_api_spec() { + let new_spec = document_predicate_api_server().unwrap(); + + let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + path.push("../../docs/chainhook-openapi.json"); + let current_spec = fs::read_to_string(path).unwrap(); + + assert_eq!( + current_spec, new_spec, + "breaking change detected: open api spec has been updated" + ) +} diff --git a/components/chainhook-cli/src/service/tests/observer_tests.rs b/components/chainhook-cli/src/service/tests/observer_tests.rs new file mode 100644 index 000000000..40df05173 --- /dev/null +++ b/components/chainhook-cli/src/service/tests/observer_tests.rs @@ -0,0 +1,164 @@ +use std::{sync::mpsc::channel, thread::sleep, time::Duration}; + +use chainhook_sdk::{ + observer::{start_event_observer, EventObserverConfig}, + types::{BitcoinNetwork, StacksNodeConfig}, + utils::Context, +}; +use reqwest::Method; +use serde_json::Value; +use test_case::test_case; + +use crate::service::tests::{ + helpers::{ + build_predicates::build_stacks_payload, + mock_service::{call_observer_svc, call_ping, call_register_predicate, flush_redis}, + }, + setup_bitcoin_chainhook_test, setup_stacks_chainhook_test, +}; + +use super::helpers::{ + build_predicates::get_random_uuid, get_free_port, mock_stacks_node::create_tmp_working_dir, +}; + +#[tokio::test] +#[cfg_attr(not(feature = "redis_tests"), ignore)] +async fn ping_endpoint_returns_metrics() { + let ( + mut redis_process, + working_dir, + chainhook_service_port, + redis_port, + stacks_ingestion_port, + _, + ) = setup_stacks_chainhook_test(1, None, None).await; + + let uuid = &get_random_uuid(); + let predicate = build_stacks_payload(Some("devnet"), None, None, None, Some(uuid)); + let _ = call_register_predicate(&predicate, chainhook_service_port) + .await + .unwrap_or_else(|e| { + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + + let metrics = call_ping(stacks_ingestion_port).await.unwrap_or_else(|e| { + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); + panic!("test failed with error: {e}"); + }); + + assert_eq!(metrics.stacks.registered_predicates, 1); + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); +} + +async fn await_observer_started(port: u16) { + let mut attempts = 0; + loop { + let url = format!("http://localhost:{port}/ping"); + match call_observer_svc(&url, Method::GET, None).await { + Ok(_) => break, + Err(e) => { + if attempts > 3 { + panic!("failed to start event observer, {}", e); + } else { + attempts += 1; + sleep(Duration::new(0, 500_000_000)); + } + } + } + } +} +#[test_case("/wallet", json!({ + "method": "getaddressinfo", + "params": vec!["bc1qxy2kgdygjrsqtzq2n0yrf2493p83kkfjhx0wlh"], + "id": "my-id", + "jsonrpc": "2.0" +}))] +#[test_case("/", json!({ + "method": "sendrawtransaction", + "params": vec!["0x0000"], + "id": "my-id", + "jsonrpc": "2.0" +}))] +#[tokio::test] +#[cfg_attr(not(feature = "redis_tests"), ignore)] +async fn bitcoin_rpc_requests_are_forwarded(endpoint: &str, body: Value) { + let (mut redis_process, working_dir, _, redis_port, stacks_ingestion_port, _) = + setup_bitcoin_chainhook_test(1).await; + + await_observer_started(stacks_ingestion_port).await; + + let url = format!("http://localhost:{stacks_ingestion_port}{endpoint}"); + let response = call_observer_svc(&url, Method::POST, Some(&body)) + .await + .unwrap(); + assert!(response.get("result").is_some()); + assert!(response.get("error").is_none()); + std::fs::remove_dir_all(&working_dir).unwrap(); + flush_redis(redis_port); + redis_process.kill().unwrap(); +} + +async fn start_and_ping_event_observer(config: EventObserverConfig, ingestion_port: u16) { + let (observer_commands_tx, observer_commands_rx) = channel(); + let logger = hiro_system_kit::log::setup_logger(); + let _guard = hiro_system_kit::log::setup_global_logger(logger.clone()); + let ctx = Context { + logger: Some(logger), + tracer: false, + }; + start_event_observer( + config, + observer_commands_tx, + observer_commands_rx, + None, + None, + ctx, + ) + .unwrap(); + await_observer_started(ingestion_port).await; +} +#[test_case("/drop_mempool_tx", Method::POST, None)] +#[test_case("/attachments/new", Method::POST, None)] +#[test_case("/mined_block", Method::POST, Some(&json!({})))] +#[test_case("/mined_microblock", Method::POST, Some(&json!({})))] +#[tokio::test] +async fn it_responds_200_for_unimplemented_endpoints( + endpoint: &str, + method: Method, + body: Option<&Value>, +) { + let ingestion_port = get_free_port().unwrap(); + let (working_dir, _tsv_dir) = create_tmp_working_dir().unwrap_or_else(|e| { + panic!("test failed with error: {e}"); + }); + let config = EventObserverConfig { + chainhook_config: None, + bitcoin_rpc_proxy_enabled: false, + ingestion_port: ingestion_port, + bitcoind_rpc_username: format!(""), + bitcoind_rpc_password: format!(""), + bitcoind_rpc_url: format!(""), + bitcoin_block_signaling: chainhook_sdk::types::BitcoinBlockSignaling::Stacks( + StacksNodeConfig { + rpc_url: format!(""), + ingestion_port: ingestion_port, + }, + ), + display_logs: false, + cache_path: working_dir, + bitcoin_network: BitcoinNetwork::Regtest, + stacks_network: chainhook_sdk::types::StacksNetwork::Devnet, + data_handler_tx: None, + }; + start_and_ping_event_observer(config, ingestion_port).await; + let url = format!("http://localhost:{ingestion_port}{endpoint}"); + let response = call_observer_svc(&url, method, body).await.unwrap(); + assert_eq!(response.get("status").unwrap(), &json!(200)); +} diff --git a/components/chainhook-cli/src/storage/mod.rs b/components/chainhook-cli/src/storage/mod.rs index 2304a722f..3c72fdb3f 100644 --- a/components/chainhook-cli/src/storage/mod.rs +++ b/components/chainhook-cli/src/storage/mod.rs @@ -61,28 +61,6 @@ pub fn open_readwrite_stacks_db_conn(base_dir: &PathBuf, _ctx: &Context) -> Resu Ok(db) } -fn get_default_bitcoin_db_file_path(base_dir: &PathBuf) -> PathBuf { - let mut destination_path = base_dir.clone(); - destination_path.push("bitcoin.rocksdb"); - destination_path -} - -pub fn open_readonly_bitcoin_db_conn(base_dir: &PathBuf, _ctx: &Context) -> Result { - let path = get_default_bitcoin_db_file_path(&base_dir); - let opts = get_db_default_options(); - let db = DB::open_for_read_only(&opts, path, false) - .map_err(|e| format!("unable to open bitcoin.rocksdb: {}", e.to_string()))?; - Ok(db) -} - -pub fn open_readwrite_bitcoin_db_conn(base_dir: &PathBuf, _ctx: &Context) -> Result { - let path = get_default_bitcoin_db_file_path(&base_dir); - let opts = get_db_default_options(); - let db = DB::open(&opts, path) - .map_err(|e| format!("unable to open bitcoin.rocksdb: {}", e.to_string()))?; - Ok(db) -} - fn get_block_key(block_identifier: &BlockIdentifier) -> [u8; 12] { let mut key = [0u8; 12]; key[..2].copy_from_slice(b"b:"); diff --git a/components/chainhook-sdk/src/chainhooks/bitcoin/mod.rs b/components/chainhook-sdk/src/chainhooks/bitcoin/mod.rs index d4b411e17..dd17c6b07 100644 --- a/components/chainhook-sdk/src/chainhooks/bitcoin/mod.rs +++ b/components/chainhook-sdk/src/chainhooks/bitcoin/mod.rs @@ -18,6 +18,8 @@ use std::str::FromStr; use reqwest::RequestBuilder; +use hex::FromHex; + pub struct BitcoinTriggerChainhook<'a> { pub chainhook: &'a BitcoinChainhookSpecification, pub apply: Vec<(Vec<&'a BitcoinTransactionData>, &'a BitcoinBlockData)>, @@ -301,6 +303,25 @@ pub fn handle_bitcoin_hook_action<'a>( } } +struct OpReturn(String); +impl OpReturn { + fn from_string(hex: &String) -> Result { + // Remove the `0x` prefix if present so that we can call from_hex without errors. + let hex = hex.strip_prefix("0x").unwrap_or(hex); + + // Parse the hex bytes. + let bytes = Vec::::from_hex(hex).unwrap(); + match bytes.as_slice() { + // An OpReturn is composed by: + // - OP_RETURN 0x6a + // - Data length (ignored) + // - The data + [0x6a, _, rest @ ..] => Ok(hex::encode(rest)), + _ => Err(String::from("not an OP_RETURN")), + } + } +} + impl BitcoinPredicateType { pub fn evaluate_transaction_predicate( &self, @@ -313,32 +334,48 @@ impl BitcoinPredicateType { BitcoinPredicateType::Txid(ExactMatchingRule::Equals(txid)) => { tx.transaction_identifier.hash.eq(txid) } - BitcoinPredicateType::Outputs(OutputPredicate::OpReturn(MatchingRule::Equals( - hex_bytes, - ))) => { - for output in tx.metadata.outputs.iter() { - if output.script_pubkey.eq(hex_bytes) { - return true; - } - } - false - } - BitcoinPredicateType::Outputs(OutputPredicate::OpReturn(MatchingRule::StartsWith( - hex_bytes, - ))) => { + BitcoinPredicateType::Outputs(OutputPredicate::OpReturn(rule)) => { for output in tx.metadata.outputs.iter() { - if output.script_pubkey.starts_with(hex_bytes) { - return true; + // opret contains the op_return data section prefixed with `0x`. + let opret = match OpReturn::from_string(&output.script_pubkey) { + Ok(op) => op, + Err(_) => continue, + }; + + // encoded_pattern takes a predicate pattern and return its lowercase hex + // representation. + fn encoded_pattern(pattern: &str) -> String { + // If the pattern starts with 0x, return it in lowercase and without the 0x + // prefix. + if pattern.starts_with("0x") { + return pattern + .strip_prefix("0x") + .unwrap() + .to_lowercase() + .to_string(); + } + + // In this case it should be trated as ASCII so let's return its hex + // representation. + hex::encode(pattern) } - } - false - } - BitcoinPredicateType::Outputs(OutputPredicate::OpReturn(MatchingRule::EndsWith( - hex_bytes, - ))) => { - for output in tx.metadata.outputs.iter() { - if output.script_pubkey.ends_with(hex_bytes) { - return true; + + match rule { + MatchingRule::StartsWith(pattern) => { + if opret.starts_with(&encoded_pattern(pattern)) { + return true; + } + } + MatchingRule::EndsWith(pattern) => { + if opret.ends_with(&encoded_pattern(pattern)) { + return true; + } + } + MatchingRule::Equals(pattern) => { + if opret.eq(&encoded_pattern(pattern)) { + return true; + } + } } } false @@ -452,3 +489,6 @@ impl BitcoinPredicateType { } } } + +#[cfg(test)] +pub mod tests; diff --git a/components/chainhook-sdk/src/chainhooks/bitcoin/tests.rs b/components/chainhook-sdk/src/chainhooks/bitcoin/tests.rs new file mode 100644 index 000000000..27698a6de --- /dev/null +++ b/components/chainhook-sdk/src/chainhooks/bitcoin/tests.rs @@ -0,0 +1,87 @@ +use super::super::types::MatchingRule; +use super::*; +use crate::types::BitcoinTransactionMetadata; +use chainhook_types::bitcoin::TxOut; + +use test_case::test_case; + +#[test_case( + "0x6affAAAA", + MatchingRule::Equals(String::from("0xAAAA")), + true; + "OpReturn: Equals matches Hex value" +)] +#[test_case( + "0x60ff0000", + MatchingRule::Equals(String::from("0x0000")), + false; + "OpReturn: Invalid OP_RETURN opcode" +)] +#[test_case( + "0x6aff012345", + MatchingRule::Equals(String::from("0x0000")), + false; + "OpReturn: Equals does not match Hex value" +)] +#[test_case( + "0x6aff68656C6C6F", + MatchingRule::Equals(String::from("hello")), + true; + "OpReturn: Equals matches ASCII value" +)] +#[test_case( + "0x6affAA0000", + MatchingRule::StartsWith(String::from("0xAA")), + true; + "OpReturn: StartsWith matches Hex value" +)] +#[test_case( + "0x6aff585858", // 0x585858 => XXX + MatchingRule::StartsWith(String::from("X")), + true; + "OpReturn: StartsWith matches ASCII value" +)] +#[test_case( + "0x6aff0000AA", + MatchingRule::EndsWith(String::from("0xAA")), + true; + "OpReturn: EndsWith matches Hex value" +)] +#[test_case( + "0x6aff000058", + MatchingRule::EndsWith(String::from("X")), + true; + "OpReturn: EndsWith matches ASCII value" +)] + +fn test_script_pubkey_evaluation(script_pubkey: &str, rule: MatchingRule, matches: bool) { + let predicate = BitcoinPredicateType::Outputs(OutputPredicate::OpReturn(rule)); + + let outputs = vec![TxOut { + value: 0, + script_pubkey: String::from(script_pubkey), + }]; + + let tx = BitcoinTransactionData { + transaction_identifier: TransactionIdentifier { + hash: String::from(""), + }, + operations: vec![], + metadata: BitcoinTransactionMetadata { + fee: 0, + proof: None, + inputs: vec![], + stacks_operations: vec![], + ordinal_operations: vec![], + + outputs, + }, + }; + + let ctx = Context { + logger: None, + tracer: false, + }; + + assert_eq!(matches, predicate.evaluate_transaction_predicate(&tx, &ctx),); +} diff --git a/components/chainhook-sdk/src/chainhooks/stacks/mod.rs b/components/chainhook-sdk/src/chainhooks/stacks/mod.rs index 766c4cf4f..345c80369 100644 --- a/components/chainhook-sdk/src/chainhooks/stacks/mod.rs +++ b/components/chainhook-sdk/src/chainhooks/stacks/mod.rs @@ -508,18 +508,43 @@ pub fn evaluate_stacks_predicate_on_transaction<'a>( } } -fn encode_transaction_including_with_clarity_decoding( - transaction: &StacksTransactionData, +fn serialize_stacks_block( + block: &dyn AbstractStacksBlock, + transactions: Vec<&StacksTransactionData>, + decode_clarity_values: bool, + include_contract_abi: bool, ctx: &Context, ) -> serde_json::Value { json!({ + "block_identifier": block.get_identifier(), + "parent_block_identifier": block.get_parent_identifier(), + "timestamp": block.get_timestamp(), + "transactions": transactions.into_iter().map(|transaction| { + serialize_stacks_transaction(&transaction, decode_clarity_values, include_contract_abi, ctx) + }).collect::>(), + "metadata": block.get_serialized_metadata(), + }) +} + +fn serialize_stacks_transaction( + transaction: &StacksTransactionData, + decode_clarity_values: bool, + include_contract_abi: bool, + ctx: &Context, +) -> serde_json::Value { + let mut json = json!({ "transaction_identifier": transaction.transaction_identifier, "operations": transaction.operations, "metadata": { "success": transaction.metadata.success, "raw_tx": transaction.metadata.raw_tx, - "result": serialized_decoded_clarity_value(&transaction.metadata.result, ctx), + "result": if decode_clarity_values { + serialized_decoded_clarity_value(&transaction.metadata.result, ctx) + } else { + json!(transaction.metadata.result) + }, "sender": transaction.metadata.sender, + "nonce": transaction.metadata.nonce, "fee": transaction.metadata.fee, "kind": transaction.metadata.kind, "receipt": { @@ -527,15 +552,21 @@ fn encode_transaction_including_with_clarity_decoding( "mutated_assets_radius": transaction.metadata.receipt.mutated_assets_radius, "contract_calls_stack": transaction.metadata.receipt.contract_calls_stack, "events": transaction.metadata.receipt.events.iter().map(|event| { - serialized_event_with_decoded_clarity_value(event, ctx) + if decode_clarity_values { serialized_event_with_decoded_clarity_value(event, ctx) } else { json!(event) } }).collect::>(), }, "description": transaction.metadata.description, "sponsor": transaction.metadata.sponsor, "execution_cost": transaction.metadata.execution_cost, - "position": transaction.metadata.position, + "position": transaction.metadata.position }, - }) + }); + if include_contract_abi { + if let Some(abi) = &transaction.metadata.contract_abi { + json["metadata"]["contract_abi"] = json!(abi); + } + } + json } pub fn serialized_event_with_decoded_clarity_value( @@ -764,37 +795,13 @@ pub fn serialize_stacks_payload_to_json<'a>( ctx: &Context, ) -> JsonValue { let decode_clarity_values = trigger.should_decode_clarity_value(); + let include_contract_abi = trigger.chainhook.include_contract_abi.unwrap_or(false); json!({ "apply": trigger.apply.into_iter().map(|(transactions, block)| { - json!({ - "block_identifier": block.get_identifier(), - "parent_block_identifier": block.get_parent_identifier(), - "timestamp": block.get_timestamp(), - "transactions": transactions.iter().map(|transaction| { - if decode_clarity_values { - encode_transaction_including_with_clarity_decoding(transaction, ctx) - } else { - json!(transaction) - } - }).collect::>(), - "metadata": block.get_serialized_metadata(), - }) + serialize_stacks_block(block, transactions, decode_clarity_values, include_contract_abi, ctx) }).collect::>(), "rollback": trigger.rollback.into_iter().map(|(transactions, block)| { - json!({ - "block_identifier": block.get_identifier(), - "parent_block_identifier": block.get_parent_identifier(), - "timestamp": block.get_timestamp(), - "transactions": transactions.iter().map(|transaction| { - if decode_clarity_values { - encode_transaction_including_with_clarity_decoding(transaction, ctx) - } else { - json!(transaction) - } - }).collect::>(), - "metadata": block.get_serialized_metadata(), - // "proof": proofs.get(&transaction.transaction_identifier), - }) + serialize_stacks_block(block, transactions, decode_clarity_values, include_contract_abi, ctx) }).collect::>(), "chainhook": { "uuid": trigger.chainhook.uuid, diff --git a/components/chainhook-sdk/src/chainhooks/tests/fixtures/stacks/testnet/base/transaction_contract_deploy.json b/components/chainhook-sdk/src/chainhooks/tests/fixtures/stacks/testnet/base/transaction_contract_deploy.json index 12d266544..6502f6428 100644 --- a/components/chainhook-sdk/src/chainhooks/tests/fixtures/stacks/testnet/base/transaction_contract_deploy.json +++ b/components/chainhook-sdk/src/chainhooks/tests/fixtures/stacks/testnet/base/transaction_contract_deploy.json @@ -1,41 +1,80 @@ { - "metadata": { - "description": "", - "execution_cost": { - "read_count": 8, - "read_length": 6, - "runtime": 84581, - "write_count": 13, - "write_length": 1612 - }, - "fee": 750000, - "kind": { - "data": { - "code": ";; The .subnet contract\n\n(define-constant CONTRACT_ADDRESS (as-contract tx-sender))\n\n;; Error codes\n(define-constant ERR_BLOCK_ALREADY_COMMITTED 1)\n(define-constant ERR_INVALID_MINER 2)\n(define-constant ERR_CONTRACT_CALL_FAILED 3)\n(define-constant ERR_TRANSFER_FAILED 4)\n(define-constant ERR_DISALLOWED_ASSET 5)\n(define-constant ERR_ASSET_ALREADY_ALLOWED 6)\n(define-constant ERR_MERKLE_ROOT_DOES_NOT_MATCH 7)\n(define-constant ERR_INVALID_MERKLE_ROOT 8)\n(define-constant ERR_WITHDRAWAL_ALREADY_PROCESSED 9)\n(define-constant ERR_VALIDATION_FAILED 10)\n;;; The value supplied for `target-chain-tip` does not match the current chain tip.\n(define-constant ERR_INVALID_CHAIN_TIP 11)\n;;; The contract was called before reaching this-chain height reaches 1.\n(define-constant ERR_CALLED_TOO_EARLY 12)\n(define-constant ERR_MINT_FAILED 13)\n(define-constant ERR_ATTEMPT_TO_TRANSFER_ZERO_AMOUNT 14)\n(define-constant ERR_IN_COMPUTATION 15)\n;; The contract does not own this NFT to withdraw it.\n(define-constant ERR_NFT_NOT_OWNED_BY_CONTRACT 16)\n(define-constant ERR_VALIDATION_LEAF_FAILED 30)\n\n;; Map from Stacks block height to block commit\n(define-map block-commits uint (buff 32))\n;; Map recording withdrawal roots\n(define-map withdrawal-roots-map (buff 32) bool)\n;; Map recording processed withdrawal leaves\n(define-map processed-withdrawal-leaves-map { withdrawal-leaf-hash: (buff 32), withdrawal-root-hash: (buff 32) } bool)\n\n;; principal that can commit blocks\n(define-data-var miner principal tx-sender)\n;; principal that can register contracts\n(define-data-var admin principal 'ST167FDXCJGS54J1T0J42VTX46G0QQQFRJGBK28RN)\n\n;; Map of allowed contracts for asset transfers - maps L1 contract principal to L2 contract principal\n(define-map allowed-contracts principal principal)\n\n;; Use trait declarations\n(use-trait nft-trait 'ST1NXBK3K5YYMD6FD41MVNP3JS1GABZ8TRVX023PT.nft-trait.nft-trait)\n(use-trait ft-trait 'ST1NXBK3K5YYMD6FD41MVNP3JS1GABZ8TRVX023PT.sip-010-trait-ft-standard.sip-010-trait)\n(use-trait mint-from-subnet-trait .subnet-traits-v1.mint-from-subnet-trait)\n\n;; Update the miner for this contract.\n(define-public (update-miner (new-miner principal))\n (begin\n (asserts! (is-eq tx-sender (var-get miner)) (err ERR_INVALID_MINER))\n (ok (var-set miner new-miner))\n )\n)\n\n;; Register a new FT contract to be supported by this subnet.\n(define-public (register-new-ft-contract (ft-contract ) (l2-contract principal))\n (begin\n ;; Verify that tx-sender is an authorized admin\n (asserts! (is-admin tx-sender) (err ERR_INVALID_MINER))\n\n ;; Set up the assets that the contract is allowed to transfer\n (asserts! (map-insert allowed-contracts (contract-of ft-contract) l2-contract)\n (err ERR_ASSET_ALREADY_ALLOWED))\n\n (print {\n event: \"register-contract\",\n asset-type: \"ft\",\n l1-contract: (contract-of ft-contract),\n l2-contract: l2-contract\n })\n\n (ok true)\n )\n)\n\n;; Register a new NFT contract to be supported by this subnet.\n(define-public (register-new-nft-contract (nft-contract ) (l2-contract principal))\n (begin\n ;; Verify that tx-sender is an authorized admin\n (asserts! (is-admin tx-sender) (err ERR_INVALID_MINER))\n\n ;; Set up the assets that the contract is allowed to transfer\n (asserts! (map-insert allowed-contracts (contract-of nft-contract) l2-contract)\n (err ERR_ASSET_ALREADY_ALLOWED))\n\n (print {\n event: \"register-contract\",\n asset-type: \"nft\",\n l1-contract: (contract-of nft-contract),\n l2-contract: l2-contract\n })\n\n (ok true)\n )\n)\n\n;; Helper function: returns a boolean indicating whether the given principal is a miner\n;; Returns bool\n(define-private (is-miner (miner-to-check principal))\n (is-eq miner-to-check (var-get miner))\n)\n\n;; Helper function: returns a boolean indicating whether the given principal is an admin\n;; Returns bool\n(define-private (is-admin (addr-to-check principal))\n (is-eq addr-to-check (var-get admin))\n)\n\n;; Helper function: determines whether the commit-block operation satisfies pre-conditions\n;; listed in `commit-block`.\n;; Returns response\n(define-private (can-commit-block? (commit-block-height uint) (target-chain-tip (buff 32)))\n (begin\n ;; check no block has been committed at this height\n (asserts! (is-none (map-get? block-commits commit-block-height)) (err ERR_BLOCK_ALREADY_COMMITTED))\n\n ;; check that `target-chain-tip` matches the burn chain tip\n (asserts! (is-eq\n target-chain-tip\n (unwrap! (get-block-info? id-header-hash (- block-height u1)) (err ERR_CALLED_TOO_EARLY)) )\n (err ERR_INVALID_CHAIN_TIP))\n\n ;; check that the tx sender is one of the miners\n (asserts! (is-miner tx-sender) (err ERR_INVALID_MINER))\n\n ;; check that the miner called this contract directly\n (asserts! (is-miner contract-caller) (err ERR_INVALID_MINER))\n\n (ok true)\n )\n)\n\n;; Helper function: modifies the block-commits map with a new commit and prints related info\n;; Returns response<(buff 32), ?>\n(define-private (inner-commit-block (block (buff 32)) (commit-block-height uint) (withdrawal-root (buff 32)))\n (begin\n (map-set block-commits commit-block-height block)\n (map-set withdrawal-roots-map withdrawal-root true)\n (print {\n event: \"block-commit\",\n block-commit: block,\n withdrawal-root: withdrawal-root,\n block-height: commit-block-height\n })\n (ok block)\n )\n)\n\n;; The subnet miner calls this function to commit a block at a particular height.\n;; `block` is the hash of the block being submitted.\n;; `target-chain-tip` is the `id-header-hash` of the burn block (i.e., block on\n;; this chain) that the miner intends to build off.\n;;\n;; Fails if:\n;; 1) we have already committed at this block height\n;; 2) `target-chain-tip` is not the burn chain tip (i.e., on this chain)\n;; 3) the sender is not a miner\n(define-public (commit-block (block (buff 32)) (target-chain-tip (buff 32)) (withdrawal-root (buff 32)))\n (let ((commit-block-height block-height))\n (try! (can-commit-block? commit-block-height target-chain-tip))\n (inner-commit-block block commit-block-height withdrawal-root)\n )\n)\n\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n;; FOR NFT ASSET TRANSFERS\n\n;; Helper function that transfers the specified NFT from the given sender to the given recipient.\n;; Returns response\n(define-private (inner-transfer-nft-asset\n (nft-contract )\n (id uint)\n (sender principal)\n (recipient principal)\n )\n (let (\n (call-result (contract-call? nft-contract transfer id sender recipient))\n (transfer-result (unwrap! call-result (err ERR_CONTRACT_CALL_FAILED)))\n )\n ;; Check that the transfer succeeded\n (asserts! transfer-result (err ERR_TRANSFER_FAILED))\n\n (ok true)\n )\n)\n\n(define-private (inner-mint-nft-asset\n (nft-mint-contract )\n (id uint)\n (sender principal)\n (recipient principal)\n )\n (let (\n (call-result (as-contract (contract-call? nft-mint-contract mint-from-subnet id sender recipient)))\n (mint-result (unwrap! call-result (err ERR_CONTRACT_CALL_FAILED)))\n )\n ;; Check that the transfer succeeded\n (asserts! mint-result (err ERR_MINT_FAILED))\n\n (ok true)\n )\n)\n\n(define-private (inner-transfer-or-mint-nft-asset\n (nft-contract )\n (nft-mint-contract )\n (id uint)\n (recipient principal)\n )\n (let (\n (call-result (contract-call? nft-contract get-owner id))\n (nft-owner (unwrap! call-result (err ERR_CONTRACT_CALL_FAILED)))\n (contract-owns-nft (is-eq nft-owner (some CONTRACT_ADDRESS)))\n (no-owner (is-eq nft-owner none))\n )\n\n (if contract-owns-nft\n (inner-transfer-nft-asset nft-contract id CONTRACT_ADDRESS recipient)\n (if no-owner\n ;; Try minting the asset if there is no existing owner of this NFT\n (inner-mint-nft-asset nft-mint-contract id CONTRACT_ADDRESS recipient)\n ;; In this case, a principal other than this contract owns this NFT, so minting is not possible\n (err ERR_MINT_FAILED)\n )\n )\n )\n)\n\n;; A user calls this function to deposit an NFT into the contract.\n;; The function emits a print with details of this event.\n;; Returns response\n(define-public (deposit-nft-asset\n (nft-contract )\n (id uint)\n (sender principal)\n )\n (let (\n ;; Check that the asset belongs to the allowed-contracts map\n (subnet-contract-id (unwrap! (map-get? allowed-contracts (contract-of nft-contract)) (err ERR_DISALLOWED_ASSET)))\n )\n\n ;; Try to transfer the NFT to this contract\n (asserts! (try! (inner-transfer-nft-asset nft-contract id sender CONTRACT_ADDRESS)) (err ERR_TRANSFER_FAILED))\n\n ;; Emit a print event - the node consumes this\n (print {\n event: \"deposit-nft\",\n l1-contract-id: (as-contract nft-contract),\n nft-id: id,\n sender: sender,\n subnet-contract-id: subnet-contract-id,\n })\n\n (ok true)\n )\n)\n\n\n;; Helper function for `withdraw-nft-asset`\n;; Returns response\n(define-public (inner-withdraw-nft-asset\n (nft-contract )\n (l2-contract principal)\n (id uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n (nft-mint-contract (optional ))\n (withdrawal-root (buff 32))\n (withdrawal-leaf-hash (buff 32))\n (sibling-hashes (list 50 {\n hash: (buff 32),\n is-left-side: bool,\n }))\n )\n (let ((hashes-are-valid (check-withdrawal-hashes withdrawal-root withdrawal-leaf-hash sibling-hashes)))\n\n (asserts! (try! hashes-are-valid) (err ERR_VALIDATION_FAILED))\n\n ;; check that the withdrawal request data matches the supplied leaf hash\n (asserts! (is-eq withdrawal-leaf-hash\n (leaf-hash-withdraw-nft l2-contract id recipient withdrawal-id height))\n (err ERR_VALIDATION_LEAF_FAILED))\n\n (asserts!\n (try!\n (match nft-mint-contract\n mint-contract (as-contract (inner-transfer-or-mint-nft-asset nft-contract mint-contract id recipient))\n (as-contract (inner-transfer-without-mint-nft-asset nft-contract id recipient))\n )\n )\n (err ERR_TRANSFER_FAILED)\n )\n\n (asserts!\n (finish-withdraw { withdrawal-leaf-hash: withdrawal-leaf-hash, withdrawal-root-hash: withdrawal-root })\n (err ERR_WITHDRAWAL_ALREADY_PROCESSED)\n )\n\n (ok true)\n )\n)\n\n;; A user calls this function to withdraw the specified NFT from this contract.\n;; In order for this withdrawal to go through, the given withdrawal must have been included\n;; in a withdrawal Merkle tree a subnet miner submitted. The user must provide the leaf\n;; hash of their withdrawal and the root hash of the specific Merkle tree their withdrawal\n;; is included in. They must also provide a list of sibling hashes. The withdraw function\n;; uses the provided hashes to ensure the requested withdrawal is valid.\n;; The function emits a print with details of this event.\n;; Returns response\n(define-public (withdraw-nft-asset\n (nft-contract )\n (id uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n (nft-mint-contract (optional ))\n (withdrawal-root (buff 32))\n (withdrawal-leaf-hash (buff 32))\n (sibling-hashes (list 50 {\n hash: (buff 32),\n is-left-side: bool,\n }))\n )\n (let (\n ;; Check that the asset belongs to the allowed-contracts map\n (l2-contract (unwrap! (map-get? allowed-contracts (contract-of nft-contract)) (err ERR_DISALLOWED_ASSET)))\n )\n (asserts!\n (try! (inner-withdraw-nft-asset\n nft-contract\n l2-contract\n id\n recipient\n withdrawal-id\n height\n nft-mint-contract\n withdrawal-root\n withdrawal-leaf-hash\n sibling-hashes\n ))\n (err ERR_TRANSFER_FAILED)\n )\n\n ;; Emit a print event\n (print {\n event: \"withdraw-nft\",\n l1-contract-id: (as-contract nft-contract),\n nft-id: id,\n recipient: recipient\n })\n\n (ok true)\n )\n)\n\n\n;; Like `inner-transfer-or-mint-nft-asset but without allowing or requiring a mint function. In order to withdraw, the user must\n;; have the appropriate balance.\n(define-private (inner-transfer-without-mint-nft-asset\n (nft-contract )\n (id uint)\n (recipient principal)\n )\n (let (\n (call-result (contract-call? nft-contract get-owner id))\n (nft-owner (unwrap! call-result (err ERR_CONTRACT_CALL_FAILED)))\n (contract-owns-nft (is-eq nft-owner (some CONTRACT_ADDRESS)))\n )\n\n (asserts! contract-owns-nft (err ERR_NFT_NOT_OWNED_BY_CONTRACT))\n (inner-transfer-nft-asset nft-contract id CONTRACT_ADDRESS recipient)\n )\n)\n\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n;; FOR FUNGIBLE TOKEN ASSET TRANSFERS\n\n;; Helper function that transfers a specified amount of the fungible token from the given sender to the given recipient.\n;; Returns response\n(define-private (inner-transfer-ft-asset\n (ft-contract )\n (amount uint)\n (sender principal)\n (recipient principal)\n (memo (optional (buff 34)))\n )\n (let (\n (call-result (contract-call? ft-contract transfer amount sender recipient memo))\n (transfer-result (unwrap! call-result (err ERR_CONTRACT_CALL_FAILED)))\n )\n ;; FIXME: SIP-010 doesn't require that transfer returns (ok true) on success, so is this check necessary?\n ;; Check that the transfer succeeded\n (asserts! transfer-result (err ERR_TRANSFER_FAILED))\n\n (ok true)\n )\n)\n\n(define-private (inner-mint-ft-asset\n (ft-mint-contract )\n (amount uint)\n (sender principal)\n (recipient principal)\n )\n (let (\n (call-result (as-contract (contract-call? ft-mint-contract mint-from-subnet amount sender recipient)))\n (mint-result (unwrap! call-result (err ERR_CONTRACT_CALL_FAILED)))\n )\n ;; Check that the transfer succeeded\n (asserts! mint-result (err ERR_MINT_FAILED))\n\n (ok true)\n )\n)\n\n(define-private (inner-transfer-or-mint-ft-asset\n (ft-contract )\n (ft-mint-contract )\n (amount uint)\n (recipient principal)\n (memo (optional (buff 34)))\n )\n (let (\n (call-result (contract-call? ft-contract get-balance CONTRACT_ADDRESS))\n (contract-ft-balance (unwrap! call-result (err ERR_CONTRACT_CALL_FAILED)))\n (contract-owns-enough (>= contract-ft-balance amount))\n (amount-to-transfer (if contract-owns-enough amount contract-ft-balance))\n (amount-to-mint (- amount amount-to-transfer))\n )\n\n ;; Check that the total balance between the transfer and mint is equal to the original balance\n (asserts! (is-eq amount (+ amount-to-transfer amount-to-mint)) (err ERR_IN_COMPUTATION))\n\n (and\n (> amount-to-transfer u0)\n (try! (inner-transfer-ft-asset ft-contract amount-to-transfer CONTRACT_ADDRESS recipient memo))\n )\n (and\n (> amount-to-mint u0)\n (try! (inner-mint-ft-asset ft-mint-contract amount-to-mint CONTRACT_ADDRESS recipient))\n )\n\n (ok true)\n )\n)\n\n;; A user calls this function to deposit a fungible token into the contract.\n;; The function emits a print with details of this event.\n;; Returns response\n(define-public (deposit-ft-asset\n (ft-contract )\n (amount uint)\n (sender principal)\n (memo (optional (buff 34)))\n )\n (let (\n ;; Check that the asset belongs to the allowed-contracts map\n (subnet-contract-id (unwrap! (map-get? allowed-contracts (contract-of ft-contract)) (err ERR_DISALLOWED_ASSET)))\n )\n ;; Try to transfer the FT to this contract\n (asserts! (try! (inner-transfer-ft-asset ft-contract amount sender CONTRACT_ADDRESS memo)) (err ERR_TRANSFER_FAILED))\n\n (let (\n (ft-name (unwrap! (contract-call? ft-contract get-name) (err ERR_CONTRACT_CALL_FAILED)))\n )\n ;; Emit a print event - the node consumes this\n (print {\n event: \"deposit-ft\",\n l1-contract-id: (as-contract ft-contract),\n ft-name: ft-name,\n ft-amount: amount,\n sender: sender,\n subnet-contract-id: subnet-contract-id,\n })\n )\n\n (ok true)\n )\n)\n\n;; This function performs validity checks related to the withdrawal and performs the withdrawal as well.\n;; Returns response\n(define-private (inner-withdraw-ft-asset\n (ft-contract )\n (amount uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n (memo (optional (buff 34)))\n (ft-mint-contract (optional ))\n (withdrawal-root (buff 32))\n (withdrawal-leaf-hash (buff 32))\n (sibling-hashes (list 50 {\n hash: (buff 32),\n is-left-side: bool,\n }))\n )\n (let ((hashes-are-valid (check-withdrawal-hashes withdrawal-root withdrawal-leaf-hash sibling-hashes)))\n (asserts! (try! hashes-are-valid) (err ERR_VALIDATION_FAILED))\n\n ;; check that the withdrawal request data matches the supplied leaf hash\n (asserts! (is-eq withdrawal-leaf-hash\n (leaf-hash-withdraw-ft (contract-of ft-contract) amount recipient withdrawal-id height))\n (err ERR_VALIDATION_LEAF_FAILED))\n\n (asserts!\n (try!\n (match ft-mint-contract\n mint-contract (as-contract (inner-transfer-or-mint-ft-asset ft-contract mint-contract amount recipient memo))\n (as-contract (inner-transfer-ft-asset ft-contract amount CONTRACT_ADDRESS recipient memo))\n )\n )\n (err ERR_TRANSFER_FAILED)\n )\n\n (asserts!\n (finish-withdraw { withdrawal-leaf-hash: withdrawal-leaf-hash, withdrawal-root-hash: withdrawal-root })\n (err ERR_WITHDRAWAL_ALREADY_PROCESSED))\n\n (ok true)\n )\n)\n\n;; A user can call this function to withdraw some amount of a fungible token asset from the\n;; contract and send it to a recipient.\n;; In order for this withdrawal to go through, the given withdrawal must have been included\n;; in a withdrawal Merkle tree a subnet miner submitted. The user must provide the leaf\n;; hash of their withdrawal and the root hash of the specific Merkle tree their withdrawal\n;; is included in. They must also provide a list of sibling hashes. The withdraw function\n;; uses the provided hashes to ensure the requested withdrawal is valid.\n;; The function emits a print with details of this event.\n;; Returns response\n(define-public (withdraw-ft-asset\n (ft-contract )\n (amount uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n (memo (optional (buff 34)))\n (ft-mint-contract (optional ))\n (withdrawal-root (buff 32))\n (withdrawal-leaf-hash (buff 32))\n (sibling-hashes (list 50 {\n hash: (buff 32),\n is-left-side: bool,\n }))\n )\n (begin\n ;; Check that the withdraw amount is positive\n (asserts! (> amount u0) (err ERR_ATTEMPT_TO_TRANSFER_ZERO_AMOUNT))\n\n ;; Check that the asset belongs to the allowed-contracts map\n (unwrap! (map-get? allowed-contracts (contract-of ft-contract)) (err ERR_DISALLOWED_ASSET))\n\n (asserts!\n (try! (inner-withdraw-ft-asset\n ft-contract\n amount\n recipient\n withdrawal-id\n height\n memo\n ft-mint-contract\n withdrawal-root\n withdrawal-leaf-hash\n sibling-hashes))\n (err ERR_TRANSFER_FAILED)\n )\n\n (let (\n (ft-name (unwrap! (contract-call? ft-contract get-name) (err ERR_CONTRACT_CALL_FAILED)))\n )\n ;; Emit a print event\n (print {\n event: \"withdraw-ft\",\n l1-contract-id: (as-contract ft-contract),\n ft-name: ft-name,\n ft-amount: amount,\n recipient: recipient,\n })\n )\n\n (ok true)\n )\n)\n\n\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n;; FOR STX TRANSFERS\n\n\n;; Helper function that transfers the given amount from the specified fungible token from the given sender to the given recipient.\n;; Returns response\n(define-private (inner-transfer-stx (amount uint) (sender principal) (recipient principal))\n (let (\n (call-result (stx-transfer? amount sender recipient))\n (transfer-result (unwrap! call-result (err ERR_TRANSFER_FAILED)))\n )\n ;; Check that the transfer succeeded\n (asserts! transfer-result (err ERR_TRANSFER_FAILED))\n\n (ok true)\n )\n)\n\n;; A user calls this function to deposit STX into the contract.\n;; The function emits a print with details of this event.\n;; Returns response\n(define-public (deposit-stx (amount uint) (sender principal))\n (begin\n ;; Try to transfer the STX to this contract\n (asserts! (try! (inner-transfer-stx amount sender CONTRACT_ADDRESS)) (err ERR_TRANSFER_FAILED))\n\n ;; Emit a print event - the node consumes this\n (print { event: \"deposit-stx\", sender: sender, amount: amount })\n\n (ok true)\n )\n)\n\n(define-read-only (leaf-hash-withdraw-stx\n (amount uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n )\n (sha512/256 (concat 0x00 (unwrap-panic (to-consensus-buff?\n {\n type: \"stx\",\n amount: amount,\n recipient: recipient,\n withdrawal-id: withdrawal-id,\n height: height\n })))\n )\n)\n\n(define-read-only (leaf-hash-withdraw-nft\n (asset-contract principal)\n (nft-id uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n )\n (sha512/256 (concat 0x00 (unwrap-panic (to-consensus-buff?\n {\n type: \"nft\",\n nft-id: nft-id,\n asset-contract: asset-contract,\n recipient: recipient,\n withdrawal-id: withdrawal-id,\n height: height\n })))\n )\n)\n\n(define-read-only (leaf-hash-withdraw-ft\n (asset-contract principal)\n (amount uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n )\n (sha512/256 (concat 0x00 (unwrap-panic (to-consensus-buff?\n {\n type: \"ft\",\n amount: amount,\n asset-contract: asset-contract,\n recipient: recipient,\n withdrawal-id: withdrawal-id,\n height: height\n })))\n )\n)\n\n;; A user calls this function to withdraw STX from this contract.\n;; In order for this withdrawal to go through, the given withdrawal must have been included\n;; in a withdrawal Merkle tree a subnet miner submitted. The user must provide the leaf\n;; hash of their withdrawal and the root hash of the specific Merkle tree their withdrawal\n;; is included in. They must also provide a list of sibling hashes. The withdraw function\n;; uses the provided hashes to ensure the requested withdrawal is valid.\n;; The function emits a print with details of this event.\n;; Returns response\n(define-public (withdraw-stx\n (amount uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n (withdrawal-root (buff 32))\n (withdrawal-leaf-hash (buff 32))\n (sibling-hashes (list 50 {\n hash: (buff 32),\n is-left-side: bool,\n }))\n )\n (let ((hashes-are-valid (check-withdrawal-hashes withdrawal-root withdrawal-leaf-hash sibling-hashes)))\n\n (asserts! (try! hashes-are-valid) (err ERR_VALIDATION_FAILED))\n ;; check that the withdrawal request data matches the supplied leaf hash\n (asserts! (is-eq withdrawal-leaf-hash\n (leaf-hash-withdraw-stx amount recipient withdrawal-id height))\n (err ERR_VALIDATION_LEAF_FAILED))\n\n (asserts! (try! (as-contract (inner-transfer-stx amount tx-sender recipient))) (err ERR_TRANSFER_FAILED))\n\n (asserts!\n (finish-withdraw { withdrawal-leaf-hash: withdrawal-leaf-hash, withdrawal-root-hash: withdrawal-root })\n (err ERR_WITHDRAWAL_ALREADY_PROCESSED))\n\n ;; Emit a print event\n (print { event: \"withdraw-stx\", recipient: recipient, amount: amount })\n\n (ok true)\n )\n)\n\n\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n;; GENERAL WITHDRAWAL FUNCTIONS\n\n;; This function concats the two given hashes in the correct order. It also prepends the buff `0x01`, which is\n;; a tag denoting a node (versus a leaf).\n;; Returns a buff\n(define-private (create-node-hash\n (curr-hash (buff 32))\n (sibling-hash (buff 32))\n (is-sibling-left-side bool)\n )\n (let (\n (concatted-hash (if is-sibling-left-side\n (concat sibling-hash curr-hash)\n (concat curr-hash sibling-hash)\n ))\n )\n\n (concat 0x01 concatted-hash)\n )\n)\n\n;; This function hashes the curr hash with its sibling hash.\n;; Returns (buff 32)\n(define-private (hash-help\n (sibling {\n hash: (buff 32),\n is-left-side: bool,\n })\n (curr-node-hash (buff 32))\n )\n (let (\n (sibling-hash (get hash sibling))\n (is-sibling-left-side (get is-left-side sibling))\n (new-buff (create-node-hash curr-node-hash sibling-hash is-sibling-left-side))\n )\n (sha512/256 new-buff)\n )\n)\n\n;; This function checks:\n;; - That the provided withdrawal root matches a previously submitted one (passed to the function `commit-block`)\n;; - That the computed withdrawal root matches a previous valid withdrawal root\n;; - That the given withdrawal leaf hash has not been previously processed\n;; Returns response\n(define-private (check-withdrawal-hashes\n (withdrawal-root (buff 32))\n (withdrawal-leaf-hash (buff 32))\n (sibling-hashes (list 50 {\n hash: (buff 32),\n is-left-side: bool,\n }))\n )\n (begin\n ;; Check that the user submitted a valid withdrawal root\n (asserts! (is-some (map-get? withdrawal-roots-map withdrawal-root)) (err ERR_INVALID_MERKLE_ROOT))\n\n ;; Check that this withdrawal leaf has not been processed before\n (asserts!\n (is-none\n (map-get? processed-withdrawal-leaves-map\n { withdrawal-leaf-hash: withdrawal-leaf-hash, withdrawal-root-hash: withdrawal-root }))\n (err ERR_WITHDRAWAL_ALREADY_PROCESSED))\n\n (let ((calculated-withdrawal-root (fold hash-help sibling-hashes withdrawal-leaf-hash))\n (roots-match (is-eq calculated-withdrawal-root withdrawal-root)))\n (if roots-match\n (ok true)\n (err ERR_MERKLE_ROOT_DOES_NOT_MATCH))\n )\n )\n)\n\n;; This function should be called after the asset in question has been transferred.\n;; It adds the withdrawal leaf hash to a map of processed leaves. This ensures that\n;; this withdrawal leaf can't be used again to withdraw additional funds.\n;; Returns bool\n(define-private (finish-withdraw\n (withdraw-info {\n withdrawal-leaf-hash: (buff 32),\n withdrawal-root-hash: (buff 32)\n })\n )\n (map-insert processed-withdrawal-leaves-map withdraw-info true)\n)\n", - "contract_identifier": "ST13F481SBR0R7Z6NMMH8YV2FJJYXA5JPA0AD3HP9.subnet-v1" - }, - "type": "ContractDeployment" - }, - "nonce": 33, - "position": { - "index": 1 - }, - "proof": null, - "raw_tx": "0x808000000004003e44ad50f99edc5e6cc5543b636284186894a008000000000000002100000000000b71b00000a84691e27fd2d46475230707a657ef7ddf6de7a4b06a1ac2049384c6474e73f85ee4ce205e0904160adaa160498df02db1782f898b5905db4d249b7025f6604c030100000000060218666169722d616d6172616e74682d7268696e6f6365726f73000005103b3b2068656c6c6f2d776f726c6420636f6e74726163740a0a28646566696e652d636f6e7374616e742073656e6465722027535a324a365a593438475631455a35563256355242394d5036365357383650594b4b51394836445052290a28646566696e652d636f6e7374616e7420726563697069656e742027534d324a365a593438475631455a35563256355242394d5036365357383650594b4b51565838583047290a0a28646566696e652d66756e6769626c652d746f6b656e206e6f76656c2d746f6b656e2d3139290a28626567696e202866742d6d696e743f206e6f76656c2d746f6b656e2d3139207531322073656e64657229290a28626567696e202866742d7472616e736665723f206e6f76656c2d746f6b656e2d31392075322073656e64657220726563697069656e7429290a0a28646566696e652d6e6f6e2d66756e6769626c652d746f6b656e2068656c6c6f2d6e66742075696e74290a28626567696e20286e66742d6d696e743f2068656c6c6f2d6e66742075312073656e64657229290a28626567696e20286e66742d6d696e743f2068656c6c6f2d6e66742075322073656e64657229290a28626567696e20286e66742d7472616e736665723f2068656c6c6f2d6e66742075312073656e64657220726563697069656e7429290a0a28646566696e652d7075626c69632028746573742d656d69742d6576656e74290a2020202028626567696e0a2020202020202020287072696e7420224576656e74212048656c6c6f20776f726c6422290a2020202020202020286f6b2075312929290a28626567696e2028746573742d656d69742d6576656e7429290a0a28646566696e652d7075626c69632028746573742d6576656e742d7479706573290a2020202028626567696e0a202020202020202028756e777261702d70616e6963202866742d6d696e743f206e6f76656c2d746f6b656e2d313920753320726563697069656e7429290a202020202020202028756e777261702d70616e696320286e66742d6d696e743f2068656c6c6f2d6e667420753220726563697069656e7429290a202020202020202028756e777261702d70616e696320287374782d7472616e736665723f207536302074782d73656e6465722027535a324a365a593438475631455a35563256355242394d5036365357383650594b4b5139483644505229290a202020202020202028756e777261702d70616e696320287374782d6275726e3f207532302074782d73656e64657229290a2020202020202020286f6b2075312929290a0a28646566696e652d6d61702073746f7265207b6b65793a202862756666203332297d207b76616c75653a202862756666203332297d290a28646566696e652d7075626c696320286765742d76616c756520286b65792028627566662033322929290a2020202028626567696e0a2020202020202020286d6174636820286d61702d6765743f2073746f7265207b6b65793a206b65797d290a202020202020202020202020656e74727920286f6b20286765742076616c756520656e74727929290a202020202020202020202020286572722030292929290a28646566696e652d7075626c696320287365742d76616c756520286b65792028627566662033322929202876616c75652028627566662033322929290a2020202028626567696e0a2020202020202020286d61702d7365742073746f7265207b6b65793a206b65797d207b76616c75653a2076616c75657d290a2020202020202020286f6b207531292929", - "receipt": { - "contract_calls_stack": [], - "events": [], - "mutated_assets_radius": [], - "mutated_contracts_radius": [ - "ST13F481SBR0R7Z6NMMH8YV2FJJYXA5JPA0AD3HP9.subnet-v1" + "metadata": { + "description": "", + "execution_cost": { + "read_count": 8, + "read_length": 6, + "runtime": 84581, + "write_count": 13, + "write_length": 1612 + }, + "fee": 750000, + "kind": { + "data": { + "code": ";; The .subnet contract\n\n(define-constant CONTRACT_ADDRESS (as-contract tx-sender))\n\n;; Error codes\n(define-constant ERR_BLOCK_ALREADY_COMMITTED 1)\n(define-constant ERR_INVALID_MINER 2)\n(define-constant ERR_CONTRACT_CALL_FAILED 3)\n(define-constant ERR_TRANSFER_FAILED 4)\n(define-constant ERR_DISALLOWED_ASSET 5)\n(define-constant ERR_ASSET_ALREADY_ALLOWED 6)\n(define-constant ERR_MERKLE_ROOT_DOES_NOT_MATCH 7)\n(define-constant ERR_INVALID_MERKLE_ROOT 8)\n(define-constant ERR_WITHDRAWAL_ALREADY_PROCESSED 9)\n(define-constant ERR_VALIDATION_FAILED 10)\n;;; The value supplied for `target-chain-tip` does not match the current chain tip.\n(define-constant ERR_INVALID_CHAIN_TIP 11)\n;;; The contract was called before reaching this-chain height reaches 1.\n(define-constant ERR_CALLED_TOO_EARLY 12)\n(define-constant ERR_MINT_FAILED 13)\n(define-constant ERR_ATTEMPT_TO_TRANSFER_ZERO_AMOUNT 14)\n(define-constant ERR_IN_COMPUTATION 15)\n;; The contract does not own this NFT to withdraw it.\n(define-constant ERR_NFT_NOT_OWNED_BY_CONTRACT 16)\n(define-constant ERR_VALIDATION_LEAF_FAILED 30)\n\n;; Map from Stacks block height to block commit\n(define-map block-commits uint (buff 32))\n;; Map recording withdrawal roots\n(define-map withdrawal-roots-map (buff 32) bool)\n;; Map recording processed withdrawal leaves\n(define-map processed-withdrawal-leaves-map { withdrawal-leaf-hash: (buff 32), withdrawal-root-hash: (buff 32) } bool)\n\n;; principal that can commit blocks\n(define-data-var miner principal tx-sender)\n;; principal that can register contracts\n(define-data-var admin principal 'ST167FDXCJGS54J1T0J42VTX46G0QQQFRJGBK28RN)\n\n;; Map of allowed contracts for asset transfers - maps L1 contract principal to L2 contract principal\n(define-map allowed-contracts principal principal)\n\n;; Use trait declarations\n(use-trait nft-trait 'ST1NXBK3K5YYMD6FD41MVNP3JS1GABZ8TRVX023PT.nft-trait.nft-trait)\n(use-trait ft-trait 'ST1NXBK3K5YYMD6FD41MVNP3JS1GABZ8TRVX023PT.sip-010-trait-ft-standard.sip-010-trait)\n(use-trait mint-from-subnet-trait .subnet-traits-v1.mint-from-subnet-trait)\n\n;; Update the miner for this contract.\n(define-public (update-miner (new-miner principal))\n (begin\n (asserts! (is-eq tx-sender (var-get miner)) (err ERR_INVALID_MINER))\n (ok (var-set miner new-miner))\n )\n)\n\n;; Register a new FT contract to be supported by this subnet.\n(define-public (register-new-ft-contract (ft-contract ) (l2-contract principal))\n (begin\n ;; Verify that tx-sender is an authorized admin\n (asserts! (is-admin tx-sender) (err ERR_INVALID_MINER))\n\n ;; Set up the assets that the contract is allowed to transfer\n (asserts! (map-insert allowed-contracts (contract-of ft-contract) l2-contract)\n (err ERR_ASSET_ALREADY_ALLOWED))\n\n (print {\n event: \"register-contract\",\n asset-type: \"ft\",\n l1-contract: (contract-of ft-contract),\n l2-contract: l2-contract\n })\n\n (ok true)\n )\n)\n\n;; Register a new NFT contract to be supported by this subnet.\n(define-public (register-new-nft-contract (nft-contract ) (l2-contract principal))\n (begin\n ;; Verify that tx-sender is an authorized admin\n (asserts! (is-admin tx-sender) (err ERR_INVALID_MINER))\n\n ;; Set up the assets that the contract is allowed to transfer\n (asserts! (map-insert allowed-contracts (contract-of nft-contract) l2-contract)\n (err ERR_ASSET_ALREADY_ALLOWED))\n\n (print {\n event: \"register-contract\",\n asset-type: \"nft\",\n l1-contract: (contract-of nft-contract),\n l2-contract: l2-contract\n })\n\n (ok true)\n )\n)\n\n;; Helper function: returns a boolean indicating whether the given principal is a miner\n;; Returns bool\n(define-private (is-miner (miner-to-check principal))\n (is-eq miner-to-check (var-get miner))\n)\n\n;; Helper function: returns a boolean indicating whether the given principal is an admin\n;; Returns bool\n(define-private (is-admin (addr-to-check principal))\n (is-eq addr-to-check (var-get admin))\n)\n\n;; Helper function: determines whether the commit-block operation satisfies pre-conditions\n;; listed in `commit-block`.\n;; Returns response\n(define-private (can-commit-block? (commit-block-height uint) (target-chain-tip (buff 32)))\n (begin\n ;; check no block has been committed at this height\n (asserts! (is-none (map-get? block-commits commit-block-height)) (err ERR_BLOCK_ALREADY_COMMITTED))\n\n ;; check that `target-chain-tip` matches the burn chain tip\n (asserts! (is-eq\n target-chain-tip\n (unwrap! (get-block-info? id-header-hash (- block-height u1)) (err ERR_CALLED_TOO_EARLY)) )\n (err ERR_INVALID_CHAIN_TIP))\n\n ;; check that the tx sender is one of the miners\n (asserts! (is-miner tx-sender) (err ERR_INVALID_MINER))\n\n ;; check that the miner called this contract directly\n (asserts! (is-miner contract-caller) (err ERR_INVALID_MINER))\n\n (ok true)\n )\n)\n\n;; Helper function: modifies the block-commits map with a new commit and prints related info\n;; Returns response<(buff 32), ?>\n(define-private (inner-commit-block (block (buff 32)) (commit-block-height uint) (withdrawal-root (buff 32)))\n (begin\n (map-set block-commits commit-block-height block)\n (map-set withdrawal-roots-map withdrawal-root true)\n (print {\n event: \"block-commit\",\n block-commit: block,\n withdrawal-root: withdrawal-root,\n block-height: commit-block-height\n })\n (ok block)\n )\n)\n\n;; The subnet miner calls this function to commit a block at a particular height.\n;; `block` is the hash of the block being submitted.\n;; `target-chain-tip` is the `id-header-hash` of the burn block (i.e., block on\n;; this chain) that the miner intends to build off.\n;;\n;; Fails if:\n;; 1) we have already committed at this block height\n;; 2) `target-chain-tip` is not the burn chain tip (i.e., on this chain)\n;; 3) the sender is not a miner\n(define-public (commit-block (block (buff 32)) (target-chain-tip (buff 32)) (withdrawal-root (buff 32)))\n (let ((commit-block-height block-height))\n (try! (can-commit-block? commit-block-height target-chain-tip))\n (inner-commit-block block commit-block-height withdrawal-root)\n )\n)\n\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n;; FOR NFT ASSET TRANSFERS\n\n;; Helper function that transfers the specified NFT from the given sender to the given recipient.\n;; Returns response\n(define-private (inner-transfer-nft-asset\n (nft-contract )\n (id uint)\n (sender principal)\n (recipient principal)\n )\n (let (\n (call-result (contract-call? nft-contract transfer id sender recipient))\n (transfer-result (unwrap! call-result (err ERR_CONTRACT_CALL_FAILED)))\n )\n ;; Check that the transfer succeeded\n (asserts! transfer-result (err ERR_TRANSFER_FAILED))\n\n (ok true)\n )\n)\n\n(define-private (inner-mint-nft-asset\n (nft-mint-contract )\n (id uint)\n (sender principal)\n (recipient principal)\n )\n (let (\n (call-result (as-contract (contract-call? nft-mint-contract mint-from-subnet id sender recipient)))\n (mint-result (unwrap! call-result (err ERR_CONTRACT_CALL_FAILED)))\n )\n ;; Check that the transfer succeeded\n (asserts! mint-result (err ERR_MINT_FAILED))\n\n (ok true)\n )\n)\n\n(define-private (inner-transfer-or-mint-nft-asset\n (nft-contract )\n (nft-mint-contract )\n (id uint)\n (recipient principal)\n )\n (let (\n (call-result (contract-call? nft-contract get-owner id))\n (nft-owner (unwrap! call-result (err ERR_CONTRACT_CALL_FAILED)))\n (contract-owns-nft (is-eq nft-owner (some CONTRACT_ADDRESS)))\n (no-owner (is-eq nft-owner none))\n )\n\n (if contract-owns-nft\n (inner-transfer-nft-asset nft-contract id CONTRACT_ADDRESS recipient)\n (if no-owner\n ;; Try minting the asset if there is no existing owner of this NFT\n (inner-mint-nft-asset nft-mint-contract id CONTRACT_ADDRESS recipient)\n ;; In this case, a principal other than this contract owns this NFT, so minting is not possible\n (err ERR_MINT_FAILED)\n )\n )\n )\n)\n\n;; A user calls this function to deposit an NFT into the contract.\n;; The function emits a print with details of this event.\n;; Returns response\n(define-public (deposit-nft-asset\n (nft-contract )\n (id uint)\n (sender principal)\n )\n (let (\n ;; Check that the asset belongs to the allowed-contracts map\n (subnet-contract-id (unwrap! (map-get? allowed-contracts (contract-of nft-contract)) (err ERR_DISALLOWED_ASSET)))\n )\n\n ;; Try to transfer the NFT to this contract\n (asserts! (try! (inner-transfer-nft-asset nft-contract id sender CONTRACT_ADDRESS)) (err ERR_TRANSFER_FAILED))\n\n ;; Emit a print event - the node consumes this\n (print {\n event: \"deposit-nft\",\n l1-contract-id: (as-contract nft-contract),\n nft-id: id,\n sender: sender,\n subnet-contract-id: subnet-contract-id,\n })\n\n (ok true)\n )\n)\n\n\n;; Helper function for `withdraw-nft-asset`\n;; Returns response\n(define-public (inner-withdraw-nft-asset\n (nft-contract )\n (l2-contract principal)\n (id uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n (nft-mint-contract (optional ))\n (withdrawal-root (buff 32))\n (withdrawal-leaf-hash (buff 32))\n (sibling-hashes (list 50 {\n hash: (buff 32),\n is-left-side: bool,\n }))\n )\n (let ((hashes-are-valid (check-withdrawal-hashes withdrawal-root withdrawal-leaf-hash sibling-hashes)))\n\n (asserts! (try! hashes-are-valid) (err ERR_VALIDATION_FAILED))\n\n ;; check that the withdrawal request data matches the supplied leaf hash\n (asserts! (is-eq withdrawal-leaf-hash\n (leaf-hash-withdraw-nft l2-contract id recipient withdrawal-id height))\n (err ERR_VALIDATION_LEAF_FAILED))\n\n (asserts!\n (try!\n (match nft-mint-contract\n mint-contract (as-contract (inner-transfer-or-mint-nft-asset nft-contract mint-contract id recipient))\n (as-contract (inner-transfer-without-mint-nft-asset nft-contract id recipient))\n )\n )\n (err ERR_TRANSFER_FAILED)\n )\n\n (asserts!\n (finish-withdraw { withdrawal-leaf-hash: withdrawal-leaf-hash, withdrawal-root-hash: withdrawal-root })\n (err ERR_WITHDRAWAL_ALREADY_PROCESSED)\n )\n\n (ok true)\n )\n)\n\n;; A user calls this function to withdraw the specified NFT from this contract.\n;; In order for this withdrawal to go through, the given withdrawal must have been included\n;; in a withdrawal Merkle tree a subnet miner submitted. The user must provide the leaf\n;; hash of their withdrawal and the root hash of the specific Merkle tree their withdrawal\n;; is included in. They must also provide a list of sibling hashes. The withdraw function\n;; uses the provided hashes to ensure the requested withdrawal is valid.\n;; The function emits a print with details of this event.\n;; Returns response\n(define-public (withdraw-nft-asset\n (nft-contract )\n (id uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n (nft-mint-contract (optional ))\n (withdrawal-root (buff 32))\n (withdrawal-leaf-hash (buff 32))\n (sibling-hashes (list 50 {\n hash: (buff 32),\n is-left-side: bool,\n }))\n )\n (let (\n ;; Check that the asset belongs to the allowed-contracts map\n (l2-contract (unwrap! (map-get? allowed-contracts (contract-of nft-contract)) (err ERR_DISALLOWED_ASSET)))\n )\n (asserts!\n (try! (inner-withdraw-nft-asset\n nft-contract\n l2-contract\n id\n recipient\n withdrawal-id\n height\n nft-mint-contract\n withdrawal-root\n withdrawal-leaf-hash\n sibling-hashes\n ))\n (err ERR_TRANSFER_FAILED)\n )\n\n ;; Emit a print event\n (print {\n event: \"withdraw-nft\",\n l1-contract-id: (as-contract nft-contract),\n nft-id: id,\n recipient: recipient\n })\n\n (ok true)\n )\n)\n\n\n;; Like `inner-transfer-or-mint-nft-asset but without allowing or requiring a mint function. In order to withdraw, the user must\n;; have the appropriate balance.\n(define-private (inner-transfer-without-mint-nft-asset\n (nft-contract )\n (id uint)\n (recipient principal)\n )\n (let (\n (call-result (contract-call? nft-contract get-owner id))\n (nft-owner (unwrap! call-result (err ERR_CONTRACT_CALL_FAILED)))\n (contract-owns-nft (is-eq nft-owner (some CONTRACT_ADDRESS)))\n )\n\n (asserts! contract-owns-nft (err ERR_NFT_NOT_OWNED_BY_CONTRACT))\n (inner-transfer-nft-asset nft-contract id CONTRACT_ADDRESS recipient)\n )\n)\n\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n;; FOR FUNGIBLE TOKEN ASSET TRANSFERS\n\n;; Helper function that transfers a specified amount of the fungible token from the given sender to the given recipient.\n;; Returns response\n(define-private (inner-transfer-ft-asset\n (ft-contract )\n (amount uint)\n (sender principal)\n (recipient principal)\n (memo (optional (buff 34)))\n )\n (let (\n (call-result (contract-call? ft-contract transfer amount sender recipient memo))\n (transfer-result (unwrap! call-result (err ERR_CONTRACT_CALL_FAILED)))\n )\n ;; FIXME: SIP-010 doesn't require that transfer returns (ok true) on success, so is this check necessary?\n ;; Check that the transfer succeeded\n (asserts! transfer-result (err ERR_TRANSFER_FAILED))\n\n (ok true)\n )\n)\n\n(define-private (inner-mint-ft-asset\n (ft-mint-contract )\n (amount uint)\n (sender principal)\n (recipient principal)\n )\n (let (\n (call-result (as-contract (contract-call? ft-mint-contract mint-from-subnet amount sender recipient)))\n (mint-result (unwrap! call-result (err ERR_CONTRACT_CALL_FAILED)))\n )\n ;; Check that the transfer succeeded\n (asserts! mint-result (err ERR_MINT_FAILED))\n\n (ok true)\n )\n)\n\n(define-private (inner-transfer-or-mint-ft-asset\n (ft-contract )\n (ft-mint-contract )\n (amount uint)\n (recipient principal)\n (memo (optional (buff 34)))\n )\n (let (\n (call-result (contract-call? ft-contract get-balance CONTRACT_ADDRESS))\n (contract-ft-balance (unwrap! call-result (err ERR_CONTRACT_CALL_FAILED)))\n (contract-owns-enough (>= contract-ft-balance amount))\n (amount-to-transfer (if contract-owns-enough amount contract-ft-balance))\n (amount-to-mint (- amount amount-to-transfer))\n )\n\n ;; Check that the total balance between the transfer and mint is equal to the original balance\n (asserts! (is-eq amount (+ amount-to-transfer amount-to-mint)) (err ERR_IN_COMPUTATION))\n\n (and\n (> amount-to-transfer u0)\n (try! (inner-transfer-ft-asset ft-contract amount-to-transfer CONTRACT_ADDRESS recipient memo))\n )\n (and\n (> amount-to-mint u0)\n (try! (inner-mint-ft-asset ft-mint-contract amount-to-mint CONTRACT_ADDRESS recipient))\n )\n\n (ok true)\n )\n)\n\n;; A user calls this function to deposit a fungible token into the contract.\n;; The function emits a print with details of this event.\n;; Returns response\n(define-public (deposit-ft-asset\n (ft-contract )\n (amount uint)\n (sender principal)\n (memo (optional (buff 34)))\n )\n (let (\n ;; Check that the asset belongs to the allowed-contracts map\n (subnet-contract-id (unwrap! (map-get? allowed-contracts (contract-of ft-contract)) (err ERR_DISALLOWED_ASSET)))\n )\n ;; Try to transfer the FT to this contract\n (asserts! (try! (inner-transfer-ft-asset ft-contract amount sender CONTRACT_ADDRESS memo)) (err ERR_TRANSFER_FAILED))\n\n (let (\n (ft-name (unwrap! (contract-call? ft-contract get-name) (err ERR_CONTRACT_CALL_FAILED)))\n )\n ;; Emit a print event - the node consumes this\n (print {\n event: \"deposit-ft\",\n l1-contract-id: (as-contract ft-contract),\n ft-name: ft-name,\n ft-amount: amount,\n sender: sender,\n subnet-contract-id: subnet-contract-id,\n })\n )\n\n (ok true)\n )\n)\n\n;; This function performs validity checks related to the withdrawal and performs the withdrawal as well.\n;; Returns response\n(define-private (inner-withdraw-ft-asset\n (ft-contract )\n (amount uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n (memo (optional (buff 34)))\n (ft-mint-contract (optional ))\n (withdrawal-root (buff 32))\n (withdrawal-leaf-hash (buff 32))\n (sibling-hashes (list 50 {\n hash: (buff 32),\n is-left-side: bool,\n }))\n )\n (let ((hashes-are-valid (check-withdrawal-hashes withdrawal-root withdrawal-leaf-hash sibling-hashes)))\n (asserts! (try! hashes-are-valid) (err ERR_VALIDATION_FAILED))\n\n ;; check that the withdrawal request data matches the supplied leaf hash\n (asserts! (is-eq withdrawal-leaf-hash\n (leaf-hash-withdraw-ft (contract-of ft-contract) amount recipient withdrawal-id height))\n (err ERR_VALIDATION_LEAF_FAILED))\n\n (asserts!\n (try!\n (match ft-mint-contract\n mint-contract (as-contract (inner-transfer-or-mint-ft-asset ft-contract mint-contract amount recipient memo))\n (as-contract (inner-transfer-ft-asset ft-contract amount CONTRACT_ADDRESS recipient memo))\n )\n )\n (err ERR_TRANSFER_FAILED)\n )\n\n (asserts!\n (finish-withdraw { withdrawal-leaf-hash: withdrawal-leaf-hash, withdrawal-root-hash: withdrawal-root })\n (err ERR_WITHDRAWAL_ALREADY_PROCESSED))\n\n (ok true)\n )\n)\n\n;; A user can call this function to withdraw some amount of a fungible token asset from the\n;; contract and send it to a recipient.\n;; In order for this withdrawal to go through, the given withdrawal must have been included\n;; in a withdrawal Merkle tree a subnet miner submitted. The user must provide the leaf\n;; hash of their withdrawal and the root hash of the specific Merkle tree their withdrawal\n;; is included in. They must also provide a list of sibling hashes. The withdraw function\n;; uses the provided hashes to ensure the requested withdrawal is valid.\n;; The function emits a print with details of this event.\n;; Returns response\n(define-public (withdraw-ft-asset\n (ft-contract )\n (amount uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n (memo (optional (buff 34)))\n (ft-mint-contract (optional ))\n (withdrawal-root (buff 32))\n (withdrawal-leaf-hash (buff 32))\n (sibling-hashes (list 50 {\n hash: (buff 32),\n is-left-side: bool,\n }))\n )\n (begin\n ;; Check that the withdraw amount is positive\n (asserts! (> amount u0) (err ERR_ATTEMPT_TO_TRANSFER_ZERO_AMOUNT))\n\n ;; Check that the asset belongs to the allowed-contracts map\n (unwrap! (map-get? allowed-contracts (contract-of ft-contract)) (err ERR_DISALLOWED_ASSET))\n\n (asserts!\n (try! (inner-withdraw-ft-asset\n ft-contract\n amount\n recipient\n withdrawal-id\n height\n memo\n ft-mint-contract\n withdrawal-root\n withdrawal-leaf-hash\n sibling-hashes))\n (err ERR_TRANSFER_FAILED)\n )\n\n (let (\n (ft-name (unwrap! (contract-call? ft-contract get-name) (err ERR_CONTRACT_CALL_FAILED)))\n )\n ;; Emit a print event\n (print {\n event: \"withdraw-ft\",\n l1-contract-id: (as-contract ft-contract),\n ft-name: ft-name,\n ft-amount: amount,\n recipient: recipient,\n })\n )\n\n (ok true)\n )\n)\n\n\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n;; FOR STX TRANSFERS\n\n\n;; Helper function that transfers the given amount from the specified fungible token from the given sender to the given recipient.\n;; Returns response\n(define-private (inner-transfer-stx (amount uint) (sender principal) (recipient principal))\n (let (\n (call-result (stx-transfer? amount sender recipient))\n (transfer-result (unwrap! call-result (err ERR_TRANSFER_FAILED)))\n )\n ;; Check that the transfer succeeded\n (asserts! transfer-result (err ERR_TRANSFER_FAILED))\n\n (ok true)\n )\n)\n\n;; A user calls this function to deposit STX into the contract.\n;; The function emits a print with details of this event.\n;; Returns response\n(define-public (deposit-stx (amount uint) (sender principal))\n (begin\n ;; Try to transfer the STX to this contract\n (asserts! (try! (inner-transfer-stx amount sender CONTRACT_ADDRESS)) (err ERR_TRANSFER_FAILED))\n\n ;; Emit a print event - the node consumes this\n (print { event: \"deposit-stx\", sender: sender, amount: amount })\n\n (ok true)\n )\n)\n\n(define-read-only (leaf-hash-withdraw-stx\n (amount uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n )\n (sha512/256 (concat 0x00 (unwrap-panic (to-consensus-buff?\n {\n type: \"stx\",\n amount: amount,\n recipient: recipient,\n withdrawal-id: withdrawal-id,\n height: height\n })))\n )\n)\n\n(define-read-only (leaf-hash-withdraw-nft\n (asset-contract principal)\n (nft-id uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n )\n (sha512/256 (concat 0x00 (unwrap-panic (to-consensus-buff?\n {\n type: \"nft\",\n nft-id: nft-id,\n asset-contract: asset-contract,\n recipient: recipient,\n withdrawal-id: withdrawal-id,\n height: height\n })))\n )\n)\n\n(define-read-only (leaf-hash-withdraw-ft\n (asset-contract principal)\n (amount uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n )\n (sha512/256 (concat 0x00 (unwrap-panic (to-consensus-buff?\n {\n type: \"ft\",\n amount: amount,\n asset-contract: asset-contract,\n recipient: recipient,\n withdrawal-id: withdrawal-id,\n height: height\n })))\n )\n)\n\n;; A user calls this function to withdraw STX from this contract.\n;; In order for this withdrawal to go through, the given withdrawal must have been included\n;; in a withdrawal Merkle tree a subnet miner submitted. The user must provide the leaf\n;; hash of their withdrawal and the root hash of the specific Merkle tree their withdrawal\n;; is included in. They must also provide a list of sibling hashes. The withdraw function\n;; uses the provided hashes to ensure the requested withdrawal is valid.\n;; The function emits a print with details of this event.\n;; Returns response\n(define-public (withdraw-stx\n (amount uint)\n (recipient principal)\n (withdrawal-id uint)\n (height uint)\n (withdrawal-root (buff 32))\n (withdrawal-leaf-hash (buff 32))\n (sibling-hashes (list 50 {\n hash: (buff 32),\n is-left-side: bool,\n }))\n )\n (let ((hashes-are-valid (check-withdrawal-hashes withdrawal-root withdrawal-leaf-hash sibling-hashes)))\n\n (asserts! (try! hashes-are-valid) (err ERR_VALIDATION_FAILED))\n ;; check that the withdrawal request data matches the supplied leaf hash\n (asserts! (is-eq withdrawal-leaf-hash\n (leaf-hash-withdraw-stx amount recipient withdrawal-id height))\n (err ERR_VALIDATION_LEAF_FAILED))\n\n (asserts! (try! (as-contract (inner-transfer-stx amount tx-sender recipient))) (err ERR_TRANSFER_FAILED))\n\n (asserts!\n (finish-withdraw { withdrawal-leaf-hash: withdrawal-leaf-hash, withdrawal-root-hash: withdrawal-root })\n (err ERR_WITHDRAWAL_ALREADY_PROCESSED))\n\n ;; Emit a print event\n (print { event: \"withdraw-stx\", recipient: recipient, amount: amount })\n\n (ok true)\n )\n)\n\n\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n;; GENERAL WITHDRAWAL FUNCTIONS\n\n;; This function concats the two given hashes in the correct order. It also prepends the buff `0x01`, which is\n;; a tag denoting a node (versus a leaf).\n;; Returns a buff\n(define-private (create-node-hash\n (curr-hash (buff 32))\n (sibling-hash (buff 32))\n (is-sibling-left-side bool)\n )\n (let (\n (concatted-hash (if is-sibling-left-side\n (concat sibling-hash curr-hash)\n (concat curr-hash sibling-hash)\n ))\n )\n\n (concat 0x01 concatted-hash)\n )\n)\n\n;; This function hashes the curr hash with its sibling hash.\n;; Returns (buff 32)\n(define-private (hash-help\n (sibling {\n hash: (buff 32),\n is-left-side: bool,\n })\n (curr-node-hash (buff 32))\n )\n (let (\n (sibling-hash (get hash sibling))\n (is-sibling-left-side (get is-left-side sibling))\n (new-buff (create-node-hash curr-node-hash sibling-hash is-sibling-left-side))\n )\n (sha512/256 new-buff)\n )\n)\n\n;; This function checks:\n;; - That the provided withdrawal root matches a previously submitted one (passed to the function `commit-block`)\n;; - That the computed withdrawal root matches a previous valid withdrawal root\n;; - That the given withdrawal leaf hash has not been previously processed\n;; Returns response\n(define-private (check-withdrawal-hashes\n (withdrawal-root (buff 32))\n (withdrawal-leaf-hash (buff 32))\n (sibling-hashes (list 50 {\n hash: (buff 32),\n is-left-side: bool,\n }))\n )\n (begin\n ;; Check that the user submitted a valid withdrawal root\n (asserts! (is-some (map-get? withdrawal-roots-map withdrawal-root)) (err ERR_INVALID_MERKLE_ROOT))\n\n ;; Check that this withdrawal leaf has not been processed before\n (asserts!\n (is-none\n (map-get? processed-withdrawal-leaves-map\n { withdrawal-leaf-hash: withdrawal-leaf-hash, withdrawal-root-hash: withdrawal-root }))\n (err ERR_WITHDRAWAL_ALREADY_PROCESSED))\n\n (let ((calculated-withdrawal-root (fold hash-help sibling-hashes withdrawal-leaf-hash))\n (roots-match (is-eq calculated-withdrawal-root withdrawal-root)))\n (if roots-match\n (ok true)\n (err ERR_MERKLE_ROOT_DOES_NOT_MATCH))\n )\n )\n)\n\n;; This function should be called after the asset in question has been transferred.\n;; It adds the withdrawal leaf hash to a map of processed leaves. This ensures that\n;; this withdrawal leaf can't be used again to withdraw additional funds.\n;; Returns bool\n(define-private (finish-withdraw\n (withdraw-info {\n withdrawal-leaf-hash: (buff 32),\n withdrawal-root-hash: (buff 32)\n })\n )\n (map-insert processed-withdrawal-leaves-map withdraw-info true)\n)\n", + "contract_identifier": "ST13F481SBR0R7Z6NMMH8YV2FJJYXA5JPA0AD3HP9.subnet-v1" + }, + "type": "ContractDeployment" + }, + "nonce": 33, + "position": { + "index": 1 + }, + "contract_abi": { + "clarity_version": "Clarity2", + "epoch": "Epoch24", + "functions": [ + { + "access": "private", + "args": [{ "name": "tid", "type": "uint128" }], + "name": "airdrop", + "outputs": { "type": "bool" } + } + ], + "fungible_tokens": [{ "name": "MEME" }], + "maps": [ + { + "key": { + "tuple": [ + { + "name": "name", + "type": { "buffer": { "length": 48 } } + }, + { + "name": "namespace", + "type": { "buffer": { "length": 20 } } + } ] - }, - "result": "(ok true)", - "sender": "ST13F481SBR0R7Z6NMMH8YV2FJJYXA5JPA0AD3HP9", - "success": true + }, + "name": "map_claimed_bns_note", + "value": "bool" + } + ], + "non_fungible_tokens": [], + "variables": [ + { + "access": "constant", + "name": "AIRDROP_COUNT_PER_MEMBER", + "type": "uint128" + } + ] + }, + "proof": null, + "raw_tx": "0x808000000004003e44ad50f99edc5e6cc5543b636284186894a008000000000000002100000000000b71b00000a84691e27fd2d46475230707a657ef7ddf6de7a4b06a1ac2049384c6474e73f85ee4ce205e0904160adaa160498df02db1782f898b5905db4d249b7025f6604c030100000000060218666169722d616d6172616e74682d7268696e6f6365726f73000005103b3b2068656c6c6f2d776f726c6420636f6e74726163740a0a28646566696e652d636f6e7374616e742073656e6465722027535a324a365a593438475631455a35563256355242394d5036365357383650594b4b51394836445052290a28646566696e652d636f6e7374616e7420726563697069656e742027534d324a365a593438475631455a35563256355242394d5036365357383650594b4b51565838583047290a0a28646566696e652d66756e6769626c652d746f6b656e206e6f76656c2d746f6b656e2d3139290a28626567696e202866742d6d696e743f206e6f76656c2d746f6b656e2d3139207531322073656e64657229290a28626567696e202866742d7472616e736665723f206e6f76656c2d746f6b656e2d31392075322073656e64657220726563697069656e7429290a0a28646566696e652d6e6f6e2d66756e6769626c652d746f6b656e2068656c6c6f2d6e66742075696e74290a28626567696e20286e66742d6d696e743f2068656c6c6f2d6e66742075312073656e64657229290a28626567696e20286e66742d6d696e743f2068656c6c6f2d6e66742075322073656e64657229290a28626567696e20286e66742d7472616e736665723f2068656c6c6f2d6e66742075312073656e64657220726563697069656e7429290a0a28646566696e652d7075626c69632028746573742d656d69742d6576656e74290a2020202028626567696e0a2020202020202020287072696e7420224576656e74212048656c6c6f20776f726c6422290a2020202020202020286f6b2075312929290a28626567696e2028746573742d656d69742d6576656e7429290a0a28646566696e652d7075626c69632028746573742d6576656e742d7479706573290a2020202028626567696e0a202020202020202028756e777261702d70616e6963202866742d6d696e743f206e6f76656c2d746f6b656e2d313920753320726563697069656e7429290a202020202020202028756e777261702d70616e696320286e66742d6d696e743f2068656c6c6f2d6e667420753220726563697069656e7429290a202020202020202028756e777261702d70616e696320287374782d7472616e736665723f207536302074782d73656e6465722027535a324a365a593438475631455a35563256355242394d5036365357383650594b4b5139483644505229290a202020202020202028756e777261702d70616e696320287374782d6275726e3f207532302074782d73656e64657229290a2020202020202020286f6b2075312929290a0a28646566696e652d6d61702073746f7265207b6b65793a202862756666203332297d207b76616c75653a202862756666203332297d290a28646566696e652d7075626c696320286765742d76616c756520286b65792028627566662033322929290a2020202028626567696e0a2020202020202020286d6174636820286d61702d6765743f2073746f7265207b6b65793a206b65797d290a202020202020202020202020656e74727920286f6b20286765742076616c756520656e74727929290a202020202020202020202020286572722030292929290a28646566696e652d7075626c696320287365742d76616c756520286b65792028627566662033322929202876616c75652028627566662033322929290a2020202028626567696e0a2020202020202020286d61702d7365742073746f7265207b6b65793a206b65797d207b76616c75653a2076616c75657d290a2020202020202020286f6b207531292929", + "receipt": { + "contract_calls_stack": [], + "events": [], + "mutated_assets_radius": [], + "mutated_contracts_radius": [ + "ST13F481SBR0R7Z6NMMH8YV2FJJYXA5JPA0AD3HP9.subnet-v1" + ] }, - "operations": [], - "transaction_identifier": { - "hash": "0x93c89ffdac77ed2ba52611563bd491f56f5d558e23d311a105663ae32bdf18e5" - } -} \ No newline at end of file + "result": "(ok true)", + "sender": "ST13F481SBR0R7Z6NMMH8YV2FJJYXA5JPA0AD3HP9", + "success": true + }, + "operations": [], + "transaction_identifier": { + "hash": "0x93c89ffdac77ed2ba52611563bd491f56f5d558e23d311a105663ae32bdf18e5" + } +} diff --git a/components/chainhook-sdk/src/chainhooks/tests/fixtures/stacks/testnet/occurrence.json b/components/chainhook-sdk/src/chainhooks/tests/fixtures/stacks/testnet/occurrence.json index 53ba07e47..76b8b30d2 100644 --- a/components/chainhook-sdk/src/chainhooks/tests/fixtures/stacks/testnet/occurrence.json +++ b/components/chainhook-sdk/src/chainhooks/tests/fixtures/stacks/testnet/occurrence.json @@ -45,6 +45,7 @@ }, "type": "ContractCall" }, + "nonce": 4064, "position": { "index": 1 }, @@ -123,6 +124,7 @@ }, "type": "ContractCall" }, + "nonce": 4064, "position": { "index": 1 }, @@ -200,6 +202,7 @@ }, "type": "ContractCall" }, + "nonce": 4064, "position": { "index": 1 }, @@ -278,6 +281,7 @@ }, "type": "ContractCall" }, + "nonce": 4064, "position": { "index": 1 }, @@ -355,6 +359,7 @@ }, "type": "ContractCall" }, + "nonce": 4064, "position": { "index": 1 }, @@ -434,6 +439,7 @@ }, "type": "ContractCall" }, + "nonce": 4064, "position": { "index": 1 }, @@ -512,6 +518,7 @@ }, "type": "ContractCall" }, + "nonce": 4064, "position": { "index": 1 }, @@ -590,6 +597,7 @@ }, "type": "ContractCall" }, + "nonce": 4064, "position": { "index": 1 }, @@ -669,6 +677,7 @@ }, "type": "ContractCall" }, + "nonce": 4064, "position": { "index": 1 }, @@ -747,6 +756,7 @@ }, "type": "ContractCall" }, + "nonce": 4064, "position": { "index": 1 }, @@ -825,6 +835,7 @@ }, "type": "ContractCall" }, + "nonce": 4064, "position": { "index": 1 }, @@ -903,6 +914,7 @@ }, "type": "ContractCall" }, + "nonce": 4064, "position": { "index": 1 }, @@ -981,6 +993,7 @@ }, "type": "ContractCall" }, + "nonce": 4064, "position": { "index": 1 }, @@ -1064,6 +1077,7 @@ }, "type": "ContractDeployment" }, + "nonce": 33, "position": { "index": 1 }, diff --git a/components/chainhook-sdk/src/chainhooks/tests/mod.rs b/components/chainhook-sdk/src/chainhooks/tests/mod.rs index 021c4ae70..cf564b067 100644 --- a/components/chainhook-sdk/src/chainhooks/tests/mod.rs +++ b/components/chainhook-sdk/src/chainhooks/tests/mod.rs @@ -14,7 +14,7 @@ use super::{ StacksTrait, }, }; -use crate::utils::Context; +use crate::{chainhooks::stacks::serialize_stacks_payload_to_json, utils::Context}; use crate::{ chainhooks::{ tests::fixtures::{get_expected_occurrence, get_test_event_by_type}, @@ -348,6 +348,7 @@ fn test_stacks_predicates( expire_after_occurrence: None, capture_all_events: None, decode_clarity_values: None, + include_contract_abi: None, predicate: predicate, action: HookAction::Noop, enabled: true, @@ -427,6 +428,7 @@ fn test_stacks_predicate_contract_deploy(predicate: StacksPredicate, expected_ap expire_after_occurrence: None, capture_all_events: None, decode_clarity_values: None, + include_contract_abi: None, predicate: predicate, action: HookAction::Noop, enabled: true, @@ -447,6 +449,114 @@ fn test_stacks_predicate_contract_deploy(predicate: StacksPredicate, expected_ap } } +#[test] +fn verify_optional_addition_of_contract_abi() { + // "mine" two blocks + // - one contract deploy (which should have a contract abi) and + // - one contract call (which should not) + let new_blocks = vec![ + StacksBlockUpdate { + block: fixtures::build_stacks_testnet_block_with_contract_deployment(), + parent_microblocks_to_apply: vec![], + parent_microblocks_to_rollback: vec![], + }, + StacksBlockUpdate { + block: fixtures::build_stacks_testnet_block_with_contract_call(), + parent_microblocks_to_apply: vec![], + parent_microblocks_to_rollback: vec![], + }, + ]; + let event: StacksChainEvent = + StacksChainEvent::ChainUpdatedWithBlocks(StacksChainUpdatedWithBlocksData { + new_blocks, + confirmed_blocks: vec![], + }); + let mut contract_deploy_chainhook = StacksChainhookSpecification { + uuid: "contract-deploy".to_string(), + owner_uuid: None, + name: "".to_string(), + network: StacksNetwork::Testnet, + version: 1, + blocks: None, + start_block: None, + end_block: None, + expire_after_occurrence: None, + capture_all_events: None, + decode_clarity_values: None, + include_contract_abi: Some(true), + predicate: StacksPredicate::ContractDeployment( + StacksContractDeploymentPredicate::Deployer("*".to_string()), + ), + action: HookAction::Noop, + enabled: true, + expired_at: None, + }; + let contract_call_chainhook = StacksChainhookSpecification { + uuid: "contract-call".to_string(), + owner_uuid: None, + name: "".to_string(), + network: StacksNetwork::Testnet, + version: 1, + blocks: None, + start_block: None, + end_block: None, + expire_after_occurrence: None, + capture_all_events: None, + decode_clarity_values: None, + include_contract_abi: Some(true), + predicate: StacksPredicate::ContractCall(StacksContractCallBasedPredicate { + contract_identifier: "ST13F481SBR0R7Z6NMMH8YV2FJJYXA5JPA0AD3HP9.subnet-v1".to_string(), + method: "commit-block".to_string(), + }), + action: HookAction::Noop, + enabled: true, + expired_at: None, + }; + + let predicates = vec![&contract_deploy_chainhook, &contract_call_chainhook]; + let (triggered, _blocks, _) = + evaluate_stacks_chainhooks_on_chain_event(&event, predicates, &Context::empty()); + assert_eq!(triggered.len(), 2); + + for t in triggered.into_iter() { + let result = serialize_stacks_payload_to_json(t, &HashMap::new(), &Context::empty()); + let result = result.as_object().unwrap(); + let uuid = result.get("chainhook").unwrap().get("uuid").unwrap(); + let apply_blocks = result.get("apply").unwrap(); + for block in apply_blocks.as_array().unwrap() { + let transactions = block.get("transactions").unwrap(); + for transaction in transactions.as_array().unwrap() { + let contract_abi = transaction.get("metadata").unwrap().get("contract_abi"); + if uuid == "contract-call" { + assert_eq!(contract_abi, None); + } else if uuid == "contract-deploy" { + assert!(contract_abi.is_some()) + } else { + unreachable!() + } + } + } + } + contract_deploy_chainhook.include_contract_abi = Some(false); + let predicates = vec![&contract_deploy_chainhook, &contract_call_chainhook]; + let (triggered, _blocks, _) = + evaluate_stacks_chainhooks_on_chain_event(&event, predicates, &Context::empty()); + assert_eq!(triggered.len(), 2); + + for t in triggered.into_iter() { + let result = serialize_stacks_payload_to_json(t, &HashMap::new(), &Context::empty()); + let result = result.as_object().unwrap(); + let apply_blocks = result.get("apply").unwrap(); + for block in apply_blocks.as_array().unwrap() { + let transactions = block.get("transactions").unwrap(); + for transaction in transactions.as_array().unwrap() { + let contract_abi = transaction.get("metadata").unwrap().get("contract_abi"); + assert_eq!(contract_abi, None); + } + } + } +} + #[test_case( StacksPredicate::ContractCall(StacksContractCallBasedPredicate { contract_identifier: "ST13F481SBR0R7Z6NMMH8YV2FJJYXA5JPA0AD3HP9.subnet-v1".to_string(), @@ -512,6 +622,7 @@ fn test_stacks_predicate_contract_call(predicate: StacksPredicate, expected_appl expire_after_occurrence: None, capture_all_events: None, decode_clarity_values: None, + include_contract_abi: None, predicate: predicate, action: HookAction::Noop, enabled: true, @@ -546,6 +657,7 @@ fn test_stacks_hook_action_noop() { expire_after_occurrence: None, capture_all_events: None, decode_clarity_values: None, + include_contract_abi: None, predicate: StacksPredicate::Txid(ExactMatchingRule::Equals( "0xb92c2ade84a8b85f4c72170680ae42e65438aea4db72ba4b2d6a6960f4141ce8".to_string(), )), @@ -603,6 +715,7 @@ fn test_stacks_hook_action_file_append() { expire_after_occurrence: None, capture_all_events: None, decode_clarity_values: Some(true), + include_contract_abi: None, predicate: StacksPredicate::Txid(ExactMatchingRule::Equals( "0xb92c2ade84a8b85f4c72170680ae42e65438aea4db72ba4b2d6a6960f4141ce8".to_string(), )), diff --git a/components/chainhook-sdk/src/chainhooks/types.rs b/components/chainhook-sdk/src/chainhooks/types.rs index 463615520..c26566d98 100644 --- a/components/chainhook-sdk/src/chainhooks/types.rs +++ b/components/chainhook-sdk/src/chainhooks/types.rs @@ -21,27 +21,6 @@ impl ChainhookConfig { } } - pub fn get_spec_with_uuid(&self, uuid: &str) -> Option { - let res = self - .stacks_chainhooks - .iter() - .filter(|spec| spec.uuid.eq(&uuid)) - .collect::>(); - if let Some(spec) = res.first() { - return Some(ChainhookSpecification::Stacks((*spec).clone())); - } - - let res = self - .bitcoin_chainhooks - .iter() - .filter(|spec| spec.uuid.eq(&uuid)) - .collect::>(); - if let Some(spec) = res.first() { - return Some(ChainhookSpecification::Bitcoin((*spec).clone())); - } - None - } - pub fn register_full_specification( &mut self, networks: (&BitcoinNetwork, &StacksNetwork), @@ -182,13 +161,6 @@ pub enum ChainhookSpecification { } impl ChainhookSpecification { - pub fn name(&self) -> &str { - match &self { - Self::Bitcoin(data) => &data.name, - Self::Stacks(data) => &data.name, - } - } - pub fn either_stx_or_btc_key(uuid: &str) -> String { format!("predicate:{}", uuid) } @@ -220,25 +192,6 @@ impl ChainhookSpecification { Self::Stacks(data) => &data.uuid, } } - - pub fn validate(&self) -> Result<(), String> { - match &self { - Self::Bitcoin(data) => { - let _ = data.action.validate()?; - } - Self::Stacks(data) => { - let _ = data.action.validate()?; - } - } - Ok(()) - } - - pub fn start_block(&self) -> Option { - match &self { - Self::Bitcoin(data) => data.start_block, - Self::Stacks(data) => data.start_block, - } - } } #[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] @@ -410,6 +363,7 @@ impl StacksChainhookFullSpecification { capture_all_events: spec.capture_all_events, decode_clarity_values: spec.decode_clarity_values, expire_after_occurrence: spec.expire_after_occurrence, + include_contract_abi: spec.include_contract_abi, predicate: spec.predicate, action: spec.action, enabled: false, @@ -432,6 +386,8 @@ pub struct StacksChainhookNetworkSpecification { pub capture_all_events: Option, #[serde(skip_serializing_if = "Option::is_none")] pub decode_clarity_values: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub include_contract_abi: Option, #[serde(rename = "if_this")] pub predicate: StacksPredicate, #[serde(rename = "then_that")] @@ -732,6 +688,7 @@ pub struct StacksChainhookSpecification { pub capture_all_events: Option, #[serde(skip_serializing_if = "Option::is_none")] pub decode_clarity_values: Option, + pub include_contract_abi: Option, #[serde(rename = "predicate")] pub predicate: StacksPredicate, pub action: HookAction, diff --git a/components/chainhook-sdk/src/indexer/bitcoin/mod.rs b/components/chainhook-sdk/src/indexer/bitcoin/mod.rs index 573babf1b..f2f6b0884 100644 --- a/components/chainhook-sdk/src/indexer/bitcoin/mod.rs +++ b/components/chainhook-sdk/src/indexer/bitcoin/mod.rs @@ -17,7 +17,6 @@ use chainhook_types::{ StacksBlockCommitmentData, TransactionIdentifier, TransferSTXData, }; use hiro_system_kit::slog; -use rand::{thread_rng, Rng}; use reqwest::Client as HttpClient; use serde::Deserialize; @@ -230,124 +229,6 @@ pub async fn retrieve_block_hash( Ok(block_hash) } -pub async fn try_fetch_block_bytes_with_retry( - http_client: HttpClient, - block_height: u64, - bitcoin_config: BitcoinConfig, - ctx: Context, -) -> Result, String> { - let block_hash = - retrieve_block_hash_with_retry(&http_client, &block_height, &bitcoin_config, &ctx) - .await - .unwrap(); - - let mut errors_count = 0; - - let response = loop { - match fetch_block(&http_client, &block_hash, &bitcoin_config, &ctx).await { - Ok(result) => break result, - Err(_e) => { - errors_count += 1; - if errors_count > 1 { - ctx.try_log(|logger| { - slog::warn!( - logger, - "unable to fetch block #{block_hash}: will retry in a few seconds (attempt #{errors_count}).", - ) - }); - } - std::thread::sleep(std::time::Duration::from_millis(1500)); - continue; - } - } - }; - Ok(response) -} - -pub async fn try_download_block_bytes_with_retry( - http_client: HttpClient, - block_height: u64, - bitcoin_config: BitcoinConfig, - ctx: Context, -) -> Result, String> { - let block_hash = - retrieve_block_hash_with_retry(&http_client, &block_height, &bitcoin_config, &ctx) - .await - .unwrap(); - - let mut errors_count = 0; - - let response = loop { - match download_block(&http_client, &block_hash, &bitcoin_config, &ctx).await { - Ok(result) => break result, - Err(_e) => { - errors_count += 1; - if errors_count > 1 { - ctx.try_log(|logger| { - slog::warn!( - logger, - "unable to fetch block #{block_hash}: will retry in a few seconds (attempt #{errors_count}).", - ) - }); - } - std::thread::sleep(std::time::Duration::from_millis(1500)); - continue; - } - } - }; - Ok(response) -} - -pub async fn download_block_with_retry( - http_client: &HttpClient, - block_hash: &str, - bitcoin_config: &BitcoinConfig, - ctx: &Context, -) -> Result { - let mut errors_count = 0; - let mut backoff: f64 = 1.0; - let mut rng = thread_rng(); - - let block = loop { - let response = { - match download_block(http_client, block_hash, bitcoin_config, ctx).await { - Ok(result) => result, - Err(_e) => { - errors_count += 1; - backoff = 2.0 * backoff + (backoff * rng.gen_range(0.0..1.0)); - let duration = std::time::Duration::from_millis((backoff * 1_000.0) as u64); - if errors_count > 1 { - ctx.try_log(|logger| { - slog::warn!( - logger, - "unable to fetch block #{block_hash}: will retry in a few seconds (attempt #{errors_count}).", - ) - }); - } - std::thread::sleep(duration); - continue; - } - } - }; - - match parse_downloaded_block(response) { - Ok(result) => break result, - Err(e) => { - errors_count += 1; - ctx.try_log(|logger| { - slog::warn!( - logger, - "unable to parse fetched block #{block_hash}: will retry in a few seconds (attempt #{errors_count}) ({e})", - ) - }); - std::thread::sleep(std::time::Duration::from_millis(500)); - continue; - } - }; - }; - Ok(block) -} - pub async fn download_block( http_client: &HttpClient, block_hash: &str, @@ -386,35 +267,6 @@ pub fn parse_downloaded_block( Ok(block) } -pub async fn fetch_block( - http_client: &HttpClient, - block_hash: &str, - bitcoin_config: &BitcoinConfig, - _ctx: &Context, -) -> Result, String> { - let block = http_client - .get(format!( - "{}/rest/block/{}.json", - bitcoin_config.rpc_url, block_hash - )) - .header("Content-Type", "application/json") - .header("Host", &bitcoin_config.rpc_url[7..]) - .send() - .await - .map_err(|e| format!("unable to send request ({})", e))? - .bytes() - .await - .map_err(|e| format!("unable to get bytes ({})", e))? - .to_vec(); - Ok(block) -} - -pub fn parse_fetched_block(downloaded_block: Vec) -> Result { - let block = serde_json::from_slice::(&downloaded_block[..]) - .map_err(|e: serde_json::Error| format!("unable to parse block ({})", e))?; - Ok(block) -} - pub async fn download_and_parse_block( http_client: &HttpClient, block_hash: &str, diff --git a/components/chainhook-sdk/src/indexer/mod.rs b/components/chainhook-sdk/src/indexer/mod.rs index 899992972..fbf580b20 100644 --- a/components/chainhook-sdk/src/indexer/mod.rs +++ b/components/chainhook-sdk/src/indexer/mod.rs @@ -49,7 +49,7 @@ impl BitcoinChainContext { } } -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq)] pub struct IndexerConfig { pub bitcoin_network: BitcoinNetwork, pub stacks_network: StacksNetwork, diff --git a/components/chainhook-sdk/src/indexer/stacks/mod.rs b/components/chainhook-sdk/src/indexer/stacks/mod.rs index 01f3318ae..49025e621 100644 --- a/components/chainhook-sdk/src/indexer/stacks/mod.rs +++ b/components/chainhook-sdk/src/indexer/stacks/mod.rs @@ -76,6 +76,7 @@ pub struct NewTransaction { pub raw_result: String, pub raw_tx: String, pub execution_cost: Option, + pub contract_abi: Option, } #[derive(Deserialize, Debug)] @@ -89,6 +90,7 @@ pub struct NewMicroblockTransaction { pub microblock_sequence: usize, pub microblock_hash: String, pub microblock_parent_hash: String, + pub contract_abi: Option, } #[derive(Debug, Deserialize, Serialize)] @@ -311,6 +313,7 @@ pub fn standardize_stacks_block( description, position: StacksTransactionPosition::anchor_block(tx.tx_index), proof: None, + contract_abi: tx.contract_abi.clone(), }, }); } @@ -452,6 +455,7 @@ pub fn standardize_stacks_microblock_trail( tx.tx_index, ), proof: None, + contract_abi: tx.contract_abi.clone(), }, }; diff --git a/components/chainhook-sdk/src/indexer/stacks/tests.rs b/components/chainhook-sdk/src/indexer/stacks/tests.rs index 84665a1be..0793b8602 100644 --- a/components/chainhook-sdk/src/indexer/stacks/tests.rs +++ b/components/chainhook-sdk/src/indexer/stacks/tests.rs @@ -1,4 +1,17 @@ -use super::super::tests::{helpers, process_stacks_blocks_and_check_expectations}; +use chainhook_types::{ + DataMapDeleteEventData, DataMapInsertEventData, DataMapUpdateEventData, DataVarSetEventData, + FTBurnEventData, FTMintEventData, FTTransferEventData, NFTBurnEventData, NFTMintEventData, + NFTTransferEventData, STXBurnEventData, STXLockEventData, STXMintEventData, + STXTransferEventData, SmartContractEventData, StacksTransactionEvent, +}; + +use crate::indexer::tests::helpers::stacks_events::create_new_event_from_stacks_event; + +use super::{ + super::tests::{helpers, process_stacks_blocks_and_check_expectations}, + NewEvent, +}; +use test_case::test_case; #[test] fn test_stacks_vector_001() { @@ -259,3 +272,116 @@ fn test_stacks_vector_051() { fn test_stacks_vector_052() { process_stacks_blocks_and_check_expectations(helpers::stacks_shapes::get_vector_052()); } + +#[test_case(StacksTransactionEvent::STXTransferEvent(STXTransferEventData { + sender: format!(""), + recipient: format!(""), + amount: format!("1"), +}); "stx_transfer")] +#[test_case(StacksTransactionEvent::STXMintEvent(STXMintEventData { + recipient: format!(""), + amount: format!("1"), +}); "stx_mint")] +#[test_case(StacksTransactionEvent::STXBurnEvent(STXBurnEventData { + sender: format!(""), + amount: format!("1"), +}); "stx_burn")] +#[test_case(StacksTransactionEvent::STXLockEvent(STXLockEventData { + locked_amount: format!("1"), + unlock_height: format!(""), + locked_address: format!(""), +}); "stx_lock")] +#[test_case(StacksTransactionEvent::NFTTransferEvent(NFTTransferEventData { + asset_class_identifier: format!(""), + hex_asset_identifier: format!(""), + sender: format!(""), + recipient: format!(""), +}); "nft_transfer")] +#[test_case(StacksTransactionEvent::NFTMintEvent(NFTMintEventData { + asset_class_identifier: format!(""), + hex_asset_identifier: format!(""), + recipient: format!(""), +}); "nft_mint")] +#[test_case(StacksTransactionEvent::NFTBurnEvent(NFTBurnEventData { + asset_class_identifier: format!(""), + hex_asset_identifier: format!(""), + sender: format!(""), +}); "nft_burn")] +#[test_case(StacksTransactionEvent::FTTransferEvent(FTTransferEventData { + asset_class_identifier: format!(""), + sender: format!(""), + recipient: format!(""), + amount: format!("1"), +}); "ft_transfer")] +#[test_case(StacksTransactionEvent::FTMintEvent(FTMintEventData { + asset_class_identifier: format!(""), + recipient: format!(""), + amount: format!("1"), +}); "ft_mint")] +#[test_case(StacksTransactionEvent::FTBurnEvent(FTBurnEventData { + asset_class_identifier: format!(""), + sender: format!(""), + amount: format!("1"), +}); "ft_burn")] +#[test_case(StacksTransactionEvent::DataVarSetEvent(DataVarSetEventData { + contract_identifier: format!(""), + var: format!(""), + hex_new_value: format!(""), +}); "data_var_set")] +#[test_case(StacksTransactionEvent::DataMapInsertEvent(DataMapInsertEventData { + contract_identifier: format!(""), + hex_inserted_key: format!(""), + hex_inserted_value: format!(""), + map: format!("") +}); "data_map_insert")] +#[test_case(StacksTransactionEvent::DataMapUpdateEvent(DataMapUpdateEventData { + contract_identifier: format!(""), + hex_new_value: format!(""), + hex_key: format!(""), + map: format!("") +}); "data_map_update")] +#[test_case(StacksTransactionEvent::DataMapDeleteEvent(DataMapDeleteEventData { + contract_identifier: format!(""), + hex_deleted_key: format!(""), + map: format!("") +}); "data_map_delete")] +#[test_case(StacksTransactionEvent::SmartContractEvent(SmartContractEventData { + contract_identifier: format!(""), + topic: format!("print"), + hex_value: format!(""), +}); "smart_contract_print_event")] +fn new_events_can_be_converted_into_chainhook_event(original_event: StacksTransactionEvent) { + let new_event = create_new_event_from_stacks_event(original_event.clone()); + let event = new_event.into_chainhook_event().unwrap(); + let original_event_serialized = serde_json::to_string(&original_event).unwrap(); + let event_serialized = serde_json::to_string(&event).unwrap(); + assert_eq!(original_event_serialized, event_serialized); +} + +#[test] +fn into_chainhook_event_rejects_invalid_missing_event() { + let new_event = NewEvent { + txid: format!(""), + committed: false, + event_index: 0, + event_type: format!(""), + stx_transfer_event: None, + stx_mint_event: None, + stx_burn_event: None, + stx_lock_event: None, + nft_transfer_event: None, + nft_mint_event: None, + nft_burn_event: None, + ft_transfer_event: None, + ft_mint_event: None, + ft_burn_event: None, + data_var_set_event: None, + data_map_insert_event: None, + data_map_update_event: None, + data_map_delete_event: None, + contract_event: None, + }; + new_event + .into_chainhook_event() + .expect_err("expected error on missing event"); +} diff --git a/components/chainhook-sdk/src/indexer/tests/helpers/mod.rs b/components/chainhook-sdk/src/indexer/tests/helpers/mod.rs index bb49fc4ed..f6e8ce4d4 100644 --- a/components/chainhook-sdk/src/indexer/tests/helpers/mod.rs +++ b/components/chainhook-sdk/src/indexer/tests/helpers/mod.rs @@ -7,6 +7,7 @@ pub mod bitcoin_shapes; pub mod microblocks; #[allow(non_snake_case, unreachable_code)] pub mod stacks_blocks; +pub mod stacks_events; pub mod stacks_shapes; pub mod transactions; diff --git a/components/chainhook-sdk/src/indexer/tests/helpers/stacks_events.rs b/components/chainhook-sdk/src/indexer/tests/helpers/stacks_events.rs new file mode 100644 index 000000000..2e1650248 --- /dev/null +++ b/components/chainhook-sdk/src/indexer/tests/helpers/stacks_events.rs @@ -0,0 +1,118 @@ +use chainhook_types::StacksTransactionEvent; + +use crate::indexer::stacks::NewEvent; + +pub fn create_new_event_from_stacks_event(event: StacksTransactionEvent) -> NewEvent { + let mut event_type = String::new(); + let stx_transfer_event = if let StacksTransactionEvent::STXTransferEvent(data) = &event { + event_type = format!("stx_transfer"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let stx_mint_event = if let StacksTransactionEvent::STXMintEvent(data) = &event { + event_type = format!("stx_mint"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let stx_burn_event = if let StacksTransactionEvent::STXBurnEvent(data) = &event { + event_type = format!("stx_burn"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let stx_lock_event = if let StacksTransactionEvent::STXLockEvent(data) = &event { + event_type = format!("stx_lock"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let nft_transfer_event = if let StacksTransactionEvent::NFTTransferEvent(data) = &event { + event_type = format!("nft_transfer"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let nft_mint_event = if let StacksTransactionEvent::NFTMintEvent(data) = &event { + event_type = format!("nft_mint"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let nft_burn_event = if let StacksTransactionEvent::NFTBurnEvent(data) = &event { + event_type = format!("nft_burn"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let ft_transfer_event = if let StacksTransactionEvent::FTTransferEvent(data) = &event { + event_type = format!("ft_transfer"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let ft_mint_event = if let StacksTransactionEvent::FTMintEvent(data) = &event { + event_type = format!("ft_mint"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let ft_burn_event = if let StacksTransactionEvent::FTBurnEvent(data) = &event { + event_type = format!("ft_burn"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let data_var_set_event = if let StacksTransactionEvent::DataVarSetEvent(data) = &event { + event_type = format!("data_var_set"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let data_map_insert_event = if let StacksTransactionEvent::DataMapInsertEvent(data) = &event { + event_type = format!("data_map_insert"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let data_map_update_event = if let StacksTransactionEvent::DataMapUpdateEvent(data) = &event { + event_type = format!("data_map_update"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let data_map_delete_event = if let StacksTransactionEvent::DataMapDeleteEvent(data) = &event { + event_type = format!("data_map_delete"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + let contract_event = if let StacksTransactionEvent::SmartContractEvent(data) = &event { + event_type = format!("smart_contract_print_event"); + Some(serde_json::to_value(data).unwrap()) + } else { + None + }; + NewEvent { + txid: format!(""), + committed: false, + event_index: 0, + event_type, + stx_transfer_event, + stx_mint_event, + stx_burn_event, + stx_lock_event, + nft_transfer_event, + nft_mint_event, + nft_burn_event, + ft_transfer_event, + ft_mint_event, + ft_burn_event, + data_var_set_event, + data_map_insert_event, + data_map_update_event, + data_map_delete_event, + contract_event, + } +} diff --git a/components/chainhook-sdk/src/indexer/tests/helpers/transactions.rs b/components/chainhook-sdk/src/indexer/tests/helpers/transactions.rs index 24b7fe9e8..505c9b9ad 100644 --- a/components/chainhook-sdk/src/indexer/tests/helpers/transactions.rs +++ b/components/chainhook-sdk/src/indexer/tests/helpers/transactions.rs @@ -64,6 +64,7 @@ pub fn generate_test_tx_stacks_contract_call( sponsor: None, position: chainhook_types::StacksTransactionPosition::anchor_block(0), proof: None, + contract_abi: None, }, } } diff --git a/components/chainhook-sdk/src/observer/mod.rs b/components/chainhook-sdk/src/observer/mod.rs index 0c602ddac..10516a2e6 100644 --- a/components/chainhook-sdk/src/observer/mod.rs +++ b/components/chainhook-sdk/src/observer/mod.rs @@ -136,67 +136,6 @@ impl EventObserverConfig { _ => unreachable!(), } } - - pub fn new_using_overrides( - overrides: Option<&EventObserverConfigOverrides>, - ) -> Result { - let bitcoin_network = - if let Some(network) = overrides.and_then(|c| c.bitcoin_network.as_ref()) { - BitcoinNetwork::from_str(network)? - } else { - BitcoinNetwork::Regtest - }; - - let stacks_network = - if let Some(network) = overrides.and_then(|c| c.stacks_network.as_ref()) { - StacksNetwork::from_str(network)? - } else { - StacksNetwork::Devnet - }; - - let config = EventObserverConfig { - bitcoin_rpc_proxy_enabled: false, - chainhook_config: None, - ingestion_port: overrides - .and_then(|c| c.ingestion_port) - .unwrap_or(DEFAULT_INGESTION_PORT), - bitcoind_rpc_username: overrides - .and_then(|c| c.bitcoind_rpc_username.clone()) - .unwrap_or("devnet".to_string()), - bitcoind_rpc_password: overrides - .and_then(|c| c.bitcoind_rpc_password.clone()) - .unwrap_or("devnet".to_string()), - bitcoind_rpc_url: overrides - .and_then(|c| c.bitcoind_rpc_url.clone()) - .unwrap_or("http://localhost:18443".to_string()), - bitcoin_block_signaling: overrides - .and_then(|c| match c.bitcoind_zmq_url.as_ref() { - Some(url) => Some(BitcoinBlockSignaling::ZeroMQ(url.clone())), - None => Some(BitcoinBlockSignaling::Stacks( - StacksNodeConfig::default_localhost( - overrides - .and_then(|c| c.ingestion_port) - .unwrap_or(DEFAULT_INGESTION_PORT), - ), - )), - }) - .unwrap_or(BitcoinBlockSignaling::Stacks( - StacksNodeConfig::default_localhost( - overrides - .and_then(|c| c.ingestion_port) - .unwrap_or(DEFAULT_INGESTION_PORT), - ), - )), - display_logs: overrides.and_then(|c| c.display_logs).unwrap_or(false), - cache_path: overrides - .and_then(|c| c.cache_path.clone()) - .unwrap_or("cache".to_string()), - bitcoin_network, - stacks_network, - data_handler_tx: None, - }; - Ok(config) - } } #[derive(Deserialize, Debug)] @@ -354,14 +293,14 @@ impl ChainhookStore { } } -#[derive(Debug, Default, Serialize, Clone)] +#[derive(Debug, Default, Serialize, Deserialize, Clone)] pub struct ReorgMetrics { timestamp: i64, applied_blocks: usize, rolled_back_blocks: usize, } -#[derive(Debug, Default, Serialize, Clone)] +#[derive(Debug, Default, Serialize, Deserialize, Clone)] pub struct ChainMetrics { pub tip_height: u64, pub last_reorg: Option, @@ -377,7 +316,7 @@ impl ChainMetrics { } } -#[derive(Debug, Default, Serialize, Clone)] +#[derive(Debug, Default, Serialize, Deserialize, Clone)] pub struct ObserverMetrics { pub bitcoin: ChainMetrics, pub stacks: ChainMetrics, diff --git a/components/chainhook-sdk/src/observer/tests/mod.rs b/components/chainhook-sdk/src/observer/tests/mod.rs index 1efea079d..2b5910887 100644 --- a/components/chainhook-sdk/src/observer/tests/mod.rs +++ b/components/chainhook-sdk/src/observer/tests/mod.rs @@ -67,6 +67,7 @@ fn stacks_chainhook_contract_call( expire_after_occurrence, capture_all_events: None, decode_clarity_values: Some(true), + include_contract_abi: None, predicate: StacksPredicate::ContractCall(StacksContractCallBasedPredicate { contract_identifier: contract_identifier.to_string(), method: method.to_string(), diff --git a/components/chainhook-types-rs/src/contract_interface.rs b/components/chainhook-types-rs/src/contract_interface.rs new file mode 100644 index 000000000..07aeb827b --- /dev/null +++ b/components/chainhook-types-rs/src/contract_interface.rs @@ -0,0 +1,171 @@ +// NOTE: This module is a very slightly simplified version of the +// `clarity-vm` repository's [ContractInterface](https://github.com/stacks-network/stacks-blockchain/blob/eca1cfe81f0c0989ebd3e53c32e3e5d70ed83757/clarity/src/vm/analysis/contract_interface_builder/mod.rs#L368) type. +// We've copied it here rather than using `clarity-vm` as a dependency to avoid circular dependencies. + +use std::{fmt, str::FromStr}; + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ContractInterface { + pub functions: Vec, + pub variables: Vec, + pub maps: Vec, + pub fungible_tokens: Vec, + pub non_fungible_tokens: Vec, + pub epoch: String, + pub clarity_version: String, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ContractInterfaceFunction { + pub name: String, + pub access: ContractInterfaceFunctionAccess, + pub args: Vec, + pub outputs: ContractInterfaceFunctionOutput, +} +#[allow(non_camel_case_types)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub enum ContractInterfaceFunctionAccess { + private, + public, + read_only, +} +#[allow(non_camel_case_types)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub enum ContractInterfaceVariableAccess { + constant, + variable, +} +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ContractInterfaceFunctionArg { + pub name: String, + #[serde(rename = "type")] + pub type_f: ContractInterfaceAtomType, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ContractInterfaceFunctionOutput { + #[serde(rename = "type")] + pub type_f: ContractInterfaceAtomType, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ContractInterfaceVariable { + pub name: String, + #[serde(rename = "type")] + pub type_f: ContractInterfaceAtomType, + pub access: ContractInterfaceVariableAccess, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ContractInterfaceMap { + pub name: String, + pub key: ContractInterfaceAtomType, + pub value: ContractInterfaceAtomType, +} + +#[allow(non_camel_case_types)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub enum ContractInterfaceAtomType { + none, + int128, + uint128, + bool, + principal, + buffer { + length: u32, + }, + #[serde(rename = "string-utf8")] + string_utf8 { + length: u32, + }, + #[serde(rename = "string-ascii")] + string_ascii { + length: u32, + }, + tuple(Vec), + optional(Box), + response { + ok: Box, + error: Box, + }, + list { + #[serde(rename = "type")] + type_f: Box, + length: u32, + }, + trait_reference, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ContractInterfaceTupleEntryType { + pub name: String, + #[serde(rename = "type")] + pub type_f: ContractInterfaceAtomType, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ContractInterfaceFungibleTokens { + pub name: String, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ContractInterfaceNonFungibleTokens { + pub name: String, + #[serde(rename = "type")] + pub type_f: ContractInterfaceAtomType, +} +#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq, PartialOrd)] +pub enum ClarityVersion { + Clarity1, + Clarity2, +} + +impl fmt::Display for ClarityVersion { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ClarityVersion::Clarity1 => write!(f, "Clarity 1"), + ClarityVersion::Clarity2 => write!(f, "Clarity 2"), + } + } +} + +impl FromStr for ClarityVersion { + type Err = String; + fn from_str(version: &str) -> Result { + let s = version.to_string().to_lowercase(); + if s == "clarity1" { + Ok(ClarityVersion::Clarity1) + } else if s == "clarity2" { + Ok(ClarityVersion::Clarity2) + } else { + Err(format!( + "Invalid clarity version. Valid versions are: Clarity1, Clarity2." + )) + } + } +} +#[repr(u32)] +#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Ord, Hash, Copy, Serialize, Deserialize)] +pub enum StacksEpochId { + Epoch10 = 0x01000, + Epoch20 = 0x02000, + Epoch2_05 = 0x02005, + Epoch21 = 0x0200a, + Epoch22 = 0x0200f, + Epoch23 = 0x02014, + Epoch24 = 0x02019, +} + +impl std::fmt::Display for StacksEpochId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + StacksEpochId::Epoch10 => write!(f, "1.0"), + StacksEpochId::Epoch20 => write!(f, "2.0"), + StacksEpochId::Epoch2_05 => write!(f, "2.05"), + StacksEpochId::Epoch21 => write!(f, "2.1"), + StacksEpochId::Epoch22 => write!(f, "2.2"), + StacksEpochId::Epoch23 => write!(f, "2.3"), + StacksEpochId::Epoch24 => write!(f, "2.4"), + } + } +} diff --git a/components/chainhook-types-rs/src/lib.rs b/components/chainhook-types-rs/src/lib.rs index b4baf2a3d..cd8ef7b02 100644 --- a/components/chainhook-types-rs/src/lib.rs +++ b/components/chainhook-types-rs/src/lib.rs @@ -4,10 +4,12 @@ extern crate serde; extern crate serde_derive; pub mod bitcoin; +mod contract_interface; mod events; mod processors; mod rosetta; +pub use contract_interface::*; pub use events::*; pub use processors::*; pub use rosetta::*; diff --git a/components/chainhook-types-rs/src/rosetta.rs b/components/chainhook-types-rs/src/rosetta.rs index 39ba9c005..8e2f2b2fb 100644 --- a/components/chainhook-types-rs/src/rosetta.rs +++ b/components/chainhook-types-rs/src/rosetta.rs @@ -1,4 +1,5 @@ use super::bitcoin::{TxIn, TxOut}; +use crate::contract_interface::ContractInterface; use crate::events::*; use schemars::JsonSchema; use std::cmp::Ordering; @@ -219,6 +220,8 @@ pub struct StacksTransactionMetadata { pub execution_cost: Option, pub position: StacksTransactionPosition, pub proof: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub contract_abi: Option, } /// TODO @@ -894,13 +897,13 @@ impl BitcoinNetwork { } } -#[derive(Deserialize, Debug, Clone)] +#[derive(Deserialize, Debug, Clone, PartialEq)] pub enum BitcoinBlockSignaling { Stacks(StacksNodeConfig), ZeroMQ(String), } -#[derive(Deserialize, Debug, Clone)] +#[derive(Deserialize, Debug, Clone, PartialEq)] pub struct StacksNodeConfig { pub rpc_url: String, pub ingestion_port: u16, diff --git a/docs/chainhook-openapi.json b/docs/chainhook-openapi.json index 8f0c72e9d..f70b80179 100644 --- a/docs/chainhook-openapi.json +++ b/docs/chainhook-openapi.json @@ -837,6 +837,10 @@ "type": "boolean", "nullable": true }, + "include_contract_abi": { + "type": "boolean", + "nullable": true + }, "if_this": { "$ref": "#/components/schemas/StacksPredicate" }, diff --git a/docs/faq.md b/docs/faq.md index f97dfe3a0..778b4203f 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -1,8 +1,8 @@ --- -title: FAQ's +title: FAQs --- -# FAQ's +# FAQs #### **Can Chainhook target both Bitcoin and Stacks?** diff --git a/docs/getting-started.md b/docs/getting-started.md index 94ca44690..0f49ae187 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -9,7 +9,7 @@ Chainhook is a transaction indexing engine for Stacks and Bitcoin. It can extrac Chainhook can extract data from the Bitcoin and the Stacks blockchains using predicates (sometimes called `chainhooks`). A predicate specifies a rule applied as a filtering function on every block transaction. - **Chainhook as a development tool** has a few convenient features designed to make developers as productive as possible by allowing them to iterate quickly in their local environments. -- **Chainhook as a service** can be used to evaluate new Bitcoin and/or Stacks blocks against your predicates. You can also dynamically register new predicates by [enabling predicates registration API](./overview.md#then-that-predicate-design) +- **Chainhook as a service** can be used to evaluate new Bitcoin and/or Stacks blocks against your predicates. You can also dynamically register new predicates by [enabling predicates registration API](./overview.md#then-that-predicate-design). ## Install Chainhook from the Source @@ -21,13 +21,13 @@ Chainhook can be installed from the source by following the steps below: git clone https://github.com/hirosystems/chainhook.git ``` -2. Navigate to the root directory of the cloned repo +2. Navigate to the root directory of the cloned repo: ```bash cd chainhook ``` -3. Run cargo target to install chainhook +3. Run cargo target to install chainhook: ```bash cargo chainhook-install diff --git a/docs/how-to-guides/how-to-run-chainhook-as-a-service-using-bitcoind.md b/docs/how-to-guides/how-to-run-chainhook-as-a-service-using-bitcoind.md index 7eda6c03a..47b0cbc09 100644 --- a/docs/how-to-guides/how-to-run-chainhook-as-a-service-using-bitcoind.md +++ b/docs/how-to-guides/how-to-run-chainhook-as-a-service-using-bitcoind.md @@ -1,8 +1,8 @@ --- -title: Run Chainhook as a Service using Bitcoind +title: Run Chainhook as a Service Using Bitcoind --- -You can run Chainhook as a service to evaluate your `if_this / then_that` predicates against the Bitcoin blockchain, delivering results—either file appendations or HTTP POST requests to a server you designate—for your application's use case. You can also dynamically register new predicates as the service is running by enabling the predicates registration API. +You can run Chainhook as a service to evaluate your `if_this / then_that` predicates against the Bitcoin blockchain, delivering data—either file appendations or HTTP POST requests to a server you designate—for your application's use case. You can also dynamically register new predicates as the service is running by enabling the predicates registration API. ## Prerequisites @@ -14,11 +14,11 @@ This guide is written to work with the latest Bitcoin Core software containing b > **_NOTE:_** > -> While bitcoind can and will start syncing a Bitcoin node, customizing this node to your use cases beyond supporting a Chainhook is out of scope for this guide. See the Bitcoin wiki for ["Running Bitcoin"](https://en.bitcoin.it/wiki/Running_Bitcoin) or bitcoin.org [Running A Full Node guide](https://bitcoin.org/en/full-node). +> While bitcoind can and will start syncing a Bitcoin node, customizing this node to your use cases beyond supporting a Chainhook is out of scope for this guide. See the Bitcoin wiki for ["Running Bitcoin"](https://en.bitcoin.it/wiki/Running_Bitcoin) or bitcoin.org's [Running A Full Node guide](https://bitcoin.org/en/full-node). -- Make note of the path of your `bitcoind` executable (located within the `bin` directory of the `bitcoin-25.0` folder you downloaded above appropriate to your operating system) +- Make note of the path of your `bitcoind` executable (located within the `bin` directory of the `bitcoin-25.0` folder you downloaded above appropriate to your operating system). - Navigate to your project folder where your Chainhook node will reside, create a new file, and rename it to `bitcoin.conf`. Copy the configuration below to this `bitcoin.conf` file. -- Find and copy your Bitcoin data directory and paste to the `datadir` field in the `bitcoin.conf` file below. Either copy the default path (see [list of default directories by operating system](https://en.bitcoin.it/wiki/Data_directory)) or copy the custom path you set for your Bitcoin data +- Find and copy your Bitcoin data directory and paste to the `datadir` field in the `bitcoin.conf` file below. Either copy the default path (see [list of default directories by operating system](https://en.bitcoin.it/wiki/Data_directory)) or copy the custom path you set for your Bitcoin data. - Set a username of your choice for bitcoind and use it in the `rpcuser` configuration below (`devnet` is a default). - Set a password of your choice for bitcoind and use it in the `rpcpassword` configuration below (`devnet` is a default). @@ -128,7 +128,7 @@ Here is a table of the relevant parameters this guide changes in our configurati ## Scan blockchain based on predicates -Now that your bitcoind and Chainhook configurations are complete, you can define the Chainhook [predicates](../overview.md#if-this-predicate-design) you would like to scan against bitcoin blocks. These predicates are where the user specifies the kinds of blockchain events that trigger Chainhook to deliver a result (either a file appendation or an HTTP POST request). This section helps you with an example JSON file to scan a range of blocks in the blockchain to trigger results. To understand the supported predicates for Bitcoin, refer to [how to use chainhooks with bitcoin](how-to-use-chainhooks-with-bitcoin.md). +Now that your bitcoind and Chainhook configurations are complete, you can define the Chainhook [predicates](../overview.md#if-this-predicate-design) you would like to scan against bitcoin blocks. These predicates are where you specify the kind of blockchain events that trigger Chainhook to deliver a result (either a file appendation or an HTTP POST request). This section helps you with an example JSON file to scan a range of blocks in the blockchain to trigger results. To understand the supported predicates for Bitcoin, refer to [how to use chainhooks with bitcoin](how-to-use-chainhooks-with-bitcoin.md). The following is an example to walk you through an `if_this / then_that` predicate design that appends event payloads to the configured file destination. @@ -232,11 +232,11 @@ The above command posts events to the URL, http://localhost:3000/events mentione ## Initiate Chainhook Service -In the examples above, our Chainhook scanned historical blockchain data against the user's predicates and delivered results. In this next section, let's learn how to set up a Chainhook that acts as an ongoing observer and event-streaming service. +In the examples above, our Chainhook scanned historical blockchain data against predicates and delivered results. In this next section, let's learn how to set up a Chainhook that acts as an ongoing observer and event-streaming service. -We can start a Chainhook service with an existing predicate. We will also see how we can dynamically register new predicates by making an API call to our Chainhook. In both of these instances, our predicates will be delivering their results to a server set up to recieve results. +We can start a Chainhook service with an existing predicate. We can also dynamically register new predicates by making an API call to our Chainhook. In both of these instances, our predicates will be delivering their results to a server set up to receive results. -- Initiate the chainhook service by passing the predicate path to the command as shown below. +- Initiate the chainhook service by passing the predicate path to the command as shown below: ```console chainhook service start --predicate-path=stacking-pool-api.json --config-path=Chainhook.toml @@ -248,7 +248,7 @@ The above command registers the predicate based on the predicate definition in t You can also dynamically register new predicates with your Chainhook service. -First, we need to uncomment the following lines of code in the `Chainhook.toml` file to enable the predicate registration server. +First, we need to uncomment the following lines of code in the `Chainhook.toml` file to enable the predicate registration server: ```toml # ... diff --git a/docs/how-to-guides/how-to-run-chainhook-as-a-service-using-stacks.md b/docs/how-to-guides/how-to-run-chainhook-as-a-service-using-stacks.md index c05a45cd3..2f1849060 100644 --- a/docs/how-to-guides/how-to-run-chainhook-as-a-service-using-stacks.md +++ b/docs/how-to-guides/how-to-run-chainhook-as-a-service-using-stacks.md @@ -1,5 +1,5 @@ --- -title: Run Chainhook as a Service using Stacks +title: Run Chainhook as a Service Using Stacks --- You can run Chainhook as a service to evaluate Stacks blocks against your predicates. You can also dynamically register new predicates by enabling predicates registration API. @@ -8,13 +8,13 @@ Start with the prerequisite section and configure your files to start the chainh ## Prerequisite -### Configure Stacks Node +### Configure Your Stacks Node -- Configure your stacks node using the [Stacks node configuration](https://docs.stacks.co/docs/nodes-and-miners/stacks-node-configuration) documentation. -- Recommend the latest version of Stacks. You can check the latest version by following [this](https://github.com/stacks-network/stacks-blockchain/releases) link. -- Set up the bitcoin node by following [this](how-to-run-chainhook-as-a-service-using-bitcoind.md#setting-up-a-bitcoin-node) article, then get the `rpcuser`, `rpcpassword`, and `rpc_port` values defined in the `bitcoin.conf` file. +- Configure your Stacks node using the [Stacks node configuration](https://docs.stacks.co/docs/nodes-and-miners/stacks-node-configuration) documentation. +- We Recommend using the latest version of Stacks. You can check the latest version by following [this](https://github.com/stacks-network/stacks-blockchain/releases) link. +- Set up your Bitcoin node by following [this](how-to-run-chainhook-as-a-service-using-bitcoind.md#setting-up-a-bitcoin-node) article, then get the `rpcuser`, `rpcpassword`, and `rpc_port` values defined in the `bitcoin.conf` file. -A `Stacks.toml` file is generated when configuring the stacks node. Below is the sample `Stacks.toml` file. +A `Stacks.toml` file is generated when configuring your Stacks node. Below is the sample `Stacks.toml` file. ```toml [node] @@ -99,7 +99,7 @@ max_caching_memory_size_mb = 32000 tsv_file_url = "https://archive.hiro.so/mainnet/stacks-blockchain-api/mainnet-stacks-blockchain-api-latest" ``` -Ensure the following configurations are matched to allow chainhook to communicate with the Stacks and Bitcoin layers. +Ensure the following configurations are matched to allow chainhook to communicate with both Stacks and Bitcoin. | bitcoin.conf | Stacks.toml | Chainhook.toml | | --------------- | ----------- | ---------------------------- | @@ -112,11 +112,11 @@ Ensure the following configurations are matched to allow chainhook to communicat > **_NOTE:_** > -> The `bitcoind_zmq_url` is optional when running chainhook as a service using stacks because stacks will pull the blocks from Stacks and the Bitcoin chain. +> The `bitcoind_zmq_url` is optional when running chainhook as a service using Stacks because Stacks will pull the blocks from Stacks and the Bitcoin chain. -## Scan blockchain based on predicates +## Scan the blockchain based on predicates -Now that the stacks and chainhook configurations are done, you can scan your blocks by defining your [predicates](../overview.md#if-this-predicate-design). This section helps you with sample JSON files to scan the blocks in the blockchain and render the results. To understand the supported predicates for Stacks, refer to [how to use chainhook with stacks](how-to-use-chainhooks-with-stacks.md). +Now that the Stacks and Chainhook configurations are done, you can scan your blocks by defining your [predicates](../overview.md#if-this-predicate-design). This section helps you with sample JSON files to scan blockchain blocks and render the results. To understand the supported predicates for Stacks, refer to [how to use chainhook with stacks](how-to-use-chainhooks-with-stacks.md). The following are the two examples to walk you through `file_append` and `http_post` `then-that` predicate designs. @@ -178,7 +178,7 @@ A JSON file `print-event.json` is generated. > > You can get blockchain height and current block in the [Explorer](https://explorer.hiro.so/blocks?chain=mainnet). -Now, use the following command to scan the blocks based on the predicates defined in the `mainnet` network block of your `print-event.json` file. +Now, use the following command to scan the blocks based on the predicates defined in the `mainnet` network block of your `print-event.json` file: ```console chainhook predicates scan print-event.json --mainnet @@ -191,13 +191,13 @@ The output of the above command will be a text file `arkadiko.txt` generated bas ``` > **_TIP:_** -> To optimize your experience with scanning, the following are a few knobs you can play with: -> Use of adequate values for `start_block` and `end_block` in predicates will drastically improve the performance. +> To optimize your experience with scanning, there are a few variables you can play with: +> Use of adequate values for `start_block` and `end_block` in predicates will drastically improve performance. > Networking: reducing the number of network hops between the chainhook and the bitcoind processes can also help. ### Example 2 - `http_post` -Run the following command to generate a sample JSON file with predicates in your terminal. +Run the following command to generate a sample JSON file with predicates in your terminal: ```console chainhook predicates new print-event-post.json --stacks @@ -250,7 +250,7 @@ Update the generated JSON file `print-event-post.json` with the following: > > The `start_block` is the required field to use the `http_post` `then-that` predicate. -Now, use the following command to scan the blocks based on the predicates defined in the `print-event-post.json` file. +Now, use the following command to scan the blocks based on the predicates defined in the `print-event-post.json` file: ```console chainhook predicates scan print-event-post.json --mainnet @@ -260,9 +260,9 @@ The above command posts events to the URL `http://localhost:3000/events` mention ## Initiate Chainhook Service -In this section, you'll learn how to initiate the chainhook service using the following two ways and use the REST API call to post the events onto a server. +In this section, you'll learn two ways to initiate the Chainhook service as well as how to use the REST API call to post the events onto a server. -- Initiate the chainhook service by passing the predicate path to the command as shown below. +- Initiate the Chainhook service by passing the predicate path to the command as shown below: ```console chainhook service start --predicate-path=print-event.json --config-path=Chainhook.toml @@ -274,7 +274,7 @@ In this section, you'll learn how to initiate the chainhook service using the fo You can also dynamically register new predicates with your Chainhook service. -First, we need to uncomment the following lines of code in the `Chainhook.toml` file to enable the predicate registration server. +First, we need to uncomment the following lines of code in the `Chainhook.toml` file to enable the predicate registration server: ```toml # ... diff --git a/docs/how-to-guides/how-to-use-chainhooks-with-bitcoin.md b/docs/how-to-guides/how-to-use-chainhooks-with-bitcoin.md index 338cc5c0f..1d96f21ea 100644 --- a/docs/how-to-guides/how-to-use-chainhooks-with-bitcoin.md +++ b/docs/how-to-guides/how-to-use-chainhooks-with-bitcoin.md @@ -1,6 +1,4 @@ ---- -title: Use Chainhooks with Bitcoin ---- +# Use Chainhooks with Bitcoin The following guide helps you define predicates to use Chainhook with Bitcoin. The predicates are specified based on `if-this`, `then-that` constructs. @@ -23,54 +21,58 @@ Get any transaction matching a given transaction ID (txid): } ``` -Get any transaction, including: +Get any transaction matching a given `OP_RETURN` payload: +Example: Given the following `script_pubkey` : -- OP_RETURN output starting with a set of characters. - - `starts_with` mandatory argument admits: - - ASCII string type. Example: `X2[` - - hex encoded bytes. Example: `0x589403` +``` +OP_RETURN +PUSHDATA(0x03) +0x616263 +``` + +or `0x6a03616263` in hex, the following predicates will match the transaction above. + +Get any transaction, where its `OP_RETURN` payload starts with a set of characters: +- `starts_with` mandatory argument admits: + - ASCII string type. Example: `ab` + - hex encoded bytes. Example: `0x6162` ```json { "if_this": { "scope": "outputs", "op_return": { - "starts_with": "X2[" + "starts_with": "ab" } } } ``` - -`op_return` is used to find blocks starting, ending, or equivalent to a specific string from the list of output blocks. - -Get any transaction, including an OP_RETURN output matching the sequence of bytes specified: - +Get any transaction, where its `OP_RETURN` payload is equals to set of characters: - `equals` mandatory argument admits: - - hex encoded bytes. Example: `0x69bd04208265aca9424d0337dac7d9e84371a2c91ece1891d67d3554bd9fdbe60afc6924d4b0773d90000006700010000006600012` + - ASCII string type: Example `abc` + - hex encoded bytes. Example: `0x616263` ```json { "if_this": { "scope": "outputs", "op_return": { - "equals": "0x69bd04208265aca9424d0337dac7d9e84371a2c91ece1891d67d3554bd9fdbe60afc6924d4b0773d90000006700010000006600012" + "equals": "0x616263" } } } ``` - -Get any transaction, including an OP_RETURN output ending with a set of characters: - +Get any transaction, where its `OP_RETURN` payload ends with a set of characters: - `ends_with` mandatory argument admits: - - ASCII string type. Example: `X2[` - - hex encoded bytes. Example: `0x76a914000000000000000000000000000000000000000088ac` + - ASCII string type. Example: `bc` + - hex encoded bytes. Example: `0x6263` ```json { "if_this": { "scope": "outputs", "op_return": { - "ends_with": "0x76a914000000000000000000000000000000000000000088ac" + "ends_with": "0x6263" } } } diff --git a/docs/how-to-guides/how-to-use-chainhooks-with-stacks.md b/docs/how-to-guides/how-to-use-chainhooks-with-stacks.md index 0a5b5c506..f95fd213f 100644 --- a/docs/how-to-guides/how-to-use-chainhooks-with-stacks.md +++ b/docs/how-to-guides/how-to-use-chainhooks-with-stacks.md @@ -225,9 +225,12 @@ Following additional configurations can be used to improve the performance of ch - Stop evaluating chainhook after a given number of occurrences found: `"expire_after_occurrence": 1` -- Include decoded clarity values in the payload +- Include decoded clarity values in the payload: `"decode_clarity_values": true` +- Include the contract ABI for transactions that deploy contracts: +`"include_contract_abi": true` + ## Example predicate definition to print events Retrieve and HTTP Post to `http://localhost:3000/api/v1/wrapBtc` the first five transactions interacting with ST1PQHQKV0RJXZFY1DGX8MNSNYVE3VGZJSRTPGZGM.monkey-sip09, emitting print events containing the word 'vault'. diff --git a/docs/overview.md b/docs/overview.md index 6f82710ce..b154a4059 100644 --- a/docs/overview.md +++ b/docs/overview.md @@ -4,7 +4,7 @@ title: Overview # Chainhook Overview -Chainhook is a reorg-aware transaction indexing engine that helps developers get reliable blockchain data, regardless of forks and reorgs. By focusing only on the data devs care about, Chainhook helps developers work with much lighter datasets and build IFTTT logic into their applications. +Chainhook is a reorg-aware transaction indexing engine that helps you get reliable blockchain data, regardless of forks and reorgs. By focusing only on the data developers care about, Chainhook helps devs work with much lighter datasets and build IFTTT logic into their applications. Chainhook can be used as a tool in your local development environment and as a service in the cloud environment. @@ -18,15 +18,15 @@ With Chainhook, developers can build consistent, reorg-proof databases that inde ## Features -1. **Faster, More Efficient Indexing:** Instead of working with a generic blockchain indexer, taking hours to process every single transaction of every single block, developers can create their own indexes, build, iterate, and refine them in minutes. Chainhook can help developers avoid massive storage management and storage scaling issues by avoiding full chain indexation. Lighter indexes imply faster query results, which helps minimize end-user response time. This leads to an enhanced Developer Experience and an improved End-User Experience. +1. **Faster, More Efficient Indexing:** Instead of working with a generic blockchain indexer, taking hours to process every single transaction of every single block, you can create your own index, build, iterate, and refine it in minutes. Chainhook can help you avoid massive storage management and storage scaling issues by avoiding full chain indexation. Lighter indexes lead to faster query results, which helps minimize end-user response time. This leads to a better developer experience and a better end-user experience. 2. **Re-org and Fork Aware:** Chainhook stores possible chain forks and checks each new chain event against the forks to maintain the current valid fork. All triggers, also known as **predicates**, are evaluated against the current valid fork. In the event of a reorg, Chainhook computes a list of new blocks to apply and old blocks to rollback and evaluates the registered predicates against those blocks. -3. **IFTTT Logic, powering your applications:** Chainhook helps developers create elegant event-based architectures using triggers, also known as **predicates**. Developers can write “if_this / then_that” **predicates**that when triggered, are packaged as events and forwarded to the configured destination. By using cloud functions as destinations, developers can also cut costs on processing by only paying for processing when a block that contains some data relevant to the developer's application is being mined. +3. **IFTTT Logic, powering your applications:** Chainhook helps developers create elegant event-based architectures using triggers, also known as **predicates**. Developers can write “if_this / then_that” **predicates** that when triggered, are packaged as events and forwarded to the configured destination. By using cloud functions as destinations, developers can also cut costs on processing by only paying for processing when a block that contains some data relevant to the developer's application is being mined. ## Chainhooks: Trigger IFTTT Logic in your Application -With Chainhook, developers can trigger actions based on predicates they can define. Chainhooks can be triggered by events such as: +With Chainhook, you can trigger actions based on predicates you define. Chainhooks can be triggered by events such as: - A certain amount of SIP-10 tokens were transferred - A particular blockchain address received some tokens on the Stacks/Bitcoin blockchain