diff --git a/.credo.exs b/.credo.exs index 7f4bcd1e..735fbc48 100644 --- a/.credo.exs +++ b/.credo.exs @@ -159,7 +159,7 @@ # # Controversial and experimental checks (opt-in, just move the check to `:enabled` # and be sure to use `mix credo --strict` to see low priority checks) - # Disabled for rudder repo + # Disabled for brp-refiner repo {Credo.Check.Readability.FunctionNames, []}, {Credo.Check.Refactor.LongQuoteBlocks, []}, {Credo.Check.Design.TagTODO, [exit_status: 2]}, diff --git a/.env_example b/.env_example index be1139e0..91fe7b44 100644 --- a/.env_example +++ b/.env_example @@ -3,12 +3,12 @@ WEB3_JWT= # used to submit block result proofs BLOCK_RESULT_OPERATOR_PRIVATE_KEY= -# if running rudder and all supporting services using docker compose +# if running brp-refiner and all supporting services using docker compose NODE_ETHEREUM_MAINNET=http://hardhat-node:8545/ IPFS_PINNER_URL=http://ipfs-pinner:3001 EVM_SERVER_URL=http://evm-server:3002 -# if running rudder locally and all other services using docker compose +# if running brp-refiner locally and all other services using docker compose NODE_ETHEREUM_MAINNET=http://127.0.0.1:8545/ IPFS_PINNER_URL=http://127.0.0.1:3001 EVM_SERVER_URL=http://127.0.0.1:3002 \ No newline at end of file diff --git a/.github/workflows/docker-ci-test.yaml b/.github/workflows/docker-ci-test.yaml index 58099fd6..89305d81 100644 --- a/.github/workflows/docker-ci-test.yaml +++ b/.github/workflows/docker-ci-test.yaml @@ -29,9 +29,9 @@ jobs: # - uses: satackey/action-docker-layer-caching@v0.0.10 # # Ignore the failure of a step and avoid terminating the job. # continue-on-error: true - - name: Build & Publish the Docker image - run: | - docker buildx create --name builder --use --platform=linux/amd64,linux/arm64 && docker buildx build --platform=linux/amd64,linux/arm64 . -t us-docker.pkg.dev/covalent-project/network/rudder:latest --push + # - name: Build & Publish the Docker image + # run: | + # docker buildx create --name builder --use --platform=linux/amd64,linux/arm64 && docker buildx build --platform=linux/amd64,linux/arm64 . -t us-docker.pkg.dev/covalent-project/network/refiner:latest --push - name: Create .env file run: | @@ -48,7 +48,7 @@ jobs: echo "CI=${{ secrets.CI }}" echo "CODECOV_TOKEN=${{ secrets.CODECOV_TOKEN }}" echo "GITHUB_REF=${{ github.ref_name }}" - echo "CODECOV_SLUG=covalenthq/rudder" + echo "CODECOV_SLUG=covalenthq/refiner" echo "GITHUB_SHA=$GITHUB_SHA" echo "GITHUB_HEAD_REF=$GITHUB_HEAD_REF" echo "GITHUB_ENV=$GITHUB_ENV" @@ -62,10 +62,10 @@ jobs: uses: xom9ikk/dotenv@v1.0.2 - name: Run containers - run: docker compose --env-file ".env" -f "docker-compose-ci.yml" up --remove-orphans --exit-code-from rudder + run: docker compose --env-file ".env" -f "docker-compose-ci.yml" up --remove-orphans --exit-code-from refiner - - name: Check running rudder - run: docker inspect rudder + - name: Check running refiner + run: docker inspect refiner - name: Check running containers run: docker ps diff --git a/.github/workflows/tag-release.yaml b/.github/workflows/tag-release.yaml index 3478618c..17b0ab2c 100644 --- a/.github/workflows/tag-release.yaml +++ b/.github/workflows/tag-release.yaml @@ -25,7 +25,7 @@ jobs: - name: Build & Publish the Docker image run: | - docker buildx create --name builder --use --platform=linux/amd64,linux/arm64 && docker buildx build --platform=linux/amd64,linux/arm64 . -t us-docker.pkg.dev/covalent-project/network/rudder:stable -t us-docker.pkg.dev/covalent-project/network/rudder:"${{ env.TAG }}" --push + docker buildx create --name builder --use --platform=linux/amd64,linux/arm64 && docker buildx build --platform=linux/amd64,linux/arm64 . -t us-docker.pkg.dev/covalent-project/network/refiner:stable -t us-docker.pkg.dev/covalent-project/network/refiner:"${{ env.TAG }}" --push - uses: "marvinpinto/action-automatic-releases@latest" with: @@ -35,4 +35,3 @@ jobs: files: | *.zip *.tar.gz - diff --git a/.gitignore b/.gitignore index dd7cf9ac..233f243a 100644 --- a/.gitignore +++ b/.gitignore @@ -20,7 +20,7 @@ erl_crash.dump *.ez # Ignore package tarball (built via "mix hex.build"). -rudder-*.tar +brp-refiner-*.tar # Temporary files, for example, from tests. /tmp/ @@ -28,7 +28,7 @@ rudder-*.tar # Generated beam files *.beam -# secrets for rudder app +# secrets for brp-refiner app /config/*.secret.exs #ElixirLS vscode extension diff --git a/Dockerfile b/Dockerfile index ece656a3..97eeb5e3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -43,13 +43,13 @@ COPY --from=builder-elixir /mix/lib /mix/lib COPY --from=builder-elixir /mix/priv /mix/priv COPY --from=builder-elixir /mix/mix.exs /mix/ COPY --from=builder-elixir /mix/mix.lock /mix/ -# COPY --from=builder-elixir /mix/_build/dev/rel/rudder/ /mix/prod/ +# COPY --from=builder-elixir /mix/_build/dev/rel/refiner/ /mix/prod/ COPY --from=builder-elixir /mix/test/ /mix/test COPY --from=builder-elixir /mix/test-data/ /mix/test-data # Used only for testing in compose # CMD [ "mix", "test", "./test/block_specimen_decoder_test.exs", "./test/block_result_uploader_test.exs"] -CMD ["/mix/prod/bin/rudder", "start"] +CMD ["/mix/prod/bin/refiner", "start"] EXPOSE 9568 \ No newline at end of file diff --git a/README.md b/README.md index 39057d75..61b56770 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,10 @@ [](https://www.covalenthq.com/docs/cqt-network/covalent-network-whitepaper/)
- - Version + + Version - + License: @@ -15,24 +15,24 @@
- - Linter Status + + Linter Status - - CI Tests Status + + CI Tests Status - - Docker Lint Status + + Docker Lint Status - - + + Twitter Follow Covalent
-# Rudder +# Refiner - [Introduction](#introduction) - [Raison d'être](#raison-dêtre) @@ -55,7 +55,7 @@ - [Scripts](#scripts) - [Appendix](#appendix) -## Introduction +## Introduction ![Layers](./docs/network-layers.png) @@ -67,9 +67,9 @@ The [Refiner Whitepaper](https://www.covalenthq.com/docs/cqt-network/refiner-whi Among many of the Refiner's outputs feasible, the Block Result is one. The block result is a one-to-one representation of block data returned from an RPC call to a blockchain node along with the artifacts of block and tx execution like transaction `receipts`. The source of the block result, the block specimen, captures a few extra fields like the [State Specimen](https://github.com/covalenthq/bsp-agent#state-specimen) and `senders` etc. This full specification and its requirement are described well in the [BSP whitepaper](https://www.covalenthq.com/static/documents/Block%20Specimen%20Whitepaper%20V1.2.pdf). -Running the Refiner stack involves running three main pieces of CQT Network OS infrastructure, [`rudder`](https://github.com/covalenthq/rudder), [`ipfs-pinner`](https://github.com/covalenthq/ipfs-pinner) and [`evm-server`](https://github.com/covalenthq/erigon) that coordinate and execute a transformation pipeline per Block Specimen. +Running the Refiner stack involves running three main pieces of CQT Network OS infrastructure, [`refiner`](https://github.com/covalenthq/refiner), [`ipfs-pinner`](https://github.com/covalenthq/ipfs-pinner) and [`evm-server`](https://github.com/covalenthq/erigon) that coordinate and execute a transformation pipeline per Block Specimen. -Here `rudder` serves as the primary orchestrator and supervisor for all transformation pipeline processes that locates a source CQT Network data object to apply a tracing/execution/transformational rule to and outputs a new object generated from using such a rule. +Here `refiner` serves as the primary orchestrator and supervisor for all transformation pipeline processes that locates a source CQT Network data object to apply a tracing/execution/transformational rule to and outputs a new object generated from using such a rule. Running these nodes are not as disk I/O (or cpu/memory) intense as running [`bsp-geth`](https://github.com/covalenthq/bsp-geth) and [`bsp-agent`](https://github.com/covalenthq/bsp-agent) on local machines, however they do require sufficient bandwidth for access to distributed store resources and sending transaction proofs using ethereum clients - connecting to our EVM public blockchain partner - [moonbeam](https://moonbeam.network/). We shall setup all of these step-by-step in this guide in two main ways: @@ -77,7 +77,7 @@ Running these nodes are not as disk I/O (or cpu/memory) intense as running [`bsp - [Build & Run from Source](#build-from-source) (Optional method) -### Raison d'être +### Raison d'être ![Phase2](./docs/phase-2.png) @@ -93,13 +93,13 @@ At a very high level, the Refiner locates a source to apply a transformational r - **Target**: The output generated from running the rule over the object that came from the source that is the block result. -## Architecture +## Architecture -![Rudder Pipeline](./docs/components.png) +![Refiner Pipeline](./docs/components.png) -The happy path for `rudder` application in the CQT Network is made up of actor processes spawned through many [Gen Servers](https://elixir-lang.org/getting-started/mix-otp/genserver.html) processes that are loosely coupled, here some maintain state, and some don't. The children processes can be called upon to fulfill responsibilities at different sections in the refinement/transformation process pipeline - under one umbrella [Dynamic Supervisor](https://elixir-lang.org/getting-started/mix-otp/dynamic-supervisor.html), that can bring them back up in case of a failure to continue a given pipeline operation. Read more about the components and their operations in the [FULL ARCHITECTURE document](./docs/ARCH.md). +The happy path for `refiner` application in the CQT Network is made up of actor processes spawned through many [Gen Servers](https://elixir-lang.org/getting-started/mix-otp/genserver.html) processes that are loosely coupled, here some maintain state, and some don't. The children processes can be called upon to fulfill responsibilities at different sections in the refinement/transformation process pipeline - under one umbrella [Dynamic Supervisor](https://elixir-lang.org/getting-started/mix-otp/dynamic-supervisor.html), that can bring them back up in case of a failure to continue a given pipeline operation. Read more about the components and their operations in the [FULL ARCHITECTURE document](./docs/ARCH.md). -## Resources +## Resources Production of Block Results forms the core of the cQT network’s functional data objects specs. These result objects are created using six main pieces of open-source software published by Covalent for the CQT Network’s decentralized blockchain data ETL stack. @@ -121,12 +121,12 @@ Open source decentralized storage layer for cQT network block specimen and trans Open source (anyone can call the rewards function on BSP Staking contracts) rewards distributer for cQT network operators. 1. [T8n Server (Transform)](https://github.com/covalenthq/erigon) - Operator run & deployed. -Open source Ethereum virtual machine binary (stateless transition tool - t8n) plugin/http server for the rudder. +Open source Ethereum virtual machine binary (stateless transition tool - t8n) plugin/http server for the refiner. -1. [Rudder (Refine & Prove)]( https://github.com/covalenthq/rudder) - Operator run & deployed. +1. [Refiner (Refine & Prove)]( https://github.com/covalenthq/refiner) - Operator run & deployed. Open Source specialized transformation process and framework for block specimens to block results (Open sourced in Q2 2023). -### Additional Resources +### Additional Resources - Reference to understand the functions of the various components can be found in the official [CQT Network Whitepaper](https://www.covalenthq.com/docs/cqt-network/covalent-network-whitepaper/). @@ -136,19 +136,19 @@ Open Source specialized transformation process and framework for block specimens - Operator reference for instructions to run BSP Geth with the BSP Agent can be found in [CQT Network: Block Specimen Producer Onboarding Process](https://www.covalenthq.com/docs/cqt-network/operator-onboarding-bsp/). -- Operator reference for instructions to run Rudder/Refiner be found in [CQT Network - Refiner Onboarding Process](https://www.covalenthq.com/docs/cqt-network/operator-onboarding-refiner/). +- Operator reference for instructions to run Refiner/Refiner be found in [CQT Network - Refiner Onboarding Process](https://www.covalenthq.com/docs/cqt-network/operator-onboarding-refiner/). - View [The Refiner: A Web3 Solution for Accessing Granular Blockchain Data for Developers](https://www.youtube.com/watch?v=o7yTUY8s_Yk&pp=ygUQcmVmaW5lciBjb3ZhbGVudA%3D%3D) -- Setting up monitoring and alerting with [Grafana and Prometheus for Refiner](https://github.com/covalenthq/rudder/blob/main/docs/METRICS.md) +- Setting up monitoring and alerting with [Grafana and Prometheus for Refiner](https://github.com/covalenthq/refiner/blob/main/docs/METRICS.md) -- Refiner [contributions and bug bounty guidelines](https://github.com/covalenthq/rudder/blob/main/docs/CONTRIBUTING.md) +- Refiner [contributions and bug bounty guidelines](https://github.com/covalenthq/refiner/blob/main/docs/CONTRIBUTING.md) -- Detailed [Refiner Architecture](https://github.com/covalenthq/rudder/blob/main/docs/ARCH.md) +- Detailed [Refiner Architecture](https://github.com/covalenthq/refiner/blob/main/docs/ARCH.md) - Refiner [Incentivized Testnet Validator-Operator Onboarding](https://www.youtube.com/watch?v=PHx4NT1_1E0) -## Requirements +## Requirements **Minimum** @@ -170,7 +170,7 @@ Open Source specialized transformation process and framework for block specimens - SSL certificates - docker, docker-compose, direnv -## Run With Docker Compose +## Run With Docker Compose Install Docker @@ -178,7 +178,7 @@ Follow the instructions for your platform/architecture: https://docs.docker.com/ README instructions will be based on Ubuntu 22.04 LTS x86_64/amd64: https://docs.docker.com/engine/install/ubuntu/. -### Environment +### Environment Install direnv @@ -217,8 +217,8 @@ direnv allow . That will lead to the corresponding logs: ```bash -direnv: loading ~/rudder/.envrc -direnv: loading ~/rudder/.envrc.local +direnv: loading ~/refiner/.envrc +direnv: loading ~/refiner/.envrc.local direnv: export +BLOCK_RESULT_OPERATOR_PRIVATE_KEY +ERIGON_NODE +EVM_SERVER_URL +IPFS_PINNER_URL +NODE_ETHEREUM_MAINNET +W3_AGENT_KEY ``` @@ -234,9 +234,9 @@ Copy over the delegation proof file to ~/.ipfs repo. You should have gotten this mv path_to_delegation_file ~/.ipfs/proof.out ``` -### Pull +### Pull -Run all services including `rudder` with [docker compose](https://docs.docker.com/compose/) with the following. +Run all services including `refiner` with [docker compose](https://docs.docker.com/compose/) with the following. **Note**: the `env` file is not necessary if env vars are already loaded. @@ -252,15 +252,15 @@ For moonbeam. docker compose --env-file ".env" -f "docker-compose-mbeam.yml" up --remove-orphans ``` -**NOTE**: On a system where an `ipfs-pinner` instance is already running, check the instruction in the [Appendix](#appendix) to run `rudder` docker alongside. +**NOTE**: On a system where an `ipfs-pinner` instance is already running, check the instruction in the [Appendix](#appendix) to run `refiner` docker alongside. Running this will pull all the images and services that are ready to run. This will lead to the corresponding logs: ```elixir -Started rudder compose. - rudder Pulling +Started refiner compose. + refiner Pulling ipfs-pinner Pulling evm-server Pulling 4f4fb700ef54 Downloading [==================================================>] 32B/32B @@ -279,19 +279,19 @@ Started rudder compose. 1fd45119e007 Extracting [================================================> ] 14.42MB/14.94MB 1fd45119e007 Extracting [==================================================>] 14.94MB/14.94MB 1fd45119e007 Pull complete - rudder Pulled + refiner Pulled Container evm-server Recreate Container ipfs-pinner Created Container evm-server Recreated - Container rudder Recreated - Attaching to evm-server, ipfs-pinner, rudder + Container refiner Recreated + Attaching to evm-server, ipfs-pinner, refiner ``` -Following this step a `rudder` release is auto compiled within the docker container and executed. +Following this step a `refiner` release is auto compiled within the docker container and executed. -**Note**: The below example is for the `dev` env (`_build/dev/rel/rudder/bin/rudder`) binary referred to as moonbase. +**Note**: The below example is for the `dev` env (`_build/dev/rel/refiner/bin/refiner`) binary referred to as moonbase. -For production the path to the binary would be to the `prod` env (`_build/prod/rel/rudder/bin/rudder`). This distinction is important since in elixir the static env vars (such as proof-chain contract address) are packaged with dynamic env vars (such as rpc to moonbeam/moonbase) along with the application binary. +For production the path to the binary would be to the `prod` env (`_build/prod/rel/refiner/bin/refiner`). This distinction is important since in elixir the static env vars (such as proof-chain contract address) are packaged with dynamic env vars (such as rpc to moonbeam/moonbase) along with the application binary. Hence there is a single binary per "Environment". To understand more about this take a look at the [hex docs](https://hexdocs.pm/elixir/main/Config.html). @@ -308,103 +308,103 @@ Hence there is a single binary per "Environment". To understand more about this ipfs-pinner | Run 'ipfs swarm limit all' to see the resulting limits. ipfs-pinner | ipfs-pinner | 2023/04/19 16:53:31 failed to sufficiently increase receive buffer size (was: 208 kiB, wanted: 2048 kiB, got: 416 kiB). See https://github.com/lucas-clemente/quic-go/wiki/UDP-Receive-Buffer-Size for details. - rudder | moonbase-node: https://moonbeam-alphanet.web3.covalenthq.com/alphanet/direct-rpc - rudder | brp-operator: ecf0b636233c6580f60f50ee1d809336c3a76640dbd77f7cdd054a82c6fc0a31 - rudder | evm-server: http://evm-server:3002 - rudder | ipfs-node: http://ipfs-pinner:3001 + refiner | moonbase-node: https://moonbeam-alphanet.web3.covalenthq.com/alphanet/direct-rpc + refiner | brp-operator: ecf0b636233c6580f60f50ee1d809336c3a76640dbd77f7cdd054a82c6fc0a31 + refiner | evm-server: http://evm-server:3002 + refiner | ipfs-node: http://ipfs-pinner:3001 ipfs-pinner | 2023/04/19 16:53:31 Listening... - rudder | ==> nimble_options - rudder | Compiling 3 files (.ex) - rudder | Generated nimble_options app - rudder | ===> Analyzing applications... - rudder | ===> Compiling parse_trans - rudder | ==> logger_file_backend - rudder | Compiling 1 file (.ex) - rudder | Generated rudder app - rudder | * assembling rudder-0.2.2 on MIX_ENV=dev - rudder | * skipping runtime configuration (config/runtime.exs not found) - rudder | * skipping elixir.bat for windows (bin/elixir.bat not found in the Elixir installation) - rudder | * skipping iex.bat for windows (bin/iex.bat not found in the Elixir installation) - rudder | - rudder | Release created at _build/dev/rel/rudder - rudder | - rudder | # To start your system - rudder | _build/dev/rel/rudder/bin/rudder start - rudder | - rudder | Once the release is running: - rudder | - rudder | # To connect to it remotely - rudder | _build/dev/rel/rudder/bin/rudder remote - rudder | - rudder | # To stop it gracefully (you may also send SIGINT/SIGTERM) - rudder | _build/dev/rel/rudder/bin/rudder stop - rudder | - rudder | To list all commands: - rudder | - rudder | _build/dev/rel/rudder/bin/rudder - rudder | - rudder | https://hexdocs.pm/telemetry/telemetry.html#attach/4 -``` - -### Docker Run - -Once the binary is compiled. Rudder can start to process block specimens into block results by starting the event listener. + refiner | ==> nimble_options + refiner | Compiling 3 files (.ex) + refiner | Generated nimble_options app + refiner | ===> Analyzing applications... + refiner | ===> Compiling parse_trans + refiner | ==> logger_file_backend + refiner | Compiling 1 file (.ex) + refiner | Generated refiner app + refiner | * assembling refiner-0.2.2 on MIX_ENV=dev + refiner | * skipping runtime configuration (config/runtime.exs not found) + refiner | * skipping elixir.bat for windows (bin/elixir.bat not found in the Elixir installation) + refiner | * skipping iex.bat for windows (bin/iex.bat not found in the Elixir installation) + refiner | + refiner | Release created at _build/dev/rel/refiner + refiner | + refiner | # To start your system + refiner | _build/dev/rel/refiner/bin/refiner start + refiner | + refiner | Once the release is running: + refiner | + refiner | # To connect to it remotely + refiner | _build/dev/rel/refiner/bin/refiner remote + refiner | + refiner | # To stop it gracefully (you may also send SIGINT/SIGTERM) + refiner | _build/dev/rel/refiner/bin/refiner stop + refiner | + refiner | To list all commands: + refiner | + refiner | _build/dev/rel/refiner/bin/refiner + refiner | + refiner | https://hexdocs.pm/telemetry/telemetry.html#attach/4 +``` + +### Docker Run + +Once the binary is compiled. Refiner can start to process block specimens into block results by starting the event listener. ```elixir - rudder | [info] starting event listener - rudder | [info] getting ids with status=discover - rudder | [info] found 1 bsps to process + refiner | [info] starting event listener + refiner | [info] getting ids with status=discover + refiner | [info] found 1 bsps to process ipfs-pinner | 2023/04/19 16:57:32 unixfsApi.Get: getting the cid: bafybeifkn67rc4lzoabvaglsifjitkhrnshhpwavutdhzeohzkxih25jpi ipfs-pinner | 2023/04/19 16:57:32 trying out https://w3s.link/ipfs/bafybeifkn67rc4lzoabvaglsifjitkhrnshhpwavutdhzeohzkxih25jpi - rudder | [info] Counter for ipfs_metrics - [fetch: 1] - rudder | [info] LastValue for ipfs_metrics - [fetch_last_exec_time: 0.0015149999999999999] - rudder | [info] Sum for ipfs_metrics - [fetch_total_exec_time: 0.0015149999999999999] - rudder | [info] Summary for ipfs_metrics - {0.0015149999999999999, 0.0015149999999999999} - rudder | [debug] reading schema `block-ethereum` from the file /app/priv/schemas/block-ethereum.avsc - rudder | [info] Counter for bsp_metrics - [decode: 1] - rudder | [info] LastValue for bsp_metrics - [decode_last_exec_time: 0.0] - rudder | [info] Sum for bsp_metrics - [decode_total_exec_time: 0.0] - rudder | [info] Summary for bsp_metrics - {0.0, 0.0} - rudder | [info] submitting 17081820 to evm http server... + refiner | [info] Counter for ipfs_metrics - [fetch: 1] + refiner | [info] LastValue for ipfs_metrics - [fetch_last_exec_time: 0.0015149999999999999] + refiner | [info] Sum for ipfs_metrics - [fetch_total_exec_time: 0.0015149999999999999] + refiner | [info] Summary for ipfs_metrics - {0.0015149999999999999, 0.0015149999999999999} + refiner | [debug] reading schema `block-ethereum` from the file /app/priv/schemas/block-ethereum.avsc + refiner | [info] Counter for bsp_metrics - [decode: 1] + refiner | [info] LastValue for bsp_metrics - [decode_last_exec_time: 0.0] + refiner | [info] Sum for bsp_metrics - [decode_total_exec_time: 0.0] + refiner | [info] Summary for bsp_metrics - {0.0, 0.0} + refiner | [info] submitting 17081820 to evm http server... evm-server | [INFO] [04-19|16:57:33.824] input file at loc=/tmp/23851799 evm-server | [INFO] [04-19|16:57:33.828] output file at: loc=/tmp/1143694015 evm-server | [INFO] [04-19|16:57:34.153] Wrote file file=/tmp/1143694015 - rudder | [info] writing block result into "/tmp/briefly-1681/briefly-576460651588718236-AE8SrEl8GLI9jKhCKPk" - rudder | [info] Counter for bsp_metrics - [execute: 1] - rudder | [info] LastValue for bsp_metrics - [execute_last_exec_time: 3.9e-4] - rudder | [info] Sum for bsp_metrics - [execute_total_exec_time: 3.9e-4] - rudder | [info] Summary for bsp_metrics - {3.9e-4, 3.9e-4} + refiner | [info] writing block result into "/tmp/briefly-1681/briefly-576460651588718236-AE8SrEl8GLI9jKhCKPk" + refiner | [info] Counter for bsp_metrics - [execute: 1] + refiner | [info] LastValue for bsp_metrics - [execute_last_exec_time: 3.9e-4] + refiner | [info] Sum for bsp_metrics - [execute_total_exec_time: 3.9e-4] + refiner | [info] Summary for bsp_metrics - {3.9e-4, 3.9e-4} ipfs-pinner | 2023/04/19 16:57:34 generated dag has root cid: bafybeifd6gz6wofk3bwb5uai7zdmmr23q3nz3zt7edfujgj4kjg2es7eee ipfs-pinner | 2023/04/19 16:57:34 car file location: /tmp/1543170755.car ipfs-pinner | 2023/04/19 16:57:35 uploaded file has root cid: bafybeifd6gz6wofk3bwb5uai7zdmmr23q3nz3zt7edfujgj4kjg2es7eee - rudder | [info] Counter for ipfs_metrics - [pin: 1] - rudder | [info] LastValue for ipfs_metrics - [pin_last_exec_time: 0.001132] - rudder | [info] Sum for ipfs_metrics - [pin_total_exec_time: 0.001132] - rudder | [info] Summary for ipfs_metrics - {0.001132, 0.001132} - rudder | [info] 17081820:556753def2ff689c6312241a1ca182d58467319b7c2dca250ca50ed6acb31a5d has been successfully uploaded at ipfs://bafybeifd6gz6wofk3bwb5uai7zdmmr23q3nz3zt7edfujgj4kjg2es7eee - rudder | [info] 17081820:556753def2ff689c6312241a1ca182d58467319b7c2dca250ca50ed6acb31a5d proof submitting - rudder | [info] Counter for brp_metrics - [proof: 1] - rudder | [info] LastValue for brp_metrics - [proof_last_exec_time: 3.03e-4] - rudder | [info] Sum for brp_metrics - [proof_total_exec_time: 3.03e-4] - rudder | [info] Summary for brp_metrics - {3.03e-4, 3.03e-4} - rudder | [info] 17081820 txid is 0x01557912a0f7e083cbf6d34a2af21d99d129af386b95edc16162202862c60f8d - rudder | [info] Counter for brp_metrics - [upload_success: 1] - rudder | [info] LastValue for brp_metrics - [upload_success_last_exec_time: 0.0014579999999999999] - rudder | [info] Sum for brp_metrics - [upload_success_total_exec_time: 0.0014579999999999999] - rudder | [info] Summary for brp_metrics - {0.0014579999999999999, 0.0014579999999999999} - rudder | [info] Counter for rudder_metrics - [pipeline_success: 1] - rudder | [info] LastValue for rudder_metrics - [pipeline_success_last_exec_time: 0.0035489999999999996] - rudder | [info] Sum for rudder_metrics - [pipeline_success_total_exec_time: 0.0035489999999999996] - rudder | [info] Summary for rudder_metrics - {0.0035489999999999996, 0.0035489999999999996} - rudder | [info] curr_block: 4180658 and latest_block_num:4180657 -``` -### Monitor - -`rudder` already captures the most relevant performance metrics and execution times for various processes in the pipeline and exports all of it using Prometheus. - -See the full document on how to setup Prometheus and Grafana for [rudder metrics collection, monitoring, reporting and alerting](./docs/METRICS.md) - -## Build From Source + refiner | [info] Counter for ipfs_metrics - [pin: 1] + refiner | [info] LastValue for ipfs_metrics - [pin_last_exec_time: 0.001132] + refiner | [info] Sum for ipfs_metrics - [pin_total_exec_time: 0.001132] + refiner | [info] Summary for ipfs_metrics - {0.001132, 0.001132} + refiner | [info] 17081820:556753def2ff689c6312241a1ca182d58467319b7c2dca250ca50ed6acb31a5d has been successfully uploaded at ipfs://bafybeifd6gz6wofk3bwb5uai7zdmmr23q3nz3zt7edfujgj4kjg2es7eee + refiner | [info] 17081820:556753def2ff689c6312241a1ca182d58467319b7c2dca250ca50ed6acb31a5d proof submitting + refiner | [info] Counter for brp_metrics - [proof: 1] + refiner | [info] LastValue for brp_metrics - [proof_last_exec_time: 3.03e-4] + refiner | [info] Sum for brp_metrics - [proof_total_exec_time: 3.03e-4] + refiner | [info] Summary for brp_metrics - {3.03e-4, 3.03e-4} + refiner | [info] 17081820 txid is 0x01557912a0f7e083cbf6d34a2af21d99d129af386b95edc16162202862c60f8d + refiner | [info] Counter for brp_metrics - [upload_success: 1] + refiner | [info] LastValue for brp_metrics - [upload_success_last_exec_time: 0.0014579999999999999] + refiner | [info] Sum for brp_metrics - [upload_success_total_exec_time: 0.0014579999999999999] + refiner | [info] Summary for brp_metrics - {0.0014579999999999999, 0.0014579999999999999} + refiner | [info] Counter for refiner_metrics - [pipeline_success: 1] + refiner | [info] LastValue for refiner_metrics - [pipeline_success_last_exec_time: 0.0035489999999999996] + refiner | [info] Sum for refiner_metrics - [pipeline_success_total_exec_time: 0.0035489999999999996] + refiner | [info] Summary for refiner_metrics - {0.0035489999999999996, 0.0035489999999999996} + refiner | [info] curr_block: 4180658 and latest_block_num:4180657 +``` +### Monitor + +`refiner` already captures the most relevant performance metrics and execution times for various processes in the pipeline and exports all of it using Prometheus. + +See the full document on how to setup Prometheus and Grafana for [refiner metrics collection, monitoring, reporting and alerting](./docs/METRICS.md) + +## Build From Source Installation Time: 35-40 mins depending on your machine and network. @@ -418,9 +418,9 @@ Install `git`, `go`, `asdf`, `erlang`, `elixir`, `direnv`, `go-ipfs`. - IPFS as the InterPlanetary File System (IPFS) is a protocol, hypermedia and file sharing peer-to-peer network for storing and sharing data in a distributed file system. - Direnv is used for secret management and control since all the necessary sensitive parameters to the agent cannot be passed into a command line flag. Direnv allows for safe and easy management of secrets like Ethereum private keys for the operator accounts on the CQT network and redis instance access passwords etc. As these applications are exposed to the internet on http ports, it’s essential not to have the information be logged anywhere. To enable “direnv” on your machine, add these to your ~./bash_profile or ~./zshrc depending on which you use as your default shell after installing it using brew. -### Linux x86_64 (Ubuntu 22.04 LTS) Install dependencies +### Linux x86_64 (Ubuntu 22.04 LTS) Install dependencies -Install dependencies for rudder. +Install dependencies for refiner. ```bash sudo apt update @@ -498,13 +498,13 @@ sudo chmod -R 700 ~/.ipfs ipfs config profile apply server ``` -### Env Vars +### Env Vars -Refer to the above existing environment var setup for [rudder docker compose](#environment). +Refer to the above existing environment var setup for [refiner docker compose](#environment). **Note**: When passing the private key into the env vars as above, please remove the 0x prefix so the private key env var has exactly 64 characters. -### Source Run +### Source Run The EVM-Server is a stateless EVM block execution tool. It's stateless because in Ethereum nodes like geth, it doesn't need to maintain database of blocks to do a block execution or re-execution. The `evm-server` service transforms Block Specimens to Block Results entirely on the input of underlying capture of Block Specimen data. @@ -556,15 +556,15 @@ Run 'ipfs swarm limit all' to see the resulting limits. 2023/04/20 12:47:49 Listening... ``` -Clone the `rudder` repo +Clone the `refiner` repo ```bash -git clone https://github.com/covalenthq/rudder.git -cd rudder +git clone https://github.com/covalenthq/refiner.git +cd refiner git checkout main ``` -Get your `BLOCK_RESULT_OPERATOR_PRIVATE_KEY` that has `DEV` tokens for Moonbase Alpha and is already whitelisted as Block Result Producer operator. Set the following environment variables for the local rudder by creating an `.envrc.local` file +Get your `BLOCK_RESULT_OPERATOR_PRIVATE_KEY` that has `DEV` tokens for Moonbase Alpha and is already whitelisted as Block Result Producer operator. Set the following environment variables for the local refiner by creating an `.envrc.local` file ```bash touch .envrc.local @@ -584,14 +584,14 @@ Call to load `.envrc.local + .envrc` files with the command below and observe th ```bash direnv allow . -direnv: loading ~/Covalent/rudder/.envrc -direnv: loading ~/Covalent/rudder/.envrc.local +direnv: loading ~/Covalent/refiner/.envrc +direnv: loading ~/Covalent/refiner/.envrc.local direnv: export +BLOCK_RESULT_OPERATOR_PRIVATE_KEY +ERIGON_NODE +IPFS_PINNER_URL +NODE_ETHEREUM_MAINNET ``` -Once the env vars are passed into the `.envrc.local` file and loaded in the shell with `direnv allow .`, build the `rudder` application for the `prod` env i.e moonbeam mainnet or `dev` env for moonbase alpha as discussed before. +Once the env vars are passed into the `.envrc.local` file and loaded in the shell with `direnv allow .`, build the `refiner` application for the `prod` env i.e moonbeam mainnet or `dev` env for moonbase alpha as discussed before. -Get all the required dependencies and build the `rudder` app for the `dev` environment (this points to Moonbase Alpha contracts). **Note**: Windows is currently not supported. +Get all the required dependencies and build the `refiner` app for the `dev` environment (this points to Moonbase Alpha contracts). **Note**: Windows is currently not supported. ```bash mix local.hex --force && mix local.rebar --force && mix deps.get @@ -612,94 +612,94 @@ MIX_ENV=dev mix release .... evm-server: http://127.0.0.1:3002 ipfs-node: http://127.0.0.1:3001 -* assembling rudder-0.2.12 on MIX_ENV=dev +* assembling refiner-0.2.12 on MIX_ENV=dev * skipping runtime configuration (config/runtime.exs not found) * skipping elixir.bat for windows (bin/elixir.bat not found in the Elixir installation) * skipping iex.bat for windows (bin/iex.bat not found in the Elixir installation) -Release created at _build/dev/rel/rudder +Release created at _build/dev/rel/refiner # To start your system - _build/dev/rel/rudder/bin/rudder start + _build/dev/rel/refiner/bin/refiner start Once the release is running: # To connect to it remotely - _build/dev/rel/rudder/bin/rudder remote + _build/dev/rel/refiner/bin/refiner remote # To stop it gracefully (you may also send SIGINT/SIGTERM) - _build/dev/rel/rudder/bin/rudder stop + _build/dev/rel/refiner/bin/refiner stop To list all commands: - _build/dev/rel/rudder/bin/rudder + _build/dev/rel/refiner/bin/refiner ``` -Start the `rudder` application and execute the proof-chain block specimen listener call which should run the Refiner pipeline pulling Block Specimens from IPFS using the cids read from recent proof-chain finalized transactions, decoding them, and uploading and proofing Block Results while keeping a track of failed ones and continuing (soft real-time) in case of failure. The erlang concurrent fault tolerance allows each pipeline to be an independent worker that can fail (for any given Block Specimen) without crashing the entire pipeline application. Multiple pipeline worker children threads continue their work in the synchronous queue of Block Specimen AVRO binary files running the stateless EVM binary (`evm-server`) re-execution tool. +Start the `refiner` application and execute the proof-chain block specimen listener call which should run the Refiner pipeline pulling Block Specimens from IPFS using the cids read from recent proof-chain finalized transactions, decoding them, and uploading and proofing Block Results while keeping a track of failed ones and continuing (soft real-time) in case of failure. The erlang concurrent fault tolerance allows each pipeline to be an independent worker that can fail (for any given Block Specimen) without crashing the entire pipeline application. Multiple pipeline worker children threads continue their work in the synchronous queue of Block Specimen AVRO binary files running the stateless EVM binary (`evm-server`) re-execution tool. For moonbeam. ```elixir -MIX_ENV=prod mix run --no-halt --eval 'Rudder.ProofChain.BlockSpecimenEventListener.start()'; +MIX_ENV=prod mix run --no-halt --eval 'Refiner.ProofChain.BlockSpecimenEventListener.start()'; ``` For moonbase. ```bash -MIX_ENV=dev mix run --no-halt --eval 'Rudder.ProofChain.BlockSpecimenEventListener.start()'; +MIX_ENV=dev mix run --no-halt --eval 'Refiner.ProofChain.BlockSpecimenEventListener.start()'; .. ... -rudder | [info] found 1 bsps to process +refiner | [info] found 1 bsps to process ipfs-pinner | 2023/06/29 20:28:30 unixfsApi.Get: getting the cid: bafybeiaxl44nbafdmydaojz7krve6lcggvtysk6r3jaotrdhib3wpdb3di ipfs-pinner | 2023/06/29 20:28:30 trying out https://w3s.link/ipfs/bafybeiaxl44nbafdmydaojz7krve6lcggvtysk6r3jaotrdhib3wpdb3di ipfs-pinner | 2023/06/29 20:28:31 got the content! -rudder | [info] Counter for ipfs_metrics - [fetch: 1] -rudder | [info] LastValue for ipfs_metrics - [fetch_last_exec_time: 0.001604] -rudder | [info] Sum for ipfs_metrics - [fetch_total_exec_time: 0.001604] -rudder | [info] Summary for ipfs_metrics - {0.001604, 0.001604} -rudder | [debug] reading schema `block-ethereum` from the file /app/priv/schemas/block-ethereum.avsc -rudder | [info] Counter for bsp_metrics - [decode: 1] -rudder | [info] LastValue for bsp_metrics - [decode_last_exec_time: 0.0] -rudder | [info] Sum for bsp_metrics - [decode_total_exec_time: 0.0] -rudder | [info] Summary for bsp_metrics - {0.0, 0.0} -rudder | [info] submitting 17586995 to evm http server... +refiner | [info] Counter for ipfs_metrics - [fetch: 1] +refiner | [info] LastValue for ipfs_metrics - [fetch_last_exec_time: 0.001604] +refiner | [info] Sum for ipfs_metrics - [fetch_total_exec_time: 0.001604] +refiner | [info] Summary for ipfs_metrics - {0.001604, 0.001604} +refiner | [debug] reading schema `block-ethereum` from the file /app/priv/schemas/block-ethereum.avsc +refiner | [info] Counter for bsp_metrics - [decode: 1] +refiner | [info] LastValue for bsp_metrics - [decode_last_exec_time: 0.0] +refiner | [info] Sum for bsp_metrics - [decode_total_exec_time: 0.0] +refiner | [info] Summary for bsp_metrics - {0.0, 0.0} +refiner | [info] submitting 17586995 to evm http server... evm-server | [INFO] [06-29|20:28:31.859] input file at loc=/tmp/3082854681 evm-server | [INFO] [06-29|20:28:31.862] output file at: loc=/tmp/1454174090 evm-server | [INFO] [06-29|20:28:32.112] Wrote file file=/tmp/1454174090 -rudder | [info] writing block result into "/tmp/briefly-1688/briefly-576460747542186916-YRw0mRjfExGMk4M672" -rudder | [info] Counter for bsp_metrics - [execute: 1] -rudder | [info] LastValue for bsp_metrics - [execute_last_exec_time: 3.14e-4] -rudder | [info] Sum for bsp_metrics - [execute_total_exec_time: 3.14e-4] -rudder | [info] Summary for bsp_metrics - {3.14e-4, 3.14e-4} +refiner | [info] writing block result into "/tmp/briefly-1688/briefly-576460747542186916-YRw0mRjfExGMk4M672" +refiner | [info] Counter for bsp_metrics - [execute: 1] +refiner | [info] LastValue for bsp_metrics - [execute_last_exec_time: 3.14e-4] +refiner | [info] Sum for bsp_metrics - [execute_total_exec_time: 3.14e-4] +refiner | [info] Summary for bsp_metrics - {3.14e-4, 3.14e-4} ipfs-pinner | 2023/06/29 20:28:32 generated dag has root cid: bafybeic6ernzbb6x4qslwfgklveisyz4vkuqhaafqzwlvto6c2njonxi3e ipfs-pinner | 2023/06/29 20:28:32 car file location: /tmp/249116437.car [119B blob data] ipfs-pinner | 2023/06/29 20:28:34 Received /health request: source= 127.0.0.1:34980 status= OK ipfs-pinner | 2023/06/29 20:28:34 uploaded file has root cid: bafybeic6ernzbb6x4qslwfgklveisyz4vkuqhaafqzwlvto6c2njonxi3e -rudder | [info] Counter for ipfs_metrics - [pin: 1] -rudder | [info] LastValue for ipfs_metrics - [pin_last_exec_time: 0.002728] -rudder | [info] Sum for ipfs_metrics - [pin_total_exec_time: 0.002728] -rudder | [info] Summary for ipfs_metrics - {0.002728, 0.002728} -rudder | [info] 17586995:48f1e992d1ac800baed282e12ef4f2200820061b5b8f01ca0a9ed9a7d6b5ddb3 has been successfully uploaded at ipfs://bafybeic6ernzbb6x4qslwfgklveisyz4vkuqhaafqzwlvto6c2njonxi3e -rudder | [info] 17586995:48f1e992d1ac800baed282e12ef4f2200820061b5b8f01ca0a9ed9a7d6b5ddb3 proof submitting -rudder | [info] Counter for brp_metrics - [proof: 1] -rudder | [info] LastValue for brp_metrics - [proof_last_exec_time: 3.6399999999999996e-4] -rudder | [info] Sum for brp_metrics - [proof_total_exec_time: 3.6399999999999996e-4] -rudder | [info] Summary for brp_metrics - {3.6399999999999996e-4, 3.6399999999999996e-4} -rudder | [info] 17586995 txid is 0xd8a8ea410240bb0324433bc26fdc79d496ad0c8bfd18b60314a05e3a0de4fb06 -rudder | [info] Counter for brp_metrics - [upload_success: 1] -rudder | [info] LastValue for brp_metrics - [upload_success_last_exec_time: 0.0031149999999999997] -rudder | [info] Sum for brp_metrics - [upload_success_total_exec_time: 0.0031149999999999997] -rudder | [info] Summary for brp_metrics - {0.0031149999999999997, 0.0031149999999999997} -rudder | [info] Counter for rudder_metrics - [pipeline_success: 1] -rudder | [info] LastValue for rudder_metrics - [pipeline_success_last_exec_time: 0.0052] -rudder | [info] Sum for rudder_metrics - [pipeline_success_total_exec_time: 0.0052] -rudder | [info] Summary for rudder_metrics - {0.0052, 0.0052} -``` - -Check logs for any errors in the pipeline process and note the performance metrics in line with execution. Checkout the documentation on what is being measured and why [here](https://github.com/covalenthq/rudder/blob/main/docs/METRICS.md). +refiner | [info] Counter for ipfs_metrics - [pin: 1] +refiner | [info] LastValue for ipfs_metrics - [pin_last_exec_time: 0.002728] +refiner | [info] Sum for ipfs_metrics - [pin_total_exec_time: 0.002728] +refiner | [info] Summary for ipfs_metrics - {0.002728, 0.002728} +refiner | [info] 17586995:48f1e992d1ac800baed282e12ef4f2200820061b5b8f01ca0a9ed9a7d6b5ddb3 has been successfully uploaded at ipfs://bafybeic6ernzbb6x4qslwfgklveisyz4vkuqhaafqzwlvto6c2njonxi3e +refiner | [info] 17586995:48f1e992d1ac800baed282e12ef4f2200820061b5b8f01ca0a9ed9a7d6b5ddb3 proof submitting +refiner | [info] Counter for brp_metrics - [proof: 1] +refiner | [info] LastValue for brp_metrics - [proof_last_exec_time: 3.6399999999999996e-4] +refiner | [info] Sum for brp_metrics - [proof_total_exec_time: 3.6399999999999996e-4] +refiner | [info] Summary for brp_metrics - {3.6399999999999996e-4, 3.6399999999999996e-4} +refiner | [info] 17586995 txid is 0xd8a8ea410240bb0324433bc26fdc79d496ad0c8bfd18b60314a05e3a0de4fb06 +refiner | [info] Counter for brp_metrics - [upload_success: 1] +refiner | [info] LastValue for brp_metrics - [upload_success_last_exec_time: 0.0031149999999999997] +refiner | [info] Sum for brp_metrics - [upload_success_total_exec_time: 0.0031149999999999997] +refiner | [info] Summary for brp_metrics - {0.0031149999999999997, 0.0031149999999999997} +refiner | [info] Counter for refiner_metrics - [pipeline_success: 1] +refiner | [info] LastValue for refiner_metrics - [pipeline_success_last_exec_time: 0.0052] +refiner | [info] Sum for refiner_metrics - [pipeline_success_total_exec_time: 0.0052] +refiner | [info] Summary for refiner_metrics - {0.0052, 0.0052} +``` + +Check logs for any errors in the pipeline process and note the performance metrics in line with execution. Checkout the documentation on what is being measured and why [here](https://github.com/covalenthq/refiner/blob/main/docs/METRICS.md). ```bash tail -f logs/log.log .. ... -rudder | [info] Counter for rudder_metrics - [pipeline_success: 1] -rudder | [info] LastValue for rudder_metrics - [pipeline_success_last_exec_time: 0.0052] -rudder | [info] Sum for rudder_metrics - [pipeline_success_total_exec_time: 0.0052] -rudder | [info] Summary for rudder_metrics - {0.0052, 0.0052} +refiner | [info] Counter for refiner_metrics - [pipeline_success: 1] +refiner | [info] LastValue for refiner_metrics - [pipeline_success_last_exec_time: 0.0052] +refiner | [info] Sum for refiner_metrics - [pipeline_success_total_exec_time: 0.0052] +refiner | [info] Summary for refiner_metrics - {0.0052, 0.0052} ``` Alternatively - check proof-chain logs for correct block result proof submissions and transactions made by your block result producer. @@ -708,7 +708,7 @@ Alternatively - check proof-chain logs for correct block result proof submission [For moonbase](https://moonbase.moonscan.io/address/0x19492a5019B30471aA8fa2c6D9d39c99b5Cda20C). -**Note**:For any issues associated with building and re-compiling execute the following commands, that cleans, downloads and re-compiles the dependencies for `rudder`. +**Note**:For any issues associated with building and re-compiling execute the following commands, that cleans, downloads and re-compiles the dependencies for `refiner`. ```bash rm -rf _build deps && mix clean && mix deps.get && mix deps.compile @@ -716,7 +716,7 @@ rm -rf _build deps && mix clean && mix deps.get && mix deps.compile If you got everything working so far. Congratulations! You're now a Refiner operator on the CQT Network. Set up Grafana monitoring and alerting from links in the [additional resources](#additional-resources) section. -## Troubleshooting +## Troubleshooting To avoid permission errors with ~/.ipfs folder execute the following in your home directory. @@ -743,17 +743,17 @@ chmod +x ./fs-repo-migrations ./fs-repo-migrations ``` -### Bugs Reporting Contributions +### Bugs Reporting Contributions Please follow the guide in docs [contribution guidelines](./docs/CONTRIBUTING.md) for bug reporting and contributions. -## Scripts +## Scripts -In order to run the Refiner docker compose services as a service unit. The example service unit file in [docs](./docs/rudder-compose.service) should suffice. After adding the env vars in their respective fields in the service unit file, enable the service and start it. +In order to run the Refiner docker compose services as a service unit. The example service unit file in [docs](./docs/refiner-compose.service) should suffice. After adding the env vars in their respective fields in the service unit file, enable the service and start it. ```bash -sudo systemctl enable rudder-compose.service -sudo systemctl start rudder-compose.service +sudo systemctl enable refiner-compose.service +sudo systemctl start refiner-compose.service ``` **Note**:: To run docker compose as a non-root user for the above shown service unit, you need to create a docker group (if it doesn't exist) and add the user “blockchain” to the docker group. @@ -782,10 +782,10 @@ export EVM_SERVER_URL="http://evm-server:3002" ```bash version: '3' -# runs the entire rudder pipeline with all supporting services (including rudder) in docker +# runs the entire refiner pipeline with all supporting services (including refiner) in docker # set .env such that all services in docker are talking to each other only; ipfs-pinnern is assumed # to be hosted on the host machine. It's accessed through http://host.docker.internal:3001/ url from -# inside rudder docker container. +# inside refiner docker container. services: evm-server: image: "us-docker.pkg.dev/covalent-project/network/evm-server:stable" @@ -800,9 +800,9 @@ services: ports: - "3002:3002" - rudder: - image: "us-docker.pkg.dev/covalent-project/network/rudder:stable" - container_name: rudder + refiner: + image: "us-docker.pkg.dev/covalent-project/network/refiner:stable" + container_name: refiner links: - "evm-server:evm-server" restart: always @@ -816,7 +816,7 @@ services: echo "ipfs-pinner:" $IPFS_PINNER; cd /app; MIX_ENV=prod mix release --overwrite; - MIX_ENV=prod mix run --no-halt --eval 'Rudder.ProofChain.BlockSpecimenEventListener.start()';" + MIX_ENV=prod mix run --no-halt --eval 'Refiner.ProofChain.BlockSpecimenEventListener.start()';" environment: - NODE_ETHEREUM_MAINNET=${NODE_ETHEREUM_MAINNET} - BLOCK_RESULT_OPERATOR_PRIVATE_KEY=${BLOCK_RESULT_OPERATOR_PRIVATE_KEY} @@ -842,13 +842,13 @@ networks: cqt-net: ``` -and start the `rudder` and `evm-server` services: +and start the `refiner` and `evm-server` services: ```bash $ docker compose -f "docker-compose-mbeam.yml" up --remove-orphans [+] Running 3/3 - ⠿ Network rudder_cqt-net Created 0.0s + ⠿ Network refiner_cqt-net Created 0.0s ⠿ Container evm-server Started 0.7s - ⠿ Container rudder Started 1.5s + ⠿ Container refiner Started 1.5s ``` diff --git a/codecov.yml b/codecov.yml index 510cfacd..d567f2b4 100644 --- a/codecov.yml +++ b/codecov.yml @@ -1,2 +1,2 @@ ignore: - - "lib/rudder/proof_chain/block_specimen_event_listener.ex" + - "lib/refiner/proof_chain/block_specimen_event_listener.ex" diff --git a/config/config.exs b/config/config.exs index 7e2c2ea5..40ef3890 100644 --- a/config/config.exs +++ b/config/config.exs @@ -7,7 +7,7 @@ # General application configuration import Config -config :rudder, +config :refiner, ipfs_pinner_url: System.get_env("IPFS_PINNER_URL", "http://127.0.0.1:3001"), operator_private_key: System.get_env("BLOCK_RESULT_OPERATOR_PRIVATE_KEY"), bsp_proofchain_address: "0x4f2E285227D43D9eB52799D0A28299540452446E", diff --git a/config/docker.exs b/config/docker.exs index 9e73877c..2eb6aa0d 100644 --- a/config/docker.exs +++ b/config/docker.exs @@ -1,6 +1,6 @@ import Config -config :rudder, +config :refiner, operator_private_key: "8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba", bsp_proofchain_address: "0xce44d283b806C62698285D83c2Ca3F1e42Eb7112", brp_proofchain_address: "0x3402ce1e416e082ed3Ba3d9dcba10353F3b64499", diff --git a/config/prod.exs b/config/prod.exs index 72a7c6b2..e3552ba9 100644 --- a/config/prod.exs +++ b/config/prod.exs @@ -48,7 +48,7 @@ config :phoenix, :stacktrace_depth, 20 config :phoenix, :plug_init_mode, :runtime # Default these should point to moonbeam since we do all prod deployments there -config :rudder, +config :refiner, operator_private_key: System.get_env("BLOCK_RESULT_OPERATOR_PRIVATE_KEY"), bsp_proofchain_address: "0x4f2E285227D43D9eB52799D0A28299540452446E", brp_proofchain_address: "0x254E3FA072324fa202577F24147066359947bC23", diff --git a/config/test.exs b/config/test.exs index a91c7dce..65041d50 100644 --- a/config/test.exs +++ b/config/test.exs @@ -1,6 +1,6 @@ import Config -config :rudder, +config :refiner, operator_private_key: "8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba", bsp_proofchain_address: "0xce44d283b806C62698285D83c2Ca3F1e42Eb7112", brp_proofchain_address: "0x3402ce1e416e082ed3Ba3d9dcba10353F3b64499", diff --git a/docker-compose-ci.yml b/docker-compose-ci.yml index 9426afce..fbd40233 100644 --- a/docker-compose-ci.yml +++ b/docker-compose-ci.yml @@ -1,9 +1,9 @@ version: "3" -# runs the entire rudder pipeline with all supporting services (including rudder) in docker +# runs the entire refiner pipeline with all supporting services (including refiner) in docker # set .env such that all services in docker are talking to each other only services: eth-node: - image: "us-docker.pkg.dev/covalent-project/network/operational-staking:latest-rudder" + image: "us-docker.pkg.dev/covalent-project/network/cqt-staking:latest-refiner" container_name: hardhat-node restart: on-failure expose: @@ -20,8 +20,8 @@ services: ports: - "8545:8545" - operational-staking: - image: "us-docker.pkg.dev/covalent-project/network/operational-staking:latest-rudder" + cqt-staking: + image: "us-docker.pkg.dev/covalent-project/network/cqt-staking:latest-refiner" container_name: proof-chain restart: on-failure entrypoint: | @@ -31,7 +31,7 @@ services: echo hard-hat node started!; echo "web3-jwt:" $WEB3_JWT; npm run docker:deploy; - nc -v rudder 8008; + nc -v refiner 8008; sleep 1000000;" depends_on: - eth-node @@ -79,9 +79,9 @@ services: ports: - "3002:3002" - rudder: - image: "us-docker.pkg.dev/covalent-project/network/rudder:latest" - container_name: rudder + refiner: + image: "us-docker.pkg.dev/covalent-project/network/refiner:latest" + container_name: refiner links: - "ipfs-pinner:ipfs-pinner" # build: diff --git a/docker-compose-mbase.yml b/docker-compose-mbase.yml index a8a6b59a..4d2eaad6 100644 --- a/docker-compose-mbase.yml +++ b/docker-compose-mbase.yml @@ -1,5 +1,5 @@ version: "3" -# runs the entire rudder pipeline with all supporting services (including rudder) in docker +# runs the entire refiner pipeline with all supporting services (including refiner) in docker # set .env such that all services in docker are talking to each other only services: @@ -34,9 +34,9 @@ services: ports: - "3002:3002" - rudder: - image: "us-docker.pkg.dev/covalent-project/network/rudder:latest" - container_name: rudder + refiner: + image: "us-docker.pkg.dev/covalent-project/network/refiner:latest" + container_name: refiner links: - "ipfs-pinner:ipfs-pinner" - "evm-server:evm-server" @@ -56,7 +56,7 @@ services: echo "ipfs-node:" $IPFS_PINNER_URL; cd /mix; MIX_ENV=dev mix release --overwrite; - MIX_ENV=dev mix run --no-halt --eval 'Rudder.ProofChain.BlockSpecimenEventListener.start()';" + MIX_ENV=dev mix run --no-halt --eval 'Refiner.ProofChain.BlockSpecimenEventListener.start()';" environment: - NODE_ETHEREUM_MAINNET=${NODE_ETHEREUM_MAINNET} - BLOCK_RESULT_OPERATOR_PRIVATE_KEY=${BLOCK_RESULT_OPERATOR_PRIVATE_KEY} diff --git a/docker-compose-mbeam.yml b/docker-compose-mbeam.yml index 99291dbb..57306361 100644 --- a/docker-compose-mbeam.yml +++ b/docker-compose-mbeam.yml @@ -1,5 +1,5 @@ version: "3" -# runs the entire rudder pipeline with all supporting services (including rudder) in docker +# runs the entire refiner pipeline with all supporting services (including refiner) in docker # set .env such that all services in docker are talking to each other only services: ipfs-pinner: @@ -35,9 +35,9 @@ services: ports: - "3002:3002" - rudder: - image: "us-docker.pkg.dev/covalent-project/network/rudder:stable" - container_name: rudder + refiner: + image: "us-docker.pkg.dev/covalent-project/network/refiner:stable" + container_name: refiner links: - "ipfs-pinner:ipfs-pinner" - "evm-server:evm-server" @@ -57,7 +57,7 @@ services: echo "ipfs-node:" $IPFS_PINNER_URL; cd /mix; MIX_ENV=prod mix release --overwrite; - MIX_ENV=prod mix run --no-halt --eval 'Rudder.ProofChain.BlockSpecimenEventListener.start()';" + MIX_ENV=prod mix run --no-halt --eval 'Refiner.ProofChain.BlockSpecimenEventListener.start()';" environment: - NODE_ETHEREUM_MAINNET=${NODE_ETHEREUM_MAINNET} - BLOCK_RESULT_OPERATOR_PRIVATE_KEY=${BLOCK_RESULT_OPERATOR_PRIVATE_KEY} diff --git a/docs/ARCH.md b/docs/ARCH.md index 6214bd23..4dc5fe1e 100644 --- a/docs/ARCH.md +++ b/docs/ARCH.md @@ -1,4 +1,4 @@ -# Architecture +# Architecture - [Architecture](#architecture) - [Block Specimen Event Listener](#block-specimen-event-listener) @@ -10,13 +10,13 @@ - [Pipeline Journal](#pipeline-journal) - [Pipeline Telemetry](#pipeline-telemetry) -![Rudder Pipeline](./pipeline-white.png) +![Refiner Pipeline](./pipeline-white.png) -The happy path for the `rudder` (the refiner) application in the Covalent Network is made up of actor processes spawned through many [Gen Servers](https://elixir-lang.org/getting-started/mix-otp/genserver.html) processes that are loosely coupled, here some maintain state, and some don't. +The happy path for the `refiner` (the refiner) application in the Covalent Network is made up of actor processes spawned through many [Gen Servers](https://elixir-lang.org/getting-started/mix-otp/genserver.html) processes that are loosely coupled, here some maintain state, and some don't. The children processes can be called upon to fulfill responsibilities at different sections in the refinement/transformation process pipeline - under one umbrella [Dynamic Supervisor](https://elixir-lang.org/getting-started/mix-otp/dynamic-supervisor.html) that can bring them back up in case of a failure to continue a given pipeline operation. -![Rudder Supervisor](./supervisor.png) +![Refiner Supervisor](./supervisor.png) There are currently 8 main components to the refiner. @@ -29,9 +29,9 @@ There are currently 8 main components to the refiner. 7. Pipeline Journal 8. Pipeline Telemetry -## Block Specimen Event Listener +## Block Specimen Event Listener -The block specimen event listener is the first to start in the rudder/refiner pipeline by listening to events happening in the [proof-chain contract](https://github.com/covalenthq/bsp-staking). The events refiner cares about relate to the finalized block specimens and the finalized block results. +The block specimen event listener is the first to start in the refiner pipeline by listening to events happening in the [proof-chain contract](https://github.com/covalenthq/bsp-staking). The events refiner cares about relate to the finalized block specimens and the finalized block results. All block specimen proofs that have been submitted and achieved consensus are ready to be transformed into block results. @@ -42,10 +42,10 @@ Start the listener. iex -S mix Erlang/OTP 25 [erts-13.0] [source] [64-bit] [smp:8:8] [ds:8:8:10] [async-threads:1] [jit:ns] [dtrace] - Generated rudder app + Generated refiner app Interactive Elixir (1.13.4) - press Ctrl+C to exit (type h() ENTER for help) -iex(1)> Rudder.ProofChain.BlockSpecimenEventListener.start() +iex(1)> Refiner.ProofChain.BlockSpecimenEventListener.start() ``` This should start listening to on-chain events for reward finalization of submitted block specimens. Once one such event is found, the block specimen will be fetched and processed in the pipeline. @@ -54,14 +54,14 @@ You can tail the logs to check the state: ```elixir tail -f logs/log.log -17:52:11.222 file=lib/rudder/proof_chain/block_specimen_event_listener.ex line=100 [info] listening for events at 3707084 -17:52:11.481 file=lib/rudder/proof_chain/block_specimen_event_listener.ex line=114 [info] found 0 bsps to process -17:52:11.742 file=lib/rudder/proof_chain/block_specimen_event_listener.ex line=125 [info] curr_block: 3707085 and latest_block_num:3769853 +17:52:11.222 file=lib/refiner/proof_chain/block_specimen_event_listener.ex line=100 [info] listening for events at 3707084 +17:52:11.481 file=lib/refiner/proof_chain/block_specimen_event_listener.ex line=114 [info] found 0 bsps to process +17:52:11.742 file=lib/refiner/proof_chain/block_specimen_event_listener.ex line=125 [info] curr_block: 3707085 and latest_block_num:3769853 ``` -## Block Specimen Encoder Decoder +## Block Specimen Encoder Decoder -Once a block specimen that has been finalized has been received, the rudder extracts the specimen directly async in the pipeline process, spawning a block specimen decode process for each specimen separately using AVRO client library `avrora`. +Once a block specimen that has been finalized has been received, the refiner extracts the specimen directly async in the pipeline process, spawning a block specimen decode process for each specimen separately using AVRO client library `avrora`. It carries out the following steps - @@ -71,8 +71,8 @@ It carries out the following steps - 4. Streams the binary files (does it async - during stream execution). ```elixir -iex(1)> Rudder.Avro.BlockSpecimenDecoder.decode_file("test-data/1-15127599-replica-0x167a4a9380713f133aa55f251fd307bd88dfd9ad1f2087346e1b741ff47ba7f5") -[debug] reading schema `block-ethereum` from the file /Users/pranay/Documents/covalent/elixir-projects/rudder/priv/schemas/block-ethereum.avsc +iex(1)> Refiner.Avro.BlockSpecimenDecoder.decode_file("test-data/1-15127599-replica-0x167a4a9380713f133aa55f251fd307bd88dfd9ad1f2087346e1b741ff47ba7f5") +[debug] reading schema `block-ethereum` from the file /Users/pranay/Documents/covalent/elixir-projects/refiner/priv/schemas/block-ethereum.avsc {:ok, %{ "codecVersion" => 0.2, @@ -95,7 +95,7 @@ iex(1)> Rudder.Avro.BlockSpecimenDecoder.decode_file("test-data/1-15127599-repli **Note**: The above decoder process only decodes a single specimen. Here below we can extract an entire directory. A stream of specimens files can be passed instead to the avro decode process for lazy eval and further down the pipeline to the erigon evm t8n (transition) tool. ```elixir -iex(2)> Rudder.Avro.BlockSpecimenDecoder.decode_dir("test-data/*") +iex(2)> Refiner.Avro.BlockSpecimenDecoder.decode_dir("test-data/*") [ #Stream<[ enum: ["test-data/1-15127599-replica-0x167a4a9380713f133aa55f251fd307bd88dfd9ad1f2087346e1b741ff47ba7f5"], @@ -112,39 +112,39 @@ iex(2)> Rudder.Avro.BlockSpecimenDecoder.decode_dir("test-data/*") ] ``` -## Block Specimen Processor +## Block Specimen Processor -Next, the block specimen processor is available as an http server with `export EVM_SERVER_URL="http://127.0.0.1:3002"`. This takes the `block_id` and `block_specimen` json object and provides the block result. The stateless transition tool needed to run the specimen is written in `golang`, which is invoked via the http server outside of the rudder. +Next, the block specimen processor is available as an http server with `export EVM_SERVER_URL="http://127.0.0.1:3002"`. This takes the `block_id` and `block_specimen` json object and provides the block result. The stateless transition tool needed to run the specimen is written in `golang`, which is invoked via the http server outside of the refiner. -In an earlier version of the rudder, the server was originally a (golang) binary plugin to the rudder application and executed with the block specimen inputs in a `:porcelain` app within a shell process in erlang, but then moved out due to performance and consistency considerations. +In an earlier version of the refiner, the server was originally a (golang) binary plugin to the refiner application and executed with the block specimen inputs in a `:porcelain` app within a shell process in erlang, but then moved out due to performance and consistency considerations. Below is an example of submitting an avro encoded block specimen binary to the specimen processor. ```elixir iex(1)> replica_fp="test-data/1-15127602-replica-0xce9ed851812286e05cd34684c9ce3836ea62ebbfc3764c8d8a131f0fd054ca35" -iex(2)> [replica_fp] |> Stream.map(&Rudder.Avro.BlockSpecimenDecoder.decode_file/1) |> Enum.map(fn {:ok, contents} -> {Integer.to_string(Enum.random('0123456789abcdef')), Poison.encode!(contents)} end) |> Enum.map(&Rudder.BlockProcessor.sync_queue/1) +iex(2)> [replica_fp] |> Stream.map(&Refiner.Avro.BlockSpecimenDecoder.decode_file/1) |> Enum.map(fn {:ok, contents} -> {Integer.to_string(Enum.random('0123456789abcdef')), Poison.encode!(contents)} end) |> Enum.map(&Refiner.BlockProcessor.sync_queue/1) ``` The block processor takes the block specimen, runs it through the stateless evm tool (server) and gives the block result. Another way by which we can achieve the transformation is by calling `push_bsps_to_process` from the event listener. ```elixir -iex(1)> Rudder.ProofChain.BlockSpecimenEventListener.push_bsps_to_process(["1_16582405_7f85dc42062468a6bbe420ae4fe4455b9c2423b798a6031f8ea7826997046907_402705672e34a250dcc798bb9ae3a14593e7cdc49750d57ef6018100503f3024"]) +iex(1)> Refiner.ProofChain.BlockSpecimenEventListener.push_bsps_to_process(["1_16582405_7f85dc42062468a6bbe420ae4fe4455b9c2423b798a6031f8ea7826997046907_402705672e34a250dcc798bb9ae3a14593e7cdc49750d57ef6018100503f3024"]) ``` That will lead to the corresponding logs: ```elixir -17:56:38.124 file=lib/rudder/evm/block_processor.ex line=38 [info] submitting 16582405 to evm plugin... -17:56:39.028 file=lib/rudder/evm/block_processor.ex line=46 [info] writing block result into "/var/folders/w0/bf3y1c7d6ys15tq97ffk5qhw0000gn/T/briefly-1676/briefly-576460644194238825-5Hm1Jx2ZdSrq7sqPmEsC" -17:56:44.897 file=lib/rudder/block_result/block_result_uploader.ex line=41 [info] 16582405:402705672e34a250dcc798bb9ae3a14593e7cdc49750d57ef6018100503f3024 has been successfully uploaded at ipfs://bafybeif4mnjugrttv4ru337inkrkji4dwe755yphfpogitivuklvmp4cym -17:56:44.921 file=lib/rudder/block_result/block_result_uploader.ex line=47 [info] 16582405:402705672e34a250dcc798bb9ae3a14593e7cdc49750d57ef6018100503f3024 proof submitting +17:56:38.124 file=lib/refiner/evm/block_processor.ex line=38 [info] submitting 16582405 to evm plugin... +17:56:39.028 file=lib/refiner/evm/block_processor.ex line=46 [info] writing block result into "/var/folders/w0/bf3y1c7d6ys15tq97ffk5qhw0000gn/T/briefly-1676/briefly-576460644194238825-5Hm1Jx2ZdSrq7sqPmEsC" +17:56:44.897 file=lib/refiner/block_result/block_result_uploader.ex line=41 [info] 16582405:402705672e34a250dcc798bb9ae3a14593e7cdc49750d57ef6018100503f3024 has been successfully uploaded at ipfs://bafybeif4mnjugrttv4ru337inkrkji4dwe755yphfpogitivuklvmp4cym +17:56:44.921 file=lib/refiner/block_result/block_result_uploader.ex line=47 [info] 16582405:402705672e34a250dcc798bb9ae3a14593e7cdc49750d57ef6018100503f3024 proof submitting ``` -## Block Result Uploader +## Block Result Uploader -Once the block results have been produced they need to be proved and uploaded. This ideally happens atomically for the rudder. +Once the block results have been produced they need to be proved and uploaded. This ideally happens atomically for the refiner. Below is an example of how to interact with block result uploader that speaks to `ipfs-pinner` available with `export IPFS_PINNER_URL="http://127.0.0.1:3001"`. The file is directly uploaded to IPFS using the wrapped local IPFS node. @@ -155,7 +155,7 @@ Below is an example of how to interact with block result uploader that speaks to <<44, 242, 77, 186, 95, 176, 163, 14, 38, 232, 59, 42, 197, 185, 226, 158, 27, 22, 30, 92, 31, 167, 66, 94, 115, 4, 51, 98, 147, 139, 152, 36>> - block_result_metadata = %Rudder.BlockResultMetadata{ + block_result_metadata = %Refiner.BlockResultMetadata{ chain_id: 1, block_height: 1, block_specimen_hash: "525D191D6492F1E0928d4e816c29778c", @@ -163,27 +163,27 @@ Below is an example of how to interact with block result uploader that speaks to } {error, cid, block_result_hash} = - Rudder.BlockResultUploader.upload_block_result(block_result_metadata) + Refiner.BlockResultUploader.upload_block_result(block_result_metadata) ``` That will lead to the corresponding logs: ```elixir -rudder | * test uploads block result to ipfs and sends the block result hash to proof chain [L#11]08:53:06.401 [info] Counter for ipfs_metrics - [pin: 1] -rudder | 08:53:06.401 [info] LastValue for ipfs_metrics - [pin_last_exec_time: 7.41e-4] -rudder | 08:53:06.402 [info] Sum for ipfs_metrics - [pin_total_exec_time: 7.41e-4] -rudder | 08:53:06.402 [info] Summary for ipfs_metrics - ***7.41e-4, 7.41e-4*** -rudder | 08:53:06.402 [info] 1:525D191D6492F1E0928d4e816c29778c has been successfully uploaded at ipfs://bafkreibm6jg3ux5qumhcn2b3flc3tyu6dmlb4xa7u5bf44yegnrjhc4yeq -rudder | 08:53:06.414 [info] 1:525D191D6492F1E0928d4e816c29778c proof submitting +refiner | * test uploads block result to ipfs and sends the block result hash to proof chain [L#11]08:53:06.401 [info] Counter for ipfs_metrics - [pin: 1] +refiner | 08:53:06.401 [info] LastValue for ipfs_metrics - [pin_last_exec_time: 7.41e-4] +refiner | 08:53:06.402 [info] Sum for ipfs_metrics - [pin_total_exec_time: 7.41e-4] +refiner | 08:53:06.402 [info] Summary for ipfs_metrics - ***7.41e-4, 7.41e-4*** +refiner | 08:53:06.402 [info] 1:525D191D6492F1E0928d4e816c29778c has been successfully uploaded at ipfs://bafkreibm6jg3ux5qumhcn2b3flc3tyu6dmlb4xa7u5bf44yegnrjhc4yeq +refiner | 08:53:06.414 [info] 1:525D191D6492F1E0928d4e816c29778c proof submitting ``` -## IPFS Interactor +## IPFS Interactor Underlying the block result uploader is the IPFS interactor module that allows refiner to interact with IPFS cids by listening for them and uploading them. Below is an example of fetching or discovering a block specimen using its uploaded `cid` collected by listening to the log event of a block specimen proof submission. ```elixir iex(1)> urls=["ipfs://bafybeifo5o7zatnudfyvixkziy5aj4fhikv5nq3pbizpgwdcz4fqwarhgu"] -iex(2)> {:ok, specimen} = Rudder.IPFSInteractor.discover_block_specimen(urls) +iex(2)> {:ok, specimen} = Refiner.IPFSInteractor.discover_block_specimen(urls) [info] Counter for ipfs_metrics - [fetch: 1] [info] LastValue for ipfs_metrics - [fetch_last_exec_time: 0.0026019999999999997] [info] Sum for ipfs_metrics - [fetch_total_exec_time: 0.0026019999999999997] @@ -197,8 +197,8 @@ iex(2)> {:ok, specimen} = Rudder.IPFSInteractor.discover_block_specimen(urls) Thereafter we can decode this block specimen and continue to use it in the pipeline process as seen before. ```elixir -iex(3)> {:ok, decoded_specimen} = Rudder.Avro.BlockSpecimen.decode(specimen) -[debug] reading schema `block-ethereum` from the file /Users/pranay/Documents/covalent/elixir-projects/rudder/priv/schemas/block-ethereum.avsc +iex(3)> {:ok, decoded_specimen} = Refiner.Avro.BlockSpecimen.decode(specimen) +[debug] reading schema `block-ethereum` from the file /Users/pranay/Documents/covalent/elixir-projects/refiner/priv/schemas/block-ethereum.avsc {:ok, %{ "codecVersion" => 0.32, @@ -216,18 +216,18 @@ iex(3)> {:ok, decoded_specimen} = Rudder.Avro.BlockSpecimen.decode(specimen) "gasUsed" => 17841359, ``` -## Proof Chain Interactor +## Proof Chain Interactor Many operators submit block results and their respective proofs for consensus to the proof-chain smart contract mentioned earlier. This function can be called below as follows. ```elixir -Rudder.ProofChain.Interactor.submit_block_result_proof(chain_id, block_height, block_specimen_hash, block_result_hash, url) +Refiner.ProofChain.Interactor.submit_block_result_proof(chain_id, block_height, block_specimen_hash, block_result_hash, url) ``` That will lead to the corresponding logs: ```elixir -rudder | 08:53:11.689 [info] 16792500:cd218d31ed5b606dae5076d01b649d849746a84735cf0f8481ad34553ee2b4b4 proof submitting +refiner | 08:53:11.689 [info] 16792500:cd218d31ed5b606dae5076d01b649d849746a84735cf0f8481ad34553ee2b4b4 proof submitting hardhat-node | eth_getBlockByNumber hardhat-node | eth_estimateGas hardhat-node | eth_getTransactionCount @@ -241,83 +241,83 @@ hardhat-node | Value: 0 ETH hardhat-node | Gas used: 349532 of 354993 hardhat-node | Block #13182330: 0x663cf0a29213261d40c5665a48ae0c670d9d6362102c094a0298f5eddcf91ce5 hardhat-node | -rudder | 08:53:11.879 [info] Counter for brp_metrics - [proof: 3] -rudder | 08:53:11.879 [info] LastValue for brp_metrics - [proof_last_exec_time: 1.9099999999999998e-4] -rudder | 08:53:11.880 [info] Sum for brp_metrics - [proof_total_exec_time: 5.579999999999999e-4] -rudder | 08:53:11.880 [info] Summary for brp_metrics - ***1.7999999999999998e-4, 1.9099999999999998e-4*** -rudder | 08:53:11.880 [info] 16792500 txid is 0x5ccb44793604049c91f592cf9854f78004ac0e63c3f6e2acb8c55fa64856894f +refiner | 08:53:11.879 [info] Counter for brp_metrics - [proof: 3] +refiner | 08:53:11.879 [info] LastValue for brp_metrics - [proof_last_exec_time: 1.9099999999999998e-4] +refiner | 08:53:11.880 [info] Sum for brp_metrics - [proof_total_exec_time: 5.579999999999999e-4] +refiner | 08:53:11.880 [info] Summary for brp_metrics - ***1.7999999999999998e-4, 1.9099999999999998e-4*** +refiner | 08:53:11.880 [info] 16792500 txid is 0x5ccb44793604049c91f592cf9854f78004ac0e63c3f6e2acb8c55fa64856894f ``` -## Pipeline Journal +## Pipeline Journal The Etfs library is used to implement a Write-Ahead Logging (WAL) journal for queued work items (block specimens) as they proceed through the transformer pipeline. This is useful for debugging purposes. To find all aborted work items. ```bash -iex(2)> Rudder.Journal.items_with_status(:abort) +iex(2)> Refiner.Journal.items_with_status(:abort) ["1_16582405_7f85dc42062468a6bbe420ae4fe4455b9c2423b798a6031f8ea7826997046907_402705672e34a250dcc798bb9ae3a14593e7cdc49750d57ef6018100503f3024", "1_16582440_a73dbfde74b0d9b6cf070e4fedb6e625868f00ab58ac1166e912fe1d84b8b19c_c0f8c62fb4447c6957d4fafe5a8471d84a6ed65d18f54ec1a3f42d9c7e0674d2", "1_16582475_bd1ad41c4d8121825822c9c1741fd8d4edba23ff00c82c775306cbdf57811160_3164f2b7839582c8ff9fc0b117d5fb8e452181fb2b803b9eb021776f19b18408", "1_16582510_35f84d62560d271a7fa03cbbb1378e078f2fd4ec78cb1712e2cf060e53b00219_f73a943ec4b6707fb9299908d4a6ddad31311f534c58023380d164299b873755"] ``` -Rudder keeps track of all the queue items that are in the following states of processing - `:discover`, `:commit`, `:abort`, `:skip` and tries to reprocess them from when it left off. +Refiner keeps track of all the queue items that are in the following states of processing - `:discover`, `:commit`, `:abort`, `:skip` and tries to reprocess them from when it left off. -## Pipeline Telemetry +## Pipeline Telemetry -Rudder records metrics like `counter`, `lastvalue`, `sum`, and `summary` during a pipeline process within each module and stores them in an ETF table by modules. This can then be used to understand how the processes have been progressing and where the performance bottlenecks may lay. +Refiner records metrics like `counter`, `lastvalue`, `sum`, and `summary` during a pipeline process within each module and stores them in an ETF table by modules. This can then be used to understand how the processes have been progressing and where the performance bottlenecks may lay. -The logs that pertain to the telemetry and performance of the rudder can be seen as follows from fetching start (specimen event) to finish (pipeline success). +The logs that pertain to the telemetry and performance of the refiner can be seen as follows from fetching start (specimen event) to finish (pipeline success). ```elixir -rudder | [info] starting event listener -rudder | [info] listening for events at 4168403 -rudder | [info] found 0 bsps to process -rudder | [info] curr_block: 4168408 and latest_block_num:4168408 -rudder | [info] listening for events at 4168408 -rudder | [info] found 1 bsps to process +refiner | [info] starting event listener +refiner | [info] listening for events at 4168403 +refiner | [info] found 0 bsps to process +refiner | [info] curr_block: 4168408 and latest_block_num:4168408 +refiner | [info] listening for events at 4168408 +refiner | [info] found 1 bsps to process ipfs-pinner | 2023/04/17 22:12:49 unixfsApi.Get: getting the cid: bafybeigx7gwkso5iwikf3f2tv2jfgri5naipxavjntejrc24bfusxn6xju ipfs-pinner | 2023/04/17 22:12:49 trying out https://w3s.link/ipfs/bafybeigx7gwkso5iwikf3f2tv2jfgri5naipxavjntejrc24bfusxn6xju -rudder | [info] Counter for ipfs_metrics - [fetch: 1] -rudder | [info] LastValue for ipfs_metrics - [fetch_last_exec_time: 0.001508] -rudder | [info] Sum for ipfs_metrics - [fetch_total_exec_time: 0.001508] -rudder | [info] Summary for ipfs_metrics - {0.001508, 0.001508} -rudder | [debug] reading schema `block-ethereum` from the file /app/priv/schemas/block-ethereum.avsc -rudder | [info] Counter for bsp_metrics - [decode: 1] -rudder | [info] LastValue for bsp_metrics - [decode_last_exec_time: 0.0] -rudder | [info] Sum for bsp_metrics - [decode_total_exec_time: 0.0] -rudder | [info] Summary for bsp_metrics - {0.0, 0.0} -rudder | [info] submitting 17069220 to evm http server... +refiner | [info] Counter for ipfs_metrics - [fetch: 1] +refiner | [info] LastValue for ipfs_metrics - [fetch_last_exec_time: 0.001508] +refiner | [info] Sum for ipfs_metrics - [fetch_total_exec_time: 0.001508] +refiner | [info] Summary for ipfs_metrics - {0.001508, 0.001508} +refiner | [debug] reading schema `block-ethereum` from the file /app/priv/schemas/block-ethereum.avsc +refiner | [info] Counter for bsp_metrics - [decode: 1] +refiner | [info] LastValue for bsp_metrics - [decode_last_exec_time: 0.0] +refiner | [info] Sum for bsp_metrics - [decode_total_exec_time: 0.0] +refiner | [info] Summary for bsp_metrics - {0.0, 0.0} +refiner | [info] submitting 17069220 to evm http server... evm-server | [INFO] [04-17|22:12:51.380] input file at loc=/tmp/30064047 evm-server | [INFO] [04-17|22:12:51.389] output file at: loc=/tmp/3478340040 evm-server | [INFO] [04-17|22:12:51.659] Wrote file file=/tmp/3478340040 -rudder | [info] writing block result into "/tmp/briefly-1681/briefly-576460687785480033-OJ3w3e15QdqaGb0t5Z" -rudder | [info] Counter for bsp_metrics - [execute: 1] -rudder | [info] LastValue for bsp_metrics - [execute_last_exec_time: 3.39e-4] -rudder | [info] Sum for bsp_metrics - [execute_total_exec_time: 3.39e-4] -rudder | [info] Summary for bsp_metrics - {3.39e-4, 3.39e-4} +refiner | [info] writing block result into "/tmp/briefly-1681/briefly-576460687785480033-OJ3w3e15QdqaGb0t5Z" +refiner | [info] Counter for bsp_metrics - [execute: 1] +refiner | [info] LastValue for bsp_metrics - [execute_last_exec_time: 3.39e-4] +refiner | [info] Sum for bsp_metrics - [execute_total_exec_time: 3.39e-4] +refiner | [info] Summary for bsp_metrics - {3.39e-4, 3.39e-4} ipfs-pinner | 2023/04/17 22:12:51 generated dag has root cid: bafybeihcj2gkfx4zeilbssby4brs22nnncnkc3wy4vopw3vo7qe5re6tqm ipfs-pinner | 2023/04/17 22:12:51 car file location: /tmp/3248414975.car ipfs-pinner | 2023/04/17 22:12:52 uploaded file has root cid: bafybeihcj2gkfx4zeilbssby4brs22nnncnkc3wy4vopw3vo7qe5re6tqm -rudder | [info] Counter for ipfs_metrics - [pin: 1] -rudder | [info] LastValue for ipfs_metrics - [pin_last_exec_time: 0.001248] -rudder | [info] Sum for ipfs_metrics - [pin_total_exec_time: 0.001248] -rudder | [info] Summary for ipfs_metrics - {0.001248, 0.001248} -rudder | [info] 17069220:bc86fcbda627565085932b83c91fa3a9638fe660917c6f96742676dd9b967835 has been successfully uploaded at ipfs://bafybe> -rudder | [info] 17069220:bc86fcbda627565085932b83c91fa3a9638fe660917c6f96742676dd9b967835 proof submitting -rudder | [info] Counter for brp_metrics - [proof: 1] -rudder | [info] LastValue for brp_metrics - [proof_last_exec_time: 3.1e-4] -rudder | [info] Sum for brp_metrics - [proof_total_exec_time: 3.1e-4] -rudder | [info] Summary for brp_metrics - {3.1e-4, 3.1e-4} -rudder | [info] 17069220 txid is 0x0ec13417b62262cc0fff47653d678af8aba082acfc14de364486103180677f3c -rudder | [info] Counter for brp_metrics - [upload_success: 1] -rudder | [info] LastValue for brp_metrics - [upload_success_last_exec_time: 0.001581] -rudder | [info] Sum for brp_metrics - [upload_success_total_exec_time: 0.001581] -rudder | [info] Summary for brp_metrics - {0.001581, 0.001581} -rudder | [info] Counter for rudder_metrics - [pipeline_success: 1] -rudder | [info] LastValue for rudder_metrics - [pipeline_success_last_exec_time: 0.0036019999999999997] -rudder | [info] Sum for rudder_metrics - [pipeline_success_total_exec_time: 0.0036019999999999997] -rudder | [info] Summary for rudder_metrics - {0.0036019999999999997, 0.0036019999999999997} -rudder | [info] curr_block: 4168409 and latest_block_num:4168408 +refiner | [info] Counter for ipfs_metrics - [pin: 1] +refiner | [info] LastValue for ipfs_metrics - [pin_last_exec_time: 0.001248] +refiner | [info] Sum for ipfs_metrics - [pin_total_exec_time: 0.001248] +refiner | [info] Summary for ipfs_metrics - {0.001248, 0.001248} +refiner | [info] 17069220:bc86fcbda627565085932b83c91fa3a9638fe660917c6f96742676dd9b967835 has been successfully uploaded at ipfs://bafybe> +refiner | [info] 17069220:bc86fcbda627565085932b83c91fa3a9638fe660917c6f96742676dd9b967835 proof submitting +refiner | [info] Counter for brp_metrics - [proof: 1] +refiner | [info] LastValue for brp_metrics - [proof_last_exec_time: 3.1e-4] +refiner | [info] Sum for brp_metrics - [proof_total_exec_time: 3.1e-4] +refiner | [info] Summary for brp_metrics - {3.1e-4, 3.1e-4} +refiner | [info] 17069220 txid is 0x0ec13417b62262cc0fff47653d678af8aba082acfc14de364486103180677f3c +refiner | [info] Counter for brp_metrics - [upload_success: 1] +refiner | [info] LastValue for brp_metrics - [upload_success_last_exec_time: 0.001581] +refiner | [info] Sum for brp_metrics - [upload_success_total_exec_time: 0.001581] +refiner | [info] Summary for brp_metrics - {0.001581, 0.001581} +refiner | [info] Counter for refiner_metrics - [pipeline_success: 1] +refiner | [info] LastValue for refiner_metrics - [pipeline_success_last_exec_time: 0.0036019999999999997] +refiner | [info] Sum for refiner_metrics - [pipeline_success_total_exec_time: 0.0036019999999999997] +refiner | [info] Summary for refiner_metrics - {0.0036019999999999997, 0.0036019999999999997} +refiner | [info] curr_block: 4168409 and latest_block_num:4168408 ``` diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index 59c8cadb..09da8b02 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -15,14 +15,14 @@ ## Reporting Bugs Please file bugs in the [GitHub Issue -Tracker](https://github.com/covalenthq/rudder). Include at +Tracker](https://github.com/covalenthq/refiner). Include at least the following: * What happened. * What did you expect to happen instead of what *did* happen, if it's not crazy obvious. * What operating system, operating system version and version of - `rudder` you are running. + `refiner` you are running. * Console log entries, where possible and relevant. If you're not sure whether something is relevant, erring on the side of too much information will never be a cause for concern. @@ -31,8 +31,8 @@ If you're not sure whether something is relevant, erring on the side of too much Contributing to this repo can mean many things such as participating in discussion or proposing code changes. To ensure a smooth workflow for all contributors, the following general procedure for contributing has been established: -1. Either [open](https://github.com/covalenthq/rudder/issues/new/choose) - or [find](https://github.com/covalenthq/rudder/issues) an issue you have identified and would like to contribute to +1. Either [open](https://github.com/covalenthq/refiner/issues/new/choose) + or [find](https://github.com/covalenthq/refiner/issues) an issue you have identified and would like to contribute to resolving. 2. Participate in thoughtful discussion on that issue. @@ -45,7 +45,7 @@ Contributing to this repo can mean many things such as participating in discussi if you are eager and do not get a prompt response, feel free to dive on in! 4. Follow standard Github best practices: 1. Fork the repo - 2. Branch from the HEAD of `develop`(For core developers working within the `rudder` repo, to ensure a clear ownership of branches, branches must be named with the convention `{moniker}/{issue#}-branch-name`). + 2. Branch from the HEAD of `develop`(For core developers working within the `refiner` repo, to ensure a clear ownership of branches, branches must be named with the convention `{moniker}/{issue#}-branch-name`). 3. Make commits 4. Submit a PR to `develop` 5. Be sure to submit the PR in `Draft` mode. Submit your PR early, even if it's incomplete as this indicates to the community you're working on something and allows them to provide comments early in the development process. @@ -53,12 +53,12 @@ Contributing to this repo can mean many things such as participating in discussi 7. Be sure to include a relevant change log entry in the `Unreleased` section of `CHANGELOG.md` (see file for log format). 8. Please make sure to run `mix format mix.exs "lib/**/*.{ex,exs}" "test/**/*.{ex,exs}" "config/*.{ex,exs}"` before every commit - the easiest way to do this is having your editor run it for you upon saving a file. Additionally, please ensure that your code is lint compliant by running `mix deps.get, mix deps.compile` . - There are CI tests built into the `rudder` repository and all PR’s will require that these tests pass before they are able to be merged. + There are CI tests built into the `refiner` repository and all PR’s will require that these tests pass before they are able to be merged. **Note**: for very small or blatantly obvious problems (such as typos), it is not required to open an issue to submit a PR, but be aware that for more complex problems/features, if a PR is opened before an adequate design discussion has taken place in a github issue, that PR runs a high likelihood of being rejected. Looking for a good place to start contributing? How about checking out -some [good first issues](https://github.com/covalenthq/rudder/issues). +some [good first issues](https://github.com/covalenthq/refiner/issues). ### Development Procedure @@ -66,7 +66,7 @@ some [good first issues](https://github.com/covalenthq/rudder/issues). 2. `main` must never fail `mix format --check-formatted, mix credo` 3. No `--force` onto `main` (except when reverting a broken commit, which should seldom happen). -4. Create your feature branch from `main` either on `github.com/covalenthq/rudder`, or your fork ( +4. Create your feature branch from `main` either on `github.com/covalenthq/refiner`, or your fork ( using `git remote add origin`). 5. Before submitting a pull request, begin `git rebase` on top of `main`. 6. Code must adhere to the official elixir [formatting](https://hexdocs.pm/mix/main/Mix.Tasks.Format.html) guidelines. @@ -79,11 +79,11 @@ some [good first issues](https://github.com/covalenthq/rudder/issues). We use [Mix](https://elixir-lang.org/getting-started/mix-otp/introduction-to-mix.html) to manage dependency versions. -The main branch of every `rudder` repository should just build with `mix deps.get, mix deps.compile`, which means they should be kept up-to-date with their dependencies, so we can get away with telling people they can just `mix deps.get` our software. Since some dependencies are not under our control, a third party may break our build, in which case we can fall back on `rm -rf _build deps && mix clean && mix deps.get && mix deps.compile`. +The main branch of every `refiner` repository should just build with `mix deps.get, mix deps.compile`, which means they should be kept up-to-date with their dependencies, so we can get away with telling people they can just `mix deps.get` our software. Since some dependencies are not under our control, a third party may break our build, in which case we can fall back on `rm -rf _build deps && mix clean && mix deps.get && mix deps.compile`. ### Testing -Covalent uses [GitHub Actions](https://github.com/features/actions) for automated [integration testing](https://github.com/covalenthq/rudder/actions). +Covalent uses [GitHub Actions](https://github.com/features/actions) for automated [integration testing](https://github.com/covalenthq/refiner/actions). ### Linting @@ -97,7 +97,7 @@ The repo uses `credo` to run linters and enforce coding standards. There are two User-facing repos should adhere to the [trunk based development branching model](https://trunkbaseddevelopment.com/). Libraries need not follow the model strictly, but would be wise to. -`rudder` utilizes [semantic versioning](https://semver.org/). +`refiner` utilizes [semantic versioning](https://semver.org/). ### PR Targeting diff --git a/docs/DEVELOPMENT.md b/docs/DEVELOPMENT.md index 331b355d..3e2012f5 100644 --- a/docs/DEVELOPMENT.md +++ b/docs/DEVELOPMENT.md @@ -14,9 +14,9 @@ Wait a minute then in Terminal 2 run: mix test ``` -### Pull +### Pull -Pull only the latest containerized version of rudder using the following - +Pull only the latest containerized version of refiner using the following - Make sure you're logged into gcr by running @@ -27,10 +27,10 @@ gcloud auth print-access-token | docker login -u oauth2accesstoken --password-st Pull image ```docker -docker pull us-docker.pkg.dev/covalent-project/network/rudder +docker pull us-docker.pkg.dev/covalent-project/network/refiner ``` -### Environment +### Environment Add the env vars to a .env file as below. Ask your node operator about these if you have questions. Check the `.env_example` for the list of required (and optional) environment variables. diff --git a/docs/METRICS.md b/docs/METRICS.md index 817887e8..b81d63b8 100644 --- a/docs/METRICS.md +++ b/docs/METRICS.md @@ -1,6 +1,6 @@ # Metrics Collection and Reporting -`rudder` is proactively enabled with metrics collection via prometheus. +`refiner` is proactively enabled with metrics collection via prometheus. ## Config @@ -10,7 +10,7 @@ Install Prometheus or * Edit `/usr/local/etc/prometheus.yml` for linux/x86. -Add the config for prometheus to pick up exported [rudder telemetry metrics](../lib/rudder/metrics/prometheus.yml). +Add the config for prometheus to pick up exported [refiner telemetry metrics](../lib/refiner/metrics/prometheus.yml). Restart your prometheus server @@ -22,45 +22,45 @@ Monitoring can be setup (for example) by plugging the endpoint serving in promet ## Metrics -The following metrics captured from rudder are exported with `/metrics` endpoint via prometheus. +The following metrics captured from refiner are exported with `/metrics` endpoint via prometheus. ```elixir -# TYPE rudder_events_rudder_pipeline_success_duration gauge -rudder_events_rudder_pipeline_success_duration{operation="pipeline_success",table="rudder_metrics"} 0.004265 -# TYPE rudder_events_rudder_pipeline_success_count counter -rudder_events_rudder_pipeline_success_count{operation="pipeline_success",table="rudder_metrics"} 4 -# TYPE rudder_events_journal_fetch_items_duration gauge -rudder_events_journal_fetch_items_duration{operation="fetch_items",table="journal_metrics"} 1.2e-5 -# TYPE rudder_events_journal_fetch_items_count counter -rudder_events_journal_fetch_items_count{operation="fetch_items",table="journal_metrics"} 1 -# TYPE rudder_events_journal_fetch_last_duration gauge -rudder_events_journal_fetch_last_duration{operation="fetch_last",table="journal_metrics"} 3.6e-5 -# TYPE rudder_events_journal_fetch_last_count counter -rudder_events_journal_fetch_last_count{operation="fetch_last",table="journal_metrics"} 1 -# TYPE rudder_events_brp_proof_duration gauge -rudder_events_brp_proof_duration{operation="proof",table="brp_metrics"} 6.259999999999999e-4 -# TYPE rudder_events_brp_proof_count counter -rudder_events_brp_proof_count{operation="proof",table="brp_metrics"} 4 -# TYPE rudder_events_brp_upload_success_duration gauge -rudder_events_brp_upload_success_duration{operation="upload_success",table="brp_metrics"} 0.0023769999999999998 -# TYPE rudder_events_brp_upload_success_count counter -rudder_events_brp_upload_success_count{operation="upload_success",table="brp_metrics"} 4 -# TYPE rudder_events_bsp_execute_duration gauge -rudder_events_bsp_execute_duration{operation="execute",table="bsp_metrics"} 2.1799999999999999e-4 -# TYPE rudder_events_bsp_execute_count counter -rudder_events_bsp_execute_count{operation="execute",table="bsp_metrics"} 4 -# TYPE rudder_events_bsp_decode_duration gauge -rudder_events_bsp_decode_duration{operation="decode",table="bsp_metrics"} 0.0 -# TYPE rudder_events_bsp_decode_count counter -rudder_events_bsp_decode_count{operation="decode",table="bsp_metrics"} 4 -# TYPE rudder_events_ipfs_fetch_duration gauge -rudder_events_ipfs_fetch_duration{operation="fetch",table="ipfs_metrics"} 0.001588 -# TYPE rudder_events_ipfs_fetch_count counter -rudder_events_ipfs_fetch_count{operation="fetch",table="ipfs_metrics"} 4 -# TYPE rudder_events_ipfs_pin_duration gauge -rudder_events_ipfs_pin_duration{operation="pin",table="ipfs_metrics"} 0.00174 -# TYPE rudder_events_ipfs_pin_count counter -rudder_events_ipfs_pin_count{operation="pin",table="ipfs_metrics"} 4 +# TYPE refiner_events_refiner_pipeline_success_duration gauge +refiner_events_refiner_pipeline_success_duration{operation="pipeline_success",table="refiner_metrics"} 0.004265 +# TYPE refiner_events_refiner_pipeline_success_count counter +refiner_events_refiner_pipeline_success_count{operation="pipeline_success",table="refiner_metrics"} 4 +# TYPE refiner_events_journal_fetch_items_duration gauge +refiner_events_journal_fetch_items_duration{operation="fetch_items",table="journal_metrics"} 1.2e-5 +# TYPE refiner_events_journal_fetch_items_count counter +refiner_events_journal_fetch_items_count{operation="fetch_items",table="journal_metrics"} 1 +# TYPE refiner_events_journal_fetch_last_duration gauge +refiner_events_journal_fetch_last_duration{operation="fetch_last",table="journal_metrics"} 3.6e-5 +# TYPE refiner_events_journal_fetch_last_count counter +refiner_events_journal_fetch_last_count{operation="fetch_last",table="journal_metrics"} 1 +# TYPE refiner_events_brp_proof_duration gauge +refiner_events_brp_proof_duration{operation="proof",table="brp_metrics"} 6.259999999999999e-4 +# TYPE refiner_events_brp_proof_count counter +refiner_events_brp_proof_count{operation="proof",table="brp_metrics"} 4 +# TYPE refiner_events_brp_upload_success_duration gauge +refiner_events_brp_upload_success_duration{operation="upload_success",table="brp_metrics"} 0.0023769999999999998 +# TYPE refiner_events_brp_upload_success_count counter +refiner_events_brp_upload_success_count{operation="upload_success",table="brp_metrics"} 4 +# TYPE refiner_events_bsp_execute_duration gauge +refiner_events_bsp_execute_duration{operation="execute",table="bsp_metrics"} 2.1799999999999999e-4 +# TYPE refiner_events_bsp_execute_count counter +refiner_events_bsp_execute_count{operation="execute",table="bsp_metrics"} 4 +# TYPE refiner_events_bsp_decode_duration gauge +refiner_events_bsp_decode_duration{operation="decode",table="bsp_metrics"} 0.0 +# TYPE refiner_events_bsp_decode_count counter +refiner_events_bsp_decode_count{operation="decode",table="bsp_metrics"} 4 +# TYPE refiner_events_ipfs_fetch_duration gauge +refiner_events_ipfs_fetch_duration{operation="fetch",table="ipfs_metrics"} 0.001588 +# TYPE refiner_events_ipfs_fetch_count counter +refiner_events_ipfs_fetch_count{operation="fetch",table="ipfs_metrics"} 4 +# TYPE refiner_events_ipfs_pin_duration gauge +refiner_events_ipfs_pin_duration{operation="pin",table="ipfs_metrics"} 0.00174 +# TYPE refiner_events_ipfs_pin_count counter +refiner_events_ipfs_pin_count{operation="pin",table="ipfs_metrics"} 4 ``` ## API @@ -75,13 +75,13 @@ Docker containers automatically export to this endpoint as well via exposed port ## Graph -Observe live the gauge time series graphs with plots for example with metrics for `pipeline_success` and `ipfs_fetch` -> +Observe live the gauge time series graphs with plots for example with metrics for `pipeline_success` and `ipfs_fetch` -> ![Observe](./prometheus.png) ## Monitor & Alert -For monitoring and alerting we advice using [Grafana (in conjunction with the aggregated prometheus metrics)](https://grafana.com/docs/grafana/latest/getting-started/get-started-grafana-prometheus/). Import the prebuilt dashboard for Rudder into Grafana [here](./grafana_rudder_dashboard.json) +For monitoring and alerting we advice using [Grafana (in conjunction with the aggregated prometheus metrics)](https://grafana.com/docs/grafana/latest/getting-started/get-started-grafana-prometheus/). Import the prebuilt dashboard for Refiner into Grafana [here](./grafana_refiner_dashboard.json) ![Dashboard](./dashboard.png) @@ -106,8 +106,8 @@ Login to your Grafana dashboard -> http://localhost:3000/. Make sure prometheus is added as a data source -> http://localhost:3000/datasources with the default values for prometheus. Click on [Explore](http://localhost:3000/explore?left=%7B%22datasource%22:%22lVZwdz8Vz%22,%22queries%22:%5B%7B%22refId%22:%22A%22,%22datasource%22:%7B%22type%22:%22prometheus%22,%22uid%22:%22lVZwdz8Vz%22%7D%7D%5D,%22range%22:%7B%22from%22:%22now-1h%22,%22to%22:%22now%22%7D%7D&orgId=1). Select the metrics and time-series data to view from the dropdown with "Select Metric". -Below is an example of three selections `rudder_events_brp_upload_success_duration`, `rudder_events_rudder_pipeline_success_duration`, `rudder_events_ipfs_fetch_duration`. +Below is an example of three selections `refiner_events_brp_upload_success_duration`, `refiner_events_refiner_pipeline_success_duration`, `refiner_events_ipfs_fetch_duration`. -This can directly be viewed [here](http://localhost:3000/explore?left=%7B%22datasource%22:%22lVZwdz8Vz%22,%22queries%22:%5B%7B%22refId%22:%22A%22,%22datasource%22:%7B%22type%22:%22prometheus%22,%22uid%22:%22lVZwdz8Vz%22%7D,%22editorMode%22:%22builder%22,%22expr%22:%22rudder_events_brp_upload_success_duration%22,%22legendFormat%22:%22__auto%22,%22range%22:true,%22instant%22:true%7D,%7B%22refId%22:%22B%22,%22datasource%22:%7B%22type%22:%22prometheus%22,%22uid%22:%22lVZwdz8Vz%22%7D,%22editorMode%22:%22builder%22,%22expr%22:%22rudder_events_rudder_pipeline_success_duration%22,%22legendFormat%22:%22__auto%22,%22range%22:true,%22instant%22:true%7D,%7B%22refId%22:%22C%22,%22datasource%22:%7B%22type%22:%22prometheus%22,%22uid%22:%22lVZwdz8Vz%22%7D,%22editorMode%22:%22builder%22,%22expr%22:%22rudder_events_ipfs_fetch_duration%22,%22legendFormat%22:%22__auto%22,%22range%22:true,%22instant%22:true%7D%5D,%22range%22:%7B%22from%22:%22now-15m%22,%22to%22:%22now%22%7D%7D&orgId=1). You can also add operations on the exported data with aggregations like `sum` and range functions like `delta` etc as seen below. +This can directly be viewed [here](http://localhost:3000/explore?left=%7B%22datasource%22:%22lVZwdz8Vz%22,%22queries%22:%5B%7B%22refId%22:%22A%22,%22datasource%22:%7B%22type%22:%22prometheus%22,%22uid%22:%22lVZwdz8Vz%22%7D,%22editorMode%22:%22builder%22,%22expr%22:%22refiner_events_brp_upload_success_duration%22,%22legendFormat%22:%22__auto%22,%22range%22:true,%22instant%22:true%7D,%7B%22refId%22:%22B%22,%22datasource%22:%7B%22type%22:%22prometheus%22,%22uid%22:%22lVZwdz8Vz%22%7D,%22editorMode%22:%22builder%22,%22expr%22:%22refiner_events_refiner_pipeline_success_duration%22,%22legendFormat%22:%22__auto%22,%22range%22:true,%22instant%22:true%7D,%7B%22refId%22:%22C%22,%22datasource%22:%7B%22type%22:%22prometheus%22,%22uid%22:%22lVZwdz8Vz%22%7D,%22editorMode%22:%22builder%22,%22expr%22:%22refiner_events_ipfs_fetch_duration%22,%22legendFormat%22:%22__auto%22,%22range%22:true,%22instant%22:true%7D%5D,%22range%22:%7B%22from%22:%22now-15m%22,%22to%22:%22now%22%7D%7D&orgId=1). You can also add operations on the exported data with aggregations like `sum` and range functions like `delta` etc as seen below. ![grafana](./grafana.png) diff --git a/docs/README.md b/docs/README.md index 10c0d431..b787ba5e 100644 --- a/docs/README.md +++ b/docs/README.md @@ -13,8 +13,8 @@ 1. Phase 2 Refiner![Phase 2 Refiner](./phase-2.png) 1. Covalent Network Layers![Covalent Network Layers](./network-layers.png) 1. Full Architecture of Covalent Network![Full Architecture of Covalent Network](./arch-white.png) -1. Rudder/Refiner Components![Rudder/Refiner Components](./components.png) -1. Rudder/Refiner Pipeline![Rudder/Refiner Pipeline](./pipeline-white.png) -1. Rudder Dynamic Supervisor![Rudder Dynamic Supervisor](./supervisor.png) +1. Refiner Components![Refiner Components](./components.png) +1. Refiner Pipeline![Refiner Pipeline](./pipeline-white.png) +1. Refiner Dynamic Supervisor![Refiner Dynamic Supervisor](./supervisor.png) 1. Prometheus Dashboard![Prometheus Dashboard](./prometheus.png) 1. Grafana Dashboard![Grafana Dashboard](./dashboard.png) diff --git a/docs/grafana_rudder_dashboard.json b/docs/grafana_rudder_dashboard.json index 91e2c118..64a97a7d 100644 --- a/docs/grafana_rudder_dashboard.json +++ b/docs/grafana_rudder_dashboard.json @@ -113,7 +113,7 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "builder", - "expr": "rudder_events_brp_upload_success_count", + "expr": "refiner_events_brp_upload_success_count", "legendFormat": "__auto", "range": true, "refId": "A" @@ -175,7 +175,7 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "builder", - "expr": "rudder_events_rudder_pipeline_success_count", + "expr": "refiner_events_refiner_pipeline_success_count", "legendFormat": "__auto", "range": true, "refId": "A" @@ -237,7 +237,7 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "builder", - "expr": "rudder_events_rudder_pipeline_failure_count", + "expr": "refiner_events_refiner_pipeline_failure_count", "legendFormat": "__auto", "range": true, "refId": "A" @@ -333,7 +333,7 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "builder", - "expr": "rudder_events_rudder_pipeline_success_count", + "expr": "refiner_events_refiner_pipeline_success_count", "legendFormat": "__auto", "range": true, "refId": "A" @@ -344,7 +344,7 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "builder", - "expr": "rudder_events_ipfs_fetch_count", + "expr": "refiner_events_ipfs_fetch_count", "hide": false, "legendFormat": "__auto", "range": true, @@ -356,14 +356,14 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "builder", - "expr": "rudder_events_brp_upload_success_count", + "expr": "refiner_events_brp_upload_success_count", "hide": false, "legendFormat": "__auto", "range": true, "refId": "C" } ], - "title": "Rudder Events Success Count", + "title": "Refiner Events Success Count", "type": "timeseries" }, { @@ -456,7 +456,7 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "builder", - "expr": "rudder_events_rudder_pipeline_success_duration", + "expr": "refiner_events_refiner_pipeline_success_duration", "legendFormat": "__auto", "range": true, "refId": "A" @@ -467,7 +467,7 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "builder", - "expr": "rudder_events_brp_proof_duration", + "expr": "refiner_events_brp_proof_duration", "hide": false, "legendFormat": "__auto", "range": true, @@ -479,14 +479,14 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "builder", - "expr": "rudder_events_ipfs_fetch_duration", + "expr": "refiner_events_ipfs_fetch_duration", "hide": false, "legendFormat": "__auto", "range": true, "refId": "C" } ], - "title": "Rudder Pipeline Event Durations", + "title": "Refiner Pipeline Event Durations", "type": "timeseries" } ], @@ -504,7 +504,7 @@ }, "timepicker": {}, "timezone": "", - "title": "Rudder Dashboard", + "title": "Refiner Dashboard", "uid": "poakTV8Vz", "version": 7, "weekStart": "" diff --git a/docs/rudder-compose.service b/docs/rudder-compose.service index 393376ff..1707ca97 100644 --- a/docs/rudder-compose.service +++ b/docs/rudder-compose.service @@ -1,5 +1,5 @@ [Unit] -Description=rudder docker compose +Description=refiner docker compose PartOf=docker.service After=docker.service diff --git a/lib/refiner/application.ex b/lib/refiner/application.ex new file mode 100644 index 00000000..79ee51aa --- /dev/null +++ b/lib/refiner/application.ex @@ -0,0 +1,39 @@ +defmodule Refiner.Application do + # See https://hexdocs.pm/elixir/Application.html + # for more information on OTP Applications + @moduledoc false + + use Application + + @impl true + @spec start(any, any) :: {:error, any} | {:ok, pid} + def start(_type, _args) do + children = [ + {Finch, + name: Refiner.Finch, + pools: %{ + :default => [size: 32] + }}, + {Refiner.IPFSInteractor, name: Refiner.IPFSInteractor}, + {Refiner.Journal, [Application.get_env(:refiner, :journal_path), name: Refiner.Journal]}, + Refiner.Avro.Client, + {Refiner.Avro.BlockSpecimen, name: Refiner.Avro.BlockSpecimen}, + {Refiner.BlockResultUploader, name: Refiner.BlockResultUploader}, + {Refiner.BlockProcessor, + [Application.get_env(:refiner, :evm_server_url), name: Refiner.BlockProcessor]}, + {Refiner.Pipeline.Spawner, name: Refiner.Pipeline.Spawner}, + {Refiner.Telemetry, name: Refiner.Telemetry} + ] + + # See https://hexdocs.pm/elixir/Supervisor.html + # for other strategies and supported options + options = [ + strategy: :one_for_one, + name: Refiner.Supervisor, + max_restarts: 3, + max_seconds: 1200 + ] + + Supervisor.start_link(children, options) + end +end diff --git a/lib/rudder/avro/avrora_client.ex b/lib/refiner/avro/avrora_client.ex similarity index 84% rename from lib/rudder/avro/avrora_client.ex rename to lib/refiner/avro/avrora_client.ex index 1e8b715e..f062caec 100644 --- a/lib/rudder/avro/avrora_client.ex +++ b/lib/refiner/avro/avrora_client.ex @@ -1,6 +1,6 @@ -defmodule Rudder.Avro.Client do +defmodule Refiner.Avro.Client do use Avrora.Client, - otp_app: :rudder, + otp_app: :refiner, registry_url: "http://localhost:8081", registry_auth: {:basic, ["username", "password"]}, schemas_path: "priv/schemas/", diff --git a/lib/rudder/avro/block_specimen_decoder_encoder.ex b/lib/refiner/avro/block_specimen_decoder_encoder.ex similarity index 93% rename from lib/rudder/avro/block_specimen_decoder_encoder.ex rename to lib/refiner/avro/block_specimen_decoder_encoder.ex index e55d93b9..c7d70045 100644 --- a/lib/rudder/avro/block_specimen_decoder_encoder.ex +++ b/lib/refiner/avro/block_specimen_decoder_encoder.ex @@ -1,4 +1,4 @@ -defmodule Rudder.Avro.BlockSpecimen do +defmodule Refiner.Avro.BlockSpecimen do use GenServer, restart: :temporary require Logger @@ -91,10 +91,10 @@ defmodule Rudder.Avro.BlockSpecimen do Returns `:ok, %{stream}` for lazy eval if successful, `:error` otherwise. """ def decode_dir(dir_path) do - Rudder.Util.get_file_paths(dir_path) + Refiner.Util.get_file_paths(dir_path) |> Enum.map(fn file -> [file] - |> Stream.map(&Rudder.Avro.BlockSpecimen.decode_file/1) + |> Stream.map(&Refiner.Avro.BlockSpecimen.decode_file/1) end) |> List.flatten() |> Enum.sort() @@ -134,10 +134,10 @@ defmodule Rudder.Avro.BlockSpecimen do ) ) :: list def encode_dir(dir_path) do - Rudder.Util.get_file_paths(dir_path) + Refiner.Util.get_file_paths(dir_path) |> Enum.map(fn file -> [file] - |> Stream.map(&Rudder.Avro.BlockSpecimen.encode_file/1) + |> Stream.map(&Refiner.Avro.BlockSpecimen.encode_file/1) end) |> List.flatten() |> Enum.sort() diff --git a/lib/rudder/block_result/block_result_metadata.ex b/lib/refiner/block_result/block_result_metadata.ex similarity index 65% rename from lib/rudder/block_result/block_result_metadata.ex rename to lib/refiner/block_result/block_result_metadata.ex index 5288edf2..de71bde8 100644 --- a/lib/rudder/block_result/block_result_metadata.ex +++ b/lib/refiner/block_result/block_result_metadata.ex @@ -1,3 +1,3 @@ -defmodule Rudder.BlockResultMetadata do +defmodule Refiner.BlockResultMetadata do defstruct [:chain_id, :block_height, :block_specimen_hash, :file_path] end diff --git a/lib/rudder/block_result/block_result_uploader.ex b/lib/refiner/block_result/block_result_uploader.ex similarity index 88% rename from lib/rudder/block_result/block_result_uploader.ex rename to lib/refiner/block_result/block_result_uploader.ex index ecae359a..38ceaae6 100644 --- a/lib/rudder/block_result/block_result_uploader.ex +++ b/lib/refiner/block_result/block_result_uploader.ex @@ -1,6 +1,6 @@ -defmodule Rudder.BlockResultUploader do +defmodule Refiner.BlockResultUploader do require Logger - alias Rudder.Events + alias Refiner.Events use GenServer @spec start_link([ @@ -23,7 +23,7 @@ defmodule Rudder.BlockResultUploader do @impl true def handle_call( {:upload_block_result, - %Rudder.BlockResultMetadata{ + %Refiner.BlockResultMetadata{ chain_id: chain_id, block_height: block_height, block_specimen_hash: block_specimen_hash, @@ -34,9 +34,9 @@ defmodule Rudder.BlockResultUploader do ) do start_upload_ms = System.monotonic_time(:millisecond) - case Rudder.IPFSInteractor.pin(file_path) do + case Refiner.IPFSInteractor.pin(file_path) do {:ok, cid} -> - specimen_hash_bytes32 = Rudder.Util.convert_to_bytes32(block_specimen_hash) + specimen_hash_bytes32 = Refiner.Util.convert_to_bytes32(block_specimen_hash) Logger.info( "#{block_height}:#{block_specimen_hash} has been successfully uploaded at ipfs://#{cid}" @@ -46,7 +46,7 @@ defmodule Rudder.BlockResultUploader do Logger.info("#{block_height}:#{block_specimen_hash} proof submitting") - case Rudder.ProofChain.Interactor.submit_block_result_proof( + case Refiner.ProofChain.Interactor.submit_block_result_proof( chain_id, block_height, specimen_hash_bytes32, @@ -78,7 +78,7 @@ defmodule Rudder.BlockResultUploader do @spec upload_block_result(any) :: any def upload_block_result(block_result_metadata) do GenServer.call( - Rudder.BlockResultUploader, + Refiner.BlockResultUploader, {:upload_block_result, block_result_metadata}, :infinity ) diff --git a/lib/rudder/block_result/block_specimen.ex b/lib/refiner/block_result/block_specimen.ex similarity index 60% rename from lib/rudder/block_result/block_specimen.ex rename to lib/refiner/block_result/block_specimen.ex index 60d9465b..78c83285 100644 --- a/lib/rudder/block_result/block_specimen.ex +++ b/lib/refiner/block_result/block_specimen.ex @@ -1,3 +1,3 @@ -defmodule Rudder.BlockSpecimen do +defmodule Refiner.BlockSpecimen do defstruct [:chain_id, :block_height, :contents] end diff --git a/lib/rudder/evm/EVM.md b/lib/refiner/evm/EVM.md similarity index 74% rename from lib/rudder/evm/EVM.md rename to lib/refiner/evm/EVM.md index 91934160..c45bba9a 100644 --- a/lib/rudder/evm/EVM.md +++ b/lib/refiner/evm/EVM.md @@ -9,6 +9,6 @@ e.g. (copied from `covalenthq/erigon`) ```bash -➜ curl -v -F filedata=@/Users/user/repos/rudder/test-data/block-specimen/15892740.specimen.json http://127.0.0.1:3002/process +➜ curl -v -F filedata=@/Users/user/repos/refiner/test-data/block-specimen/15892740.specimen.json http://127.0.0.1:3002/process ``` diff --git a/lib/rudder/evm/block_processor.ex b/lib/refiner/evm/block_processor.ex similarity index 90% rename from lib/rudder/evm/block_processor.ex rename to lib/refiner/evm/block_processor.ex index 41883658..ddff1ed3 100644 --- a/lib/rudder/evm/block_processor.ex +++ b/lib/refiner/evm/block_processor.ex @@ -1,8 +1,8 @@ -defmodule Rudder.BlockProcessor do +defmodule Refiner.BlockProcessor do use GenServer require Logger alias Multipart.Part - alias Rudder.Events + alias Refiner.Events def start_link([evm_server_url | opts]) do GenServer.start_link(__MODULE__, evm_server_url, opts) @@ -24,7 +24,7 @@ defmodule Rudder.BlockProcessor do case( Finch.build("POST", evm_server_url, headers, {:stream, body_stream}) - |> Finch.request(Rudder.Finch) + |> Finch.request(Refiner.Finch) ) do {:ok, %Finch.Response{body: body, headers: headers, status: status}} -> case body |> Poison.decode() do @@ -54,12 +54,12 @@ defmodule Rudder.BlockProcessor do end end - def sync_queue(%Rudder.BlockSpecimen{} = block_specimen) do + def sync_queue(%Refiner.BlockSpecimen{} = block_specimen) do Logger.info("submitting #{block_specimen.block_height} to evm http server...") start_execute_ms = System.monotonic_time(:millisecond) - case GenServer.call(Rudder.BlockProcessor, {:process, block_specimen.contents}, 60_000) do + case GenServer.call(Refiner.BlockProcessor, {:process, block_specimen.contents}, 60_000) do {:ok, block_result} -> block_result_path = Briefly.create!() File.write!(block_result_path, block_result) diff --git a/lib/rudder/ipfs/ipfs_interactor.ex b/lib/refiner/ipfs/ipfs_interactor.ex similarity index 85% rename from lib/rudder/ipfs/ipfs_interactor.ex rename to lib/refiner/ipfs/ipfs_interactor.ex index 87e353e9..9fff5d33 100644 --- a/lib/rudder/ipfs/ipfs_interactor.ex +++ b/lib/refiner/ipfs/ipfs_interactor.ex @@ -1,8 +1,8 @@ -defmodule Rudder.IPFSInteractor do +defmodule Refiner.IPFSInteractor do use GenServer alias Multipart.Part - alias Rudder.Events + alias Refiner.Events require Logger @@ -26,7 +26,7 @@ defmodule Rudder.IPFSInteractor do @impl true def handle_call({:pin, file_path}, _from, state) do start_pin_ms = System.monotonic_time(:millisecond) - ipfs_url = Application.get_env(:rudder, :ipfs_pinner_url) + ipfs_url = Application.get_env(:refiner, :ipfs_pinner_url) url = "#{ipfs_url}/upload" multipart = Multipart.new() |> Multipart.add_part(Part.file_body(file_path)) @@ -37,7 +37,7 @@ defmodule Rudder.IPFSInteractor do resp = Finch.build("POST", url, headers, {:stream, body_stream}) - |> Finch.request(Rudder.Finch) + |> Finch.request(Refiner.Finch) case resp do {:ok, %Finch.Response{body: body, headers: _, status: _}} -> @@ -63,11 +63,11 @@ defmodule Rudder.IPFSInteractor do def handle_call({:fetch, cid}, _from, state) do start_fetch_ms = System.monotonic_time(:millisecond) - ipfs_url = Application.get_env(:rudder, :ipfs_pinner_url) + ipfs_url = Application.get_env(:refiner, :ipfs_pinner_url) resp = Finch.build(:get, "#{ipfs_url}/get?cid=#{cid}") - |> Finch.request(Rudder.Finch, receive_timeout: 150_000_000, pool_timeout: 150_000_000) + |> Finch.request(Refiner.Finch, receive_timeout: 150_000_000, pool_timeout: 150_000_000) case resp do {:ok, %Finch.Response{body: body, headers: _, status: _}} -> @@ -95,18 +95,18 @@ defmodule Rudder.IPFSInteractor do @spec pin(any) :: any def pin(path) do - GenServer.call(Rudder.IPFSInteractor, {:pin, path}, :infinity) + GenServer.call(Refiner.IPFSInteractor, {:pin, path}, :infinity) end @spec fetch(any) :: any def fetch(cid) do - GenServer.call(Rudder.IPFSInteractor, {:fetch, cid}, :infinity) + GenServer.call(Refiner.IPFSInteractor, {:fetch, cid}, :infinity) end @spec discover_block_specimen(nonempty_maybe_improper_list) :: {:ok, any} def discover_block_specimen([url | _]) do ["ipfs", cid] = String.split(url, "://") - Rudder.IPFSInteractor.fetch(cid) + Refiner.IPFSInteractor.fetch(cid) end end diff --git a/lib/rudder/metrics/custom_reporter.ex b/lib/refiner/metrics/custom_reporter.ex similarity index 97% rename from lib/rudder/metrics/custom_reporter.ex rename to lib/refiner/metrics/custom_reporter.ex index 7ef8fc94..6438d747 100644 --- a/lib/rudder/metrics/custom_reporter.ex +++ b/lib/refiner/metrics/custom_reporter.ex @@ -1,4 +1,4 @@ -defmodule Rudder.Telemetry.CustomReporter do +defmodule Refiner.Telemetry.CustomReporter do use GenServer require Logger @@ -25,7 +25,7 @@ defmodule Rudder.Telemetry.CustomReporter do # journal events (instrumented) :ets.new(:journal_metrics, [:named_table, :public, :set, {:write_concurrency, true}]) # application wide events (instrumented) - :ets.new(:rudder_metrics, [:named_table, :public, :set, {:write_concurrency, true}]) + :ets.new(:refiner_metrics, [:named_table, :public, :set, {:write_concurrency, true}]) groups = Enum.group_by(metrics, & &1.event_name) diff --git a/lib/rudder/metrics/events.ex b/lib/refiner/metrics/events.ex similarity index 57% rename from lib/rudder/metrics/events.ex rename to lib/refiner/metrics/events.ex index 8dae1fd1..25fb82ad 100644 --- a/lib/rudder/metrics/events.ex +++ b/lib/refiner/metrics/events.ex @@ -1,4 +1,4 @@ -defmodule Rudder.Events do +defmodule Refiner.Events do @spec emit :: :ok def emit() do start_emit_ms = System.monotonic_time(:millisecond) @@ -6,7 +6,7 @@ defmodule Rudder.Events do :timer.sleep(100 * random_number) stop_emit_ms = System.monotonic_time(:millisecond) - :telemetry.execute([:rudder, :events, :emit], %{duration: stop_emit_ms - start_emit_ms}, %{ + :telemetry.execute([:refiner, :events, :emit], %{duration: stop_emit_ms - start_emit_ms}, %{ table: "emit_metrics", operation: "event" }) @@ -14,7 +14,7 @@ defmodule Rudder.Events do @spec ipfs_pin(any) :: :ok def ipfs_pin(duration) do - :telemetry.execute([:rudder, :events, :ipfs_pin], %{duration: duration}, %{ + :telemetry.execute([:refiner, :events, :ipfs_pin], %{duration: duration}, %{ table: "ipfs_metrics", operation: "pin" }) @@ -22,7 +22,7 @@ defmodule Rudder.Events do @spec ipfs_fetch(any) :: :ok def ipfs_fetch(duration) do - :telemetry.execute([:rudder, :events, :ipfs_fetch], %{duration: duration}, %{ + :telemetry.execute([:refiner, :events, :ipfs_fetch], %{duration: duration}, %{ table: "ipfs_metrics", operation: "fetch" }) @@ -30,7 +30,7 @@ defmodule Rudder.Events do @spec bsp_decode(any) :: :ok def bsp_decode(duration) do - :telemetry.execute([:rudder, :events, :bsp_decode], %{duration: duration}, %{ + :telemetry.execute([:refiner, :events, :bsp_decode], %{duration: duration}, %{ table: "bsp_metrics", operation: "decode" }) @@ -38,7 +38,7 @@ defmodule Rudder.Events do @spec bsp_execute(any) :: :ok def bsp_execute(duration) do - :telemetry.execute([:rudder, :events, :bsp_execute], %{duration: duration}, %{ + :telemetry.execute([:refiner, :events, :bsp_execute], %{duration: duration}, %{ table: "bsp_metrics", operation: "execute" }) @@ -46,7 +46,7 @@ defmodule Rudder.Events do @spec brp_upload_success(any) :: :ok def brp_upload_success(duration) do - :telemetry.execute([:rudder, :events, :brp_upload_success], %{duration: duration}, %{ + :telemetry.execute([:refiner, :events, :brp_upload_success], %{duration: duration}, %{ table: "brp_metrics", operation: "upload_success" }) @@ -54,7 +54,7 @@ defmodule Rudder.Events do @spec brp_upload_failure(any) :: :ok def brp_upload_failure(duration) do - :telemetry.execute([:rudder, :events, :brp_upload_failure], %{duration: duration}, %{ + :telemetry.execute([:refiner, :events, :brp_upload_failure], %{duration: duration}, %{ table: "brp_metrics", operation: "upload_failure" }) @@ -62,7 +62,7 @@ defmodule Rudder.Events do @spec brp_proof(any) :: :ok def brp_proof(duration) do - :telemetry.execute([:rudder, :events, :brp_proof], %{duration: duration}, %{ + :telemetry.execute([:refiner, :events, :brp_proof], %{duration: duration}, %{ table: "brp_metrics", operation: "proof" }) @@ -70,7 +70,7 @@ defmodule Rudder.Events do @spec journal_fetch_last(any) :: :ok def journal_fetch_last(duration) do - :telemetry.execute([:rudder, :events, :journal_fetch_last], %{duration: duration}, %{ + :telemetry.execute([:refiner, :events, :journal_fetch_last], %{duration: duration}, %{ table: "journal_metrics", operation: "fetch_last" }) @@ -78,24 +78,24 @@ defmodule Rudder.Events do @spec journal_fetch_items(any) :: :ok def journal_fetch_items(duration) do - :telemetry.execute([:rudder, :events, :journal_fetch_items], %{duration: duration}, %{ + :telemetry.execute([:refiner, :events, :journal_fetch_items], %{duration: duration}, %{ table: "journal_metrics", operation: "fetch_items" }) end - @spec rudder_pipeline_success(any) :: :ok - def rudder_pipeline_success(duration) do - :telemetry.execute([:rudder, :events, :rudder_pipeline_success], %{duration: duration}, %{ - table: "rudder_metrics", + @spec refiner_pipeline_success(any) :: :ok + def refiner_pipeline_success(duration) do + :telemetry.execute([:refiner, :events, :refiner_pipeline_success], %{duration: duration}, %{ + table: "refiner_metrics", operation: "pipeline_success" }) end - @spec rudder_pipeline_failure(any) :: :ok - def rudder_pipeline_failure(duration) do - :telemetry.execute([:rudder, :events, :rudder_pipeline_failure], %{duration: duration}, %{ - table: "rudder_metrics", + @spec refiner_pipeline_failure(any) :: :ok + def refiner_pipeline_failure(duration) do + :telemetry.execute([:refiner, :events, :refiner_pipeline_failure], %{duration: duration}, %{ + table: "refiner_metrics", operation: "pipeline_failure" }) end diff --git a/lib/rudder/metrics/prometheus.yml b/lib/refiner/metrics/prometheus.yml similarity index 100% rename from lib/rudder/metrics/prometheus.yml rename to lib/refiner/metrics/prometheus.yml diff --git a/lib/rudder/metrics/telemetry.ex b/lib/refiner/metrics/telemetry.ex similarity index 61% rename from lib/rudder/metrics/telemetry.ex rename to lib/refiner/metrics/telemetry.ex index ac16571c..5669be35 100644 --- a/lib/rudder/metrics/telemetry.ex +++ b/lib/refiner/metrics/telemetry.ex @@ -1,4 +1,4 @@ -defmodule Rudder.Telemetry do +defmodule Refiner.Telemetry do use Supervisor import Telemetry.Metrics @@ -11,163 +11,163 @@ defmodule Rudder.Telemetry do def init(_arg) do metrics = [ # event_emitting (all available metrics for any event) - counter("rudder.events.emit.count", + counter("refiner.events.emit.count", tags: [:table, :operation] ), - last_value("rudder.events.emit.duration", + last_value("refiner.events.emit.duration", unit: {:native, :millisecond}, tags: [:table, :operation] ), - # sum("rudder.events.emit.duration", + # sum("refiner.events.emit.duration", # unit: {:native, :millisecond}, # tags: [:table, :operation] # ), - # distribution("rudder.events.emit.duration", + # distribution("refiner.events.emit.duration", # unit: {:native, :millisecond}, # buckets: [0.0003, 0.0006, 0.0010], # tags: [:table, :operation] # ), # ipfs_pinning - counter("rudder.events.ipfs_pin.count", + counter("refiner.events.ipfs_pin.count", tags: [:table, :operation] ), - last_value("rudder.events.ipfs_pin.duration", + last_value("refiner.events.ipfs_pin.duration", unit: {:native, :millisecond}, tags: [:table, :operation] ), - # sum("rudder.events.ipfs_pin.duration", + # sum("refiner.events.ipfs_pin.duration", # unit: {:native, :millisecond}, # tags: [:table, :operation] # ), # ipfs_fetching - counter("rudder.events.ipfs_fetch.count", + counter("refiner.events.ipfs_fetch.count", tags: [:table, :operation] ), - last_value("rudder.events.ipfs_fetch.duration", + last_value("refiner.events.ipfs_fetch.duration", unit: {:native, :millisecond}, tags: [:table, :operation] ), - # sum("rudder.events.ipfs_fetch.duration", + # sum("refiner.events.ipfs_fetch.duration", # unit: {:native, :millisecond}, # tags: [:table, :operation] # ), # bsp_decoding - counter("rudder.events.bsp_decode.count", + counter("refiner.events.bsp_decode.count", tags: [:table, :operation] ), - last_value("rudder.events.bsp_decode.duration", + last_value("refiner.events.bsp_decode.duration", unit: {:native, :millisecond}, tags: [:table, :operation] ), - # sum("rudder.events.bsp_decode.duration", + # sum("refiner.events.bsp_decode.duration", # unit: {:native, :millisecond}, # tags: [:table, :operation] # ), # bsp_executing - counter("rudder.events.bsp_execute.count", + counter("refiner.events.bsp_execute.count", tags: [:table, :operation] ), - last_value("rudder.events.bsp_execute.duration", + last_value("refiner.events.bsp_execute.duration", unit: {:native, :millisecond}, tags: [:table, :operation] ), - # sum("rudder.events.bsp_execute.duration", + # sum("refiner.events.bsp_execute.duration", # unit: {:native, :millisecond}, # tags: [:table, :operation] # ), # brp_uploading - counter("rudder.events.brp_upload_success.count", + counter("refiner.events.brp_upload_success.count", tags: [:table, :operation] ), - last_value("rudder.events.brp_upload_success.duration", + last_value("refiner.events.brp_upload_success.duration", unit: {:native, :millisecond}, tags: [:table, :operation] ), - # sum("rudder.events.brp_upload_success.duration", + # sum("refiner.events.brp_upload_success.duration", # unit: {:native, :millisecond}, # tags: [:table, :operation] # ), - counter("rudder.events.brp_upload_failure.count", + counter("refiner.events.brp_upload_failure.count", tags: [:table, :operation] ), - last_value("rudder.events.brp_upload_failure.duration", + last_value("refiner.events.brp_upload_failure.duration", unit: {:native, :millisecond}, tags: [:table, :operation] ), - # sum("rudder.events.brp_upload_failure.duration", + # sum("refiner.events.brp_upload_failure.duration", # unit: {:native, :millisecond}, # tags: [:table, :operation] # ), # brp_proofing - counter("rudder.events.brp_proof.count", + counter("refiner.events.brp_proof.count", tags: [:table, :operation] ), - last_value("rudder.events.brp_proof.duration", + last_value("refiner.events.brp_proof.duration", unit: {:native, :millisecond}, tags: [:table, :operation] ), - # sum("rudder.events.brp_proof.duration", + # sum("refiner.events.brp_proof.duration", # unit: {:native, :millisecond}, # tags: [:table, :operation] # ), # journal_fetching - counter("rudder.events.journal_fetch_last.count", + counter("refiner.events.journal_fetch_last.count", tags: [:table, :operation] ), - last_value("rudder.events.journal_fetch_last.duration", + last_value("refiner.events.journal_fetch_last.duration", unit: {:native, :millisecond}, tags: [:table, :operation] ), - # sum("rudder.events.journal_fetch_last.duration", + # sum("refiner.events.journal_fetch_last.duration", # unit: {:native, :millisecond}, # tags: [:table, :operation] # ), - counter("rudder.events.journal_fetch_items.count", + counter("refiner.events.journal_fetch_items.count", tags: [:table, :operation] ), - last_value("rudder.events.journal_fetch_items.duration", + last_value("refiner.events.journal_fetch_items.duration", unit: {:native, :millisecond}, tags: [:table, :operation] ), - # sum("rudder.events.journal_fetch_items.duration", + # sum("refiner.events.journal_fetch_items.duration", # unit: {:native, :millisecond}, # tags: [:table, :operation] # ), - # rudder_pipelining - counter("rudder.events.rudder_pipeline_success.count", + # refiner_pipelining + counter("refiner.events.refiner_pipeline_success.count", tags: [:table, :operation] ), - last_value("rudder.events.rudder_pipeline_success.duration", + last_value("refiner.events.refiner_pipeline_success.duration", unit: {:native, :millisecond}, tags: [:table, :operation] ), - # sum("rudder.events.rudder_pipeline_success.duration", + # sum("refiner.events.refiner_pipeline_success.duration", # unit: {:native, :millisecond}, # tags: [:table, :operation] # ), - counter("rudder.events.rudder_pipeline_failure.count", + counter("refiner.events.refiner_pipeline_failure.count", tags: [:table, :operation] ), - last_value("rudder.events.rudder_pipeline_failure.duration", + last_value("refiner.events.refiner_pipeline_failure.duration", unit: {:native, :millisecond}, tags: [:table, :operation] ) - # sum("rudder.events.rudder_pipeline_failure.duration", + # sum("refiner.events.refiner_pipeline_failure.duration", # unit: {:native, :millisecond}, # tags: [:table, :operation] # ) ] children = [ - {Rudder.Telemetry.CustomReporter, metrics: metrics}, + {Refiner.Telemetry.CustomReporter, metrics: metrics}, {TelemetryMetricsPrometheus, metrics: metrics} ] diff --git a/lib/rudder/pipeline.ex b/lib/refiner/pipeline.ex similarity index 86% rename from lib/rudder/pipeline.ex rename to lib/refiner/pipeline.ex index ba231991..3ba32dfa 100644 --- a/lib/rudder/pipeline.ex +++ b/lib/refiner/pipeline.ex @@ -1,7 +1,7 @@ -defmodule Rudder.Pipeline do - alias Rudder.Events +defmodule Refiner.Pipeline do + alias Refiner.Events require Logger - alias Rudder.Util.GVA + alias Refiner.Util.GVA require GVA use Task @@ -39,7 +39,7 @@ defmodule Rudder.Pipeline do __MODULE__, {Task, fn -> - return_val = Rudder.Pipeline.process_specimen(bsp_key, urls) + return_val = Refiner.Pipeline.process_specimen(bsp_key, urls) if reply do send(caller_pid, {:result, return_val}) @@ -56,23 +56,23 @@ defmodule Rudder.Pipeline do try do with [_chain_id, _block_height, _block_hash, specimen_hash] <- String.split(bsp_key, "_"), - {:ok, specimen} <- Rudder.IPFSInteractor.discover_block_specimen(urls), - {:ok, decoded_specimen} <- Rudder.Avro.BlockSpecimen.decode(specimen), + {:ok, specimen} <- Refiner.IPFSInteractor.discover_block_specimen(urls), + {:ok, decoded_specimen} <- Refiner.Avro.BlockSpecimen.decode(specimen), {:ok, block_specimen} <- extract_block_specimen(decoded_specimen), {:ok, block_result_file_path} <- - Rudder.BlockProcessor.sync_queue(block_specimen), + Refiner.BlockProcessor.sync_queue(block_specimen), {block_height, ""} <- Integer.parse(block_specimen.block_height), block_result_metadata <- - %Rudder.BlockResultMetadata{ + %Refiner.BlockResultMetadata{ chain_id: block_specimen.chain_id, block_height: block_height, block_specimen_hash: specimen_hash, file_path: block_result_file_path } do return_val = - case Rudder.BlockResultUploader.upload_block_result(block_result_metadata) do + case Refiner.BlockResultUploader.upload_block_result(block_result_metadata) do {:ok, cid, block_result_hash} -> - :ok = Rudder.Journal.commit(bsp_key) + :ok = Refiner.Journal.commit(bsp_key) rec_uploader_success() if !is_prev_uploader_state_ok && is_uploader_status_ok() do @@ -80,7 +80,7 @@ defmodule Rudder.Pipeline do set_retry_failed_bsp() end - Events.rudder_pipeline_success( + Events.refiner_pipeline_success( System.monotonic_time(:millisecond) - start_pipeline_ms ) @@ -89,11 +89,11 @@ defmodule Rudder.Pipeline do {:error, :irreparable, errormsg} -> rec_uploader_failure() - Events.rudder_pipeline_failure( + Events.refiner_pipeline_failure( System.monotonic_time(:millisecond) - start_pipeline_ms ) - raise(Rudder.Pipeline.ProofSubmissionIrreparableError, errormsg) + raise(Refiner.Pipeline.ProofSubmissionIrreparableError, errormsg) {:error, error, _block_result_hash} -> rec_uploader_failure() @@ -104,7 +104,7 @@ defmodule Rudder.Pipeline do log_error_info(bsp_key, urls, error) - Events.rudder_pipeline_failure( + Events.refiner_pipeline_failure( System.monotonic_time(:millisecond) - start_pipeline_ms ) @@ -121,7 +121,7 @@ defmodule Rudder.Pipeline do # resource cleanups Briefly.cleanup() rescue - e in Rudder.Pipeline.ProofSubmissionIrreparableError -> + e in Refiner.Pipeline.ProofSubmissionIrreparableError -> log_error_info(bsp_key, urls, e) Logger.error(Exception.format(:error, e, __STACKTRACE__)) Process.exit(Process.whereis(:bspec_listener), :irreparable) @@ -141,7 +141,7 @@ defmodule Rudder.Pipeline do :ok = Events.bsp_decode(System.monotonic_time(:millisecond) - start_decode_ms) {:ok, - %Rudder.BlockSpecimen{ + %Refiner.BlockSpecimen{ chain_id: chain_id, block_height: Integer.to_string(block_height), contents: Poison.encode!(data) diff --git a/lib/rudder/proof_chain/block_specimen_event_listener.ex b/lib/refiner/proof_chain/block_specimen_event_listener.ex similarity index 73% rename from lib/rudder/proof_chain/block_specimen_event_listener.ex rename to lib/refiner/proof_chain/block_specimen_event_listener.ex index ac271ffe..9ad9b5ea 100644 --- a/lib/rudder/proof_chain/block_specimen_event_listener.ex +++ b/lib/refiner/proof_chain/block_specimen_event_listener.ex @@ -1,4 +1,4 @@ -defmodule Rudder.ProofChain.BlockSpecimenEventListener do +defmodule Refiner.ProofChain.BlockSpecimenEventListener do require Logger use GenServer @@ -21,10 +21,10 @@ defmodule Rudder.ProofChain.BlockSpecimenEventListener do def start() do initialize() Logger.info("starting event listener") - Application.ensure_all_started(:rudder) - proofchain_address = Application.get_env(:rudder, :bsp_proofchain_address) + Application.ensure_all_started(:refiner) + proofchain_address = Application.get_env(:refiner, :bsp_proofchain_address) Logger.info("retrying older uprocessed bsps (if any) before starting to listen") - push_bsps_to_process(Rudder.Journal.items_with_status(:discover), true) + push_bsps_to_process(Refiner.Journal.items_with_status(:discover), true) block_height = load_last_checked_block() listen_for_event(proofchain_address, block_height) end @@ -47,10 +47,10 @@ defmodule Rudder.ProofChain.BlockSpecimenEventListener do @spec load_last_checked_block :: any def load_last_checked_block() do - {:ok, block_height} = Rudder.Journal.last_started_block() + {:ok, block_height} = Refiner.Journal.last_started_block() if block_height == 1 do - {:ok, block_height} = Rudder.Network.EthereumMainnet.eth_blockNumber() + {:ok, block_height} = Refiner.Network.EthereumMainnet.eth_blockNumber() block_height else block_height @@ -65,7 +65,7 @@ defmodule Rudder.ProofChain.BlockSpecimenEventListener do Map.fetch(log_event, "topics") [_validator_bit_map, specimen_hash_raw] = - Rudder.Util.extract_data(log_event, "(uint256,bytes32)") + Refiner.Util.extract_data(log_event, "(uint256,bytes32)") # prepare data to generate key specimen_hash = Base.encode16(specimen_hash_raw, case: :lower) @@ -86,36 +86,38 @@ defmodule Rudder.ProofChain.BlockSpecimenEventListener do def push_bsps_to_process(bsp_keys, mark_discover \\ false) do Enum.map(bsp_keys, fn bsp_key -> if !mark_discover do - Rudder.Journal.discover(bsp_key) + Refiner.Journal.discover(bsp_key) end Logger.info("processing specimen #{bsp_key}") [_chain_id, block_height, _block_hash, specimen_hash] = String.split(bsp_key, "_") - is_brp_sesion_open = Rudder.ProofChain.Interactor.is_block_result_session_open(block_height) + + is_brp_sesion_open = + Refiner.ProofChain.Interactor.is_block_result_session_open(block_height) if is_brp_sesion_open do - specimen_hash_bytes32 = Rudder.Util.convert_to_bytes32(specimen_hash) - bsp_urls = Rudder.ProofChain.Interactor.get_urls(specimen_hash_bytes32) - Rudder.Pipeline.Spawner.push_hash(bsp_key, bsp_urls) + specimen_hash_bytes32 = Refiner.Util.convert_to_bytes32(specimen_hash) + bsp_urls = Refiner.ProofChain.Interactor.get_urls(specimen_hash_bytes32) + Refiner.Pipeline.Spawner.push_hash(bsp_key, bsp_urls) else - Rudder.Journal.skip(bsp_key) + Refiner.Journal.skip(bsp_key) end end) end defp listen_for_event(proofchain_address, block_height) do - if Rudder.Pipeline.is_retry_failed_bsp() do - Rudder.Pipeline.clear_retry_failed_bsp() + if Refiner.Pipeline.is_retry_failed_bsp() do + Refiner.Pipeline.clear_retry_failed_bsp() Logger.info("retrying older unprocessed bsps (if any)") - push_bsps_to_process(Rudder.Journal.items_with_status(:discover), true) + push_bsps_to_process(Refiner.Journal.items_with_status(:discover), true) end Logger.info("listening for events at #{block_height}") - Rudder.Journal.block_height_started(block_height) + Refiner.Journal.block_height_started(block_height) {:ok, bsp_awarded_logs} = - Rudder.Network.EthereumMainnet.eth_getLogs([ + Refiner.Network.EthereumMainnet.eth_getLogs([ %{ address: proofchain_address, fromBlock: "0x" <> Integer.to_string(block_height, 16), @@ -127,7 +129,7 @@ defmodule Rudder.ProofChain.BlockSpecimenEventListener do bsps_to_process = extract_awarded_specimens(bsp_awarded_logs) Logger.info("found #{length(bsps_to_process)} bsps to process") push_bsps_to_process(bsps_to_process) - Rudder.Journal.block_height_committed(block_height) + Refiner.Journal.block_height_committed(block_height) next_block_height = block_height + 1 loop(next_block_height) @@ -135,7 +137,7 @@ defmodule Rudder.ProofChain.BlockSpecimenEventListener do end defp loop(curr_block_height) do - {:ok, latest_block_number} = Rudder.Network.EthereumMainnet.eth_blockNumber() + {:ok, latest_block_number} = Refiner.Network.EthereumMainnet.eth_blockNumber() if curr_block_height > latest_block_number do Logger.info("synced to latest; waiting for #{curr_block_height} to be mined") diff --git a/lib/rudder/proof_chain/proof_chain_interactor.ex b/lib/refiner/proof_chain/proof_chain_interactor.ex similarity index 76% rename from lib/rudder/proof_chain/proof_chain_interactor.ex rename to lib/refiner/proof_chain/proof_chain_interactor.ex index 174df644..835c1536 100644 --- a/lib/rudder/proof_chain/proof_chain_interactor.ex +++ b/lib/refiner/proof_chain/proof_chain_interactor.ex @@ -1,6 +1,6 @@ -defmodule Rudder.ProofChain.Interactor do +defmodule Refiner.ProofChain.Interactor do require Logger - alias Rudder.Events + alias Refiner.Events use GenServer @impl true @@ -21,14 +21,14 @@ defmodule Rudder.ProofChain.Interactor do end defp get_bsp_proofchain() do - proofchain_address = Application.get_env(:rudder, :bsp_proofchain_address) - {:ok, proofchain} = Rudder.RPC.PublicKeyHash.parse(proofchain_address) + proofchain_address = Application.get_env(:refiner, :bsp_proofchain_address) + {:ok, proofchain} = Refiner.RPC.PublicKeyHash.parse(proofchain_address) proofchain end defp get_brp_proofchain() do - proofchain_address = Application.get_env(:rudder, :brp_proofchain_address) - {:ok, proofchain} = Rudder.RPC.PublicKeyHash.parse(proofchain_address) + proofchain_address = Application.get_env(:refiner, :brp_proofchain_address) + {:ok, proofchain} = Refiner.RPC.PublicKeyHash.parse(proofchain_address) proofchain end @@ -40,7 +40,7 @@ defmodule Rudder.ProofChain.Interactor do data: data ] - with {:ok, res} <- Rudder.Network.EthereumMainnet.eth_call(tx) do + with {:ok, res} <- Refiner.Network.EthereumMainnet.eth_call(tx) do res end end @@ -57,7 +57,7 @@ defmodule Rudder.ProofChain.Interactor do def is_block_result_session_open(block_height) do {block_height, _} = Integer.parse(block_height) operator = get_operator_wallet() - chain_id = Application.get_env(:rudder, :block_specimen_chain_id) + chain_id = Application.get_env(:refiner, :block_specimen_chain_id) rpc_params = [chain_id, block_height, operator.address.bytes] data = {@is_session_open_selector, rpc_params} brp_proofchain = get_brp_proofchain() @@ -65,8 +65,8 @@ defmodule Rudder.ProofChain.Interactor do end defp get_operator_wallet() do - operator_private_key = Application.get_env(:rudder, :operator_private_key) - Rudder.Wallet.load(Base.decode16!(operator_private_key, case: :lower)) + operator_private_key = Application.get_env(:refiner, :operator_private_key) + Refiner.Wallet.load(Base.decode16!(operator_private_key, case: :lower)) end defp send_eip1559_signed_tx( @@ -78,13 +78,13 @@ defmodule Rudder.ProofChain.Interactor do proofchain_chain_id, max_priority_fee_per_gas_hex ) do - {:ok, block} = Rudder.Network.EthereumMainnet.eth_getBlockByNumber(:latest) + {:ok, block} = Refiner.Network.EthereumMainnet.eth_getBlockByNumber(:latest) base_fee = block.base_fee_per_gas "0x" <> max_priority_fee_per_gas_hex = max_priority_fee_per_gas_hex {max_priority_fee_per_gas, _} = Integer.parse(max_priority_fee_per_gas_hex, 16) max_fee_per_gas = 2 * base_fee + max_priority_fee_per_gas - tx = %Rudder.RPC.EthereumClient.TransactionEIP1559{ + tx = %Refiner.RPC.EthereumClient.TransactionEIP1559{ type: 2, nonce: nonce, to: to, @@ -96,14 +96,14 @@ defmodule Rudder.ProofChain.Interactor do chain_id: proofchain_chain_id } - Rudder.RPC.EthereumClient.TransactionEIP1559.signed_by(tx, sender) + Refiner.RPC.EthereumClient.TransactionEIP1559.signed_by(tx, sender) end defp get_eip1559_signed_tx(sender, nonce, to, estimated_gas_limit, data, proofchain_chain_id) do case proofchain_chain_id do # case for testing via hardhat node in absence of maxPriorityFeePerGas support 31_337 -> - {:ok, fee_history} = Rudder.Network.EthereumMainnet.eth_feeHistory() + {:ok, fee_history} = Refiner.Network.EthereumMainnet.eth_feeHistory() fee_history_list = Map.to_list(fee_history) max_priority_fee_per_gas_hex = @@ -121,7 +121,7 @@ defmodule Rudder.ProofChain.Interactor do _ -> {:ok, max_priority_fee_per_gas_hex} = - Rudder.Network.EthereumMainnet.eth_maxPriorityFeePerGas() + Refiner.Network.EthereumMainnet.eth_maxPriorityFeePerGas() send_eip1559_signed_tx( sender, @@ -136,9 +136,9 @@ defmodule Rudder.ProofChain.Interactor do end defp get_legacy_signed_tx(sender, nonce, to, estimated_gas_limit, data, proofchain_chain_id) do - gas_price = Rudder.Network.EthereumMainnet.eth_gasPrice!() + gas_price = Refiner.Network.EthereumMainnet.eth_gasPrice!() - tx = %Rudder.RPC.EthereumClient.Transaction{ + tx = %Refiner.RPC.EthereumClient.Transaction{ nonce: nonce, gas_price: gas_price, gas_limit: estimated_gas_limit, @@ -148,7 +148,7 @@ defmodule Rudder.ProofChain.Interactor do chain_id: proofchain_chain_id } - Rudder.RPC.EthereumClient.Transaction.signed_by(tx, sender) + Refiner.RPC.EthereumClient.Transaction.signed_by(tx, sender) end @spec submit_block_result_proof(any, any, any, any, any) :: any @@ -173,30 +173,30 @@ defmodule Rudder.ProofChain.Interactor do sender = get_operator_wallet() to = get_brp_proofchain() - {:ok, recent_gas_limit} = Rudder.Network.EthereumMainnet.gas_limit(:latest) + {:ok, recent_gas_limit} = Refiner.Network.EthereumMainnet.gas_limit(:latest) try do estimated_gas_limit = - Rudder.Network.EthereumMainnet.eth_estimateGas!( + Refiner.Network.EthereumMainnet.eth_estimateGas!( from: sender.address, to: to, data: data, gas: recent_gas_limit ) - nonce = Rudder.Network.EthereumMainnet.next_nonce(sender.address) - proofchain_chain_id = Application.get_env(:rudder, :proofchain_chain_id) + nonce = Refiner.Network.EthereumMainnet.next_nonce(sender.address) + proofchain_chain_id = Application.get_env(:refiner, :proofchain_chain_id) signed_tx = get_eip1559_signed_tx(sender, nonce, to, estimated_gas_limit, data, proofchain_chain_id) - with {:ok, txid} <- Rudder.Network.EthereumMainnet.eth_sendTransaction(signed_tx) do + with {:ok, txid} <- Refiner.Network.EthereumMainnet.eth_sendTransaction(signed_tx) do :ok = Events.brp_proof(System.monotonic_time(:millisecond) - start_proof_ms) Logger.info("#{block_height} txid is #{txid}") {:ok, :submitted} end rescue - e in Rudder.RPCError -> + e in Refiner.RPCError -> cond do String.contains?(e.message, "Operator already submitted for the provided block hash") -> {:ok, :submitted} diff --git a/lib/rudder/rpc/ethereum_client/codec.ex b/lib/refiner/rpc/ethereum_client/codec.ex similarity index 93% rename from lib/rudder/rpc/ethereum_client/codec.ex rename to lib/refiner/rpc/ethereum_client/codec.ex index bbd95d9d..e531f05f 100644 --- a/lib/rudder/rpc/ethereum_client/codec.ex +++ b/lib/refiner/rpc/ethereum_client/codec.ex @@ -1,9 +1,9 @@ -defmodule Rudder.RPC.EthereumClient.Codec do - alias Rudder.RPC.EthereumClient.Transaction - alias Rudder.RPC.EthereumClient.TransactionEIP1559 +defmodule Refiner.RPC.EthereumClient.Codec do + alias Refiner.RPC.EthereumClient.Transaction + alias Refiner.RPC.EthereumClient.TransactionEIP1559 def encode_sha256(nil), do: nil - def encode_sha256(%Rudder.SHA256{bytes: bytes}), do: encode_sha256(bytes) + def encode_sha256(%Refiner.SHA256{bytes: bytes}), do: encode_sha256(bytes) def encode_sha256(hash) when is_binary(hash), do: encode_bin(hash) def encode_address(nil), do: nil @@ -13,7 +13,7 @@ defmodule Rudder.RPC.EthereumClient.Codec do end def encode_address( - %Rudder.RPC.PublicKeyHash{format: :ethpub, namespace: 0, bytes: bytes}, + %Refiner.RPC.PublicKeyHash{format: :ethpub, namespace: 0, bytes: bytes}, opts \\ [] ) do if Keyword.get(opts, :raw, false) do @@ -31,7 +31,7 @@ defmodule Rudder.RPC.EthereumClient.Codec do @spec decode_address(nil | bitstring) :: nil - | %Rudder.RPC.PublicKeyHash{ + | %Refiner.RPC.PublicKeyHash{ bytes: bitstring, chain_id: nil, format: :ethpub, @@ -40,11 +40,11 @@ defmodule Rudder.RPC.EthereumClient.Codec do def decode_address(nil), do: nil def decode_address("") do - %Rudder.RPC.PublicKeyHash{format: :ethpub, namespace: 0, bytes: <<0::160>>} + %Refiner.RPC.PublicKeyHash{format: :ethpub, namespace: 0, bytes: <<0::160>>} end def decode_address(bytes) when byte_size(bytes) == 20 do - %Rudder.RPC.PublicKeyHash{format: :ethpub, namespace: 0, bytes: bytes} + %Refiner.RPC.PublicKeyHash{format: :ethpub, namespace: 0, bytes: bytes} end def decode_address(<<"0x", _::binary>> = bin) do @@ -127,8 +127,8 @@ defmodule Rudder.RPC.EthereumClient.Codec do |> List.to_tuple() end - defp normalize_hl_call_payload_part(%Rudder.SHA256{bytes: bytes}), do: bytes - defp normalize_hl_call_payload_part(%Rudder.RPC.PublicKeyHash{bytes: bytes}), do: bytes + defp normalize_hl_call_payload_part(%Refiner.SHA256{bytes: bytes}), do: bytes + defp normalize_hl_call_payload_part(%Refiner.RPC.PublicKeyHash{bytes: bytes}), do: bytes defp normalize_hl_call_payload_part(other), do: other @spec encode_call_payload(binary | {binary | ABI.FunctionSelector.t(), any}) :: @@ -233,7 +233,7 @@ defmodule Rudder.RPC.EthereumClient.Codec do {transaction_ids, transactions} = case Enum.map(block["transactions"], &decode_transaction/1) do [] -> {[], []} - [%Rudder.SHA256{} | _] = txids -> {txids, nil} + [%Refiner.SHA256{} | _] = txids -> {txids, nil} txs -> {nil, txs} end @@ -350,11 +350,11 @@ defmodule Rudder.RPC.EthereumClient.Codec do def decode_log_topic(bin) do decode_bin(bin) - |> Rudder.SHA256.new() + |> Refiner.SHA256.new() end - def extract_address_from_log_topic(%Rudder.SHA256{bytes: <<0::96, addr::binary-size(20)>>}), - do: %Rudder.RPC.PublicKeyHash{format: :ethpub, namespace: 0, bytes: addr} + def extract_address_from_log_topic(%Refiner.SHA256{bytes: <<0::96, addr::binary-size(20)>>}), + do: %Refiner.RPC.PublicKeyHash{format: :ethpub, namespace: 0, bytes: addr} def linearize_log_offsets(logs) do Enum.sort_by(logs, fn log -> @@ -386,7 +386,7 @@ defmodule Rudder.RPC.EthereumClient.Codec do def decode_sha256(bin) do case decode_bin(bin) do <<0::256>> -> nil - bytes -> Rudder.SHA256.new(bytes) + bytes -> Refiner.SHA256.new(bytes) end end diff --git a/lib/rudder/rpc/ethereum_client/ethereum_client.ex b/lib/refiner/rpc/ethereum_client/ethereum_client.ex similarity index 93% rename from lib/rudder/rpc/ethereum_client/ethereum_client.ex rename to lib/refiner/rpc/ethereum_client/ethereum_client.ex index 58fa1a1d..18682950 100644 --- a/lib/rudder/rpc/ethereum_client/ethereum_client.ex +++ b/lib/refiner/rpc/ethereum_client/ethereum_client.ex @@ -1,17 +1,17 @@ -defmodule Rudder.RPC.EthereumClient do +defmodule Refiner.RPC.EthereumClient do @spec __using__(any) :: {:__block__, [], [{:=, [], [...]} | {:__block__, [...], [...]}, ...]} defmacro __using__(opts) do quote location: :keep, bind_quoted: [used_with_opts: opts] do use Confex, used_with_opts - alias Rudder.RPC.EthereumClient.Codec - alias Rudder.RPC.EthereumClient.Transaction - alias Rudder.RPC.EthereumClient.TransactionEIP1559 + alias Refiner.RPC.EthereumClient.Codec + alias Refiner.RPC.EthereumClient.Transaction + alias Refiner.RPC.EthereumClient.TransactionEIP1559 - @default_client_module Rudder.RPC.JSONRPC.HTTPClient + @default_client_module Refiner.RPC.JSONRPC.HTTPClient @default_client_opts "http://localhost:8545" @default_selector List.last(Module.split(__MODULE__)) - @default_adapter Rudder.RPC.JSONRPC.HTTPAdapter + @default_adapter Refiner.RPC.JSONRPC.HTTPAdapter @batch_load_path "var/batch_loads" @@ -50,7 +50,7 @@ defmodule Rudder.RPC.EthereumClient do def sealer do case Keyword.fetch(config(), :sealer) do {:ok, sealer_addr_str} -> - Rudder.RPC.PublicKeyHash.parse!(sealer_addr_str) + Refiner.RPC.PublicKeyHash.parse!(sealer_addr_str) :error -> nil @@ -75,7 +75,7 @@ defmodule Rudder.RPC.EthereumClient do try do is_binary(web3_clientVersion!()) rescue - e in Rudder.RPCError -> + e in Refiner.RPCError -> false end end @@ -86,7 +86,7 @@ defmodule Rudder.RPC.EthereumClient do request_timeout: request_timeout_millis() ] - resp = client() |> Rudder.RPC.JSONRPC.Client.call(rpc_method, rpc_params, default_opts) + resp = client() |> Refiner.RPC.JSONRPC.Client.call(rpc_method, rpc_params, default_opts) case {resp, decode_fun} do {{:ok, value}, f} when is_function(f, 1) -> {:ok, f.(value)} @@ -98,7 +98,7 @@ defmodule Rudder.RPC.EthereumClient do quote do case unquote(rpc_resp) do {:ok, value} -> value - error -> raise Rudder.RPCError, error + error -> raise Refiner.RPCError, error end end end @@ -108,7 +108,7 @@ defmodule Rudder.RPC.EthereumClient do Stream.map(unquote(rpc_resp_stream), fn rpc_resp -> case rpc_resp do {:ok, value} -> value - error -> raise Rudder.RPCError, error + error -> raise Refiner.RPCError, error end end) end @@ -308,7 +308,7 @@ defmodule Rudder.RPC.EthereumClient do pos when is_atom(pos) or is_integer(pos) -> call(:eth_getBlockByNumber, [Codec.encode_qty(pos), full_transactions?]) - %Rudder.SHA256{} = hash -> + %Refiner.SHA256{} = hash -> call(:eth_getBlockByHash, [Codec.encode_sha256(hash), full_transactions?]) hash when is_binary(hash) -> @@ -339,7 +339,7 @@ defmodule Rudder.RPC.EthereumClient do def next_nonce(nil), do: 0 - def next_nonce(%Rudder.RPC.PublicKeyHash{} = pkh) do + def next_nonce(%Refiner.RPC.PublicKeyHash{} = pkh) do eth_getTransactionCount!(pkh, :pending) end diff --git a/lib/rudder/rpc/ethereum_client/hash.ex b/lib/refiner/rpc/ethereum_client/hash.ex similarity index 78% rename from lib/rudder/rpc/ethereum_client/hash.ex rename to lib/refiner/rpc/ethereum_client/hash.ex index 4682d560..46fcaf27 100644 --- a/lib/rudder/rpc/ethereum_client/hash.ex +++ b/lib/refiner/rpc/ethereum_client/hash.ex @@ -1,4 +1,4 @@ -defmodule Rudder.SHA256 do +defmodule Refiner.SHA256 do defstruct [:bytes] def sigil_h(maybe_pfx_hexhash, []), do: parse(maybe_pfx_hexhash) @@ -37,7 +37,7 @@ defmodule Rudder.SHA256 do def cast(%__MODULE__{} = h), do: {:ok, h} def cast(_), do: :error - @spec load(any) :: :error | {:ok, %Rudder.SHA256{bytes: binary}} + @spec load(any) :: :error | {:ok, %Refiner.SHA256{bytes: binary}} @doc false def load(data) when is_binary(data) and byte_size(data) == 32 do {:ok, %__MODULE__{bytes: data}} @@ -51,23 +51,23 @@ defmodule Rudder.SHA256 do def dump(_), do: :error end -defimpl Jason.Encoder, for: Rudder.SHA256 do - def encode(%Rudder.SHA256{} = hash, opts) do - Rudder.SHA256.as_string(hash) +defimpl Jason.Encoder, for: Refiner.SHA256 do + def encode(%Refiner.SHA256{} = hash, opts) do + Refiner.SHA256.as_string(hash) |> Jason.Encode.string(opts) end end -defimpl String.Chars, for: Rudder.SHA256 do - def to_string(block_hash), do: Rudder.SHA256.as_string(block_hash) +defimpl String.Chars, for: Refiner.SHA256 do + def to_string(block_hash), do: Refiner.SHA256.as_string(block_hash) end -defimpl Inspect, for: Rudder.SHA256 do +defimpl Inspect, for: Refiner.SHA256 do import Inspect.Algebra case Mix.env() do :prod -> - def inspect(%Rudder.SHA256{bytes: hash}, opts) do + def inspect(%Refiner.SHA256{bytes: hash}, opts) do hash_hex = Base.encode16(hash, case: :lower) hash_start = String.slice(hash_hex, 0..1) hash_end = String.slice(hash_hex, 52..63) @@ -77,7 +77,7 @@ defimpl Inspect, for: Rudder.SHA256 do end _ -> - @spec inspect(%Rudder.SHA256{}, Inspect.Opts.t()) :: + @spec inspect(%Refiner.SHA256{}, Inspect.Opts.t()) :: :doc_line | :doc_nil | binary @@ -86,7 +86,7 @@ defimpl Inspect, for: Rudder.SHA256 do | {:doc_break | :doc_color | :doc_cons | :doc_fits | :doc_group | :doc_string, any, any} | {:doc_nest, any, :cursor | :reset | non_neg_integer, :always | :break} - def inspect(%Rudder.SHA256{bytes: hash}, opts) do + def inspect(%Refiner.SHA256{bytes: hash}, opts) do hash_hex = Base.encode16(hash, case: :lower) concat(["♯", hash_hex]) diff --git a/lib/rudder/rpc/ethereum_client/public_key_hash.ex b/lib/refiner/rpc/ethereum_client/public_key_hash.ex similarity index 80% rename from lib/rudder/rpc/ethereum_client/public_key_hash.ex rename to lib/refiner/rpc/ethereum_client/public_key_hash.ex index 4afcc998..5c52887a 100644 --- a/lib/rudder/rpc/ethereum_client/public_key_hash.ex +++ b/lib/refiner/rpc/ethereum_client/public_key_hash.ex @@ -1,10 +1,10 @@ -defmodule Rudder.RPC.PublicKeyHash do +defmodule Refiner.RPC.PublicKeyHash do defstruct format: :p2pkh, namespace: 0, chain_id: nil, bytes: <<0::160>> - alias Rudder.RPC.EthereumClient.Codec, as: EthereumCodec + alias Refiner.RPC.EthereumClient.Codec, as: EthereumCodec @codecs_by_format %{ ethpub: EthereumCodec @@ -12,7 +12,7 @@ defmodule Rudder.RPC.PublicKeyHash do @zero_address EthereumCodec.decode_address("0x0000000000000000000000000000000000000000") - @spec zero_address :: %Rudder.RPC.PublicKeyHash{ + @spec zero_address :: %Refiner.RPC.PublicKeyHash{ bytes: <<_::160>>, chain_id: nil, format: :ethpub, @@ -43,7 +43,7 @@ defmodule Rudder.RPC.PublicKeyHash do :error | {:ok, nil - | %Rudder.RPC.PublicKeyHash{ + | %Refiner.RPC.PublicKeyHash{ bytes: bitstring, chain_id: nil, format: :ethpub, @@ -57,7 +57,7 @@ defmodule Rudder.RPC.PublicKeyHash do @spec parse_raw!(any) :: nil - | %Rudder.RPC.PublicKeyHash{ + | %Refiner.RPC.PublicKeyHash{ bytes: bitstring, chain_id: nil, format: :ethpub, @@ -68,7 +68,7 @@ defmodule Rudder.RPC.PublicKeyHash do pkh end - @spec new(any, any, any, any) :: %Rudder.RPC.PublicKeyHash{ + @spec new(any, any, any, any) :: %Refiner.RPC.PublicKeyHash{ bytes: any, chain_id: any, format: any, @@ -111,7 +111,7 @@ defmodule Rudder.RPC.PublicKeyHash do :error | {:ok, nil - | %Rudder.RPC.PublicKeyHash{ + | %Refiner.RPC.PublicKeyHash{ bytes: bitstring, chain_id: nil, format: :ethpub, @@ -130,26 +130,26 @@ defmodule Rudder.RPC.PublicKeyHash do def equal?(a, b), do: a == b end -defimpl Jason.Encoder, for: Rudder.RPC.PublicKeyHash do - @spec encode(%Rudder.RPC.PublicKeyHash{}, Jason.Encode.opts()) :: [ +defimpl Jason.Encoder, for: Refiner.RPC.PublicKeyHash do + @spec encode(%Refiner.RPC.PublicKeyHash{}, Jason.Encode.opts()) :: [ binary | maybe_improper_list(any, binary | []) | byte, ... ] - def encode(%Rudder.RPC.PublicKeyHash{} = pkh, opts) do - Rudder.RPC.PublicKeyHash.as_string!(pkh) + def encode(%Refiner.RPC.PublicKeyHash{} = pkh, opts) do + Refiner.RPC.PublicKeyHash.as_string!(pkh) |> Jason.Encode.string(opts) end end -defimpl String.Chars, for: Rudder.RPC.PublicKeyHash do - @spec to_string(%Rudder.RPC.PublicKeyHash{}) :: any - def to_string(pkh), do: Rudder.RPC.PublicKeyHash.as_string!(pkh) +defimpl String.Chars, for: Refiner.RPC.PublicKeyHash do + @spec to_string(%Refiner.RPC.PublicKeyHash{}) :: any + def to_string(pkh), do: Refiner.RPC.PublicKeyHash.as_string!(pkh) end -defimpl Inspect, for: Rudder.RPC.PublicKeyHash do +defimpl Inspect, for: Refiner.RPC.PublicKeyHash do import Inspect.Algebra - @spec inspect(%Rudder.RPC.PublicKeyHash{}, Inspect.Opts.t()) :: + @spec inspect(%Refiner.RPC.PublicKeyHash{}, Inspect.Opts.t()) :: :doc_line | :doc_nil | binary @@ -158,10 +158,11 @@ defimpl Inspect, for: Rudder.RPC.PublicKeyHash do | {:doc_break | :doc_color | :doc_cons | :doc_fits | :doc_group | :doc_string, any, any} | {:doc_nest, any, :cursor | :reset | non_neg_integer, :always | :break} def inspect( - %Rudder.RPC.PublicKeyHash{format: format, namespace: namespace, chain_id: chain_id} = pkh, + %Refiner.RPC.PublicKeyHash{format: format, namespace: namespace, chain_id: chain_id} = + pkh, opts ) do - {:ok, str_repr} = Rudder.RPC.PublicKeyHash.as_string(pkh, hide_prefix: true) + {:ok, str_repr} = Refiner.RPC.PublicKeyHash.as_string(pkh, hide_prefix: true) str_repr_doc = str_repr_doc(str_repr, opts) diff --git a/lib/rudder/rpc/ethereum_client/transaction.ex b/lib/refiner/rpc/ethereum_client/transaction.ex similarity index 88% rename from lib/rudder/rpc/ethereum_client/transaction.ex rename to lib/refiner/rpc/ethereum_client/transaction.ex index 7c627b38..d0c6e257 100644 --- a/lib/rudder/rpc/ethereum_client/transaction.ex +++ b/lib/refiner/rpc/ethereum_client/transaction.ex @@ -1,5 +1,5 @@ -defmodule Rudder.RPC.EthereumClient.Transaction do - alias Rudder.Wallet +defmodule Refiner.RPC.EthereumClient.Transaction do + alias Refiner.Wallet defstruct [ :nonce, @@ -95,12 +95,12 @@ defmodule Rudder.RPC.EthereumClient.Transaction do defp normalize_bin("0x" <> hex), do: Base.decode16!(hex, case: :mixed) defp normalize_bin(bin) when is_binary(bin), do: bin - def normalize_address(%Wallet{address: %Rudder.RPC.PublicKeyHash{} = pkh}), do: pkh - def normalize_address(other), do: Rudder.RPC.PublicKeyHash.parse_raw!(normalize_bin(other)) + def normalize_address(%Wallet{address: %Refiner.RPC.PublicKeyHash{} = pkh}), do: pkh + def normalize_address(other), do: Refiner.RPC.PublicKeyHash.parse_raw!(normalize_bin(other)) defp term_to_rlpable(nil), do: "" defp term_to_rlpable(0), do: "" - defp term_to_rlpable(%Rudder.RPC.PublicKeyHash{bytes: bin}), do: bin + defp term_to_rlpable(%Refiner.RPC.PublicKeyHash{bytes: bin}), do: bin defp term_to_rlpable(data) when is_integer(data), do: :binary.encode_unsigned(data) defp term_to_rlpable(data) when is_binary(data), do: data diff --git a/lib/rudder/rpc/ethereum_client/transaction_eip1559.ex b/lib/refiner/rpc/ethereum_client/transaction_eip1559.ex similarity index 89% rename from lib/rudder/rpc/ethereum_client/transaction_eip1559.ex rename to lib/refiner/rpc/ethereum_client/transaction_eip1559.ex index f0992961..665d319c 100644 --- a/lib/rudder/rpc/ethereum_client/transaction_eip1559.ex +++ b/lib/refiner/rpc/ethereum_client/transaction_eip1559.ex @@ -1,5 +1,5 @@ -defmodule Rudder.RPC.EthereumClient.TransactionEIP1559 do - alias Rudder.Wallet +defmodule Refiner.RPC.EthereumClient.TransactionEIP1559 do + alias Refiner.Wallet defstruct [ :type, @@ -117,12 +117,12 @@ defmodule Rudder.RPC.EthereumClient.TransactionEIP1559 do defp normalize_bin("0x" <> hex), do: Base.decode16!(hex, case: :mixed) defp normalize_bin(bin) when is_binary(bin), do: bin - def normalize_address(%Wallet{address: %Rudder.RPC.PublicKeyHash{} = pkh}), do: pkh - def normalize_address(other), do: Rudder.RPC.PublicKeyHash.parse_raw!(normalize_bin(other)) + def normalize_address(%Wallet{address: %Refiner.RPC.PublicKeyHash{} = pkh}), do: pkh + def normalize_address(other), do: Refiner.RPC.PublicKeyHash.parse_raw!(normalize_bin(other)) defp term_to_rlpable(nil), do: "" defp term_to_rlpable(0), do: 0 - defp term_to_rlpable(%Rudder.RPC.PublicKeyHash{bytes: bin}), do: bin + defp term_to_rlpable(%Refiner.RPC.PublicKeyHash{bytes: bin}), do: bin defp term_to_rlpable(data) when is_integer(data), do: :binary.encode_unsigned(data) defp term_to_rlpable(data) when is_binary(data), do: data defp term_to_rlpable([]), do: [] diff --git a/lib/rudder/rpc/ethereum_client/wallet.ex b/lib/refiner/rpc/ethereum_client/wallet.ex similarity index 85% rename from lib/rudder/rpc/ethereum_client/wallet.ex rename to lib/refiner/rpc/ethereum_client/wallet.ex index 8cbd19d4..9ef3e498 100644 --- a/lib/rudder/rpc/ethereum_client/wallet.ex +++ b/lib/refiner/rpc/ethereum_client/wallet.ex @@ -1,4 +1,4 @@ -defmodule Rudder.Wallet do +defmodule Refiner.Wallet do defstruct [:private_key, :public_key, :address, :mnemonic_phrase] def new(), do: load(:crypto.strong_rand_bytes(32)) @@ -23,11 +23,11 @@ defmodule Rudder.Wallet do end def sign(%__MODULE__{} = sender, tx) do - Rudder.RPC.EthereumClient.Transaction.signed_by(tx, sender) + Refiner.RPC.EthereumClient.Transaction.signed_by(tx, sender) end defp pubkey_to_address(<<4::size(8), key::binary-size(64)>>) do <<_::binary-size(12), raw_address::binary-size(20)>> = ExKeccak.hash_256(key) - Rudder.RPC.PublicKeyHash.parse_raw!(raw_address) + Refiner.RPC.PublicKeyHash.parse_raw!(raw_address) end end diff --git a/lib/refiner/rpc/ethereum_mainnet.ex b/lib/refiner/rpc/ethereum_mainnet.ex new file mode 100644 index 00000000..ef24c8ac --- /dev/null +++ b/lib/refiner/rpc/ethereum_mainnet.ex @@ -0,0 +1,8 @@ +defmodule Refiner.Network.EthereumMainnet do + use Refiner.RPC.EthereumClient, + otp_app: :refiner, + client_opts: Application.get_env(:refiner, :proofchain_node), + chain_id: Application.get_env(:refiner, :proofchain_chain_id), + description: "Ethereum Foundation Mainnet", + currency: [name: "Ether", ticker_symbol: "ETH"] +end diff --git a/lib/rudder/rpc/jsonrpc/client.ex b/lib/refiner/rpc/jsonrpc/client.ex similarity index 85% rename from lib/rudder/rpc/jsonrpc/client.ex rename to lib/refiner/rpc/jsonrpc/client.ex index a58e5eb4..57a89e15 100644 --- a/lib/rudder/rpc/jsonrpc/client.ex +++ b/lib/refiner/rpc/jsonrpc/client.ex @@ -1,4 +1,4 @@ -defprotocol Rudder.RPC.JSONRPC.Client do +defprotocol Refiner.RPC.JSONRPC.Client do @spec call(t, any, any, any) :: any def call(client, rpc_method, rpc_params, opts \\ []) @spec long_call(t, any, any) :: any diff --git a/lib/rudder/rpc/jsonrpc/http_adapter.ex b/lib/refiner/rpc/jsonrpc/http_adapter.ex similarity index 87% rename from lib/rudder/rpc/jsonrpc/http_adapter.ex rename to lib/refiner/rpc/jsonrpc/http_adapter.ex index 31bad9ec..d349daef 100644 --- a/lib/rudder/rpc/jsonrpc/http_adapter.ex +++ b/lib/refiner/rpc/jsonrpc/http_adapter.ex @@ -1,4 +1,4 @@ -defmodule Rudder.RPC.JSONRPC.HTTPAdapter do +defmodule Refiner.RPC.JSONRPC.HTTPAdapter do require Logger @http_retries 5 @@ -12,7 +12,7 @@ defmodule Rudder.RPC.JSONRPC.HTTPAdapter do req_uri, req_headers ) - |> Finch.request(Rudder.Finch) + |> Finch.request(Refiner.Finch) end @spec call(any, any, any, any, any) :: none @@ -63,10 +63,10 @@ defmodule Rudder.RPC.JSONRPC.HTTPAdapter do defp submit_and_decode_http_req_with_retry(req_body, retries_left) defp submit_and_decode_http_req_with_retry(_req_body, 0), - do: raise(Rudder.RPCError, "retries exceeded") + do: raise(Refiner.RPCError, "retries exceeded") defp submit_and_decode_http_req_with_retry(req_body, retries_left) do - case decode_http_resp(Finch.request(req_body, Rudder.Finch)) do + case decode_http_resp(Finch.request(req_body, Refiner.Finch)) do :connection_closed -> submit_and_decode_http_req_with_retry(req_body, retries_left - 1) @@ -155,10 +155,10 @@ defmodule Rudder.RPC.JSONRPC.HTTPAdapter do do: :connection_closed def decode_http_resp({:error, %Finch.Error{} = e}), - do: raise(Rudder.RPCError, e) + do: raise(Refiner.RPCError, e) def decode_http_resp({:error, %Mint.TransportError{reason: :econnrefused} = e}), - do: raise(Rudder.RPCError, e) + do: raise(Refiner.RPCError, e) def decode_jsonrpc_resp(%{"id" => seq, "error" => %{"code" => code, "message" => msg}}), do: {seq, req_error(code, msg)} @@ -182,11 +182,13 @@ defmodule Rudder.RPC.JSONRPC.HTTPAdapter do def batch_error(502, _), do: :gateway_error def batch_error(503, _), do: :overload def batch_error(524, _), do: :overload - def batch_error({_, -32_700}, _), do: raise(Rudder.RPCError, "client sent malformed JSON") - def batch_error({_, -32_600}, _), do: raise(Rudder.RPCError, "invalid request") - def batch_error({_, -32_601}, _), do: raise(Rudder.RPCError, "RPC endpoint not available") - def batch_error({_, -32_602}, e), do: raise(Rudder.RPCError, {"invalid parameters", e}) - def batch_error({_, -32_603}, msg), do: raise(Rudder.RPCError, {"internal JSON-RPC error", msg}) + def batch_error({_, -32_700}, _), do: raise(Refiner.RPCError, "client sent malformed JSON") + def batch_error({_, -32_600}, _), do: raise(Refiner.RPCError, "invalid request") + def batch_error({_, -32_601}, _), do: raise(Refiner.RPCError, "RPC endpoint not available") + def batch_error({_, -32_602}, e), do: raise(Refiner.RPCError, {"invalid parameters", e}) + + def batch_error({_, -32_603}, msg), + do: raise(Refiner.RPCError, {"internal JSON-RPC error", msg}) def batch_error({_, code}, error_msg) when code >= -32_099 and code <= -32_000 do {:server_error, code, error_msg} @@ -203,7 +205,7 @@ defmodule Rudder.RPC.JSONRPC.HTTPAdapter do def batch_error(400, ""), do: :gateway_error def batch_error(code, msg) when code >= 400 and code < 500 do - raise Rudder.RPCError, {:client_error, code, msg} + raise Refiner.RPCError, {:client_error, code, msg} end def batch_error(code, msg) when code >= 500 and code < 600 do @@ -219,8 +221,8 @@ defmodule Rudder.RPC.JSONRPC.HTTPAdapter do | {:rpc_error, any} | {:server_error, any, any} | {:unknown_error, any, any} - def req_error(-32_700, _), do: raise(Rudder.RPCError, "client sent malformed JSON") - def req_error(-32_600, _), do: raise(Rudder.RPCError, "invalid request") + def req_error(-32_700, _), do: raise(Refiner.RPCError, "client sent malformed JSON") + def req_error(-32_600, _), do: raise(Refiner.RPCError, "invalid request") def req_error(-32_601, _), do: :notfound def req_error(-32_602, e), do: {:invalid_parameters, e} def req_error(-32_603, msg), do: {:rpc_error, msg} diff --git a/lib/rudder/rpc/jsonrpc/http_client.ex b/lib/refiner/rpc/jsonrpc/http_client.ex similarity index 79% rename from lib/rudder/rpc/jsonrpc/http_client.ex rename to lib/refiner/rpc/jsonrpc/http_client.ex index 63494e54..5e375c9d 100644 --- a/lib/rudder/rpc/jsonrpc/http_client.ex +++ b/lib/refiner/rpc/jsonrpc/http_client.ex @@ -1,9 +1,9 @@ -defmodule Rudder.RPC.JSONRPC.HTTPClient do +defmodule Refiner.RPC.JSONRPC.HTTPClient do defstruct [:request_uri, :ws_request_uri, :request_headers] - alias Rudder.RPC.JSONRPC.HTTPAdapter, as: Adapter - alias Rudder.RPC.JSONRPC.WSAdapter, as: LongCallAdapter + alias Refiner.RPC.JSONRPC.HTTPAdapter, as: Adapter + alias Refiner.RPC.JSONRPC.WSAdapter, as: LongCallAdapter - @spec get_or_create(any, binary | URI.t()) :: %Rudder.RPC.JSONRPC.HTTPClient{ + @spec get_or_create(any, binary | URI.t()) :: %Refiner.RPC.JSONRPC.HTTPClient{ request_headers: [{any, any}, ...], request_uri: binary, ws_request_uri: binary @@ -42,7 +42,7 @@ defmodule Rudder.RPC.JSONRPC.HTTPClient do other_headers = [ {"host", host_header}, - {"user-agent", "covalent/rudder"}, + {"user-agent", "covalent/refiner"}, {"content-type", "application/json"} ] @@ -59,13 +59,13 @@ defmodule Rudder.RPC.JSONRPC.HTTPClient do Regex.match?(~r/^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/, addr) end - defimpl Rudder.RPC.JSONRPC.Client do - @spec call(%Rudder.RPC.JSONRPC.HTTPClient{}, any, any, any) :: none + defimpl Refiner.RPC.JSONRPC.Client do + @spec call(%Refiner.RPC.JSONRPC.HTTPClient{}, any, any, any) :: none def call(client, rpc_method, rpc_params, opts \\ []) do Adapter.call(client.request_uri, client.request_headers, rpc_method, rpc_params, opts) end - @spec long_call(%Rudder.RPC.JSONRPC.HTTPClient{}, any, any) :: none + @spec long_call(%Refiner.RPC.JSONRPC.HTTPClient{}, any, any) :: none def long_call(client, rpc_method, rpc_params) do # Adapter.call(client.request_uri, client.request_headers, rpc_method, rpc_params) LongCallAdapter.long_call( @@ -76,7 +76,7 @@ defmodule Rudder.RPC.JSONRPC.HTTPClient do ) end - @spec delete(%Rudder.RPC.JSONRPC.HTTPClient{}, binary | URI.t()) :: + @spec delete(%Refiner.RPC.JSONRPC.HTTPClient{}, binary | URI.t()) :: {:error, %{:__exception__ => true, :__struct__ => atom, optional(atom) => any}} | {:ok, Finch.Response.t()} def delete(client, uri_ref) do diff --git a/lib/rudder/rpc/jsonrpc/message_format.ex b/lib/refiner/rpc/jsonrpc/message_format.ex similarity index 98% rename from lib/rudder/rpc/jsonrpc/message_format.ex rename to lib/refiner/rpc/jsonrpc/message_format.ex index 1f74f2eb..3ef4f261 100644 --- a/lib/rudder/rpc/jsonrpc/message_format.ex +++ b/lib/refiner/rpc/jsonrpc/message_format.ex @@ -1,4 +1,4 @@ -defmodule Rudder.RPC.JSONRPC.MessageFormat do +defmodule Refiner.RPC.JSONRPC.MessageFormat do @spec build_request(:batch | :single, any, any, number) :: {binary | maybe_improper_list( diff --git a/lib/rudder/rpc/jsonrpc/ws_adapter.ex b/lib/refiner/rpc/jsonrpc/ws_adapter.ex similarity index 96% rename from lib/rudder/rpc/jsonrpc/ws_adapter.ex rename to lib/refiner/rpc/jsonrpc/ws_adapter.ex index da92f7e5..3bf7e830 100644 --- a/lib/rudder/rpc/jsonrpc/ws_adapter.ex +++ b/lib/refiner/rpc/jsonrpc/ws_adapter.ex @@ -1,5 +1,5 @@ -defmodule Rudder.RPC.JSONRPC.WSAdapter do - alias Rudder.RPC.JSONRPC.MessageFormat +defmodule Refiner.RPC.JSONRPC.WSAdapter do + alias Refiner.RPC.JSONRPC.MessageFormat use WebSockex require Logger diff --git a/lib/refiner/rpc/moonbeam_mainnet.ex b/lib/refiner/rpc/moonbeam_mainnet.ex new file mode 100644 index 00000000..1b1b4d0b --- /dev/null +++ b/lib/refiner/rpc/moonbeam_mainnet.ex @@ -0,0 +1,8 @@ +defmodule Refiner.Network.MoonbeamMainnet do + use Refiner.RPC.EthereumClient, + otp_app: :refiner, + client_opts: Application.get_env(:refiner, :proofchain_node), + chain_id: Application.get_env(:refiner, :proofchain_chain_id), + description: "Moonbeam Foundation Mainnet", + currency: [name: "Glimmer", ticker_symbol: "GLMR"] +end diff --git a/lib/rudder/rpc/rpc.ex b/lib/refiner/rpc/rpc.ex similarity index 80% rename from lib/rudder/rpc/rpc.ex rename to lib/refiner/rpc/rpc.ex index 9f2f3a65..edb60cd0 100644 --- a/lib/rudder/rpc/rpc.ex +++ b/lib/refiner/rpc/rpc.ex @@ -1,4 +1,4 @@ -defmodule Rudder.RPC.DecodeError do +defmodule Refiner.RPC.DecodeError do defexception [:message] def exception(error) do @@ -7,7 +7,7 @@ defmodule Rudder.RPC.DecodeError do end end -defmodule Rudder.RPCError do +defmodule Refiner.RPCError do defexception [:message] def exception(error) do @@ -16,7 +16,7 @@ defmodule Rudder.RPCError do end end -defmodule Rudder.RPC.Semaphore do +defmodule Refiner.RPC.Semaphore do use GenServer def start_link(_) do diff --git a/lib/rudder/util/processor_journal.ex b/lib/refiner/util/processor_journal.ex similarity index 88% rename from lib/rudder/util/processor_journal.ex rename to lib/refiner/util/processor_journal.ex index bc3f4b23..e7de11c4 100644 --- a/lib/rudder/util/processor_journal.ex +++ b/lib/refiner/util/processor_journal.ex @@ -1,11 +1,11 @@ -defmodule Rudder.Journal do +defmodule Refiner.Journal do @moduledoc """ A journal for recording work done by the processing engine. Before the engine starts processing, the work item, identified by some id is "discovered"-ed. When the processing is done, the work item is either "commit"-ed or "abort"-ed. """ use GenServer - alias Rudder.Events + alias Refiner.Events require Logger require Application @@ -118,7 +118,7 @@ defmodule Rudder.Journal do """ def items_with_status(status) do if status in [:commit, :abort, :discover, :skip] do - {:ok, items} = GenServer.call(Rudder.Journal, {:workitem, :fetch, status}, :infinity) + {:ok, items} = GenServer.call(Refiner.Journal, {:workitem, :fetch, status}, :infinity) items else Logger.info("status not supported provided #{status}") @@ -132,39 +132,39 @@ defmodule Rudder.Journal do Returns 1 + last_process_block_height in case no such block exists """ def last_started_block() do - GenServer.call(Rudder.Journal, {:blockh, :fetch}) + GenServer.call(Refiner.Journal, {:blockh, :fetch}) end # ethereum block ids status logging @spec discover(any) :: any def discover(id) do - GenServer.call(Rudder.Journal, {:workitem, :discover, id}, 500_000) + GenServer.call(Refiner.Journal, {:workitem, :discover, id}, 500_000) end @spec commit(any) :: any def commit(id) do - GenServer.call(Rudder.Journal, {:workitem, :commit, id}, 500_000) + GenServer.call(Refiner.Journal, {:workitem, :commit, id}, 500_000) end @spec abort(any) :: any def abort(id) do - GenServer.call(Rudder.Journal, {:workitem, :abort, id}, 500_000) + GenServer.call(Refiner.Journal, {:workitem, :abort, id}, 500_000) end @spec skip(any) :: any def skip(id) do - GenServer.call(Rudder.Journal, {:workitem, :skip, id}, 500_000) + GenServer.call(Refiner.Journal, {:workitem, :skip, id}, 500_000) end # moonbeam block_height status logging APIs @spec block_height_started(any) :: any def block_height_started(height) do - GenServer.call(Rudder.Journal, {:blockh, :start, height}, 500_000) + GenServer.call(Refiner.Journal, {:blockh, :start, height}, 500_000) end @spec block_height_committed(any) :: any def block_height_committed(height) do - GenServer.call(Rudder.Journal, {:blockh, :commit, height}, 500_000) + GenServer.call(Refiner.Journal, {:blockh, :commit, height}, 500_000) end end diff --git a/lib/rudder/util/redis_event_consumer.ex b/lib/refiner/util/redis_event_consumer.ex similarity index 94% rename from lib/rudder/util/redis_event_consumer.ex rename to lib/refiner/util/redis_event_consumer.ex index c09dcb21..6b17f5ec 100644 --- a/lib/rudder/util/redis_event_consumer.ex +++ b/lib/refiner/util/redis_event_consumer.ex @@ -1,4 +1,4 @@ -defmodule Rudder.RedisEventConsumer do +defmodule Refiner.RedisEventConsumer do # This is required so that `A` knows how to start and restart this module # def child_spec(_) do # %{ diff --git a/lib/rudder/util/util.ex b/lib/refiner/util/util.ex similarity index 98% rename from lib/rudder/util/util.ex rename to lib/refiner/util/util.ex index ae79f231..ca5da140 100644 --- a/lib/rudder/util/util.ex +++ b/lib/refiner/util/util.ex @@ -1,4 +1,4 @@ -defmodule Rudder.Util do +defmodule Refiner.Util do @spec typeof(any) :: <<_::24, _::_*8>> def typeof(a) do cond do diff --git a/lib/rudder/application.ex b/lib/rudder/application.ex deleted file mode 100644 index 4d8954f8..00000000 --- a/lib/rudder/application.ex +++ /dev/null @@ -1,39 +0,0 @@ -defmodule Rudder.Application do - # See https://hexdocs.pm/elixir/Application.html - # for more information on OTP Applications - @moduledoc false - - use Application - - @impl true - @spec start(any, any) :: {:error, any} | {:ok, pid} - def start(_type, _args) do - children = [ - {Finch, - name: Rudder.Finch, - pools: %{ - :default => [size: 32] - }}, - {Rudder.IPFSInteractor, name: Rudder.IPFSInteractor}, - {Rudder.Journal, [Application.get_env(:rudder, :journal_path), name: Rudder.Journal]}, - Rudder.Avro.Client, - {Rudder.Avro.BlockSpecimen, name: Rudder.Avro.BlockSpecimen}, - {Rudder.BlockResultUploader, name: Rudder.BlockResultUploader}, - {Rudder.BlockProcessor, - [Application.get_env(:rudder, :evm_server_url), name: Rudder.BlockProcessor]}, - {Rudder.Pipeline.Spawner, name: Rudder.Pipeline.Spawner}, - {Rudder.Telemetry, name: Rudder.Telemetry} - ] - - # See https://hexdocs.pm/elixir/Supervisor.html - # for other strategies and supported options - options = [ - strategy: :one_for_one, - name: Rudder.Supervisor, - max_restarts: 3, - max_seconds: 1200 - ] - - Supervisor.start_link(children, options) - end -end diff --git a/lib/rudder/rpc/ethereum_mainnet.ex b/lib/rudder/rpc/ethereum_mainnet.ex deleted file mode 100644 index 55cd7663..00000000 --- a/lib/rudder/rpc/ethereum_mainnet.ex +++ /dev/null @@ -1,8 +0,0 @@ -defmodule Rudder.Network.EthereumMainnet do - use Rudder.RPC.EthereumClient, - otp_app: :rudder, - client_opts: Application.get_env(:rudder, :proofchain_node), - chain_id: Application.get_env(:rudder, :proofchain_chain_id), - description: "Ethereum Foundation Mainnet", - currency: [name: "Ether", ticker_symbol: "ETH"] -end diff --git a/lib/rudder/rpc/moonbeam_mainnet.ex b/lib/rudder/rpc/moonbeam_mainnet.ex deleted file mode 100644 index 35f91d21..00000000 --- a/lib/rudder/rpc/moonbeam_mainnet.ex +++ /dev/null @@ -1,8 +0,0 @@ -defmodule Rudder.Network.MoonbeamMainnet do - use Rudder.RPC.EthereumClient, - otp_app: :rudder, - client_opts: Application.get_env(:rudder, :proofchain_node), - chain_id: Application.get_env(:rudder, :proofchain_chain_id), - description: "Moonbeam Foundation Mainnet", - currency: [name: "Glimmer", ticker_symbol: "GLMR"] -end diff --git a/mix.exs b/mix.exs index c1040306..97e1a7ee 100644 --- a/mix.exs +++ b/mix.exs @@ -1,9 +1,9 @@ -defmodule Rudder.MixProject do +defmodule Refiner.MixProject do use Mix.Project def project do [ - app: :rudder, + app: :refiner, version: "0.4.0", elixir: "~> 1.14.3", start_permanent: Mix.env() == :prod, @@ -24,7 +24,7 @@ defmodule Rudder.MixProject do def application do [ extra_applications: [:logger_file_backend, :runtime_tools, :poison, :avrora], - mod: {Rudder.Application, []} + mod: {Refiner.Application, []} ] end diff --git a/rel/config.exs b/rel/config.exs index c204dd2c..52dc23a4 100644 --- a/rel/config.exs +++ b/rel/config.exs @@ -48,8 +48,8 @@ end # when running `mix distillery.release`, the first release in the file # will be used by default -release :rudder do - set version: current_version(:rudder) +release :refiner do + set version: current_version(:refiner) set applications: [ :runtime_tools ] diff --git a/test/block_result_uploader_test.exs b/test/block_result_uploader_test.exs index 43ec691f..6cf6d57d 100644 --- a/test/block_result_uploader_test.exs +++ b/test/block_result_uploader_test.exs @@ -1,9 +1,9 @@ -defmodule Rudder.BlockResultUploaderTest do +defmodule Refiner.BlockResultUploaderTest do use ExUnit.Case, async: true setup_all do - block_result_uploader = start_supervised!(Rudder.BlockResultUploader) - ipfs_interactor = start_supervised!(Rudder.IPFSInteractor) + block_result_uploader = start_supervised!(Refiner.BlockResultUploader) + ipfs_interactor = start_supervised!(Refiner.IPFSInteractor) %{block_result_uploader: block_result_uploader, ipfs_interactor: ipfs_interactor} end @@ -22,7 +22,7 @@ defmodule Rudder.BlockResultUploaderTest do <<44, 242, 77, 186, 95, 176, 163, 14, 38, 232, 59, 42, 197, 185, 226, 158, 27, 22, 30, 92, 31, 167, 66, 94, 115, 4, 51, 98, 147, 139, 152, 36>> - block_result_metadata = %Rudder.BlockResultMetadata{ + block_result_metadata = %Refiner.BlockResultMetadata{ chain_id: 1, block_height: 1, block_specimen_hash: "525D191D6492F1E0928d4e816c29778c", @@ -30,7 +30,7 @@ defmodule Rudder.BlockResultUploaderTest do } {error, cid, block_result_hash} = - Rudder.BlockResultUploader.upload_block_result(block_result_metadata) + Refiner.BlockResultUploader.upload_block_result(block_result_metadata) assert error == :ok assert cid == expected_cid @@ -42,7 +42,7 @@ defmodule Rudder.BlockResultUploaderTest do block_result_uploader: _block_result_uploader } do file_path = Path.expand(Path.absname(Path.relative_to_cwd("test-data/temp.txt"))) - {err, cid} = Rudder.IPFSInteractor.pin(file_path) + {err, cid} = Refiner.IPFSInteractor.pin(file_path) expected_cid = "bafkreiehfggmf4y7xjzrqhvcvhto6eg44ipnsxuyxwwjytqvatvbn5eg4q" assert err == :ok @@ -53,12 +53,12 @@ defmodule Rudder.BlockResultUploaderTest do ipfs_interactor: _ipfs_interactor, block_result_uploader: _block_result_uploader } do - ipfs_url = Application.get_env(:rudder, :ipfs_pinner_url) + ipfs_url = Application.get_env(:refiner, :ipfs_pinner_url) url = "#{ipfs_url}/upload" {err, _} = Finch.build(:get, "#{url}/upload") - |> Finch.request(Rudder.Finch) + |> Finch.request(Refiner.Finch) assert err != :error end diff --git a/test/block_specimen_avro_test.exs b/test/block_specimen_avro_test.exs index 78eb3388..2c78d929 100644 --- a/test/block_specimen_avro_test.exs +++ b/test/block_specimen_avro_test.exs @@ -1,25 +1,25 @@ -defmodule Rudder.BlockSpecimenDecoderEncoderTest do +defmodule Refiner.BlockSpecimenDecoderEncoderTest do use ExUnit.Case, async: true setup do - block_specimen_avro = start_supervised(Rudder.Avro.BlockSpecimen) + block_specimen_avro = start_supervised(Refiner.Avro.BlockSpecimen) %{block_specimen_avro: block_specimen_avro} end - test "Rudder.Avro.BlockSpecimen.list/0 returns an empty list", %{ + test "Refiner.Avro.BlockSpecimen.list/0 returns an empty list", %{ block_specimen_avro: _block_specimen_avro } do - assert Rudder.Avro.BlockSpecimen.list() == :ok + assert Refiner.Avro.BlockSpecimen.list() == :ok end - test "Rudder.Avro.BlockSpecimen.get_schema/0 returns correct schema", %{ + test "Refiner.Avro.BlockSpecimen.get_schema/0 returns correct schema", %{ block_specimen_avro: _block_specimen_avro } do - assert Rudder.Avro.BlockSpecimen.get_schema() == + assert Refiner.Avro.BlockSpecimen.get_schema() == "com.covalenthq.brp.avro.ReplicationSegment" end - test "Rudder.Avro.BlockSpecimen.decode_file/1 decodes binary specimen file", %{ + test "Refiner.Avro.BlockSpecimen.decode_file/1 decodes binary specimen file", %{ block_specimen_avro: _block_specimen_avro } do specimen_path = @@ -28,7 +28,7 @@ defmodule Rudder.BlockSpecimenDecoderEncoderTest do expected_start_block = 17_090_940 expected_hash = "0x54245042c6cc9a9d80888db816525d097984c3c2ba4f11d64e9cdf6aaefe5e8d" - {:ok, decoded_specimen} = Rudder.Avro.BlockSpecimen.decode_file(specimen_path) + {:ok, decoded_specimen} = Refiner.Avro.BlockSpecimen.decode_file(specimen_path) {:ok, decoded_specimen_start_block} = Map.fetch(decoded_specimen, "startBlock") {:ok, specimen_event} = Map.fetch(decoded_specimen, "replicaEvent") @@ -40,7 +40,7 @@ defmodule Rudder.BlockSpecimenDecoderEncoderTest do assert decoded_specimen_hash == expected_hash end - test "Rudder.Avro.BlockSpecimen.decode_dir/1 streams directory binary files", %{ + test "Refiner.Avro.BlockSpecimen.decode_dir/1 streams directory binary files", %{ block_specimen_avro: _block_specimen_avro } do dir_path = "./test-data/codec-0.35/encoded/*" @@ -51,7 +51,7 @@ defmodule Rudder.BlockSpecimenDecoderEncoderTest do expected_start_hash = "0x54245042c6cc9a9d80888db816525d097984c3c2ba4f11d64e9cdf6aaefe5e8d" expected_last_hash = "0x6a1a24cfbee3d64c7f6c7fd478ec0e1112176d1340f18d0ba933352c6ce2026a" - decode_specimen_stream = Rudder.Avro.BlockSpecimen.decode_dir(dir_path) + decode_specimen_stream = Refiner.Avro.BlockSpecimen.decode_dir(dir_path) start_block_stream = List.first(decode_specimen_stream) last_block_stream = List.last(decode_specimen_stream) @@ -99,14 +99,14 @@ defmodule Rudder.BlockSpecimenDecoderEncoderTest do assert decoded_last_hash == expected_last_hash end - test "Rudder.Avro.BlockSpecimen.decode_dir/1 decodes all binary files", %{ + test "Refiner.Avro.BlockSpecimen.decode_dir/1 decodes all binary files", %{ block_specimen_avro: _block_specimen_avro } do dir_path = "./test-data/codec-0.35/encoded/*" expected_specimens = 3 - decode_specimen_stream = Rudder.Avro.BlockSpecimen.decode_dir(dir_path) + decode_specimen_stream = Refiner.Avro.BlockSpecimen.decode_dir(dir_path) # stream resolved earlier to full specimen list resolved_stream = decode_specimen_stream |> Enum.map(fn x -> Enum.to_list(x) end) @@ -115,7 +115,7 @@ defmodule Rudder.BlockSpecimenDecoderEncoderTest do assert resolved_specimens == expected_specimens end - test "Rudder.Avro.BlockSpecimen.encode_file/1 encodes segment json file", %{ + test "Refiner.Avro.BlockSpecimen.encode_file/1 encodes segment json file", %{ block_specimen_avro: _block_specimen_avro } do segment_path = "./test-data/codec-0.35/segment/17090940.segment.json" @@ -123,7 +123,7 @@ defmodule Rudder.BlockSpecimenDecoderEncoderTest do expected_start_block = 17_090_940 expected_hash = "0x54245042c6cc9a9d80888db816525d097984c3c2ba4f11d64e9cdf6aaefe5e8d" - {:ok, encoded_segment_avro} = Rudder.Avro.BlockSpecimen.encode_file(segment_path) + {:ok, encoded_segment_avro} = Refiner.Avro.BlockSpecimen.encode_file(segment_path) {:ok, decoded_segment_avro} = Avrora.decode_plain(encoded_segment_avro, schema_name: "block-ethereum") @@ -138,7 +138,7 @@ defmodule Rudder.BlockSpecimenDecoderEncoderTest do assert decoded_segment_hash == expected_hash end - test "Rudder.Avro.BlockSpecimen.encode_dir/1 streams encoded .json files", %{ + test "Refiner.Avro.BlockSpecimen.encode_dir/1 streams encoded .json files", %{ block_specimen_avro: _block_specimen_avro } do dir_path = "./test-data/codec-0.35/segment/*" @@ -146,7 +146,7 @@ defmodule Rudder.BlockSpecimenDecoderEncoderTest do expected_start_block = 17_090_940 expected_last_block = 17_090_960 - encoded_segment_stream = Rudder.Avro.BlockSpecimen.encode_dir(dir_path) + encoded_segment_stream = Refiner.Avro.BlockSpecimen.encode_dir(dir_path) start_segment_stream = List.first(encoded_segment_stream) last_segment_stream = List.last(encoded_segment_stream) diff --git a/test/evm/evm_test.exs b/test/evm/evm_test.exs index 96af8b42..9536fa22 100644 --- a/test/evm/evm_test.exs +++ b/test/evm/evm_test.exs @@ -4,7 +4,7 @@ defmodule SupervisionTreeTest do use ExUnit.Case, async: false @moduletag :spawn require Logger - alias Rudder.BlockProcessor.Struct + alias Refiner.BlockProcessor.Struct alias TestHelper.EVMInputGenerator alias TestHelper.SupervisorUtils @@ -14,7 +14,7 @@ defmodule SupervisionTreeTest do contents = get_sample_specimen!() - {:ok, filepath} = Rudder.BlockProcessor.sync_queue(contents) + {:ok, filepath} = Refiner.BlockProcessor.sync_queue(contents) File.rm(filepath) end @@ -24,7 +24,7 @@ defmodule SupervisionTreeTest do specimen = get_sample_specimen!() {:error, errormsg} = - Rudder.BlockProcessor.sync_queue(%Rudder.BlockSpecimen{ + Refiner.BlockProcessor.sync_queue(%Refiner.BlockSpecimen{ chain_id: specimen.chain_id, block_height: specimen.block_height, contents: "[" <> specimen.contents <> "]" @@ -35,7 +35,7 @@ defmodule SupervisionTreeTest do # block_id = "1234_f_" # specimen = get_sample_specimen!() - # {:ok, bpid} = Rudder.BlockProcessor.start_link(["http://127.0.0.1:3100"]) + # {:ok, bpid} = Refiner.BlockProcessor.start_link(["http://127.0.0.1:3100"]) # {:error, errormsg} = GenServer.call(bpid, {:process, "dfjkejkjfd"}, 60_000) # end @@ -45,7 +45,7 @@ defmodule SupervisionTreeTest do specimen = get_sample_specimen!() {:ok, bpid} = - Rudder.BlockProcessor.start_link([Application.get_env(:rudder, :evm_server_url)]) + Refiner.BlockProcessor.start_link([Application.get_env(:refiner, :evm_server_url)]) {:error, error_msg} = GenServer.call(bpid, {:process, "invalid content"}, 60_000) @@ -56,7 +56,7 @@ defmodule SupervisionTreeTest do def get_sample_specimen!() do path = Path.join([__DIR__, "data", "15548376-segment.json"]) - %Rudder.BlockSpecimen{ + %Refiner.BlockSpecimen{ chain_id: 1, block_height: 15_548_376, contents: File.read!(path) diff --git a/test/pipeline_test.exs b/test/pipeline_test.exs index e86671ab..7a0bb111 100644 --- a/test/pipeline_test.exs +++ b/test/pipeline_test.exs @@ -1,4 +1,4 @@ -defmodule Rudder.PipelineTest do +defmodule Refiner.PipelineTest do use ExUnit.Case, async: true test "returns the cid and hash of the processed block hash", %{} do @@ -12,7 +12,7 @@ defmodule Rudder.PipelineTest do <<105, 50, 175, 90, 71, 36, 11, 89, 40, 141, 86, 97, 77, 37, 70, 218, 93, 72, 45, 15, 41, 190, 77, 26, 60, 229, 65, 201, 154, 114, 47, 253>> - {status, cid, block_result_hash} = Rudder.Pipeline.process_specimen(test_bsp_key, test_urls) + {status, cid, block_result_hash} = Refiner.Pipeline.process_specimen(test_bsp_key, test_urls) assert status == :ok assert cid == expected_block_result_cid @@ -24,7 +24,7 @@ defmodule Rudder.PipelineTest do test_urls = ["ipfs://bafybeihfjhxfr3r2ti7phs7gzwbx5oimzf6ainhccrk2hlzoozcmcsu36q"] test_block_specimen_hash = "6a1a24cfbee3d64c7f6c7fd478ec0e1112176d1340f18d0ba933352c6ce2026a" test_bsp_key = "1_1_1_" <> test_block_specimen_hash - {:ok, task} = Rudder.Pipeline.Spawner.push_hash(test_bsp_key, test_urls, true) + {:ok, task} = Refiner.Pipeline.Spawner.push_hash(test_bsp_key, test_urls, true) {status, cid, block_result_hash} = receive do diff --git a/test/util_test.exs b/test/util_test.exs index c7768ee8..82b352dd 100644 --- a/test/util_test.exs +++ b/test/util_test.exs @@ -1,37 +1,37 @@ -defmodule Rudder.UtilTest do +defmodule Refiner.UtilTest do use ExUnit.Case test "returns 'float' for a float" do - assert Rudder.Util.typeof(1.0) == "float" + assert Refiner.Util.typeof(1.0) == "float" end test "returns 'number' for an integer" do - assert Rudder.Util.typeof(1) == "number" + assert Refiner.Util.typeof(1) == "number" end test "returns 'atom' for an atom" do - assert Rudder.Util.typeof(:my_atom) == "atom" + assert Refiner.Util.typeof(:my_atom) == "atom" end test "returns 'boolean' for a boolean" do - assert Rudder.Util.typeof(true) == "boolean" - assert Rudder.Util.typeof(false) == "boolean" + assert Refiner.Util.typeof(true) == "boolean" + assert Refiner.Util.typeof(false) == "boolean" end test "returns 'binary' for a binary" do - assert Rudder.Util.typeof("hello world") == "binary" + assert Refiner.Util.typeof("hello world") == "binary" end test "returns 'function' for a function" do - assert Rudder.Util.typeof(fn -> :ok end) == "function" + assert Refiner.Util.typeof(fn -> :ok end) == "function" end test "returns 'list' for a list" do - assert Rudder.Util.typeof([1, 2, 3]) == "list" + assert Refiner.Util.typeof([1, 2, 3]) == "list" end test "returns 'tuple' for a tuple" do - assert Rudder.Util.typeof({1, 2, 3}) == "tuple" + assert Refiner.Util.typeof({1, 2, 3}) == "tuple" end test "returns 'map' for a map" do @@ -43,51 +43,51 @@ defmodule Rudder.UtilTest do specimen_path = "./test-data/codec-0.35/encoded/1-17090940-replica-0x7b8e1d463a0fbc6fce05b31c5c30e605aa13efaca14a1f3ba991d33ea979b12b" - {:ok, decoded_specimen} = Rudder.Avro.BlockSpecimen.decode_file(specimen_path) + {:ok, decoded_specimen} = Refiner.Avro.BlockSpecimen.decode_file(specimen_path) - assert Rudder.Util.typeof(decoded_specimen) == "map" - assert Rudder.Util.typeof(result_decoded_map) == "map" + assert Refiner.Util.typeof(decoded_specimen) == "map" + assert Refiner.Util.typeof(result_decoded_map) == "map" end test "returns 'pid' for a pid" do - assert Rudder.Util.typeof(self()) == "pid" + assert Refiner.Util.typeof(self()) == "pid" end test "returns 'bitstring' for a bitstring" do - assert Rudder.Util.typeof(<<3::4>>) == "bitstring" + assert Refiner.Util.typeof(<<3::4>>) == "bitstring" end test "returns 'nil' for an nil" do - assert Rudder.Util.typeof(nil) == "nil" + assert Refiner.Util.typeof(nil) == "nil" end test "returns 'struct' for a struct" do - block_result_metadata = %Rudder.BlockResultMetadata{ + block_result_metadata = %Refiner.BlockResultMetadata{ chain_id: 1, block_height: 17_090_940, block_specimen_hash: 0x54245042C6CC9A9D80888DB816525D097984C3C2BA4F11D64E9CDF6AAEFE5E8D, file_path: "./test-data/codec-0.35/block-result/17090940.result.json" } - assert Rudder.Util.typeof(block_result_metadata) == "struct" + assert Refiner.Util.typeof(block_result_metadata) == "struct" end test "returns 'exception' for an exception" do - assert Rudder.Util.typeof(%RuntimeError{}) == "exception" + assert Refiner.Util.typeof(%RuntimeError{}) == "exception" end test "returns 'reference' for a reference" do ref_1 = Kernel.make_ref() - assert Rudder.Util.typeof(ref_1) == "reference" + assert Refiner.Util.typeof(ref_1) == "reference" end test "returns 'port' for a port" do port = Port.open({:spawn, "cat"}, [:binary]) - assert Rudder.Util.typeof(port) == "port" + assert Refiner.Util.typeof(port) == "port" end test "get_file_paths returns a list of files in the given directory" do - assert Rudder.Util.get_file_paths("./test-data/codec-0.35/block-specimen/*") == [ + assert Refiner.Util.get_file_paths("./test-data/codec-0.35/block-specimen/*") == [ "test-data/codec-0.35/block-specimen/17090940.specimen.json", "test-data/codec-0.35/block-specimen/17090950.specimen.json", "test-data/codec-0.35/block-specimen/17090960.specimen.json" @@ -95,17 +95,17 @@ defmodule Rudder.UtilTest do end test "get_file_paths returns an empty list when given a directory with no files or invalid path" do - assert Rudder.Util.get_file_paths("./evm") == [] + assert Refiner.Util.get_file_paths("./evm") == [] end test "converts a hexadecimal string to a 32-byte binary string" do - assert Rudder.Util.convert_to_bytes32("0123456789abcdef") == + assert Refiner.Util.convert_to_bytes32("0123456789abcdef") == <<1, 35, 69, 103, 137, 171, 205, 239>> end test "raises an error if the input string is not a hexadecimal string" do assert_raise ArgumentError, "non-alphabet character found: \"n\" (byte 110)", fn -> - Rudder.Util.convert_to_bytes32("not-a-hex-string") + Refiner.Util.convert_to_bytes32("not-a-hex-string") end end end