diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 65ae1e59ace..a9af11921fc 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -35,7 +35,6 @@ Explain your changes:
Explain how you tested your changes:
*
-
Checklist:
- [ ] Dependency versions are unchanged
diff --git a/.prettierignore b/.prettierignore
deleted file mode 100644
index 66279798c55..00000000000
--- a/.prettierignore
+++ /dev/null
@@ -1,2 +0,0 @@
-src/lib/crypto/**/*.js
-src/lib/snarkyjs/src/bindings/kimchi/js/**/*.js
diff --git a/Makefile b/Makefile
index df2d8cc73b9..f9d59753d00 100644
--- a/Makefile
+++ b/Makefile
@@ -73,7 +73,7 @@ endif
genesis_ledger: ocaml_checks
$(info Building runtime_genesis_ledger)
- ulimit -s 65532 && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune exec --profile=$(DUNE_PROFILE) src/app/runtime_genesis_ledger/runtime_genesis_ledger.exe -- --genesis-dir $(GENESIS_DIR)
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune exec --profile=$(DUNE_PROFILE) src/app/runtime_genesis_ledger/runtime_genesis_ledger.exe -- --genesis-dir $(GENESIS_DIR)
$(info Genesis ledger and genesis proof generated)
# Checks that every OCaml packages in the project build without issues
@@ -82,32 +82,32 @@ check: ocaml_checks libp2p_helper
build: ocaml_checks reformat-diff libp2p_helper
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune build src/app/logproc/logproc.exe src/app/cli/src/mina.exe --profile=$(DUNE_PROFILE)
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune build src/app/logproc/logproc.exe src/app/cli/src/mina.exe --profile=$(DUNE_PROFILE)
$(info Build complete)
build_all_sigs: ocaml_checks reformat-diff libp2p_helper
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune build src/app/logproc/logproc.exe src/app/cli/src/mina.exe src/app/cli/src/mina_testnet_signatures.exe src/app/cli/src/mina_mainnet_signatures.exe --profile=$(DUNE_PROFILE)
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune build src/app/logproc/logproc.exe src/app/cli/src/mina.exe src/app/cli/src/mina_testnet_signatures.exe src/app/cli/src/mina_mainnet_signatures.exe --profile=$(DUNE_PROFILE)
$(info Build complete)
build_archive: ocaml_checks reformat-diff
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe --profile=$(DUNE_PROFILE)
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe --profile=$(DUNE_PROFILE)
$(info Build complete)
build_archive_all_sigs: ocaml_checks reformat-diff
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe src/app/archive/archive_testnet_signatures.exe src/app/archive/archive_mainnet_signatures.exe --profile=$(DUNE_PROFILE)
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe src/app/archive/archive_testnet_signatures.exe src/app/archive/archive_mainnet_signatures.exe --profile=$(DUNE_PROFILE)
$(info Build complete)
build_rosetta: ocaml_checks
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe src/app/rosetta/rosetta.exe src/app/rosetta/ocaml-signer/signer.exe --profile=$(DUNE_PROFILE)
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe src/app/rosetta/rosetta.exe src/app/rosetta/ocaml-signer/signer.exe --profile=$(DUNE_PROFILE)
$(info Build complete)
build_rosetta_all_sigs: ocaml_checks
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe src/app/archive/archive_testnet_signatures.exe src/app/archive/archive_mainnet_signatures.exe src/app/rosetta/rosetta.exe src/app/rosetta/rosetta_testnet_signatures.exe src/app/rosetta/rosetta_mainnet_signatures.exe src/app/rosetta/ocaml-signer/signer.exe src/app/rosetta/ocaml-signer/signer_testnet_signatures.exe src/app/rosetta/ocaml-signer/signer_mainnet_signatures.exe --profile=$(DUNE_PROFILE)
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/archive/archive.exe src/app/archive/archive_testnet_signatures.exe src/app/archive/archive_mainnet_signatures.exe src/app/rosetta/rosetta.exe src/app/rosetta/rosetta_testnet_signatures.exe src/app/rosetta/rosetta_mainnet_signatures.exe src/app/rosetta/ocaml-signer/signer.exe src/app/rosetta/ocaml-signer/signer_testnet_signatures.exe src/app/rosetta/ocaml-signer/signer_mainnet_signatures.exe --profile=$(DUNE_PROFILE)
$(info Build complete)
build_intgtest: ocaml_checks
@@ -115,24 +115,9 @@ build_intgtest: ocaml_checks
dune build --profile=$(DUNE_PROFILE) src/app/test_executive/test_executive.exe src/app/logproc/logproc.exe
$(info Build complete)
-client_sdk: ocaml_checks
- $(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/client_sdk/client_sdk.bc.js
- $(info Build complete)
-
-client_sdk_test_sigs: ocaml_checks
- $(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/client_sdk/tests/test_signatures.exe --profile=mainnet
- $(info Build complete)
-
-client_sdk_test_sigs_nonconsensus: ocaml_checks
- $(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/client_sdk/tests/test_signatures_nonconsensus.exe --profile=nonconsensus_mainnet
- $(info Build complete)
-
snarkyjs: ocaml_checks
$(info Starting Build)
- (ulimit -s 65532) && (ulimit -n 10240 || true) \
+ ((ulimit -s 65532) || true) && (ulimit -n 10240 || true) \
&& bash ./src/lib/snarkyjs/src/bindings/scripts/build-snarkyjs-node.sh
$(info Build complete)
@@ -144,17 +129,17 @@ snarkyjs_no_types: ocaml_checks
rosetta_lib_encodings: ocaml_checks
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/lib/rosetta_lib/test/test_encodings.exe --profile=mainnet
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/lib/rosetta_lib/test/test_encodings.exe --profile=mainnet
$(info Build complete)
rosetta_lib_encodings_nonconsensus: ocaml_checks
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/nonconsensus/rosetta_lib/test/test_encodings.exe --profile=nonconsensus_mainnet
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/nonconsensus/rosetta_lib/test/test_encodings.exe --profile=nonconsensus_mainnet
$(info Build complete)
dhall_types: ocaml_checks
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/dhall_types/dump_dhall_types.exe --profile=dev
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/dhall_types/dump_dhall_types.exe --profile=dev
$(info Build complete)
replayer: ocaml_checks
@@ -164,37 +149,37 @@ replayer: ocaml_checks
delegation_compliance: ocaml_checks
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/delegation_compliance/delegation_compliance.exe --profile=testnet_postake_medium_curves
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/delegation_compliance/delegation_compliance.exe --profile=testnet_postake_medium_curves
$(info Build complete)
missing_blocks_auditor: ocaml_checks
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/missing_blocks_auditor/missing_blocks_auditor.exe --profile=testnet_postake_medium_curves
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/missing_blocks_auditor/missing_blocks_auditor.exe --profile=testnet_postake_medium_curves
$(info Build complete)
extract_blocks: ocaml_checks
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/extract_blocks/extract_blocks.exe --profile=testnet_postake_medium_curves
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/extract_blocks/extract_blocks.exe --profile=testnet_postake_medium_curves
$(info Build complete)
archive_blocks: ocaml_checks
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/archive_blocks/archive_blocks.exe --profile=testnet_postake_medium_curves
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/archive_blocks/archive_blocks.exe --profile=testnet_postake_medium_curves
$(info Build complete)
patch_archive_test: ocaml_checks
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/patch_archive_test/patch_archive_test.exe --profile=testnet_postake_medium_curves
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/patch_archive_test/patch_archive_test.exe --profile=testnet_postake_medium_curves
$(info Build complete)
genesis_ledger_from_tsv: ocaml_checks
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/genesis_ledger_from_tsv/genesis_ledger_from_tsv.exe --profile=testnet_postake_medium_curves
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/genesis_ledger_from_tsv/genesis_ledger_from_tsv.exe --profile=testnet_postake_medium_curves
$(info Build complete)
swap_bad_balances: ocaml_checks
$(info Starting Build)
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build src/app/swap_bad_balances/swap_bad_balances.exe --profile=testnet_postake_medium_curves
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build src/app/swap_bad_balances/swap_bad_balances.exe --profile=testnet_postake_medium_curves
$(info Build complete)
heap_usage: ocaml_checks
@@ -218,7 +203,7 @@ macos-portable:
@echo Find coda-daemon-macos.zip inside _build/
update-graphql:
- ulimit -s 65532 && (ulimit -n 10240 || true) && dune build --profile=$(DUNE_PROFILE) graphql_schema.json
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && dune build --profile=$(DUNE_PROFILE) graphql_schema.json
########################################
## Lint
@@ -241,7 +226,7 @@ check-proof-systems-submodule:
#######################################
## Environment setup
-macos-setup-download:
+macos-setup:
./scripts/macos-setup-brew.sh
########################################
@@ -264,12 +249,12 @@ deb_optimized:
build_pv_keys: ocaml_checks
$(info Building keys)
- ulimit -s 65532 && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune exec --profile=$(DUNE_PROFILE) src/lib/snark_keys/gen_keys/gen_keys.exe -- --generate-keys-only
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune exec --profile=$(DUNE_PROFILE) src/lib/snark_keys/gen_keys/gen_keys.exe -- --generate-keys-only
$(info Keys built)
build_or_download_pv_keys: ocaml_checks
$(info Building keys)
- ulimit -s 65532 && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune exec --profile=$(DUNE_PROFILE) src/lib/snark_keys/gen_keys/gen_keys.exe -- --generate-keys-only
+ (ulimit -s 65532 || true) && (ulimit -n 10240 || true) && env MINA_COMMIT_SHA1=$(GITLONGHASH) dune exec --profile=$(DUNE_PROFILE) src/lib/snark_keys/gen_keys/gen_keys.exe -- --generate-keys-only
$(info Keys built)
publish_debs:
@@ -352,4 +337,4 @@ ml-docs: ocaml_checks
# https://www.gnu.org/software/make/manual/html_node/Phony-Targets.html
# HACK: cat Makefile | egrep '^\w.*' | sed 's/:/ /' | awk '{print $1}' | grep -v myprocs | sort | xargs
-.PHONY: all build check-format clean deb dev mina-docker reformat doc_diagrams ml-docs macos-setup macos-setup-download setup-opam libp2p_helper dhall_types replayer missing_blocks_auditor extract_blocks archive_blocks genesis_ledger_from_tsv ocaml_version ocaml_word_size ocaml_checks
+.PHONY: all build check-format clean deb dev mina-docker reformat doc_diagrams ml-docs macos-setup setup-opam libp2p_helper dhall_types replayer missing_blocks_auditor extract_blocks archive_blocks genesis_ledger_from_tsv ocaml_version ocaml_word_size ocaml_checks
diff --git a/README-ci-failures.md b/README-ci-failures.md
index 7cd528d5a87..358cbd6992a 100644
--- a/README-ci-failures.md
+++ b/README-ci-failures.md
@@ -5,6 +5,9 @@ label and comments by MinaProtocol organization members containing exactly
`!ci-build-me`. If your CI job has not started after adding the `ci-build-me`
label, please comment on the pull request with `!ci-build-me` to attempt to
re-trigger the script.
+If no CI jobs started, check that your membership to O(1) Labs/mina organisation
+is public. If your membership is private, the jobs will not started and
+`!ci-build-me` won't have an impact.
If CI jobs are not running after applying both the `ci-build-me` label and
comment, you may be able to find and fix the error in the script. The script
diff --git a/README-dev.md b/README-dev.md
index 5151e17c579..149459d4e80 100644
--- a/README-dev.md
+++ b/README-dev.md
@@ -26,13 +26,13 @@ Quick start instructions:
git clone git@github.com:MinaProtocol/mina.git
```
-If you have already done that, remember that the MinaProtocol and o1-labs repositories do not accept the password authentication used by the https URLs. You must set GitHub repos to pull and push over ssh:
+If you have already done that, remember that the MinaProtocol and o1-labs repositories do not accept the password authentication used by the https URLs. You must set GitHub repos to pull and push over ssh:
```sh
git config --global url.ssh://git@github.com/.insteadOf https://github.com/
```
-3. Pull in the submodules:
+3. Pull in the submodules:
```sh
git submodule update --init --recursive
@@ -52,13 +52,73 @@ You can build Mina using Docker. Using Docker works in any dev environment. See
### Developer Setup (MacOS)
-- Invoke `make macos-setup`
- - You will be prompted to add a number of `export`s in your shell config file. Do so.
- - If this is your first time using OCaml, be sure to run `eval $(opam config env)`
-- Install [rustup](https://rustup.rs/).
-- Invoke `make build`
-- Jump to [customizing your editor for autocomplete](#customizing-your-dev-environment-for-autocompletemerlin)
-- Note: If you are seeing conf-openssl install errors, try running `export PKG_CONFIG_PATH=$(brew --prefix openssl@1.1)/lib/pkgconfig` and try `opam switch import opam.export` again.
+1. Upgrade to the latest version of macOS.
+2. Install Xcode Command Line Tools:
+
+ ```sh
+ xcode-select --install
+ ```
+
+3. Invoke `make macos-setup`.
+ - When prompted, confirm that you want to add a number of exports in your shell config file.
+ - Make sure to `source` your shell config file or create a new terminal.
+ - If this is your first time using OCaml, be sure to run:
+
+ ```sh
+ eval $(opam config env)
+ ```
+
+4. Install [rustup](https://rustup.rs/).
+5. Create your switch with deps `opam switch import --switch mina opam.export`
+
+ M1- and M-2 operating systems experience issues because Homebrew does not link include files automatically.
+
+ If you get an error about failing to find `gmp.h`, update your `~/.zshrc` or `~/.bashrc` with:
+
+ `export CFLAGS="-I/opt/homebrew/Cellar/gmp/6.2.1_1/include/"`
+
+ or run:
+
+ `env CFLAGS="/opt/homebrew/Cellar/gmp/6.2.1_1/include/" opam install conf-gmp.2`
+
+ If you get an error about failing to find `lmdb.h`, update your `~/.zshrc` or `~/.bashrc` with:
+
+ ```text
+ export CPATH="$HOMEBREW_PREFIX/include:$CPATH"
+ export LIBRARY_PATH="$HOMEBREW_PREFIX/lib:$LIBRARY_PATH"
+ export PATH="$(brew --prefix lmdb)/bin:$PATH"
+ export PKG_CONFIG_PATH=$(brew --prefix lmdb)/lib/pkgconfig:$PKG_CONFIG_PATH
+ ```
+
+- Note: If you get conf-openssl install errors, try running `export PKG_CONFIG_PATH=$(brew --prefix openssl@1.1)/lib/pkgconfig` and try `opam switch import opam.export` again.
+- If prompted, run `opam user-setup install` to enable opam-user-setup support for Merlin.
+6. Pin dependencies that override opam versions:
+
+ ```sh
+ scripts/pin-external-packages.sh
+ ```
+
+7. Install the correct version of golang:
+ - `goenv init`
+ - To make sure the right `goenv` is used, update your shell env script with:
+
+ ```text
+ eval "$(goenv init -)"
+ export PATH="/Users/$USER/.goenv/shims:$PATH"
+ ```
+ - `goenv install 1.18.10`
+ - `goenv global 1.18.10`
+ - Check that the `go version` returns the right version, otherwise you see the message `compile: version "go1.18.10" does not match go tool version "go1.20.2"`. If so, run `brew remove go` or get the matching version.
+8. Invoke `make build`.
+
+ If you get errors about `libp2p` and `capnp`, try with `brew install capnp`.
+9. For better IDE support, install the OCaml-LSP language server for OCaml:
+
+ ```sh
+ opam install ocaml-lsp-server
+ ```
+
+10. Set up your IDE. See [Customizing your dev environment for autocomplete/merlin](https://github.com/MinaProtocol/mina/blob/develop/README-dev.md#customizing-your-dev-environment-for-autocompletemerlin).
### Developer Setup (Linux)
@@ -72,7 +132,7 @@ To get all of the required opam dependencies, run:
opam switch import opam.export
```
-_*NOTE:*_ The `switch` command provides a `dune_wrapper` binary that you can use instead of dune and fails early if your switch becomes out of sync with the `opam.export` file.
+*_NOTE:_* The `switch` command provides a `dune_wrapper` binary that you can use instead of dune and fails early if your switch becomes out of sync with the `opam.export` file.
Some dependencies that are not taken from `opam` or integrated with `dune` must be added manually. Run the `scripts/pin-external-packages.sh` script.
@@ -83,7 +143,6 @@ A number of C libraries are expected to be available in the system and are also
- [Ubuntu Setup Instructions](https://docs.docker.com/install/linux/docker-ce/ubuntu/)
#### Customizing your dev environment for autocomplete/merlin
-
[dev-env]: #dev-env
If you use vim, add this snippet in your `.vimrc` file to use Merlin. (Note: Be sure to change the HOME directory to match yours.)
@@ -136,6 +195,7 @@ The source code for the Mina node is located in `src/app/cli/`. After it is comp
$ dune exec src/app/cli/src/mina.exe -- daemon --libp2p-keypair /path/to/key
```
+
The results of a successful build appear in `_build/default/src/app/cli/src/mina.exe`.
The default configuration of the node depends on the build profile that is used during compilation. To connect to some networks, you need to compile the daemon with a specific profile.
@@ -144,11 +204,11 @@ Some setup is required:
1. Generate a key pair so that the daemon can create an account to issue blocks from using the same `mina.exe` binary:
-```shell
-$ dune exec src/app/cli/src/mina.exe -- libp2p generate-keypair --privkey-path /path/to/key
-```
+ ```shell
+ $ dune exec src/app/cli/src/mina.exe -- libp2p generate-keypair --privkey-path /path/to/key
+ ```
-When prompted, enter a passphrase. During development, you can leave it blank for convenience, but using a passphrase is strongly encouraged when running a real node!
+When prompted, enter a passphrase. During development, you can leave it blank for convenience, but using a passphrase is strongly encouraged when running a real node!
The running daemon expects to find this passphrase in
an environment variable `MINA_LIBP2P_PASS`, which must be defined even if the passphrase is empty.
@@ -184,14 +244,14 @@ The command line help is the place to learn about other options to the Mina CLI
## Using the Makefile
-The Makefile contains placeholder targets for all the common tasks that need to be done and automatically knows how to use Docker.
+The Makefile contains placeholder targets for all the common tasks that need to be done and automatically knows how to use Docker.
The most important `make` targets are:
- `build`: build everything
- `libp2p_helper`: build the libp2p helper
- `reformat`: automatically use `ocamlformat` to reformat the source files (use
- it if the hook fails during a commit)
+ it if the hook fails during a commit)
We use the [Dune](https://github.com/ocaml/dune/) build system for OCaml code.
@@ -206,7 +266,7 @@ $ opam switch create mina_fresh 4.14.0
$ opam switch import opam.export
```
-After that, install your dependency. You might have to specify versions of current dependencies to avoid having to upgrade dependencies. For example:
+After that, install your dependency. You might have to specify versions of current dependencies to avoid having to upgrade dependencies. For example:
```console
$ opam install alcotest cmdliner=1.0.3 fmt=0.8.6
diff --git a/README.md b/README.md
index 49c0cf54099..b848104e1d8 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,9 @@
+### Build status
+
+| Develop | Berkeley | Compatible |
+| ------- | -------- | ---------- |
+| [![Build status - develop](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=develop)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - berkeley](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=berkeley)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies) | [![Build status - compatible](https://badge.buildkite.com/0c47452f3ea619d3217d388e0de522b218db28c3e161887a9a.svg?branch=compatible)](https://buildkite.com/o-1-labs-2/mina-end-to-end-nightlies)
+
diff --git a/automation/scripts/github_branch_autosync/.gitignore b/automation/scripts/github_branch_autosync/.gitignore
new file mode 100644
index 00000000000..97af16c72d8
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/.gitignore
@@ -0,0 +1,127 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
diff --git a/automation/scripts/github_branch_autosync/Makefile b/automation/scripts/github_branch_autosync/Makefile
new file mode 100644
index 00000000000..d70046813ce
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/Makefile
@@ -0,0 +1,47 @@
+clean:
+ find . -type f -name '*.py[co]' -delete -o -type d -name __pycache__ -delete
+ find . -type f -name '*.zip' -delete
+
+# This hack address issue on gcloud functions framework when importing local module
+# in python. We need to remove leading '.' from import
+# https://github.com/GoogleCloudPlatform/functions-framework-python/pull/169
+fix_comp_on_gcloud:
+ sed -i 's/from .lib import /from lib import /' github_autosync/gcloud_entrypoint/main.py
+
+zip_package: clean fix_comp_on_gcloud
+ zip -r github_autosync.zip github_autosync/gcloud_entrypoint
+
+run-tests:
+ python3 -X tracemalloc=25 -m unittest discover -t tests -s tests
+
+deploy: zip_package check-env
+ @gcloud functions deploy AutoSyncBranches \
+ --project=o1labs-192920 \
+ --region=us-central1 \
+ --runtime=python311 \
+ --source=github_autosync/gcloud_entrypoint \
+ --memory=1024MB \
+ --timeout=300 \
+ --trigger-http \
+ --allow-unauthenticated \
+ --entry-point=handle_incoming_commit_push \
+ --set-env-vars=WEBHOOK_APP_USER=$(WEBHOOK_APP_USER),WEBHOOK_APP_REPO=$(WEBHOOK_APP_REPO),WEBHOOK_APP_TOKEN=$(WEBHOOK_APP_TOKEN),WEBHOOK_APP_GITHUB_SECRET=$(WEBHOOK_APP_GITHUB_SECRET)
+
+ @echo --- reverts import fixing ---
+ sed -i 's/from lib import /from .lib import /' github_autosync/gcloud_entrypoint/main.py
+
+check-env:
+# Lack of indentation is required:
+# https://stackoverflow.com/questions/4728810/how-to-ensure-makefile-variable-is-set-as-a-prerequisite
+ifndef WEBHOOK_APP_USER
+ $(error WEBHOOK_APP_USER is undefined)
+endif
+ifndef WEBHOOK_APP_REPO
+ $(error WEBHOOK_APP_REPO is undefined)
+endif
+ifndef WEBHOOK_APP_TOKEN
+ $(error WEBHOOK_APP_TOKEN is undefined)
+endif
+ifndef WEBHOOK_APP_GITHUB_SECRET
+ $(error WEBHOOK_APP_GITHUB_SECRET is undefined)
+endif
\ No newline at end of file
diff --git a/automation/scripts/github_branch_autosync/README.md b/automation/scripts/github_branch_autosync/README.md
new file mode 100644
index 00000000000..3876c1fdec8
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/README.md
@@ -0,0 +1,235 @@
+# GITHUB Auto sync tool
+
+Aim of this project is to satisfy needs for automatic branch synchronization between important branches in github. So far this is manual process of detecting changes between branches, Pull requests creation, checking if there is no merge conflicts and pushing changes to target branch.
+This tool automate this process. It is possible to deploy project to google cloud function module.
+
+## Business logic
+
+### Requirements:
+
+For MVP we want only merge conflicts detection.
+
+- [x] There should NOT be a PR created if there is no merge conflicts.
+- [x] There should be a PR created with assignee list tagged who should fix the PR.
+- [x] Program should detect changes immediately and perform merging attempt.
+- [ ] There should be a solution for updating already existing PR with new conflicting changes
+- [ ] In future we maybe would like to attach some small buildkite pipeline for testing purposes (TBD)
+
+### Design
+
+Program mainly operates on Github REST API. It creates a thin layer of configuration and logic on top of python library (PyGithub).
+
+It is prepared to receive github webhook payload json on new commit to specified branches and to be deployed in google cloud function
+
+#### Basic flow:
+- Perform diff between incoming source and target branches
+- Create branch containing commits
+
+ a) If branch already exists, push this commit and tag assigners that there was yet another commit check if there are merge conflicts
+
+ b) if there are conflicts : create pr from temp branch to target branch. Add proper description. Add assigners which should fix the pr
+
+ c) if there are not conflicts : start buildkite pipeline (TBD) to verify changes. If they passes merge pr and exit
+
+##### Examples:
+
+###### No conflict
+
+![No conflict](./docs/res/CASE1.jpg)
+
+###### Conflict
+
+![Conflict](./docs/res/CASE2.jpg)
+
+###### Update sync branch while on conflict
+
+![Update branch while conflict](./docs/res/CASE3.jpg)
+
+# Configuration
+
+Configuration is defined as module in `./github_autosync/gcloud_entrypoint/lib/config.py`
+
+Below more detailed description of each section
+
+## Branches
+
+Controls relation between branches. Dictionary key is a branch name on which change we will try to merge to branch with name as value.
+
+For example tuple master -> develop:
+
+If there is a new commit on master branch, program will attempt to merge new changes to develop branch. We can have more than one branch mapping:
+```
+branches = dict(
+ master = 'develop',
+ develop = 'featureA'
+)
+```
+
+## Github
+
+Github access settings. Points to user (or organization), repository and access token. Access token can be classic or fine-grained. However if latter is used, then an issue can be encountered during e2e test run, since it uses Graphql Api.
+Implementation of fine-grained is still TBD: (https://github.blog/2022-10-18-introducing-fine-grained-personal-access-tokens-for-github/)
+
+Token need to have permission to:
+- list prs,
+- list branches,
+- create new branch,
+- create new pr,
+- delete branch,
+- merge branch.
+
+Example:
+
+```
+github = {
+ "token": "....",
+ "username": "dkijania",
+ "repo": "myproject",
+ "new_branch_name_format": "sync-{source_branch}-with-{target_branch}"
+}
+```
+
+## Pull Request Configuration
+
+Specific settings for PR creation (if there is necessity to do it based on branch merge conflict).
+
+example:
+
+```
+pr = {
+ "title_prefix": "[Branches auto sync failure] ",
+ "assignees": ["dkijania"],
+ "body_prefix": "This is auto-generated PR in order to solve merge conflicts between two branches.",
+ "draft": 'false',
+ "labels": ["auto-sync"]
+}
+```
+
+## Buildkite (TBD)
+
+
+# CLI
+
+For debugging purposes cli entry point can be used. All it need is a properly configured program and payload.json file.
+
+Example:
+
+```
+python3 github_autosync payload.json
+```
+
+Where `payload.json` is a webhook event json payload.
+
+**WARNING:**
+
+**Changes made in such run will also be persistent (as running tool on gcloud)**
+
+
+# Tests
+
+## Setup
+
+Test run requires below setup:
+
+- Classic Github Token need to be used,
+- Sample github project need to be created. Alternatively existing project (https://github.com/dkijania/webhook_test) can be used. Please contact dariusz@o1labs.org in order to gain access.
+- Set environment variables:
+ - WEBHOOK_APP_USER - owner of repo
+ - WEBHOOK_APP_REPO - repository name
+ - WEBHOOK_APP_TOKEN - classic token with access to above repo
+
+## Run
+
+```
+ make run-tests
+```
+
+### Warnings during test execution
+
+Test execution may produce warnings which are related to known issue:
+https://github.com/PyGithub/PyGithub/issues/1372
+
+They manifest as warnings in console or log output similar to:
+
+```
+sys:1: ResourceWarning: unclosed
+sys:1: ResourceWarning: unclosed
+```
+
+# GCloud Deployment
+
+## Setup
+
+Your gcloud account need to be configured. Please run:
+
+```
+$ gcloud auth login
+```
+
+and follow instructions if you are not logged to gcloud cli.
+
+### Set env variables
+
+```
+$export WEBHOOK_APP_USER=owner of repo
+$export WEBHOOK_APP_REPO=repository name
+$export WEBHOOK_APP_TOKEN=classic token or fine grained token
+$export WEBHOOK_APP_GITHUB_SECRET=webhook github secret
+```
+
+#### Notes on WEBHOOK_APP_GITHUB_SECRET
+Github secret can be acquired from existing gcloud storage:
+
+`https://console.cloud.google.com/security/secret-manager/secret/WEBHOOK_APP_GITHUB_SECRET/versions?project=o1labs-192920`
+
+Usually we don't want to update it as this leads to required update of github secret token in github.
+However, if there is a such necessity below steps will help perform such operation:
+
+1. Generate token locally
+```
+$ openssl rand -hex 20
+```
+
+2. Copy token to github webhook event settings:
+
+Follow instructions on: https://docs.github.com/en/webhooks-and-events/webhooks/securing-your-webhooks#setting-your-secret-token
+
+3. Set environment variable
+```
+$set WEBHOOK_APP_GITHUB_SECRET={output from command 1.}
+```
+
+#### Notes on WEBHOOK_APP_TOKEN
+
+Valid github token (classic or fine-grained) should have following permissions:
+- list prs,
+- list branches,
+- create new branch,
+- create new pr,
+- delete branch,
+- merge branch.
+
+Both fine-grained tokens or classic are acceptable. However when running tests please ensure that classic token is used as we are
+using github graphql instance (for creating commits) which is not supporting fine-grained token yet
+
+### Run
+
+In order to deploy application to gcloud first run:
+
+```
+make deploy
+```
+
+This deploys to https://console.cloud.google.com/functions/details/us-central1/AutoSyncBranches
+
+### Post deploy checks
+
+Please ensure that proper permissions are set for cloud function. Github webhook need below permission:
+
+| Role | Group |
+|-------|-------|
+| Cloud Functions Invoker | allUsers |
+
+**Note:**
+
+While generally it is unsafe to allow all users invoke cloud function, we have a safe guard in form of validating response with github secret.
diff --git a/automation/scripts/github_branch_autosync/docs/res/CASE1.jpg b/automation/scripts/github_branch_autosync/docs/res/CASE1.jpg
new file mode 100644
index 00000000000..825fa5bb7e7
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/docs/res/CASE1.jpg
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:87bb8575ed49da4b98d0494f2117d85b2ef922f2c74c821b3c62110b3e879774
+size 33159
diff --git a/automation/scripts/github_branch_autosync/docs/res/CASE2.jpg b/automation/scripts/github_branch_autosync/docs/res/CASE2.jpg
new file mode 100644
index 00000000000..8c0f0406ab8
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/docs/res/CASE2.jpg
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bcd36ff4f1257411a7c08b4b861964aaa3e61e77568f986cb9dcf94a76cca1bf
+size 41727
diff --git a/automation/scripts/github_branch_autosync/docs/res/CASE3.jpg b/automation/scripts/github_branch_autosync/docs/res/CASE3.jpg
new file mode 100644
index 00000000000..de005df392a
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/docs/res/CASE3.jpg
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3b52e0ad98bf3c366495ef08172b82a3a40cecd25e5901b33311f07435cc7c07
+size 38677
diff --git a/automation/scripts/github_branch_autosync/github_autosync/__main__.py b/automation/scripts/github_branch_autosync/github_autosync/__main__.py
new file mode 100644
index 00000000000..0336cfda6fb
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/github_autosync/__main__.py
@@ -0,0 +1,22 @@
+""" Cli & Debug entrypoint """
+
+import json
+import argparse
+import os
+import sys
+from gcloud_entrypoint import handle_incoming_commit_push_json,config,verify_signature
+
+parser = argparse.ArgumentParser()
+parser.add_argument('payload', help='test file from github webhook push event')
+parser.add_argument('secret', help='secret for calculating signature')
+parser.add_argument('incoming_signature', help='payload signature')
+
+args = parser.parse_args()
+
+if not os.path.isfile(args.payload):
+ sys.exit('cannot find test file :',args.payload)
+
+with open(args.payload,encoding="utf-8") as file:
+ data = json.load(file)
+ json_payload = json.dumps(data)
+ verify_signature(json_payload, args.secret, "sha=" + args.incoming_signature)
\ No newline at end of file
diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/__init__.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/__init__.py
new file mode 100644
index 00000000000..ba37eac5fe2
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/__init__.py
@@ -0,0 +1,2 @@
+"""Entrypoint init"""
+from .main import handle_incoming_commit_push_json,config,verify_signature
diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/__init__.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/__init__.py
new file mode 100644
index 00000000000..e966ff1a8ba
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/__init__.py
@@ -0,0 +1,5 @@
+from .buildkite import BuildkiteApi
+from .config import *
+from .github import GithubApi, GithubException
+from .request_parser import CommitInfo,GithubPayloadInfo
+from .request_validator import verify_signature,is_push_event
\ No newline at end of file
diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/buildkite.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/buildkite.py
new file mode 100644
index 00000000000..3674c636be1
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/buildkite.py
@@ -0,0 +1,28 @@
+'''
+ Module for Buildkite operations
+'''
+from pybuildkite.buildkite import Buildkite
+
+class BuildkiteApi:
+ """ Api for running buildkite pipeline. Currently not used"""
+
+ def __init__(self, config):
+ self.buildkite = Buildkite()
+ self.buildkite.set_access_token(config["token"])
+ self.org = config["org"]
+ self.pipeline = config["pipeline"]
+
+
+ def run_pipeline(self, sha, branch, message):
+ '''
+ Runs pipeline for given branch.
+
+ Parameters:
+ sha (str): Commit sha.
+ branch (str): Branch name.
+ message (str): Message seen on buildkite job.
+ Returns:
+ Buildkite pipeline handle.
+ '''
+ return self.buildkite.builds().create_build(self.org, self.pipeline, sha, branch,
+ clean_checkout=True, message=message)
diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/config.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/config.py
new file mode 100644
index 00000000000..db4c7c4f1e1
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/config.py
@@ -0,0 +1,70 @@
+'''
+Main configuration file for auto-sync logic.
+One can define relation between branches in section 'branches' or accessibility settings
+for Buildkite and Github
+'''
+import os
+
+'''
+Controls relation between branches. Dictionary Key is a branch name
+on which change we will try to merge to branch with name as value.
+For example tuple develop -> compatible:
+If there is a new commit on develop branch,
+program will attempt to merge new changes to compatible branch
+'''
+branches = dict(
+ compatible = 'rampup',
+ rampup = 'berkeley',
+ berkeley = 'develop'
+)
+
+'''
+ Settings for github repository.
+ dryrun: if set to true, program will not perform any operations but will printout
+ token: github webhook secret (for validation of request)
+ username: owner of repo
+ repo: repo name
+ secret: valid github token (classic or fine-grained)
+ WARNING:
+
+ Token need to have permission to:
+ - list prs
+ - list branches
+ - create new branch
+ - create new pr
+ - delete branch
+ - merge branch
+'''
+github = {
+ "dryrun": False,
+ "token": os.environ["WEBHOOK_APP_TOKEN"],
+ "username": os.environ["WEBHOOK_APP_USER"],
+ "repo": os.environ["WEBHOOK_APP_REPO"],
+ "secret": os.environ["WEBHOOK_APP_GITHUB_SECRET"]
+}
+
+def tmp_branch_name(source_branch,target_branch):
+ '''
+ Method which will be used for naming temp branch (needed for checking merge ability)
+ '''
+ return f"sync-{source_branch}-with-{target_branch}"
+
+'''
+Specific settings for PR creation (if there is necessity to do it based on current repo situation).
+'''
+pr = {
+ "title_prefix": "[Branches auto sync failure] ",
+ "assignees": ["dkijania"],
+ "body_prefix": "This is auto-generated PR in order to solve merge conflicts between two branches.",
+ "draft": 'false',
+ "labels": ["auto-sync"]
+}
+
+'''
+ Buildkite specific settings
+'''
+buildkite = {
+ "token": "...",
+ "org": "mina-foundation",
+ "pipeline": "test-buildkite"
+}
diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/github.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/github.py
new file mode 100644
index 00000000000..e9a3f30b0da
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/github.py
@@ -0,0 +1,295 @@
+''' Github api tailored for auto-sync needs'''
+
+import json
+from github import Github,PullRequest
+import requests
+
+class GithubException(Exception):
+ """Exception raised for errors when interacting with Github REST api.
+
+ Attributes:
+ message -- explanation of the error
+ """
+ def __init__(self, message):
+ super().__init__(message)
+
+class GithubApi:
+ '''
+ Responsible for various operation on github rest api
+ like creating new branches or merging changes.
+ Is tightly coupled with config module
+ '''
+
+ def __init__(self, config):
+ self.token = config["token"]
+ self.username = config["username"]
+ self.repo = config["repo"]
+ self.github = Github(self.token)
+ self.default_timeout = 60
+ self.dryrun = bool(config["dryrun"])
+
+ def repository(self):
+ '''
+ Retrieves github repository based on configuration
+ '''
+ return Repository(self.github,
+ self.username,
+ self.repo,
+ self.dryrun,
+ self.get_authorization_header,
+ self.default_timeout)
+
+ def branch(self, name):
+ '''
+ Retrieves github branch from configured repository with given name
+
+ Parameters:
+ name (string): Branch name
+
+ Returns:
+ branch object
+ '''
+ return self.repository().get_branch(branch=name)
+
+ def get_diff_commits(self, left_branch, right_branch):
+ '''
+ Retrieves differences between two branches
+
+ Parameters:
+ left_branch (string): Left branch name
+ right_branch (string): Right branch name
+
+ Returns:
+ commit compare object
+ '''
+
+ left_branch_ref = self.branch(left_branch).commit.sha
+ right_branch_ref = self.branch(right_branch).commit.sha
+ return self.repository().compare(left_branch_ref, right_branch_ref)
+
+ def has_merge_conflict(self,base_branch,head_branch):
+ '''
+ Detects if two branches have merge conflict.
+ It doesn't use github rest api for this purpose, but a little 'hack'
+ by not accessing REST api but sending request to part of github web which is
+ only indicating mergeability. Then, it scrapes text visible on page to detect if
+ branches are mergeable or not. It uses 60s. of timeout for response. However, usually
+ the response is immediate.
+
+ Parameters:
+ base_branch (string): Branch name to which we want to merge
+ head_branch (string): Branch name from which we want to merge
+
+ Returns:
+ boolean indicating if branches are mergeable. True if they are, False otherwise
+ '''
+ res = requests.get(f'https://github.com/{self.username}/{self.repo}/branches/pre_mergeable/{base_branch}...{head_branch}',
+ timeout=60)
+ return "Able to merge" not in res.text
+
+ def create_new_branch(self, branch_name, from_branch):
+ '''
+ Creates new branch
+
+ Parameters:
+ branch_name (string): New branch name
+ from_branch (string): Branch name from which we create new branch
+
+ Returns:
+ new branch object
+ '''
+ from_branch_sha = self.branch(from_branch).commit.sha
+ branch_ref_name = f"refs/heads/{branch_name}"
+
+ return self.repository().create_git_ref(branch_ref_name,from_branch_sha)
+
+ def fast_forward(self, source, target):
+ '''
+ Fast forward source branch name to target branch commit. Method extract head commit sha
+ from target branch and update reference sha of source branch.
+
+ Unfortunately this method is not available in pygithub library.
+ Therefore we are accessing REST api directly
+
+ Parameters:
+ source (string): Branch name to update
+ target (string): Branch name to which head commit we want to update
+
+ Returns:
+ fast forward response json
+
+ Raises:
+ GithubException: On request failure.
+ '''
+
+ target_sha = self.branch(name=target).commit.sha
+ return self.repository().fast_forward(source,target_sha)
+
+ @property
+ def get_authorization_header(self):
+ """
+ Gets authorization header for situation when we need to bypass pygithub library
+ """
+ return {'Authorization': "Bearer " + self.token }
+
+ def delete_branch(self, branch_name):
+ '''
+ Deletes branch. According to github documentation this operation will also remove
+ all PRs that relates to given branch
+
+ Parameters:
+ branch_name (string): Branch name to delete
+
+ Raises:
+ GithubException: On request failure.
+ '''
+ self.repository().delete_branch(branch_name)
+
+ def create_pull_request(self,config,source_branch,target_branch,new_branch):
+ """
+ Creates new pull request
+
+ Parameters:
+ config (config): Config module
+ source_branch (string): Branch name from new branch was created
+ target_branch (string): Branch name to which we want to merge changes
+ new_branch (string): temporary branch which will be used to check mergeability and perform merge
+
+ Returns:
+ return PullRequest object
+ """
+ title = config.pr["title_prefix"] + f"into {source_branch} from {target_branch}"
+ assignee_tags = list(map(lambda x: "@" + x, config.pr["assignees"]))
+ separator = ", "
+ body = config.pr["body_prefix"] + "\n" + separator.join(assignee_tags)
+ base = target_branch
+ head = new_branch
+ draft = bool(config.pr["draft"])
+ self.repository().create_pull(title=title,body=body,base=base,head=head,draft=draft,assignees=assignee_tags,labels=config.pr["labels"])
+ return title
+
+ def create_pull_request_for_tmp_branch(self,config,source_branch,temp_branch):
+ """
+ Creates new pull request
+
+ Parameters:
+ config (config): Config module
+ source_branch (string): Branch name from new branch was created
+ target_branch (string): Branch name to which we want to merge changes
+ new_branch (string): temporary branch which will be used to check mergeability and perform merge
+
+ Returns:
+ return PullRequest object
+ """
+ title = config.pr["title_prefix"] + f"into {source_branch} from {temp_branch} for commit {self.branch(source_branch).commit.sha[0:6]}"
+ assignee_tags = list(map(lambda x: "@" + x, config.pr["assignees"]))
+ separator = ", "
+ body = config.pr["body_prefix"] + "\n" + separator.join(assignee_tags)
+ base = temp_branch
+ head = source_branch
+ draft = bool(config.pr["draft"])
+ self.repository().create_pull(title,body,base,head,draft,assignees=config.pr["assigness"],labels=config.pr["labels"])
+ return title
+
+ def branch_exists(self, branch):
+ """
+ Returns true if branch by given name exists. False otherwise
+
+ Parameters:
+ branch (string): branch name
+ """
+ return any(x.name == branch for x in self.repository().get_branches())
+
+ def merge(self,base,head,message):
+ """
+ Merges head branch to base branch
+
+ Parameters:
+ base (string): base branch name
+ head (string): head branch name
+ commit (string): commit message
+
+ """
+ self.repository().merge(base,head,message)
+
+class Repository:
+ """
+ Class responsible for low level operation on github
+ For testing purposes it can be configured to just printout
+ operations meant to perform (dryrun)
+ """
+
+ def __init__(self,github,username,repo,dryrun,authorization_header,timeout):
+ self.inner = github.get_repo(username + "/" + repo)
+ self.username = username
+ self.repo = repo
+ self.dryrun = dryrun
+ self.dryrun_suffix = "[DRYRUN]"
+ self.authorization_header = authorization_header
+ self.timeout = timeout
+
+ def get_branches(self):
+ return self.inner.get_branches()
+
+ def merge(self,base,head,message):
+ if self.dryrun:
+ print(f'{self.dryrun_suffix} Merge {head} to {base} with message {message}')
+ else:
+ self.inner.merge(base,head,message)
+
+ def create_pull(self,title,body,base,head,draft,assignees,labels):
+ if self.dryrun:
+ print(f'{self.dryrun_suffix} Pull request created:')
+ print(f"{self.dryrun_suffix} title: '{title}'")
+ print(f"{self.dryrun_suffix} body: '{body}'")
+ print(f"{self.dryrun_suffix} base: '{base}'")
+ print(f"{self.dryrun_suffix} head: '{head}'")
+ print(f"{self.dryrun_suffix} is draft: '{draft}'")
+ print(f"{self.dryrun_suffix} assignees: '{assignees}'")
+ print(f"{self.dryrun_suffix} labels: '{labels}'")
+ else:
+ pull = self.inner.create_pull(title,body,base,head,draft)
+ for assignee in assignees:
+ pull.add_to_assignees(assignee)
+
+ for label in labels:
+ pull.add_to_labels(label)
+ def create_git_ref(self,branch_ref_name,from_branch_sha):
+ if self.dryrun:
+ print(f'{self.dryrun_suffix} New branch created:')
+ print(f"{self.dryrun_suffix} name: '{branch_ref_name}'")
+ print(f"{self.dryrun_suffix} head: '{from_branch_sha}'")
+ else:
+ self.inner.create_git_ref(branch_ref_name,from_branch_sha)
+
+ def compare(self,left_branch_ref, right_branch_ref):
+ return self.inner.compare(left_branch_ref,right_branch_ref)
+
+ def get_branch(self,branch):
+ try:
+ return self.inner.get_branch(branch)
+ except Exception as ex:
+ raise GithubException(f'unable to find branch "{branch}" due to {ex}') from ex
+
+ def fast_forward(self,source,target_sha):
+ if self.dryrun:
+ print(f"{self.dryrun_suffix} Fast forward '{source}' to '{target_sha}'")
+ return target_sha
+ res = requests.patch(f"https://api.github.com/repos/{self.username}/{self.repo}/git/refs/heads/{source}",
+ json={"sha": target_sha},
+ headers=self.authorization_header,
+ timeout=self.timeout
+ )
+ if res.status_code == 200:
+ output = json.loads(res.text)
+ return output["object"]["sha"]
+ raise GithubException(f'unable to fast forward branch {source} due to : {res.text}')
+
+ def delete_branch(self,branch_name):
+ if self.dryrun:
+ print(f"{self.dryrun_suffix} Delete branch '{branch_name}'")
+ else:
+ res = requests.delete(f"https://api.github.com/repos/{self.username}/{self.repo}/git/refs/heads/{branch_name}",
+ headers=self.authorization_header,timeout=self.timeout)
+ if not res.status_code == 204:
+ raise GithubException(f"unable to delete branch '{branch_name}' due to : '{res.text}'. Status code: '{res.status_code}'")
diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/request_parser.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/request_parser.py
new file mode 100644
index 00000000000..35af76ed702
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/request_parser.py
@@ -0,0 +1,49 @@
+"""
+ Module responsible for extracting information from github webhook event payload json
+"""
+
+class GithubPayloadInfo(object):
+ """
+ Class responsible for parsing webhook event payload json
+ """
+ def __init__(self, json):
+ self.data = json
+
+ @property
+ def incoming_branch(self):
+ """
+ Gets full branch id (/refs/head/{})
+ """
+ branch_id = self.data["ref"]
+ return str.split(branch_id,"/")[2]
+
+ @property
+ def commits(self):
+ """
+ Gets commits info
+ """
+ return list(map(CommitInfo, self.data["commits"]))
+
+class CommitInfo(object):
+ """
+ Responsible for providing information about commit
+ """
+ def __init__(self, json):
+ self.data = json
+
+ @property
+ def files(self):
+ """
+ Returns all files touched by this commit
+ """
+ added = self.data["added"]
+ removed = self.data["removed"]
+ modified = self.data["modified"]
+ return added + removed + modified
+
+ @property
+ def message(self):
+ """
+ Gets commit message
+ """
+ return self.data["message"]
diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/request_validator.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/request_validator.py
new file mode 100644
index 00000000000..f612304e320
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/lib/request_validator.py
@@ -0,0 +1,26 @@
+import hashlib
+import hmac
+from http.client import HTTPException
+
+def verify_signature(payload_body, secret_token, signature_header):
+ """Verify that the payload was sent from GitHub by validating SHA256.
+
+ Raise and return 403 if not authorized.
+
+ Taken from: https://docs.github.com/en/webhooks-and-events/webhooks/securing-your-webhooks
+
+ Args:
+ payload_body: original request body to verify (request.body())
+ secret_token: GitHub app webhook token (WEBHOOK_SECRET)
+ signature_header: header received from GitHub (x-hub-signature-256)
+ """
+ if not signature_header:
+ raise HTTPException(status_code=403, detail="x-hub-signature-256 header is missing!")
+ hash_object = hmac.new(secret_token.encode('utf-8'), msg=payload_body, digestmod=hashlib.sha256)
+ expected_signature = "sha256=" + hash_object.hexdigest()
+ if not hmac.compare_digest(expected_signature, signature_header):
+ raise HTTPException(status_code=403, detail="Request signatures didn't match!")
+
+def is_push_event(request):
+ """ Verifies if request has is push github event """
+ return "push" == request.headers.get("X-GitHub-Event","")
\ No newline at end of file
diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/main.py b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/main.py
new file mode 100644
index 00000000000..5c975e0e425
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/main.py
@@ -0,0 +1,80 @@
+''' Main module for handling incoming github webhook event'''
+
+from .lib import GithubPayloadInfo, config, GithubApi, GithubException, verify_signature,is_push_event
+
+def handle_incoming_commit_push(request):
+ """Responds to any HTTP request.
+ Args:
+ request (flask.Request): HTTP request object.
+ Returns:
+ The response text or any set of values that can be turned into a
+ Response object using
+ `make_response `.
+ """
+ verify_signature(request.data, config.github["secret"], request.headers['x-hub-signature-256'])
+ if not is_push_event(request):
+ print("not a push event. skipping...")
+ return
+
+ handle_incoming_commit_push_json(request.json,config=config)
+ print("done")
+ return
+
+def handle_incoming_commit_push_json(json,config):
+ """
+ Main logic for handling incoming github webhook event
+ """
+ payload_info= GithubPayloadInfo(json)
+
+ source_branch = payload_info.incoming_branch
+
+ if not source_branch in config.branches:
+ print(f"change in '{source_branch}' is not supported ")
+ return
+
+ target_branch = config.branches[source_branch]
+ github = GithubApi(config.github)
+ print(f"generating diff between {source_branch} and '{target_branch}'...")
+ cmp = github.get_diff_commits(target_branch,source_branch)
+
+ if cmp.status == "identical":
+ print(f"'{source_branch}' and '{target_branch}' branches are identical. skipping merge...")
+ return
+ if cmp.status == "behind":
+ print(f"'{source_branch}' is behind '{target_branch}'. skipping merge...")
+ return
+
+ if cmp.status == "ahead":
+ print(f"'{source_branch}' is ahead of '{target_branch}'. It is enough just to fast-forward...")
+ new_sha = github.fast_forward(target_branch,source_branch)
+ print(f'branch {target_branch} successfully fast-forward. It is now on commit: {new_sha}')
+ return
+
+ print(f"'{source_branch}' and '{target_branch}' branches are not identical, both branches contains different commits (there are 'diverged'). approaching merge...")
+ new_branch = config.tmp_branch_name(source_branch,target_branch)
+
+ if github.branch_exists(new_branch):
+ print(f'temporary sync branch {new_branch} already exists. fast-forwarding or creating yet another pr for new changes')
+
+ try:
+ new_sha = github.fast_forward(new_branch,source_branch)
+ print(f'branch {new_branch} successfully fast-forward. It is now on commit: {new_sha}')
+ except GithubException:
+ title = github.create_pull_request_for_tmp_branch(config,source_branch,new_branch)
+ print(f"new PR: '{title}' created. Please resolve it before merge...")
+
+ else:
+ print(f'creating new sync branch {new_branch} to incorporate changes from {source_branch} to {target_branch}')
+ github.create_new_branch(new_branch,source_branch)
+
+ print("checking mergeability...")
+
+ if github.has_merge_conflict(new_branch,target_branch):
+ print("branches have a merge conflict! creating PR to address those changes...")
+ title = github.create_pull_request(config,source_branch,target_branch,new_branch)
+ print(f"new PR: '{title}' created. Please resolve it before merge...")
+
+ else:
+ print(f"there is no merge conflict. merging {new_branch} into {target_branch}...")
+ github.merge(target_branch,new_branch, f"Github Autosync: {source_branch} -> {target_branch}")
+ github.delete_branch(new_branch)
diff --git a/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/requirements.txt b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/requirements.txt
new file mode 100644
index 00000000000..5cfa315a3ab
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/github_autosync/gcloud_entrypoint/requirements.txt
@@ -0,0 +1,4 @@
+pybuildkite==1.2.2
+PyGithub==1.58.1
+requests==2.22.0
+
diff --git a/automation/scripts/github_branch_autosync/tests/config.py b/automation/scripts/github_branch_autosync/tests/config.py
new file mode 100644
index 00000000000..1b48375789d
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/tests/config.py
@@ -0,0 +1,22 @@
+""" Test config """
+import os
+
+branches = {}
+
+github = {
+ "token": os.environ["WEBHOOK_APP_TOKEN"],
+ "username": os.environ["WEBHOOK_APP_USER"],
+ "repo": os.environ["WEBHOOK_APP_REPO"],
+}
+
+def tmp_branch_name(source_branch,target_branch):
+ return f"sync-{source_branch}-with-{target_branch}"
+
+pr = {
+ "title_prefix": "[Branches auto sync failure] ",
+ "assignees": [os.environ["WEBHOOK_APP_USER"]],
+ "body_prefix": "This is auto-generated PR in order to solve merge conflicts between two branches.",
+ "draft": 'false',
+ "maintainer_can_modify": 'false',
+ "labels": ["auto-sync"]
+}
\ No newline at end of file
diff --git a/automation/scripts/github_branch_autosync/tests/payload.json b/automation/scripts/github_branch_autosync/tests/payload.json
new file mode 100644
index 00000000000..75dabc729da
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/tests/payload.json
@@ -0,0 +1,192 @@
+{
+ "ref": "refs/heads/rampup",
+ "before": "b1c422e8f098a7312fd68560bddc283746c24bda",
+ "after": "d7b348d83c39bf94bdeaac6cb644b2c65088164d",
+ "repository": {
+ "id": 353642475,
+ "node_id": "MDEwOlJlcG9zaXRvcnkzNTM2NDI0NzU=",
+ "name": "rust-bdd",
+ "full_name": "dkijania/rust-bdd",
+ "private": false,
+ "owner": {
+ "name": "dkijania",
+ "email": "dariusz.kijania@gmail.com",
+ "login": "dkijania",
+ "id": 20424186,
+ "node_id": "MDQ6VXNlcjIwNDI0MTg2",
+ "avatar_url": "https://avatars.githubusercontent.com/u/20424186?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/dkijania",
+ "html_url": "https://github.com/dkijania",
+ "followers_url": "https://api.github.com/users/dkijania/followers",
+ "following_url": "https://api.github.com/users/dkijania/following{/other_user}",
+ "gists_url": "https://api.github.com/users/dkijania/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/dkijania/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/dkijania/subscriptions",
+ "organizations_url": "https://api.github.com/users/dkijania/orgs",
+ "repos_url": "https://api.github.com/users/dkijania/repos",
+ "events_url": "https://api.github.com/users/dkijania/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/dkijania/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "html_url": "https://github.com/dkijania/rust-bdd",
+ "description": "Example of bdd in rust",
+ "fork": false,
+ "url": "https://github.com/dkijania/rust-bdd",
+ "forks_url": "https://api.github.com/repos/dkijania/rust-bdd/forks",
+ "keys_url": "https://api.github.com/repos/dkijania/rust-bdd/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/dkijania/rust-bdd/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/dkijania/rust-bdd/teams",
+ "hooks_url": "https://api.github.com/repos/dkijania/rust-bdd/hooks",
+ "issue_events_url": "https://api.github.com/repos/dkijania/rust-bdd/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/dkijania/rust-bdd/events",
+ "assignees_url": "https://api.github.com/repos/dkijania/rust-bdd/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/dkijania/rust-bdd/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/dkijania/rust-bdd/tags",
+ "blobs_url": "https://api.github.com/repos/dkijania/rust-bdd/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/dkijania/rust-bdd/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/dkijania/rust-bdd/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/dkijania/rust-bdd/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/dkijania/rust-bdd/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/dkijania/rust-bdd/languages",
+ "stargazers_url": "https://api.github.com/repos/dkijania/rust-bdd/stargazers",
+ "contributors_url": "https://api.github.com/repos/dkijania/rust-bdd/contributors",
+ "subscribers_url": "https://api.github.com/repos/dkijania/rust-bdd/subscribers",
+ "subscription_url": "https://api.github.com/repos/dkijania/rust-bdd/subscription",
+ "commits_url": "https://api.github.com/repos/dkijania/rust-bdd/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/dkijania/rust-bdd/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/dkijania/rust-bdd/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/dkijania/rust-bdd/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/dkijania/rust-bdd/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/dkijania/rust-bdd/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/dkijania/rust-bdd/merges",
+ "archive_url": "https://api.github.com/repos/dkijania/rust-bdd/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/dkijania/rust-bdd/downloads",
+ "issues_url": "https://api.github.com/repos/dkijania/rust-bdd/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/dkijania/rust-bdd/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/dkijania/rust-bdd/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/dkijania/rust-bdd/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/dkijania/rust-bdd/labels{/name}",
+ "releases_url": "https://api.github.com/repos/dkijania/rust-bdd/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/dkijania/rust-bdd/deployments",
+ "created_at": 1617268643,
+ "updated_at": "2023-03-28T04:59:17Z",
+ "pushed_at": 1680164399,
+ "git_url": "git://github.com/dkijania/rust-bdd.git",
+ "ssh_url": "git@github.com:dkijania/rust-bdd.git",
+ "clone_url": "https://github.com/dkijania/rust-bdd.git",
+ "svn_url": "https://github.com/dkijania/rust-bdd",
+ "homepage": null,
+ "size": 66,
+ "stargazers_count": 0,
+ "watchers_count": 0,
+ "language": "Rust",
+ "has_issues": true,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": true,
+ "has_pages": false,
+ "has_discussions": false,
+ "forks_count": 0,
+ "mirror_url": null,
+ "archived": false,
+ "disabled": false,
+ "open_issues_count": 1,
+ "license": null,
+ "allow_forking": true,
+ "is_template": false,
+ "web_commit_signoff_required": false,
+ "topics": [
+
+ ],
+ "visibility": "public",
+ "forks": 0,
+ "open_issues": 1,
+ "watchers": 0,
+ "default_branch": "main",
+ "stargazers": 0,
+ "master_branch": "main"
+ },
+ "pusher": {
+ "name": "dkijania",
+ "email": "dariusz.kijania@gmail.com"
+ },
+ "sender": {
+ "login": "dkijania",
+ "id": 20424186,
+ "node_id": "MDQ6VXNlcjIwNDI0MTg2",
+ "avatar_url": "https://avatars.githubusercontent.com/u/20424186?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/dkijania",
+ "html_url": "https://github.com/dkijania",
+ "followers_url": "https://api.github.com/users/dkijania/followers",
+ "following_url": "https://api.github.com/users/dkijania/following{/other_user}",
+ "gists_url": "https://api.github.com/users/dkijania/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/dkijania/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/dkijania/subscriptions",
+ "organizations_url": "https://api.github.com/users/dkijania/orgs",
+ "repos_url": "https://api.github.com/users/dkijania/repos",
+ "events_url": "https://api.github.com/users/dkijania/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/dkijania/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "created": false,
+ "deleted": false,
+ "forced": true,
+ "base_ref": null,
+ "compare": "https://github.com/dkijania/rust-bdd/compare/b1c422e8f098...d7b348d83c39",
+ "commits": [
+ {
+ "id": "d7b348d83c39bf94bdeaac6cb644b2c65088164d",
+ "tree_id": "ec62c6eedbfff5457f2b683205db4dc0333d827c",
+ "distinct": true,
+ "message": "change in berkeley",
+ "timestamp": "2023-03-30T10:19:43+02:00",
+ "url": "https://github.com/dkijania/rust-bdd/commit/d7b348d83c39bf94bdeaac6cb644b2c65088164d",
+ "author": {
+ "name": "dkijania",
+ "email": "dariusz@o1labs.org"
+ },
+ "committer": {
+ "name": "dkijania",
+ "email": "dariusz@o1labs.org"
+ },
+ "added": [
+
+ ],
+ "removed": [
+
+ ],
+ "modified": [
+ "src/cucumber/debug.rs"
+ ]
+ }
+ ],
+ "head_commit": {
+ "id": "d7b348d83c39bf94bdeaac6cb644b2c65088164d",
+ "tree_id": "ec62c6eedbfff5457f2b683205db4dc0333d827c",
+ "distinct": true,
+ "message": "change in berkeley",
+ "timestamp": "2023-03-30T10:19:43+02:00",
+ "url": "https://github.com/dkijania/rust-bdd/commit/d7b348d83c39bf94bdeaac6cb644b2c65088164d",
+ "author": {
+ "name": "dkijania",
+ "email": "dariusz@o1labs.org"
+ },
+ "committer": {
+ "name": "dkijania",
+ "email": "dariusz@o1labs.org"
+ },
+ "added": [
+
+ ],
+ "removed": [
+
+ ],
+ "modified": [
+ "src/cucumber/debug.rs"
+ ]
+ }
+ }
\ No newline at end of file
diff --git a/automation/scripts/github_branch_autosync/tests/test_e2e.py b/automation/scripts/github_branch_autosync/tests/test_e2e.py
new file mode 100644
index 00000000000..d754f64e600
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/tests/test_e2e.py
@@ -0,0 +1,129 @@
+""" E2E tests for auto-sync merges"""
+
+import random
+import unittest
+from github_autosync.gcloud_entrypoint.main import handle_incoming_commit_push_json
+from tests import config,utils
+from github_autosync.gcloud_entrypoint.lib.github import GithubApi
+
+class TestEndToEndFlow(unittest.TestCase):
+
+ generator = None
+ github = None
+
+ @classmethod
+ def setUpClass(cls):
+ cls.generator = utils.BranchNamesGenerator()
+ cls.github = GithubApi(config.github)
+
+ def push_commit_to(self, branch,some_source_file):
+ change = "change" + str(random.randint(0, 100_000))
+ utils.create_simple_commit(self.github, config.github,branch,"commit", some_source_file, change)
+
+ def assert_on_the_same_commit(self, left, right):
+ left_sha = self.github.branch(left).commit.sha
+ right_sha = self.github.branch(right).commit.sha
+
+ self.assertEqual(left_sha,right_sha)
+
+ def assert_temp_sync_branch_created(self, new_branch):
+ self.assertTrue(self.github.branch_exists(new_branch))
+
+ def assert_temp_sync_branch_was_cleaned(self,base,head):
+ self.assertFalse(self.github.branch_exists(config.tmp_branch_name(base,head)))
+
+ def assert_pr_created(self,base,head):
+ prs = self.github.repository().get_pulls(base,head).get_page(0)
+
+ self.assertEqual(len(prs),1)
+
+ pr = prs[0]
+ self.assertEqual(config.pr["assignees"],list(map(lambda x: x.login, pr.assignees)))
+ self.assertTrue(config.pr["title_prefix"] in pr.title)
+ self.assertEqual(config.pr["labels"],list(map(lambda x: x.name, pr.labels)))
+ self.assertTrue(config.pr["body_prefix"] in pr.body)
+ self.assertEqual(bool(config.pr["draft"]),pr.draft)
+
+ def handle_commit_event(self,branch):
+ handle_incoming_commit_push_json(json={ "ref": "refs/heads/" + branch},config=config)
+
+ def test_no_conflict(self):
+ compatible,develop,some_source_file = self.generator.generate_unique_names()
+
+
+ self.push_commit_to(compatible,some_source_file)
+ self.handle_commit_event(compatible)
+
+ self.assert_on_the_same_commit(compatible,develop)
+ self.assert_temp_sync_branch_was_cleaned(compatible,develop)
+
+ def test_conflict(self):
+ compatible,develop,some_source_file = self.generator.generate_unique_names()
+
+ # Creating conflict
+ self.push_commit_to(develop,some_source_file)
+ self.push_commit_to(compatible,some_source_file)
+
+ self.handle_commit_event(compatible)
+
+ temp_sync_branch = config.tmp_branch_name(compatible,develop)
+ self.assert_temp_sync_branch_created(temp_sync_branch)
+ self.assert_pr_created(base=develop,head=temp_sync_branch)
+
+ def test_update_stable_branch_while_conflict(self):
+ compatible,develop,some_source_file = self.generator.generate_unique_names()
+
+ # Creating conflict
+ self.push_commit_to(develop,some_source_file)
+ self.push_commit_to(compatible,some_source_file)
+
+ self.handle_commit_event(compatible)
+
+ temp_sync_branch = config.tmp_branch_name(compatible,develop)
+ self.assert_pr_created(base=develop,head=temp_sync_branch)
+
+ self.push_commit_to(compatible,some_source_file)
+ self.handle_commit_event(compatible)
+
+ # sync branch should fast forward to compatible head
+ temp_branch_head = self.github.branch(temp_sync_branch).commit.sha
+ compatible_head = self.github.branch(compatible).commit.sha
+ develop_head = self.github.branch(develop).commit.sha
+
+ self.assertEqual(temp_branch_head,compatible_head)
+ self.assertNotEqual(compatible_head,develop_head)
+
+ def test_update_stable_branch_while_conflict_causes_conflict_with_temp_branch(self):
+ compatible,develop,some_source_file = self.generator.generate_unique_names()
+ temp_branch = config.tmp_branch_name(compatible,develop)
+
+ # Creating conflict
+ self.push_commit_to(develop,some_source_file)
+ self.push_commit_to(compatible,some_source_file)
+
+ self.handle_commit_event(compatible)
+
+ # attempt to fix merge conflict
+ self.push_commit_to(temp_branch,some_source_file)
+
+ # but then compatible got yet another commit which now creates conflict not only with develop
+ # but also with sync branch
+ self.push_commit_to(compatible,some_source_file)
+
+ self.handle_commit_event(compatible)
+
+ # as a result we should have two prs original one and new for fixing intermittent conflict
+ self.assert_pr_exist(base=temp_branch,head=compatible)
+ self.assert_pr_exist(base=temp_branch,head=develop)
+
+ def assert_pr_exist(self,base,head):
+ prs = self.github.repository().get_pulls(base,head).get_page(0)
+ self.assertEqual(1,len(prs))
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.generator.tear_down()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/automation/scripts/github_branch_autosync/tests/test_payload_parser.py b/automation/scripts/github_branch_autosync/tests/test_payload_parser.py
new file mode 100644
index 00000000000..9c499679c0e
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/tests/test_payload_parser.py
@@ -0,0 +1,30 @@
+import unittest
+import json
+
+from github_autosync.gcloud_entrypoint.lib.request_parser import GithubPayloadInfo
+
+class TestPayloadParser(unittest.TestCase):
+
+ data = None
+
+ @classmethod
+ def setUpClass(cls):
+ with open("tests/payload.json",encoding="utf-8") as file:
+ data = json.load(file)
+ cls.data = data
+
+ def test_incoming_branch(self):
+ info = GithubPayloadInfo(self.data)
+ self.assertEqual("rampup",info.incoming_branch)
+
+ def test_commits(self):
+ info = GithubPayloadInfo(self.data)
+ commits = info.commits
+ self.assertEqual(1,len(commits))
+ commit = commits[0]
+ self.assertEqual(["src/cucumber/debug.rs"],commit.files)
+ self.assertEqual("change in berkeley",commit.message)
+
+if __name__ == '__main__':
+ unittest.main()
+
diff --git a/automation/scripts/github_branch_autosync/tests/utils.py b/automation/scripts/github_branch_autosync/tests/utils.py
new file mode 100644
index 00000000000..10ac1f17453
--- /dev/null
+++ b/automation/scripts/github_branch_autosync/tests/utils.py
@@ -0,0 +1,97 @@
+""" Test utility module """
+
+import random
+import base64
+from gql import gql, Client
+from gql.transport.requests import RequestsHTTPTransport
+from github_autosync.gcloud_entrypoint.lib.github import GithubApi
+from tests import config
+
+
+def create_simple_commit(github_api,config,branch, message, path, content ):
+ """
+ Creates simple commit.
+
+ Parameters:
+ github_api (GithubApi): Github api
+ config (config): Test config module
+ branch (string): Branch name to which we commit
+ message (string): commit message
+ path (string): path to file which will receive new content
+ content (string): new content for file in 'path' argument
+
+ Returns:
+ Graphql response
+ """
+ head = github_api.branch(name=branch).commit.sha
+ sample_string_bytes = content.encode("ascii")
+ base64_bytes = base64.b64encode(sample_string_bytes)
+ base64_string = base64_bytes.decode("ascii")
+
+ transport = RequestsHTTPTransport(url="https://api.github.com/graphql",headers=github_api.get_authorization_header)
+
+ client = Client(transport=transport)
+ mutation = gql(
+ """
+ mutation ($input: CreateCommitOnBranchInput!) {
+ createCommitOnBranch(input: $input) {
+ commit { url }
+ }
+ }
+ """
+ )
+
+ variables = {
+ "input": {
+ "branch": {
+ "repositoryNameWithOwner": config["username"] +"/" + config["repo"],
+ "branchName": branch
+ },
+ "message": {"headline": message },
+ "fileChanges": {
+ "additions": [{
+ "path": path,
+ "contents": base64_string
+ }]
+ },
+ "expectedHeadOid": head
+ }}
+
+ res = client.execute(mutation, variable_values=variables)
+ transport.close()
+ client.close_sync()
+ return res
+
+class BranchNamesGenerator(object):
+ """
+ Utility class to generate unique (with reasonable uniqueness) names for branches
+ and files which are about to be edited
+ """
+ def __init__(self):
+ self.store = []
+ self.github= GithubApi(config.github)
+
+ def generate_unique_names(self):
+ """
+ Generates unique tuple of two branches and file to edit.
+ Then stores branches in inner dict for later clean up
+ """
+ rand = str(random.randint(0, 100_000))
+ compatible_branch = "compatible" + rand
+ develop_branch = "develop_" + rand
+ file_to_edit = f"README_{rand}.md"
+ self.github.create_new_branch(compatible_branch,"main")
+ self.github.create_new_branch(develop_branch,"main")
+
+ config.branches[compatible_branch] = develop_branch
+ self.store.extend([compatible_branch,develop_branch,config.tmp_branch_name(compatible_branch,develop_branch)])
+ return (compatible_branch,develop_branch,file_to_edit)
+
+ def tear_down(self):
+ """
+ Deletes all branches that class is aware of
+ """
+ all_branches = self.github.repository().get_branches().get_page(0)
+ for branch in self.store:
+ if any(x.name == branch for x in all_branches):
+ self.github.delete_branch(branch)
diff --git a/automation/terraform/modules/google-cloud/cloud-postgres/README.md b/automation/terraform/modules/google-cloud/cloud-postgres/README.md
new file mode 100644
index 00000000000..7ac9d424959
--- /dev/null
+++ b/automation/terraform/modules/google-cloud/cloud-postgres/README.md
@@ -0,0 +1,26 @@
+# Google Cloud Postgres Deployment
+
+This terraform configuration is used to deploy an instance of Google Cloud Postgres. Although the default configuration works without creating a conflict, it is recommended to deploy the postgres instance as a module within a larger terraform deployment (which passes it unique var values).
+
+The default configuration uses Google Secret Manager to pull in a password for the default `postgres` user. After deployment, the assigned IP addresses, username, and password will be printed to the terminal as shown below:
+
+```
+Outputs:
+
+cloud_postgres_ip = tolist([
+ {
+ "ip_address" = "35.35.35.35" <---- example IP
+ "time_to_retire" = ""
+ "type" = "PRIMARY"
+ },
+ {
+ "ip_address" = "34.34.34.34" <---- example IP
+ "time_to_retire" = ""
+ "type" = "OUTGOING"
+ },
+])
+db_password = "PASSWORD_HERE"
+db_user = "postgres"
+```
+
+The `PRIMARY` IP should be used when connecting to the new instance. By default, not database or schema is defined on the newly deployed db.
diff --git a/automation/terraform/modules/google-cloud/cloud-postgres/main.tf b/automation/terraform/modules/google-cloud/cloud-postgres/main.tf
new file mode 100644
index 00000000000..bcf21243df1
--- /dev/null
+++ b/automation/terraform/modules/google-cloud/cloud-postgres/main.tf
@@ -0,0 +1,36 @@
+# Configure the Google Cloud provider
+provider "google" {
+ project = var.gcp_project
+ region = var.gcp_region
+}
+
+resource "random_id" "instance_id" {
+ byte_length = 4
+}
+
+data "google_secret_manager_secret_version" "db_password" {
+ provider = google
+ secret = var.db_pass
+}
+
+# Create a Google Cloud SQL PostgreSQL instance
+resource "google_sql_database_instance" "postgres_instance" {
+ name = "${var.db_name}-${random_id.instance_id.hex}"
+ database_version = var.postgres_version
+ project = var.gcp_project
+ region = var.gcp_region
+ settings {
+ tier = var.db_spec
+ user_labels = {
+ service = var.service_label
+ }
+ }
+ deletion_protection = var.deletion_protection
+}
+
+# Define the database user
+resource "google_sql_user" "database_user" {
+ name = var.db_user
+ instance = google_sql_database_instance.postgres_instance.name
+ password = data.google_secret_manager_secret_version.db_password.secret_data
+}
diff --git a/automation/terraform/modules/google-cloud/cloud-postgres/output.tf b/automation/terraform/modules/google-cloud/cloud-postgres/output.tf
new file mode 100644
index 00000000000..b6f2e78cd34
--- /dev/null
+++ b/automation/terraform/modules/google-cloud/cloud-postgres/output.tf
@@ -0,0 +1,13 @@
+output "cloud_postgres_ip" {
+ value = google_sql_database_instance.postgres_instance.ip_address
+}
+
+output "db_user" {
+ value = google_sql_user.database_user.name
+}
+
+output "db_password" {
+ value = data.google_secret_manager_secret_version.db_password.secret_data
+}
+
+
diff --git a/automation/terraform/modules/google-cloud/cloud-postgres/vars.tf b/automation/terraform/modules/google-cloud/cloud-postgres/vars.tf
new file mode 100644
index 00000000000..fe6c59fbdd6
--- /dev/null
+++ b/automation/terraform/modules/google-cloud/cloud-postgres/vars.tf
@@ -0,0 +1,39 @@
+variable "gcp_project" {
+ default = "o1labs-192920"
+}
+
+variable "gcp_region" {
+ default = "us-east4"
+}
+
+variable "gcp_zone" {
+ default = "us-east4-b"
+}
+
+variable "db_name" {
+ default = "o1db"
+}
+
+variable "db_user" {
+ default = "postgres"
+}
+
+variable "db_pass" {
+ default = "o1db-pass"
+}
+
+variable "deletion_protection" {
+ default = false
+}
+
+variable "postgres_version" {
+ default = "POSTGRES_14"
+}
+
+variable "db_spec" {
+ default = "db-g1-small"
+}
+
+variable "service_label" {
+ default = "none"
+}
diff --git a/automation/terraform/modules/testnet-alerts/templates/testnet-alert-rules.yml.tpl b/automation/terraform/modules/testnet-alerts/templates/testnet-alert-rules.yml.tpl
index 7a9a175383d..9812696c78a 100644
--- a/automation/terraform/modules/testnet-alerts/templates/testnet-alert-rules.yml.tpl
+++ b/automation/terraform/modules/testnet-alerts/templates/testnet-alert-rules.yml.tpl
@@ -255,6 +255,24 @@ groups:
description: "{{ $value }} blocks have been validated on network {{ $labels.testnet }} in the last hour (according to some node)."
runbook: "https://www.notion.so/minaprotocol/FewBlocksPerHour-47a6356f093242d988b0d9527ce23478"
+ - alert: StuckInBootstrap
+ expr: count by (testnet) (increase(Coda_Runtime_process_uptime_ms_total{syncStatus = "BOOTSTRAP"}[2h]) >= 7200000) > 0
+ for: ${alert_evaluation_duration}
+ labels:
+ testnet: "{{ $labels.testnet }}"
+ severity: critical
+ annotations:
+ summary: "One or more {{ $labels.testnet }} nodes are stuck at bootstrap for more than 2 hours"
+
+ - alert: StuckInCatchup
+ expr: count by (testnet) (increase(Coda_Runtime_process_uptime_ms_total{syncStatus = "CATCHUP"}[2h]) >= 7200000) > 0
+ for: ${alert_evaluation_duration}
+ labels:
+ testnet: "{{ $labels.testnet }}"
+ severity: critical
+ annotations:
+ summary: "One or more {{ $labels.testnet }} nodes are stuck at catchup for more than 2 hours"
+
- name: Warnings
rules:
diff --git a/buildkite/scripts/build-js-tests.sh b/buildkite/scripts/build-js-tests.sh
index 880dffd6b1a..ef33ac983ff 100755
--- a/buildkite/scripts/build-js-tests.sh
+++ b/buildkite/scripts/build-js-tests.sh
@@ -8,9 +8,9 @@ make snarkyjs
echo "Prepare SnarkyJS test module and pack into archive"
npm pack src/lib/snarkyjs
-mv snarkyjs-*.tgz snarkyjs.tgz
+mv o1js-*.tgz o1js.tgz
cd src/lib/snarkyjs/tests/integration
-npm i ../../../../../snarkyjs.tgz
+npm i ../../../../../o1js.tgz
cp $(which node) ./node
cd ../../../../..
tar -chzf snarkyjs_test.tar.gz src/lib/snarkyjs/tests/integration
diff --git a/buildkite/scripts/check-compatibility.sh b/buildkite/scripts/check-compatibility.sh
new file mode 100755
index 00000000000..dcccdd97ba9
--- /dev/null
+++ b/buildkite/scripts/check-compatibility.sh
@@ -0,0 +1,213 @@
+#!/bin/bash
+
+# start mainline branch daemon as seed, see if PR branch daemon can sync to it
+
+# don't exit if docker download fails
+set +e
+
+function get_shas {
+ SHAS=$(git log -n 10 --format="%h" --abbrev=7 --no-merges)
+}
+
+function image_tag {
+ SHA=$1
+ IMAGE_TAG="$SHA-bullseye-berkeley"
+}
+
+function download-docker {
+ SHA=$1
+ image_tag $SHA
+ docker pull gcr.io/o1labs-192920/mina-daemon:$IMAGE_TAG
+}
+
+function try_docker_shas {
+ DOCKER_SHAS=$1
+ GOT_DOCKER=0
+
+ for sha in $DOCKER_SHAS; do
+ download-docker $sha
+ if [ $? -eq 0 ] ; then
+ GOT_DOCKER=1
+ image_tag $sha
+ break
+ else
+ echo "No docker available for SHA=$sha"
+ fi
+ done
+}
+
+function image_id {
+ TAG=$1
+ IMAGE_ID=$(docker images | grep $TAG | head -n 1 | awk '{print $3}')
+}
+
+function gen_libp2p_keypair {
+ IMAGE_ID=$1
+ DOCKER_TAG=$2
+
+ CONTAINER=$(docker run -d -e MINA_LIBP2P_PASS='' --entrypoint mina $IMAGE_ID libp2p generate-keypair --privkey-path libp2p)
+
+ # allow time for key to be written
+ sleep 10
+
+ docker commit $CONTAINER "mina_ci":$DOCKER_TAG
+
+ image_id $DOCKER_TAG
+
+ COMMITTED_IMAGE_ID=$IMAGE_ID
+
+ echo "Committed image:" $DOCKER_TAG:$COMMITTED_IMAGE_ID
+}
+
+function boot_and_sync {
+ IMAGE_ID=$1
+ EXTERNAL_PORT=$2
+ REST_PORT=$3
+ PEER_ID=$4
+ PEER_PORT=$5
+
+ if [ ! -z $PEER_ID ] && [ ! -z $PEER_PORT ]; then
+ echo "Running with peer" $PEER_ID "on port" $PEER_PORT
+ PEER_FLAG="--peer /ip4/127.0.0.1/tcp/"$PEER_PORT"/p2p/"$PEER_ID
+ SEED_FLAG=""
+ else
+ echo "Running as seed"
+ PEER_FLAG=""
+ SEED_FLAG="--seed"
+ fi
+
+ DAEMON_CONTAINER=$(docker run --entrypoint mina -d -e MINA_LIBP2P_PASS='' $IMAGE_ID daemon \
+ --libp2p-keypair ./libp2p --external-port $EXTERNAL_PORT --rest-port $REST_PORT $PEER_FLAG $SEED_FLAG)
+
+ # allow time to boot
+ sleep 20
+
+ SYNCED=0
+ REST_SERVER="http://127.0.0.1:$REST_PORT/graphql"
+
+ while [ $SYNCED -eq 0 ]; do
+ SYNC_STATUS=$(docker container exec -it $DAEMON_CONTAINER \
+ curl -g -X POST -H "Content-Type: application/json" -d '{"query":"query { syncStatus }"}' ${REST_SERVER})
+
+ # print logs
+ docker container logs $DAEMON_CONTAINER --tail 10
+
+ # "connection refused" until GraphQL server up
+ GOT_SYNC_STATUS=$(echo ${SYNC_STATUS} | grep "syncStatus")
+ if [ ! -z $GOT_SYNC_STATUS ]; then
+ echo $(date +'%Y-%m-%d %H:%M:%S') ". Sync status:" $GOT_SYNC_STATUS
+ fi
+
+ SYNCED=$(echo ${SYNC_STATUS} | grep -c "SYNCED")
+ sleep 5
+ done
+}
+
+function rm_docker_container {
+ IMAGE_ID=$1
+
+ DOCKER_CONTAINER=$(docker ps -a | grep $IMAGE_ID | awk '{print $1}')
+
+ docker kill $DOCKER_CONTAINER
+ docker rm $DOCKER_CONTAINER
+}
+
+### start of code
+
+if [[ $# -ne 1 ]]; then
+ echo "Usage: $0 "
+ exit 1
+fi
+
+MAINLINE_BRANCH=$1
+
+case "$BUILDKITE_PULL_REQUEST_BASE_BRANCH" in
+ develop) ;;
+ *)
+ echo "PR is not against develop, not running the $MAINLINE_BRANCH compatibility test"
+ exit 0
+esac
+
+### Download docker images
+
+echo "Current branch is $BUILDKITE_BRANCH"
+
+echo "Checking out $MAINLINE_BRANCH branch"
+git checkout $MAINLINE_BRANCH
+git pull
+
+echo "Getting $MAINLINE_BRANCH docker"
+get_shas
+try_docker_shas "$SHAS"
+
+if [ $GOT_DOCKER -eq 1 ] ; then
+ echo "Got $MAINLINE_BRANCH docker"
+else
+ echo "Could not find $MAINLINE_BRANCH docker"
+ exit 1
+fi
+
+MAIN_BRANCH_IMAGE_TAG=$IMAGE_TAG
+
+CURR_BRANCH=$(git rev-parse --symbolic-full-name --abbrev-ref HEAD)
+
+echo "Checking out PR branch"
+git checkout $CURR_BRANCH
+
+echo "Getting PR docker"
+get_shas
+try_docker_shas "$SHAS"
+
+if [ $GOT_DOCKER -eq 1 ] ; then
+ echo "Got docker for PR branch"
+else
+ echo "Could not find a docker for PR branch"
+ exit 1
+fi
+
+PR_IMAGE_TAG=$IMAGE_TAG
+
+echo "${MAINLINE_BRANCH} image tag:" $MAIN_BRANCH_IMAGE_TAG
+echo "PR image tag:" $PR_IMAGE_TAG
+
+image_id $MAIN_BRANCH_IMAGE_TAG
+MAIN_BRANCH_IMAGE_ID=$IMAGE_ID
+
+echo "${MAINLINE_BRANCH} image id:" $MAIN_BRANCH_IMAGE_ID
+
+image_id $PR_IMAGE_TAG
+PR_IMAGE_ID=$IMAGE_ID
+
+echo "PR image id:" $PR_IMAGE_ID
+
+### Run docker images
+
+# generate libp2p keypair for mainline branch
+gen_libp2p_keypair $MAIN_BRANCH_IMAGE_ID "${MAINLINE_BRANCH}_docker"
+
+MAIN_BRANCH_COMMITTED_IMAGE_ID=$COMMITTED_IMAGE_ID
+MAIN_BRANCH_LIBP2P_PEER_ID=$(docker run -e MINA_LIBP2P_PASS='' --entrypoint mina $MAIN_BRANCH_COMMITTED_IMAGE_ID \
+ libp2p dump-keypair --privkey-path libp2p | awk -F , '(NR==2){print $3}')
+
+echo "${MAINLINE_BRANCH} libp2p peer id:" $MAIN_BRANCH_LIBP2P_PEER_ID
+
+echo "Booting ${MAINLINE_BRANCH} daemon"
+boot_and_sync $MAIN_BRANCH_COMMITTED_IMAGE_ID 8302 3085
+
+echo "${MAINLINE_BRANCH} seed done bootstrapping"
+
+# generate PR libp2p keypair
+gen_libp2p_keypair $PR_IMAGE_ID "pr_docker"
+
+PR_COMMITTED_IMAGE_ID=$COMMITTED_IMAGE_ID
+
+echo "Booting PR daemon"
+
+boot_and_sync $PR_COMMITTED_IMAGE_ID 8305 3086 $MAIN_BRANCH_LIBP2P_PEER_ID 8302
+
+echo "PR daemon synced to ${MAINLINE_BRANCH} daemon!"
+
+echo "Removing docker containers"
+
+rm_docker_container $MAIN_BRANCH_COMMITTED_IMAGE_ID
+rm_docker_container $PR_COMMITTED_IMAGE_ID
diff --git a/buildkite/scripts/merges-cleanly.sh b/buildkite/scripts/merges-cleanly.sh
index f98234c92c6..29693d980d4 100755
--- a/buildkite/scripts/merges-cleanly.sh
+++ b/buildkite/scripts/merges-cleanly.sh
@@ -8,8 +8,14 @@ echo 'Testing for conflicts between the current branch `'"${CURRENT}"'` and `'"$
# The git merge-tree command shows the content of a 3-way merge without
# touching the index, which we can then search for conflict markers.
-# Tell git where to find ssl certs
-git config --global http.sslCAInfo /etc/ssl/certs/ca-bundle.crt
+# Only execute in the CI. If the script is run locally, it messes us the user
+# config
+if [ "${BUILDKITE:-false}" == true ]
+then
+ # Tell git where to find ssl certs
+ git config --global http.sslCAInfo /etc/ssl/certs/ca-bundle.crt
+fi
+
# Fetch a fresh copy of the repo
git fetch origin
git config --global user.email "hello@ci.com"
diff --git a/buildkite/scripts/rosetta-integration-tests.sh b/buildkite/scripts/rosetta-integration-tests.sh
index 1efca1562ef..b48b07b1a89 100755
--- a/buildkite/scripts/rosetta-integration-tests.sh
+++ b/buildkite/scripts/rosetta-integration-tests.sh
@@ -93,7 +93,7 @@ ROSETTA_CLI_CONFIG_FILES=${ROSETTA_CLI_CONFIG_FILES:="config.json mina.ros"}
ROSETTA_CLI_MAIN_CONFIG_FILE=${ROSETTA_CLI_MAIN_CONFIG_FILE:="config.json"}
# Frequency (in seconds) at which payment operations will be sent
-TRANSACTION_FREQUENCY=60
+TRANSACTION_FREQUENCY=10
# Fetch zkApps
curl -Ls https://github.com/MinaProtocol/rosetta-integration-test-zkapps/tarball/$ROSETTA_INT_TEST_ZKAPPS_VERSION | tar xz -C /tmp
@@ -122,8 +122,8 @@ cat <"$MINA_CONFIG_FILE"
"ledger": {
"name": "${MINA_NETWORK}",
"accounts": [
- { "pk": "${BLOCK_PRODUCER_PK}", "balance": "1000", "delegate": null, "sk": null },
- { "pk": "${SNARK_PRODUCER_PK}", "balance": "2000", "delegate": "${BLOCK_PRODUCER_PK}", "sk": null }
+ { "pk": "${BLOCK_PRODUCER_PK}", "balance": "1000000", "delegate": null, "sk": null },
+ { "pk": "${SNARK_PRODUCER_PK}", "balance": "2000000", "delegate": "${BLOCK_PRODUCER_PK}", "sk": null }
]
}
}
diff --git a/buildkite/scripts/run-snark-transaction-profiler.sh b/buildkite/scripts/run-snark-transaction-profiler.sh
index 31d7bd8777a..1dfd01e008a 100755
--- a/buildkite/scripts/run-snark-transaction-profiler.sh
+++ b/buildkite/scripts/run-snark-transaction-profiler.sh
@@ -8,13 +8,7 @@ export DEBIAN_FRONTEND=noninteractive
apt-get update
apt-get install -y git apt-transport-https ca-certificates tzdata curl python3
-case "$BUILDKITE_PULL_REQUEST_BASE_BRANCH" in
- rampup|berkeley|release/2.0.0|develop)
- TESTNET_NAME="berkeley"
- ;;
- *)
- TESTNET_NAME="mainnet"
-esac
+TESTNET_NAME="berkeley"
git config --global --add safe.directory /workdir
@@ -30,4 +24,4 @@ MAX_NUM_UPDATES=4
MIN_NUM_UPDATES=2
echo "--- Run Snark Transaction Profiler with parameters: --zkapps --k ${K} --max-num-updates ${MAX_NUM_UPDATES} --min-num-updates ${MIN_NUM_UPDATES}"
-python3 ./scripts/snark_transaction_profiler.py ${K} ${MAX_NUM_UPDATES} ${MIN_NUM_UPDATES}
\ No newline at end of file
+python3 ./scripts/snark_transaction_profiler.py ${K} ${MAX_NUM_UPDATES} ${MIN_NUM_UPDATES}
diff --git a/buildkite/scripts/unit-test.sh b/buildkite/scripts/unit-test.sh
index c1cada9a38b..5046e143ec0 100755
--- a/buildkite/scripts/unit-test.sh
+++ b/buildkite/scripts/unit-test.sh
@@ -22,12 +22,8 @@ time make build
echo "--- Build all targets"
dune build "${path}" --profile="${profile}" -j16
-# Note: By attempting a re-run on failure here, we can avoid rebuilding and
-# skip running all of the tests that have already succeeded, since dune will
-# only retry those tests that failed.
+echo "--- Check for changes to verification keys"
+time dune runtest "src/app/print_blockchain_snark_vk" --profile="${profile}" -j16
+
echo "--- Run unit tests"
-time dune runtest "${path}" --profile="${profile}" -j16 || \
-(./scripts/link-coredumps.sh && \
- echo "--- Retrying failed unit tests" && \
- time dune runtest "${path}" --profile="${profile}" -j16 || \
- (./scripts/link-coredumps.sh && false))
+time dune runtest "${path}" --profile="${profile}" -j16 || (./scripts/link-coredumps.sh)
diff --git a/buildkite/src/Command/Base.dhall b/buildkite/src/Command/Base.dhall
index c669425a09f..5c84d1367e4 100644
--- a/buildkite/src/Command/Base.dhall
+++ b/buildkite/src/Command/Base.dhall
@@ -101,10 +101,11 @@ let Config =
, docker_login : Optional DockerLogin.Type
, summon : Optional Summon.Type
, retries : List Retry.Type
+ , flake_retry_limit: Optional Natural
, soft_fail : Optional B/SoftFail
, skip: Optional B/Skip
, `if` : Optional B/If
- , timeout_in_minutes : Optional Natural
+ , timeout_in_minutes : Optional Integer
}
, default =
{ depends_on = [] : List TaggedKey.Type
@@ -114,10 +115,11 @@ let Config =
, artifact_paths = [] : List SelectFiles.Type
, env = [] : List TaggedKey.Type
, retries = [] : List Retry.Type
+ , flake_retry_limit = Some 4
, soft_fail = None B/SoftFail
, skip = None B/Skip
, `if` = None B/If
- , timeout_in_minutes = None Natural
+ , timeout_in_minutes = None Integer
}
}
@@ -154,6 +156,7 @@ let build : Config.Type -> B/Command.Type = \(c : Config.Type) ->
else Some (B/ArtifactPaths.String (SelectFiles.compile c.artifact_paths)),
key = Some c.key,
label = Some c.label,
+ timeout_in_minutes = c.timeout_in_minutes,
retry =
Some {
-- we only consider automatic retries
@@ -180,13 +183,14 @@ let build : Config.Type -> B/Command.Type = \(c : Config.Type) ->
retry.limit
})
-- per https://buildkite.com/docs/agent/v3#exit-codes:
- ([
+ (
+ [
-- infra error
Retry::{ exit_status = ExitStatus.Code -1, limit = Some 4 },
-- infra error
Retry::{ exit_status = ExitStatus.Code +255, limit = Some 4 },
-- common/flake error
- Retry::{ exit_status = ExitStatus.Code +1, limit = Some 4 },
+ Retry::{ exit_status = ExitStatus.Code +1, limit = c.flake_retry_limit },
-- apt-get update race condition error
Retry::{ exit_status = ExitStatus.Code +100, limit = Some 4 },
-- Git checkout error
diff --git a/buildkite/src/Command/MinaArtifact.dhall b/buildkite/src/Command/MinaArtifact.dhall
index 89ed25e662b..6ef33dfd9ff 100644
--- a/buildkite/src/Command/MinaArtifact.dhall
+++ b/buildkite/src/Command/MinaArtifact.dhall
@@ -5,6 +5,9 @@ let S = ../Lib/SelectFiles.dhall
let D = S.PathPattern
let Pipeline = ../Pipeline/Dsl.dhall
+let PipelineTag = ../Pipeline/Tag.dhall
+let PipelineMode = ../Pipeline/Mode.dhall
+
let JobSpec = ../Pipeline/JobSpec.dhall
let Command = ./Base.dhall
@@ -16,13 +19,16 @@ let DebianVersions = ../Constants/DebianVersions.dhall
in
-let pipeline : DebianVersions.DebVersion -> Pipeline.Config.Type = \(debVersion : DebianVersions.DebVersion) ->
+let pipeline : DebianVersions.DebVersion -> PipelineMode.Type -> Pipeline.Config.Type = \(debVersion : DebianVersions.DebVersion) ->
+\(mode: PipelineMode.Type) ->
Pipeline.Config::{
spec =
JobSpec::{
dirtyWhen = DebianVersions.dirtyWhen debVersion,
path = "Release",
- name = "MinaArtifact${DebianVersions.capitalName debVersion}"
+ name = "MinaArtifact${DebianVersions.capitalName debVersion}",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Release ],
+ mode = mode
},
steps = [
Libp2p.step debVersion,
@@ -119,7 +125,7 @@ let pipeline : DebianVersions.DebVersion -> Pipeline.Config.Type = \(debVersion
in
{
- bullseye = pipeline DebianVersions.DebVersion.Bullseye
- , buster = pipeline DebianVersions.DebVersion.Buster
- , focal = pipeline DebianVersions.DebVersion.Focal
+ bullseye = pipeline DebianVersions.DebVersion.Bullseye PipelineMode.Type.PullRequest
+ , buster = pipeline DebianVersions.DebVersion.Buster PipelineMode.Type.PullRequest
+ , focal = pipeline DebianVersions.DebVersion.Focal PipelineMode.Type.PullRequest
}
diff --git a/buildkite/src/Constants/ContainerImages.dhall b/buildkite/src/Constants/ContainerImages.dhall
index f687849ca59..9c06bf956f3 100644
--- a/buildkite/src/Constants/ContainerImages.dhall
+++ b/buildkite/src/Constants/ContainerImages.dhall
@@ -4,10 +4,10 @@
-- NOTE: minaToolchainBookworm is also used for building Ubuntu Jammy packages in CI
{
toolchainBase = "codaprotocol/ci-toolchain-base:v3",
- minaToolchainBuster = "gcr.io/o1labs-192920/mina-toolchain@sha256:563fd7adda282fb3b6765c1811a3566e0fa0560f5d1c5270003483030d82d394",
- minaToolchainBullseye = "gcr.io/o1labs-192920/mina-toolchain@sha256:49891eb46089f937f054afa464ce9868529981b92b30740cce32ef60957a1098",
- minaToolchainBookworm = "gcr.io/o1labs-192920/mina-toolchain@sha256:49891eb46089f937f054afa464ce9868529981b92b30740cce32ef60957a1098",
- minaToolchain = "gcr.io/o1labs-192920/mina-toolchain@sha256:49891eb46089f937f054afa464ce9868529981b92b30740cce32ef60957a1098",
+ minaToolchainBuster = "gcr.io/o1labs-192920/mina-toolchain@sha256:71173ebccf6af3e24d27262a5071f3dd0bd2c40b9de1c258422fdb9419507d3c",
+ minaToolchainBullseye = "gcr.io/o1labs-192920/mina-toolchain@sha256:9c4062e76fcd910ad60d3f1f58e2395f6a5e70f16fbef422442aedb70112ac73",
+ minaToolchainBookworm = "gcr.io/o1labs-192920/mina-toolchain@sha256:9c4062e76fcd910ad60d3f1f58e2395f6a5e70f16fbef422442aedb70112ac73",
+ minaToolchain = "gcr.io/o1labs-192920/mina-toolchain@sha256:9c4062e76fcd910ad60d3f1f58e2395f6a5e70f16fbef422442aedb70112ac73",
delegationBackendToolchain = "gcr.io/o1labs-192920/delegation-backend-production@sha256:12ffd0a9016819c720687f440c7a46b8815f8d3ad06d306d342ee5f8dd4375f5",
elixirToolchain = "elixir:1.10-alpine",
nodeToolchain = "node:14.13.1-stretch-slim",
diff --git a/buildkite/src/Constants/DebianVersions.dhall b/buildkite/src/Constants/DebianVersions.dhall
index 6f9bbb30803..39895baa5a3 100644
--- a/buildkite/src/Constants/DebianVersions.dhall
+++ b/buildkite/src/Constants/DebianVersions.dhall
@@ -66,7 +66,12 @@ let minimalDirtyWhen = [
S.strictlyStart (S.contains "dockerfiles/stages"),
S.exactly "scripts/rebuild-deb" "sh",
S.exactly "scripts/release-docker" "sh",
- S.exactly "buildkite/scripts/build-artifact" "sh"
+ S.exactly "buildkite/scripts/build-artifact" "sh",
+ S.exactly "buildkite/scripts/check-compatibility" "sh",
+ -- Snark profiler dirtyWhen
+ S.exactly "buildkite/src/Jobs/Test/RunSnarkProfiler" "dhall",
+ S.exactly "buildkite/scripts/run-snark-transaction-profiler" "sh",
+ S.exactly "scripts/snark_transaction_profiler" "py"
]
-- The default debian version (Bullseye) is used in all downstream CI jobs
diff --git a/buildkite/src/Jobs/Lint/Fast.dhall b/buildkite/src/Jobs/Lint/Fast.dhall
index d9645e5b545..fc96486a180 100644
--- a/buildkite/src/Jobs/Lint/Fast.dhall
+++ b/buildkite/src/Jobs/Lint/Fast.dhall
@@ -5,6 +5,7 @@ let B = ../../External/Buildkite.dhall
let S = ../../Lib/SelectFiles.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
@@ -36,6 +37,7 @@ in Pipeline.build
]
, path = "Lint"
, name = "Fast"
+ , tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Lint ]
}
, steps =
[ Command.build
diff --git a/buildkite/src/Jobs/Lint/HelmChart.dhall b/buildkite/src/Jobs/Lint/HelmChart.dhall
index 2b0e602117c..f0f93b132f1 100644
--- a/buildkite/src/Jobs/Lint/HelmChart.dhall
+++ b/buildkite/src/Jobs/Lint/HelmChart.dhall
@@ -4,6 +4,7 @@ let S = ../../Lib/SelectFiles.dhall
let Cmd = ../../Lib/Cmds.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -23,7 +24,8 @@ Pipeline.build
S.exactly "buildkite/scripts/helm-ci" "sh"
],
path = "Lint",
- name = "HelmChart"
+ name = "HelmChart",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Lint ]
},
steps = [
Command.build
diff --git a/buildkite/src/Jobs/Lint/Merge.dhall b/buildkite/src/Jobs/Lint/Merge.dhall
index f6d06707e46..7701fffd052 100644
--- a/buildkite/src/Jobs/Lint/Merge.dhall
+++ b/buildkite/src/Jobs/Lint/Merge.dhall
@@ -4,6 +4,7 @@ let B = ../../External/Buildkite.dhall
let SelectFiles = ../../Lib/SelectFiles.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Cmd = ../../Lib/Cmds.dhall
@@ -20,7 +21,8 @@ Pipeline.build
spec = JobSpec::{
dirtyWhen = [ SelectFiles.everything ],
path = "Lint",
- name = "Merge"
+ name = "Merge",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Lint ]
},
steps = [
Command.build
@@ -49,6 +51,7 @@ Pipeline.build
commands = [ Cmd.run "buildkite/scripts/merges-cleanly.sh berkeley"]
, label = "Check merges cleanly into berkeley"
, key = "clean-merge-berkeley"
+ , soft_fail = Some (B/SoftFail.Boolean True)
, target = Size.Small
, docker = Some Docker::{
image = (../../Constants/ContainerImages.dhall).toolchainBase
diff --git a/buildkite/src/Jobs/Lint/OCaml.dhall b/buildkite/src/Jobs/Lint/OCaml.dhall
index a79f1e00026..3dec98290af 100644
--- a/buildkite/src/Jobs/Lint/OCaml.dhall
+++ b/buildkite/src/Jobs/Lint/OCaml.dhall
@@ -7,6 +7,7 @@ let JobSpec = ../../Pipeline/JobSpec.dhall
let Cmd = ../../Lib/Cmds.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let RunInToolchain = ../../Command/RunInToolchain.dhall
@@ -31,6 +32,7 @@ in Pipeline.build
[ dirtyDhallDir, S.strictlyStart (S.contains "src/") ]
, path = "Lint"
, name = "OCaml"
+ , tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Lint ]
}
, steps =
[ Command.build
diff --git a/buildkite/src/Jobs/Lint/Rust.dhall b/buildkite/src/Jobs/Lint/Rust.dhall
index a551272308d..c099d225646 100644
--- a/buildkite/src/Jobs/Lint/Rust.dhall
+++ b/buildkite/src/Jobs/Lint/Rust.dhall
@@ -4,6 +4,8 @@ let S = ../../Lib/SelectFiles.dhall
let Cmd = ../../Lib/Cmds.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -19,7 +21,8 @@ Pipeline.build
spec = JobSpec::{
dirtyWhen = [ S.contains "src/app/trace-tool", S.strictlyStart (S.contains "buildkite/src/Jobs/Lint/Rust") ],
path = "Lint",
- name = "Rust"
+ name = "Rust",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Lint ]
},
steps = [
Command.build
diff --git a/buildkite/src/Jobs/Lint/TestnetAlerts.dhall b/buildkite/src/Jobs/Lint/TestnetAlerts.dhall
index 90b69b3f91e..c60772c198b 100644
--- a/buildkite/src/Jobs/Lint/TestnetAlerts.dhall
+++ b/buildkite/src/Jobs/Lint/TestnetAlerts.dhall
@@ -7,6 +7,8 @@ let S = ../../Lib/SelectFiles.dhall
let Cmd = ../../Lib/Cmds.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -25,7 +27,8 @@ Pipeline.build
S.strictlyStart (S.contains "buildkite/src/Jobs/Release/TestnetAlerts")
],
path = "Lint",
- name = "TestnetAlerts"
+ name = "TestnetAlerts",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Lint ]
},
steps = [
Command.build
diff --git a/buildkite/src/Jobs/Lint/ValidationService.dhall b/buildkite/src/Jobs/Lint/ValidationService.dhall
index 96fe9d19bf8..a2469a29ef0 100644
--- a/buildkite/src/Jobs/Lint/ValidationService.dhall
+++ b/buildkite/src/Jobs/Lint/ValidationService.dhall
@@ -4,6 +4,8 @@ let List/map = Prelude.List.map
let S = ../../Lib/SelectFiles.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+
let Cmd = ../../Lib/Cmds.dhall
let Command = ../../Command/Base.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
@@ -52,7 +54,8 @@ in Pipeline.build Pipeline.Config::{
S.strictlyStart (S.contains ValidationService.rootPath)
],
path = "Lint",
- name = "ValidationService"
+ name = "ValidationService",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Lint ]
},
steps = [
Command.build Command.Config::{
diff --git a/buildkite/src/Jobs/Lint/Xrefcheck.dhall b/buildkite/src/Jobs/Lint/Xrefcheck.dhall
index 139687b4a5b..28a9268420c 100644
--- a/buildkite/src/Jobs/Lint/Xrefcheck.dhall
+++ b/buildkite/src/Jobs/Lint/Xrefcheck.dhall
@@ -4,6 +4,8 @@ let B = ../../External/Buildkite.dhall
let SelectFiles = ../../Lib/SelectFiles.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+
let JobSpec = ../../Pipeline/JobSpec.dhall
let Cmd = ../../Lib/Cmds.dhall
@@ -23,7 +25,8 @@ Pipeline.build
SelectFiles.strictly (SelectFiles.contains ".xrefcheck.yml")
],
path = "Lint",
- name = "Xrefcheck"
+ name = "Xrefcheck",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Lint ]
},
steps = [
Command.build
diff --git a/buildkite/src/Jobs/Release/HelmRelease.dhall b/buildkite/src/Jobs/Release/HelmRelease.dhall
index 7fa74079dda..8e843171ab7 100644
--- a/buildkite/src/Jobs/Release/HelmRelease.dhall
+++ b/buildkite/src/Jobs/Release/HelmRelease.dhall
@@ -4,6 +4,8 @@ let S = ../../Lib/SelectFiles.dhall
let Cmd = ../../Lib/Cmds.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -21,7 +23,8 @@ Pipeline.build
S.exactly "buildkite/scripts/helm-ci" "sh"
],
path = "Release",
- name = "HelmRelease"
+ name = "HelmRelease",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Release ]
},
steps = [
Command.build
diff --git a/buildkite/src/Jobs/Release/ItnOrchestratorArtifact.dhall b/buildkite/src/Jobs/Release/ItnOrchestratorArtifact.dhall
index e1232b6278e..99e193cf299 100644
--- a/buildkite/src/Jobs/Release/ItnOrchestratorArtifact.dhall
+++ b/buildkite/src/Jobs/Release/ItnOrchestratorArtifact.dhall
@@ -5,6 +5,8 @@ let S = ../../Lib/SelectFiles.dhall
let D = S.PathPattern
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -29,7 +31,8 @@ Pipeline.build
S.strictlyStart (S.contains "src/app/itn_orchestrator")
],
path = "Release",
- name = "ItnOrchestratorArtifact"
+ name = "ItnOrchestratorArtifact",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Release ]
},
steps = [
DockerImage.generateStep spec
diff --git a/buildkite/src/Jobs/Release/LeaderboardArtifact.dhall b/buildkite/src/Jobs/Release/LeaderboardArtifact.dhall
index f8b657359fd..252cd012e22 100644
--- a/buildkite/src/Jobs/Release/LeaderboardArtifact.dhall
+++ b/buildkite/src/Jobs/Release/LeaderboardArtifact.dhall
@@ -5,6 +5,8 @@ let S = ../../Lib/SelectFiles.dhall
let D = S.PathPattern
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -27,7 +29,8 @@ Pipeline.build
S.strictlyStart (S.contains "frontend/leaderboard")
],
path = "Release",
- name = "LeaderboardArtifact"
+ name = "LeaderboardArtifact",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Release ]
},
steps = [
Command.build
diff --git a/buildkite/src/Jobs/Release/MinaToolchainArtifactBullseye.dhall b/buildkite/src/Jobs/Release/MinaToolchainArtifactBullseye.dhall
new file mode 100644
index 00000000000..0e2b4f198ea
--- /dev/null
+++ b/buildkite/src/Jobs/Release/MinaToolchainArtifactBullseye.dhall
@@ -0,0 +1,50 @@
+let Prelude = ../../External/Prelude.dhall
+
+let Cmd = ../../Lib/Cmds.dhall
+let S = ../../Lib/SelectFiles.dhall
+
+let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+let JobSpec = ../../Pipeline/JobSpec.dhall
+
+let Command = ../../Command/Base.dhall
+let Size = ../../Command/Size.dhall
+let DockerImage = ../../Command/DockerImage.dhall
+let DockerLogin = ../../Command/DockerLogin/Type.dhall
+
+
+in
+
+Pipeline.build
+ Pipeline.Config::{
+ spec =
+ JobSpec::{
+ dirtyWhen = [
+ S.strictlyStart (S.contains "dockerfiles/stages/1-"),
+ S.strictlyStart (S.contains "dockerfiles/stages/2-"),
+ S.strictlyStart (S.contains "dockerfiles/stages/3-"),
+ S.strictlyStart (S.contains "buildkite/src/Jobs/Release/MinaToolchainArtifact"),
+ S.strictly (S.contains "opam.export"),
+ -- Rust version has changed
+ S.strictlyEnd (S.contains "rust-toolchain.toml")
+ ],
+ path = "Release",
+ name = "MinaToolchainArtifactBullseye",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Release ]
+ },
+ steps = [
+
+ -- mina-toolchain Debian 11 "Bullseye" Toolchain
+ let toolchainBullseyeSpec = DockerImage.ReleaseSpec::{
+ service="mina-toolchain",
+ deb_codename="bullseye",
+ extra_args="--no-cache",
+ step_key="toolchain-bullseye-docker-image"
+ }
+
+ in
+
+ DockerImage.generateStep toolchainBullseyeSpec
+
+ ]
+ }
\ No newline at end of file
diff --git a/buildkite/src/Jobs/Release/MinaToolchainArtifact.dhall b/buildkite/src/Jobs/Release/MinaToolchainArtifactBuster.dhall
similarity index 77%
rename from buildkite/src/Jobs/Release/MinaToolchainArtifact.dhall
rename to buildkite/src/Jobs/Release/MinaToolchainArtifactBuster.dhall
index 3780943bb12..f478a107890 100644
--- a/buildkite/src/Jobs/Release/MinaToolchainArtifact.dhall
+++ b/buildkite/src/Jobs/Release/MinaToolchainArtifactBuster.dhall
@@ -4,6 +4,8 @@ let Cmd = ../../Lib/Cmds.dhall
let S = ../../Lib/SelectFiles.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+let PipelineMode = ../../Pipeline/Mode.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -28,22 +30,12 @@ Pipeline.build
S.strictlyEnd (S.contains "rust-toolchain.toml")
],
path = "Release",
- name = "MinaToolchainArtifact"
+ name = "MinaToolchainArtifactBuster",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Release ],
+ mode = PipelineMode.Type.Stable
},
steps = [
- -- mina-toolchain Debian 11 "Bullseye" Toolchain
- let toolchainBullseyeSpec = DockerImage.ReleaseSpec::{
- service="mina-toolchain",
- deb_codename="bullseye",
- extra_args="--no-cache",
- step_key="toolchain-bullseye-docker-image"
- }
-
- in
-
- DockerImage.generateStep toolchainBullseyeSpec,
-
-- mina-toolchain Debian 10 "Buster" Toolchain
let toolchainBusterSpec = DockerImage.ReleaseSpec::{
service="mina-toolchain",
diff --git a/buildkite/src/Jobs/Release/TestnetAlerts.dhall b/buildkite/src/Jobs/Release/TestnetAlerts.dhall
index 06125455a8c..e444c4e7a33 100644
--- a/buildkite/src/Jobs/Release/TestnetAlerts.dhall
+++ b/buildkite/src/Jobs/Release/TestnetAlerts.dhall
@@ -7,6 +7,8 @@ let S = ../../Lib/SelectFiles.dhall
let Cmd = ../../Lib/Cmds.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -24,7 +26,8 @@ Pipeline.build
S.strictlyStart (S.contains "buildkite/src/Jobs/Release/TestnetAlerts")
],
path = "Release",
- name = "TestnetAlerts"
+ name = "TestnetAlerts",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Release ]
},
steps = [
Command.build
diff --git a/buildkite/src/Jobs/Release/TraceTool.dhall b/buildkite/src/Jobs/Release/TraceTool.dhall
index d4a0e4997f4..0d6770523e5 100644
--- a/buildkite/src/Jobs/Release/TraceTool.dhall
+++ b/buildkite/src/Jobs/Release/TraceTool.dhall
@@ -4,6 +4,8 @@ let S = ../../Lib/SelectFiles.dhall
let Cmd = ../../Lib/Cmds.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -20,7 +22,8 @@ Pipeline.build
spec = JobSpec::{
dirtyWhen = [ S.contains "src/app/trace-tool", S.strictlyStart (S.contains "buildkite/src/Jobs/TraceTool") ],
path = "Release",
- name = "TraceTool"
+ name = "TraceTool",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Release ]
},
steps = [
Command.build
diff --git a/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall b/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall
index e92cf6be77b..16fa19b1cc1 100644
--- a/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall
+++ b/buildkite/src/Jobs/Test/ArchiveNodeUnitTest.dhall
@@ -2,6 +2,7 @@ let Prelude = ../../External/Prelude.dhall
let Cmd = ../../Lib/Cmds.dhall
let S = ../../Lib/SelectFiles.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
let RunInToolchain = ../../Command/RunInToolchain.dhall
@@ -27,6 +28,7 @@ Pipeline.build
]
, path = "Test"
, name = "ArchiveNodeUnitTest"
+ , tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Test ]
}
, steps =
let outerDir : Text =
diff --git a/buildkite/src/Jobs/Test/BerkeleyCompatibility.dhall b/buildkite/src/Jobs/Test/BerkeleyCompatibility.dhall
new file mode 100644
index 00000000000..fbe72a1751c
--- /dev/null
+++ b/buildkite/src/Jobs/Test/BerkeleyCompatibility.dhall
@@ -0,0 +1,47 @@
+let JobSpec = ../../Pipeline/JobSpec.dhall
+let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineMode = ../../Pipeline/Mode.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+let Prelude = ../../External/Prelude.dhall
+
+let Cmd = ../../Lib/Cmds.dhall
+let S = ../../Lib/SelectFiles.dhall
+let D = S.PathPattern
+
+let Command = ../../Command/Base.dhall
+let RunInToolchain = ../../Command/RunInToolchain.dhall
+let Docker = ../../Command/Docker/Type.dhall
+let Size = ../../Command/Size.dhall
+
+let dependsOn = [
+ { name = "MinaArtifactBullseye", key = "daemon-berkeley-bullseye-docker-image" }
+]
+
+in Pipeline.build Pipeline.Config::{
+ spec =
+ JobSpec::{
+ dirtyWhen = [
+ S.strictlyStart (S.contains "src"),
+ S.exactly "buildkite/scripts/check-compatibility" "sh",
+ S.exactly "buildkite/src/Jobs/Test/BerkeleyCompatibility" "dhall"
+ ],
+ path = "Test",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ],
+ name = "BerkeleyCompatibility"
+ },
+ steps = [
+ Command.build Command.Config::{
+ commands = [
+ Cmd.run "buildkite/scripts/check-compatibility.sh berkeley"
+ ],
+ label = "Test: berkeley compatibilty test",
+ key = "berkeley-compatibilty-test",
+ target = Size.XLarge,
+ docker = None Docker.Type,
+ depends_on = dependsOn,
+ timeout_in_minutes = Some +60
+ }
+ ]
+}
+
+
diff --git a/buildkite/src/Jobs/Test/CheckDhall.dhall b/buildkite/src/Jobs/Test/CheckDhall.dhall
index 34f8c78476e..18cace107aa 100644
--- a/buildkite/src/Jobs/Test/CheckDhall.dhall
+++ b/buildkite/src/Jobs/Test/CheckDhall.dhall
@@ -3,6 +3,7 @@ let D = S.PathPattern
let JobSpec = ../../Pipeline/JobSpec.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let Command = ../../Command/Base.dhall
let Docker = ../../Command/Docker/Type.dhall
let Size = ../../Command/Size.dhall
@@ -23,7 +24,8 @@ Pipeline.build
S.exactly "buildkite/scripts/generate-jobs" "sh"
],
path = "Test",
- name = "CheckDhall"
+ name = "CheckDhall",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Test ]
},
steps = [
Command.build
diff --git a/buildkite/src/Jobs/Test/CheckGraphQLSchema.dhall b/buildkite/src/Jobs/Test/CheckGraphQLSchema.dhall
index be248e22210..4583a5b38bc 100644
--- a/buildkite/src/Jobs/Test/CheckGraphQLSchema.dhall
+++ b/buildkite/src/Jobs/Test/CheckGraphQLSchema.dhall
@@ -2,6 +2,7 @@ let S = ../../Lib/SelectFiles.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let CheckGraphQLSchema = ../../Command/CheckGraphQLSchema.dhall
@@ -18,7 +19,8 @@ in Pipeline.build Pipeline.Config::{
S.strictly (S.contains "Makefile")
],
path = "Test",
- name = "CheckGraphQLSchema"
+ name = "CheckGraphQLSchema",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ]
},
steps = [
CheckGraphQLSchema.step dependsOn
diff --git a/buildkite/src/Jobs/Test/ConnectToBerkeley.dhall b/buildkite/src/Jobs/Test/ConnectToBerkeley.dhall
index d6f404bfcf5..4537de014e9 100644
--- a/buildkite/src/Jobs/Test/ConnectToBerkeley.dhall
+++ b/buildkite/src/Jobs/Test/ConnectToBerkeley.dhall
@@ -2,6 +2,7 @@ let S = ../../Lib/SelectFiles.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let ConnectToTestnet = ../../Command/ConnectToTestnet.dhall
@@ -19,7 +20,8 @@ in Pipeline.build Pipeline.Config::{
S.exactly "buildkite/src/Command/ConnectToTestnet" "dhall"
],
path = "Test",
- name = "ConnectToBerkeley"
+ name = "ConnectToBerkeley",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ]
},
steps = [
ConnectToTestnet.step dependsOn
diff --git a/buildkite/src/Jobs/Test/CoverageTearDown.dhall b/buildkite/src/Jobs/Test/CoverageTearDown.dhall
index bba020d8025..aa10f2a5ff8 100644
--- a/buildkite/src/Jobs/Test/CoverageTearDown.dhall
+++ b/buildkite/src/Jobs/Test/CoverageTearDown.dhall
@@ -8,7 +8,7 @@ let Size = ../../Command/Size.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
let PipelineMode = ../../Pipeline/Mode.dhall
-let PipelineStage = ../../Pipeline/Stage.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
in Pipeline.build
Pipeline.Config::{
@@ -16,7 +16,7 @@ in Pipeline.build
JobSpec::{
dirtyWhen = [ S.everything ],
path = "Test",
- stage = PipelineStage.Type.TearDown,
+ tags = [ PipelineTag.Type.TearDown ],
name = "CoverageTearDown"
}
, steps = [
diff --git a/buildkite/src/Jobs/Test/DaemonUnitTest.dhall b/buildkite/src/Jobs/Test/DaemonUnitTest.dhall
index 7b769901aa7..a21bc935750 100644
--- a/buildkite/src/Jobs/Test/DaemonUnitTest.dhall
+++ b/buildkite/src/Jobs/Test/DaemonUnitTest.dhall
@@ -5,6 +5,8 @@ let S = ../../Lib/SelectFiles.dhall
let D = S.PathPattern
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -44,7 +46,8 @@ Pipeline.build
JobSpec::{
dirtyWhen = unitDirtyWhen,
path = "Test",
- name = "DaemonUnitTest"
+ name = "DaemonUnitTest",
+ tags = [ PipelineTag.Type.VeryLong, PipelineTag.Type.Test ]
},
steps = [
buildTestCmd "dev" "src/lib" Size.XLarge
diff --git a/buildkite/src/Jobs/Test/DelegationBackendUnitTest.dhall b/buildkite/src/Jobs/Test/DelegationBackendUnitTest.dhall
index f0b8e42f78e..62d6432e127 100644
--- a/buildkite/src/Jobs/Test/DelegationBackendUnitTest.dhall
+++ b/buildkite/src/Jobs/Test/DelegationBackendUnitTest.dhall
@@ -4,12 +4,17 @@ let Cmd = ../../Lib/Cmds.dhall
let S = ../../Lib/SelectFiles.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
let Docker = ../../Command/Docker/Type.dhall
let Size = ../../Command/Size.dhall
+let B = ../../External/Buildkite.dhall
+let B/SoftFail = B.definitions/commandStep/properties/soft_fail/Type
+
in
Pipeline.build
@@ -22,7 +27,8 @@ Pipeline.build
S.exactly "buildkite/src/Jobs/Test/DelegationBackendUnitTest" "dhall"
],
path = "Test",
- name = "DelegationBackendUnitTest"
+ name = "DelegationBackendUnitTest",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Test ]
},
steps = [
Command.build
@@ -31,6 +37,7 @@ Pipeline.build
Cmd.runInDocker Cmd.Docker::{image = ContainerImages.delegationBackendToolchain} "cd src/app/delegation_backend && mkdir -p result && cp -R /headers result && cd src/delegation_backend && go test"
],
label = "delegation backend unit-tests",
+ soft_fail = Some (B/SoftFail.Boolean True),
key = "delegation-backend-unit-tests",
target = Size.Small,
docker = None Docker.Type
diff --git a/buildkite/src/Jobs/Test/DevelopCompatibility.dhall b/buildkite/src/Jobs/Test/DevelopCompatibility.dhall
new file mode 100644
index 00000000000..ba907ed0d5e
--- /dev/null
+++ b/buildkite/src/Jobs/Test/DevelopCompatibility.dhall
@@ -0,0 +1,47 @@
+let JobSpec = ../../Pipeline/JobSpec.dhall
+let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineMode = ../../Pipeline/Mode.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+let Prelude = ../../External/Prelude.dhall
+
+let Cmd = ../../Lib/Cmds.dhall
+let S = ../../Lib/SelectFiles.dhall
+let D = S.PathPattern
+
+let Command = ../../Command/Base.dhall
+let RunInToolchain = ../../Command/RunInToolchain.dhall
+let Docker = ../../Command/Docker/Type.dhall
+let Size = ../../Command/Size.dhall
+
+let dependsOn = [
+ { name = "MinaArtifactBullseye", key = "daemon-berkeley-bullseye-docker-image" }
+]
+
+in Pipeline.build Pipeline.Config::{
+ spec =
+ JobSpec::{
+ dirtyWhen = [
+ S.strictlyStart (S.contains "src"),
+ S.exactly "buildkite/scripts/check-compatibility" "sh",
+ S.exactly "buildkite/src/Jobs/Test/DevelopCompatibility" "dhall"
+ ],
+ path = "Test",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ],
+ name = "DevelopCompatibility"
+ },
+ steps = [
+ Command.build Command.Config::{
+ commands = [
+ Cmd.run "buildkite/scripts/check-compatibility.sh develop"
+ ],
+ label = "Test: develop compatibilty test",
+ key = "develop-compatibilty-test",
+ target = Size.XLarge,
+ docker = None Docker.Type,
+ depends_on = dependsOn,
+ timeout_in_minutes = Some +60
+ }
+ ]
+}
+
+
diff --git a/buildkite/src/Jobs/Test/FuzzyZkappTest.dhall b/buildkite/src/Jobs/Test/FuzzyZkappTest.dhall
index 62ea69f2724..826f38be20d 100644
--- a/buildkite/src/Jobs/Test/FuzzyZkappTest.dhall
+++ b/buildkite/src/Jobs/Test/FuzzyZkappTest.dhall
@@ -6,8 +6,9 @@ let D = S.PathPattern
let Pipeline = ../../Pipeline/Dsl.dhall
let PipelineMode = ../../Pipeline/Mode.dhall
-let JobSpec = ../../Pipeline/JobSpec.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
let RunInToolchain = ../../Command/RunInToolchain.dhall
let Docker = ../../Command/Docker/Type.dhall
@@ -25,9 +26,9 @@ let buildTestCmd : Text -> Text -> Natural -> Natural -> Size -> Command.Type =
key = key,
target = cmd_target,
docker = None Docker.Type,
- artifact_paths = [ S.contains "core_dumps/*" ]
+ artifact_paths = [ S.contains "core_dumps/*" ],
+ flake_retry_limit = Some 0
}
-
in
Pipeline.build
@@ -46,9 +47,10 @@ Pipeline.build
dirtyWhen = unitDirtyWhen,
path = "Test",
name = "FuzzyZkappTest",
+ tags = [ PipelineTag.Type.VeryLong, PipelineTag.Type.Test ],
mode = PipelineMode.Type.Stable
},
steps = [
- buildTestCmd "dev" "src/lib/transaction_snark/test/zkapp_fuzzy/zkapp_fuzzy.exe" 3600 150 Size.Small
+ buildTestCmd "dev" "src/lib/transaction_snark/test/zkapp_fuzzy/zkapp_fuzzy.exe" 4200 150 Size.Small
]
}
diff --git a/buildkite/src/Jobs/Test/Libp2pUnitTest.dhall b/buildkite/src/Jobs/Test/Libp2pUnitTest.dhall
index e22384dc2ba..ce5a974ee22 100644
--- a/buildkite/src/Jobs/Test/Libp2pUnitTest.dhall
+++ b/buildkite/src/Jobs/Test/Libp2pUnitTest.dhall
@@ -4,6 +4,7 @@ let Cmd = ../../Lib/Cmds.dhall
let S = ../../Lib/SelectFiles.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -23,7 +24,8 @@ Pipeline.build
S.exactly "buildkite/src/Jobs/Test/Libp2pUnitTest" "dhall"
],
path = "Test",
- name = "Libp2pUnitTest"
+ name = "Libp2pUnitTest",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Test ]
},
steps = [
Command.build
@@ -55,7 +57,7 @@ Pipeline.build
key = "libp2p-bs-qc",
target = Size.Large,
docker = None Docker.Type,
- timeout_in_minutes = Some 45
+ timeout_in_minutes = Some +45
}
]
diff --git a/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall b/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall
index 4b8d0c73302..06f2b20473c 100644
--- a/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall
+++ b/buildkite/src/Jobs/Test/RosettaIntegrationTests.dhall
@@ -5,6 +5,7 @@ let Cmd = ../../Lib/Cmds.dhall
let S = ../../Lib/SelectFiles.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -32,7 +33,8 @@ Pipeline.build
JobSpec::{
dirtyWhen = dirtyWhen,
path = "Test",
- name = "RosettaIntegrationTests"
+ name = "RosettaIntegrationTests",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ]
}
, steps = [
Command.build
diff --git a/buildkite/src/Jobs/Test/RosettaUnitTest.dhall b/buildkite/src/Jobs/Test/RosettaUnitTest.dhall
index 7e802917eff..6375974d206 100644
--- a/buildkite/src/Jobs/Test/RosettaUnitTest.dhall
+++ b/buildkite/src/Jobs/Test/RosettaUnitTest.dhall
@@ -5,6 +5,7 @@ let S = ../../Lib/SelectFiles.dhall
let D = S.PathPattern
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -41,7 +42,8 @@ Pipeline.build
JobSpec::{
dirtyWhen = unitDirtyWhen,
path = "Test",
- name = "RosettaUnitTest"
+ name = "RosettaUnitTest",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ]
},
steps = [
buildTestCmd "dev" "src/app/rosetta" Size.Small
diff --git a/buildkite/src/Jobs/Test/RunSnarkProfiler.dhall b/buildkite/src/Jobs/Test/RunSnarkProfiler.dhall
index dee246854be..7a8b0251f5a 100644
--- a/buildkite/src/Jobs/Test/RunSnarkProfiler.dhall
+++ b/buildkite/src/Jobs/Test/RunSnarkProfiler.dhall
@@ -5,6 +5,7 @@ let S = ../../Lib/SelectFiles.dhall
let D = S.PathPattern
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -54,7 +55,8 @@ Pipeline.build
JobSpec::{
dirtyWhen = lintDirtyWhen,
path = "Test",
- name = "RunSnarkProfiler"
+ name = "RunSnarkProfiler",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ]
},
steps = [
buildTestCmd Size.Small dependsOn
diff --git a/buildkite/src/Jobs/Test/SingleNodeTest.dhall b/buildkite/src/Jobs/Test/SingleNodeTest.dhall
index 06ed548a2f3..8f85ec04140 100644
--- a/buildkite/src/Jobs/Test/SingleNodeTest.dhall
+++ b/buildkite/src/Jobs/Test/SingleNodeTest.dhall
@@ -5,6 +5,8 @@ let S = ../../Lib/SelectFiles.dhall
let D = S.PathPattern
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -56,7 +58,8 @@ Pipeline.build
JobSpec::{
dirtyWhen = unitDirtyWhen,
path = "Test",
- name = "SingleNodeTest"
+ name = "SingleNodeTest",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ]
},
steps = [
buildTestCmd "dev" "src/test/command_line_tests/command_line_tests.exe" Size.XLarge
diff --git a/buildkite/src/Jobs/Test/SnarkyJSTest.dhall b/buildkite/src/Jobs/Test/SnarkyJSTest.dhall
index d5054981a12..b2027f9863b 100644
--- a/buildkite/src/Jobs/Test/SnarkyJSTest.dhall
+++ b/buildkite/src/Jobs/Test/SnarkyJSTest.dhall
@@ -2,6 +2,7 @@ let S = ../../Lib/SelectFiles.dhall
let B = ../../External/Buildkite.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -24,7 +25,8 @@ Pipeline.build
S.strictlyStart (S.contains "src/lib")
],
path = "Test",
- name = "SnarkyJSTest"
+ name = "SnarkyJSTest",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ]
},
steps = [
Command.build
diff --git a/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall b/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall
index 96f6ab5e835..2e918ff3432 100644
--- a/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall
+++ b/buildkite/src/Jobs/Test/TestnetIntegrationTests.dhall
@@ -3,6 +3,8 @@ let S = ../../Lib/SelectFiles.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
let PipelineMode = ../../Pipeline/Mode.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
+
let TestExecutive = ../../Command/TestExecutive.dhall
let dependsOn = [
@@ -28,6 +30,7 @@ in Pipeline.build Pipeline.Config::{
],
path = "Test",
name = "TestnetIntegrationTests",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ],
mode = PipelineMode.Type.Stable
},
steps = [
diff --git a/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall b/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall
index 04dc26cef28..271a99d08e9 100644
--- a/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall
+++ b/buildkite/src/Jobs/Test/TestnetIntegrationTestsLong.dhall
@@ -3,6 +3,7 @@ let S = ../../Lib/SelectFiles.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
let PipelineMode = ../../Pipeline/Mode.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let TestExecutive = ../../Command/TestExecutive.dhall
let dependsOn = [
@@ -23,7 +24,8 @@ in Pipeline.build Pipeline.Config::{
],
path = "Test",
name = "TestnetIntegrationTestsLong",
- mode = PipelineMode.Type.Stable
+ mode = PipelineMode.Type.Stable,
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ]
},
steps = [
TestExecutive.execute "hard-fork" dependsOn
diff --git a/buildkite/src/Jobs/Test/ValidationService.dhall b/buildkite/src/Jobs/Test/ValidationService.dhall
index 1c2ee6fa571..cce41331433 100644
--- a/buildkite/src/Jobs/Test/ValidationService.dhall
+++ b/buildkite/src/Jobs/Test/ValidationService.dhall
@@ -1,6 +1,7 @@
let S = ../../Lib/SelectFiles.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let Command = ../../Command/Base.dhall
let Docker = ../../Command/Docker/Type.dhall
let Size = ../../Command/Size.dhall
@@ -15,7 +16,8 @@ in Pipeline.build Pipeline.Config::{
S.strictlyStart (S.contains ValidationService.rootPath)
],
path = "Test",
- name = "ValidationService"
+ name = "ValidationService",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Test ]
},
steps = [
Command.build Command.Config::{
diff --git a/buildkite/src/Jobs/Test/VersionLint.dhall b/buildkite/src/Jobs/Test/VersionLint.dhall
index fcbdc34f21a..a0aa132649a 100644
--- a/buildkite/src/Jobs/Test/VersionLint.dhall
+++ b/buildkite/src/Jobs/Test/VersionLint.dhall
@@ -5,6 +5,7 @@ let S = ../../Lib/SelectFiles.dhall
let D = S.PathPattern
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -56,7 +57,8 @@ Pipeline.build
JobSpec::{
dirtyWhen = lintDirtyWhen,
path = "Test",
- name = "VersionLint"
+ name = "VersionLint",
+ tags = [ PipelineTag.Type.Long, PipelineTag.Type.Test ]
},
steps = [
buildTestCmd "develop" Size.Small dependsOn
diff --git a/buildkite/src/Jobs/Test/ZkappMetrics.dhall b/buildkite/src/Jobs/Test/ZkappMetrics.dhall
index 4bf912a70c4..31ec5072d7b 100644
--- a/buildkite/src/Jobs/Test/ZkappMetrics.dhall
+++ b/buildkite/src/Jobs/Test/ZkappMetrics.dhall
@@ -2,6 +2,7 @@ let Prelude = ../../External/Prelude.dhall
let S = ../../Lib/SelectFiles.dhall
let Cmd = ../../Lib/Cmds.dhall
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let Command = ../../Command/Base.dhall
let RunInToolchain = ../../Command/RunInToolchain.dhall
let WithCargo = ../../Command/WithCargo.dhall
@@ -20,6 +21,7 @@ Pipeline.build
]
, path = "Test"
, name = "ZkappMetrics"
+ , tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Test ]
}
, steps =
[ Command.build
diff --git a/buildkite/src/Jobs/Test/ZkappTestToolUnitTest.dhall b/buildkite/src/Jobs/Test/ZkappTestToolUnitTest.dhall
index 45b00d8c2b5..7d0996abe30 100644
--- a/buildkite/src/Jobs/Test/ZkappTestToolUnitTest.dhall
+++ b/buildkite/src/Jobs/Test/ZkappTestToolUnitTest.dhall
@@ -5,6 +5,7 @@ let S = ../../Lib/SelectFiles.dhall
let D = S.PathPattern
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -41,7 +42,8 @@ Pipeline.build
JobSpec::{
dirtyWhen = unitDirtyWhen,
path = "Test",
- name = "ZkappTestToolUnitTest"
+ name = "ZkappTestToolUnitTest",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Test ]
},
steps = [
buildTestCmd "dev" "src/app/zkapp_test_transaction" Size.Small
diff --git a/buildkite/src/Jobs/Test/ZkappsExamplesTest.dhall b/buildkite/src/Jobs/Test/ZkappsExamplesTest.dhall
index 0fcdc592d98..9a6e4be6c95 100644
--- a/buildkite/src/Jobs/Test/ZkappsExamplesTest.dhall
+++ b/buildkite/src/Jobs/Test/ZkappsExamplesTest.dhall
@@ -5,6 +5,7 @@ let S = ../../Lib/SelectFiles.dhall
let D = S.PathPattern
let Pipeline = ../../Pipeline/Dsl.dhall
+let PipelineTag = ../../Pipeline/Tag.dhall
let JobSpec = ../../Pipeline/JobSpec.dhall
let Command = ../../Command/Base.dhall
@@ -43,7 +44,8 @@ Pipeline.build
JobSpec::{
dirtyWhen = unitDirtyWhen,
path = "Test",
- name = "ZkappsExamplesTest"
+ name = "ZkappsExamplesTest",
+ tags = [ PipelineTag.Type.Fast, PipelineTag.Type.Test ]
},
steps = [
buildTestCmd "dev" Size.XLarge
diff --git a/buildkite/src/Monorepo.dhall b/buildkite/src/Monorepo.dhall
index 64783a0582f..3281333b8e6 100644
--- a/buildkite/src/Monorepo.dhall
+++ b/buildkite/src/Monorepo.dhall
@@ -11,12 +11,14 @@ let Docker = ./Command/Docker/Type.dhall
let JobSpec = ./Pipeline/JobSpec.dhall
let Pipeline = ./Pipeline/Dsl.dhall
let PipelineMode = ./Pipeline/Mode.dhall
-let PipelineStage = ./Pipeline/Stage.dhall
+let PipelineFilter = ./Pipeline/Filter.dhall
+let PipelineTag = ./Pipeline/Tag.dhall
let Size = ./Command/Size.dhall
let triggerCommand = ./Pipeline/TriggerCommand.dhall
let mode = env:BUILDKITE_PIPELINE_MODE as Text ? "PullRequest"
-let stage = env:BUILDKITE_PIPELINE_STAGE as Text ? "Test"
+let include_tags = env:BUILDKITE_PIPELINE_TAGS_INCL as Text ? "Fast"
+let exclude_tags = env:BUILDKITE_PIPELINE_TAGS_EXCL as Text ? ""
let jobs : List JobSpec.Type =
List/map
@@ -33,34 +35,36 @@ let prefixCommands = [
-- Run a job if we touched a dirty path
-let commands: Text -> Text -> List Cmd.Type = \(targetStage: Text) -> \(targetMode: Text) ->
+let commands: PipelineFilter.Type -> PipelineMode.Type -> List Cmd.Type = \(filter: PipelineFilter.Type) -> \(mode: PipelineMode.Type) ->
Prelude.List.map
JobSpec.Type
Cmd.Type
(\(job: JobSpec.Type) ->
let jobMode = PipelineMode.capitalName job.mode
- let jobStage = PipelineStage.capitalName job.stage
-
+ let targetMode = PipelineMode.capitalName mode
+ let targetTags = PipelineFilter.tags filter
+ let filter = PipelineFilter.show filter
+ let isIncluded = Prelude.Bool.show (PipelineTag.contains job.tags targetTags)
let dirtyWhen = SelectFiles.compile job.dirtyWhen
let trigger = triggerCommand "src/Jobs/${job.path}/${job.name}.dhall"
let pipelineHandlers = {
PullRequest = ''
if [ "${targetMode}" == "PullRequest" ]; then
- if [ "${jobStage}" == "${targetStage}" ]; then
+ if [ "${isIncluded}" == "True" ]; then
if (cat _computed_diff.txt | egrep -q '${dirtyWhen}'); then
echo "Triggering ${job.name} for reason:"
cat _computed_diff.txt | egrep '${dirtyWhen}'
${Cmd.format trigger}
fi
else
- echo "Skipping ${job.name} because this is a ${targetStage} stage"
+ echo "Skipping ${job.name} because this is a ${filter} stage"
fi
else
- if [ "${jobStage}" == "${targetStage}" ]; then
+ if [ "${isIncluded}" == "True" ]; then
echo "Triggering ${job.name} because this is a stable buildkite run"
${Cmd.format trigger}
else
- echo "Skipping ${job.name} because this is a ${targetStage} stage"
+ echo "Skipping ${job.name} because this is a ${filter} stage"
fi
fi
'',
@@ -68,11 +72,11 @@ let commands: Text -> Text -> List Cmd.Type = \(targetStage: Text) -> \(target
if [ "${targetMode}" == "PullRequest" ]; then
echo "Skipping ${job.name} because this is a PR buildkite run"
else
- if [ "${jobStage}" == "${targetStage}" ]; then
+ if [ "${isIncluded}" == "True" ]; then
echo "Triggering ${job.name} because this is a stable buildkite run"
${Cmd.format trigger}
else
- echo "Skipping ${job.name} because this is a ${targetStage} stage"
+ echo "Skipping ${job.name} because this is a ${filter} stage"
fi
fi
''
@@ -81,23 +85,29 @@ let commands: Text -> Text -> List Cmd.Type = \(targetStage: Text) -> \(target
)
jobs
-in Pipeline.build Pipeline.Config::{
- spec = JobSpec::{
- name = "monorepo-triage",
- -- TODO: Clean up this code so we don't need an unused dirtyWhen here
- dirtyWhen = [ SelectFiles.everything ]
- },
- steps = [
- Command.build
- Command.Config::{
- commands = prefixCommands # (commands stage mode),
- label = "Monorepo triage ${stage}",
- key = "cmds-${stage}",
- target = Size.Small,
- docker = Some Docker::{
- image = (./Constants/ContainerImages.dhall).toolchainBase,
- environment = ["BUILDKITE_AGENT_ACCESS_TOKEN", "BUILDKITE_INCREMENTAL"]
- }
- }
- ]
-}
+in
+
+(\(args : { filter : PipelineFilter.Type, mode: PipelineMode.Type }) ->
+ let pipelineType = Pipeline.build Pipeline.Config::{
+ spec = JobSpec::{
+ name = "monorepo-triage-${PipelineFilter.show args.filter}",
+ -- TODO: Clean up this code so we don't need an unused dirtyWhen here
+ dirtyWhen = [ SelectFiles.everything ]
+ },
+ steps = [
+ Command.build
+ Command.Config::{
+ commands = prefixCommands # (commands args.filter args.mode),
+ label = "Monorepo triage ${PipelineFilter.show args.filter}",
+ key = "cmds-${PipelineFilter.show args.filter}",
+ target = Size.Small,
+ docker = Some Docker::{
+ image = (./Constants/ContainerImages.dhall).toolchainBase,
+ environment = ["BUILDKITE_AGENT_ACCESS_TOKEN", "BUILDKITE_INCREMENTAL"]
+ }
+ }
+ ]
+ }
+ in pipelineType.pipeline
+)
+
diff --git a/buildkite/src/Pipeline/Filter.dhall b/buildkite/src/Pipeline/Filter.dhall
new file mode 100644
index 00000000000..fc9cd7dd5ac
--- /dev/null
+++ b/buildkite/src/Pipeline/Filter.dhall
@@ -0,0 +1,33 @@
+-- Tag defines pipeline
+-- Using tags one can tailor pipeline for any need. Each job should be tagged with one or several tags
+-- then on pipeline settings we can define which tagged jobs to include or exclue in pipeline
+
+let Prelude = ../External/Prelude.dhall
+let Tag = ./Tag.dhall
+
+let Filter : Type = < FastOnly | Long | LongAndVeryLong | TearDownOnly | AllTests >
+
+let tags: Filter -> List Tag.Type = \(filter: Filter) ->
+ merge {
+ FastOnly = [ Tag.Type.Fast ]
+ , LongAndVeryLong = [ Tag.Type.Long, Tag.Type.VeryLong ]
+ , Long = [ Tag.Type.Long ]
+ , TearDownOnly = [ Tag.Type.TearDown ]
+ , AllTests = [ Tag.Type.Lint, Tag.Type.Release, Tag.Type.Test ]
+ } filter
+
+let show: Filter -> Text = \(filter: Filter) ->
+ merge {
+ FastOnly = "FastOnly"
+ , LongAndVeryLong = "LongAndVeryLong"
+ , Long = "Long"
+ , TearDownOnly = "TearDownOnly"
+ , AllTests = "AllTests"
+ } filter
+
+in
+{
+ Type = Filter,
+ tags = tags,
+ show = show
+}
\ No newline at end of file
diff --git a/buildkite/src/Pipeline/JobSpec.dhall b/buildkite/src/Pipeline/JobSpec.dhall
index 187835c7d90..ea0373a0c2d 100644
--- a/buildkite/src/Pipeline/JobSpec.dhall
+++ b/buildkite/src/Pipeline/JobSpec.dhall
@@ -1,6 +1,6 @@
let SelectFiles = ../Lib/SelectFiles.dhall
let PipelineMode = ./Mode.dhall
-let PipelineStage = ./Stage.dhall
+let PipelineTag = ./Tag.dhall
in
@@ -11,12 +11,12 @@ in
path: Text,
name: Text,
mode: PipelineMode.Type,
- stage: PipelineStage.Type,
+ tags: List PipelineTag.Type,
dirtyWhen: List SelectFiles.Type
},
default = {
path = ".",
mode = PipelineMode.Type.PullRequest,
- stage = PipelineStage.Type.Test
+ tags = [ PipelineTag.Type.Fast ]
}
}
diff --git a/buildkite/src/Pipeline/Stage.dhall b/buildkite/src/Pipeline/Stage.dhall
deleted file mode 100644
index fffad54c7a2..00000000000
--- a/buildkite/src/Pipeline/Stage.dhall
+++ /dev/null
@@ -1,44 +0,0 @@
--- Mode defines pipeline stages
---
--- A pipeline in order to be faster and more cost efficient can have up to 3 stages
--- Between each stages there is a '- wait' step defined which cause buildkite to wait
--- for ALL jobs to complete before running any job from next stage.
--- Current design defines three stages:
--- - Stage 1 -> contains fastest and most independent jobs which are supposed to provide quickest feedback possible
--- - Stage 2 -> contains heavy jobs that should be run only on clean code (no merges issues or lints problems)
--- - Tear down -> should contains all clean up or reporting jobs. For example test coverage gathering
-
-let Prelude = ../External/Prelude.dhall
-
-let Stage : Type = < Test | TearDown >
-
-let toNatural: Stage -> Natural = \(stage: Stage) ->
- merge {
- Test = 1
- , TearDown = 2
- } stage
-
-let equal: Stage -> Stage -> Bool = \(left: Stage) -> \(right: Stage) ->
- Prelude.Natural.equal (toNatural left) (toNatural right)
-
-let capitalName = \(stage : Stage) ->
- merge {
- Test = "Test"
- , TearDown = "TearDown"
- } stage
-
-let lowerName = \(stage : Stage) ->
- merge {
- Test = "test"
- , TearDown = "tearDown"
- } stage
-
-
-in
-{
- Type = Stage,
- capitalName = capitalName,
- lowerName = lowerName,
- toNatural = toNatural,
- equal = equal
-}
\ No newline at end of file
diff --git a/buildkite/src/Pipeline/Tag.dhall b/buildkite/src/Pipeline/Tag.dhall
new file mode 100644
index 00000000000..b6018062e67
--- /dev/null
+++ b/buildkite/src/Pipeline/Tag.dhall
@@ -0,0 +1,63 @@
+-- Tag defines pipeline
+-- Using tags one can tailor pipeline for any need. Each job should be tagged with one or several tags
+-- then on pipeline settings we can define which tagged jobs to include or exclue in pipeline
+
+let Prelude = ../External/Prelude.dhall
+let List/any = Prelude.List.any
+
+let Tag : Type = < Fast | Long | VeryLong | TearDown | Lint | Release | Test >
+
+let toNatural: Tag -> Natural = \(tag: Tag) ->
+ merge {
+ Fast = 1
+ , Long = 2
+ , VeryLong = 3
+ , TearDown = 4
+ , Lint = 5
+ , Release = 6
+ , Test = 7
+ } tag
+
+let equal: Tag -> Tag -> Bool = \(left: Tag) -> \(right: Tag) ->
+ Prelude.Natural.equal (toNatural left) (toNatural right)
+
+
+let hasAny: Tag -> List Tag -> Bool = \(input: Tag) -> \(tags: List Tag) ->
+ List/any Tag (\(x: Tag) -> equal x input ) tags
+
+let contains: List Tag -> List Tag -> Bool = \(input: List Tag) -> \(tags: List Tag) ->
+ List/any Tag (\(x: Tag) -> hasAny x tags ) input
+
+let capitalName = \(tag : Tag) ->
+ merge {
+ Fast = "Fast"
+ , Long = "Long"
+ , VeryLong = "VeryLong"
+ , TearDown = "TearDown"
+ , Lint = "Lint"
+ , Release = "Release"
+ , Test = "Test"
+ } tag
+
+let lowerName = \(tag : Tag) ->
+ merge {
+ Fast = "fast"
+ , Long = "long"
+ , VeryLong = "veryLong"
+ , TearDown = "tearDown"
+ , Lint = "lint"
+ , Release = "release"
+ , Test = "test"
+ } tag
+
+
+in
+{
+ Type = Tag,
+ capitalName = capitalName,
+ lowerName = lowerName,
+ toNatural = toNatural,
+ equal = equal,
+ hasAny = hasAny,
+ contains = contains
+}
\ No newline at end of file
diff --git a/buildkite/src/Prepare.dhall b/buildkite/src/Prepare.dhall
index 101e22be770..5b78ff24369 100644
--- a/buildkite/src/Prepare.dhall
+++ b/buildkite/src/Prepare.dhall
@@ -8,11 +8,14 @@ let Command = ./Command/Base.dhall
let Docker = ./Command/Docker/Type.dhall
let JobSpec = ./Pipeline/JobSpec.dhall
let Pipeline = ./Pipeline/Dsl.dhall
+let PipelineMode = ./Pipeline/Mode.dhall
+let PipelineFilter = ./Pipeline/Filter.dhall
+let PipelineTag = ./Pipeline/Tag.dhall
let Size = ./Command/Size.dhall
let triggerCommand = ./Pipeline/TriggerCommand.dhall
let mode = env:BUILDKITE_PIPELINE_MODE as Text ? "PullRequest"
-let stage = env:BUILDKITE_PIPELINE_STAGE as Text ? "Test"
+let filter = env:BUILDKITE_PIPELINE_FILTER as Text ? "FastOnly"
let config : Pipeline.Config.Type = Pipeline.Config::{
spec = JobSpec::{
@@ -24,12 +27,12 @@ let config : Pipeline.Config.Type = Pipeline.Config::{
Command.build Command.Config::{
commands = [
Cmd.run "export BUILDKITE_PIPELINE_MODE=${mode}",
- Cmd.run "export BUILDKITE_PIPELINE_STAGE=${stage}",
+ Cmd.run "export BUILDKITE_PIPELINE_FILTER=${filter}",
Cmd.run "./buildkite/scripts/generate-jobs.sh > buildkite/src/gen/Jobs.dhall",
- triggerCommand "src/Monorepo.dhall"
+ Cmd.quietly "dhall-to-yaml --quoted <<< '(./buildkite/src/Monorepo.dhall) { mode=(./buildkite/src/Pipeline/Mode.dhall).Type.${mode}, filter=(./buildkite/src/Pipeline/Filter.dhall).Type.${filter} }' | buildkite-agent pipeline upload"
],
label = "Prepare monorepo triage",
- key = "monorepo-${stage}",
+ key = "monorepo-${mode}-${filter}",
target = Size.Small,
docker = Some Docker::{
image = (./Constants/ContainerImages.dhall).toolchainBase,
diff --git a/dockerfiles/stages/1-build-deps b/dockerfiles/stages/1-build-deps
index 8cef1afb96f..5202a830a59 100644
--- a/dockerfiles/stages/1-build-deps
+++ b/dockerfiles/stages/1-build-deps
@@ -20,11 +20,15 @@ ARG GO_VERSION=1.19.11
ARG GO_CAPNP_VERSION=v3.0.0-alpha.5
# Rust Version passed into rustup-init, can also be "stable", "nightly" or similar
-ARG RUST_VERSION=1.63.0
+# This should stay in line with:
+# - src/lib/crypto/kimchi_bindings/stubs/rust-toolchain.toml
+# - src/lib/crypto/proof-systems/rust-toolchain.toml
+ARG RUST_VERSION=1.72
# Nightly Rust Version used for WebAssembly builds
-ARG RUST_NIGHTLY=2022-09-12
+# - src/lib/snarkyjs/src/bindings/kimchi/wasm/rust-toolchain.toml
+ARG RUST_NIGHTLY=2023-09-01
# wasm-pack version
-ARG WASM_PACK_VERSION=v0.10.3
+ARG WASM_PACK_VERSION=v0.12.1
# Rocksdb commit tag/branch to clone
ARG ROCKSDB_VERSION=v5.18.4
diff --git a/docs/README.md b/docs/README.md
index e12901ca674..b617246d2e7 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -2,4 +2,4 @@
The docs for the Mina Protocol website are published on [docs.minaprotocol.com](https://docs.minaprotocol.com/).
-The docs repository is [https://github.com/o1-labs/docs2/)https://github.com/o1-labs/docs2/](https://github.com/o1-labs/docs2/)https://github.com/o1-labs/docs2/).
+The docs repository is [https://github.com/o1-labs/docs2/](https://github.com/o1-labs/docs2/](https://github.com/o1-labs/docs2/).
diff --git a/flake.nix b/flake.nix
index 1bae798d9b0..a27a93805ab 100644
--- a/flake.nix
+++ b/flake.nix
@@ -287,12 +287,12 @@
# Main user-facing binaries.
packages = rec {
inherit (ocamlPackages)
- mina mina_tests mina-ocaml-format test_executive;
+ mina mina_tests mina-ocaml-format mina_client_sdk test_executive with-instrumentation;
inherit (pkgs)
libp2p_helper kimchi_bindings_stubs snarky_js leaderboard
validation trace-tool zkapp-cli;
inherit (dockerImages)
- mina-image-slim mina-image-full mina-archive-image-full;
+ mina-image-slim mina-image-full mina-archive-image-full mina-image-instr-full;
mina-deb = debianPackages.mina;
default = mina;
};
diff --git a/frontend/ci-build-me/README.md b/frontend/ci-build-me/README.md
index 5b3de329115..bf33574f65b 100644
--- a/frontend/ci-build-me/README.md
+++ b/frontend/ci-build-me/README.md
@@ -27,3 +27,14 @@ gcloud functions deploy githubWebhookHandler \
```
This deploys to https://us-central1-o1labs-192920.cloudfunctions.net/githubWebhookHandler
+
+## Update Branch Protection Rules
+
+In order to gate a new branch with this mechanism, github needs to see this job run (but generally we don't actually run a job here,
+ just block on its existence). This means that if months pass between changes, github will stop showing the buildkite/mina-pr-gating job
+ in their UI and therefore you cannot block new branches on it.
+
+To fix this, run the PR gating job manually in the builkite UI here: https://buildkite.com/o-1-labs-2/mina-pr-gating
+
+Just running the job once will re-populate it in github's dropdown menus so that you can add the gate to a new branch.
+This does not require a redeploy unless you're also intending to change the mechanism of activation or the list of users with this power.
diff --git a/frontend/ci-build-me/src/index.js b/frontend/ci-build-me/src/index.js
index fcfa0dd796f..9873982f2cd 100644
--- a/frontend/ci-build-me/src/index.js
+++ b/frontend/ci-build-me/src/index.js
@@ -64,9 +64,8 @@ const getRequest = async (url) => {
const handler = async (event, req) => {
const buildkiteTrigger = {};
- if (event == "issue_comment") {
- // PR Gating Lifting section
- if (
+ // PR Gating Lifting section
+ if (
// we are creating the comment
req.body.action == "created" &&
// and this is actually a pull request
@@ -92,11 +91,11 @@ const handler = async (event, req) => {
"mina-pr-gating",
{ PR_GATE: "lifted" }
);
- return [buildkite, null];
+ return buildkite;
} else {
return [
- "comment author is not (publically) a member of the core team",
- "comment author is not (publically) a member of the core team",
+ "comment author is not authorized to approve for mainnet",
+ "comment author is not authorized to approve for mainnet",
];
}
}
@@ -171,9 +170,8 @@ const handler = async (event, req) => {
];
}
}
- }
- return [null, null];
-};
+ return null;
+ };
/**
* HTTP Cloud Function for GitHub Webhook events.
@@ -201,24 +199,16 @@ exports.githubWebhookHandler = async (req, res) => {
github.validateWebhook(req);
const githubEvent = req.headers["x-github-event"];
- const [buildkite, circle] = await handler(githubEvent, req);
+ const buildkite = await handler(githubEvent, req);
if (buildkite && buildkite.web_url) {
console.info(`Triggered buildkite build at ${buildkite.web_url}`);
} else {
console.error(`Failed to trigger buildkite build for some reason:`);
console.error(buildkite);
}
-
- if (circle && circle.number) {
- console.info(`Triggered circle build #${circle.number}`);
- } else {
- console.error(`Failed to trigger circle build for some reason:`);
- console.error(circle);
- }
-
res.status(200);
console.info(`HTTP 200: ${githubEvent} event`);
- res.send({ buildkite, circle } || {});
+ res.send({ buildkite } || {});
} catch (e) {
if (e instanceof HTTPError) {
res.status(e.statusCode).send(e.message);
diff --git a/graphql_schema.json b/graphql_schema.json
index 0bd86aa2767..e5a343e5f07 100644
--- a/graphql_schema.json
+++ b/graphql_schema.json
@@ -6375,68 +6375,6 @@
"enumValues": null,
"possibleTypes": null
},
- {
- "kind": "ENUM",
- "name": "sign",
- "description": null,
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": [
- {
- "name": "PLUS",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "MINUS",
- "description": null,
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "possibleTypes": null
- },
- {
- "kind": "OBJECT",
- "name": "SignedFee",
- "description": "Signed fee",
- "fields": [
- {
- "name": "sign",
- "description": "+/-",
- "args": [],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": { "kind": "ENUM", "name": "sign", "ofType": null }
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
- {
- "name": "feeMagnitude",
- "description": "Fee",
- "args": [],
- "type": {
- "kind": "NON_NULL",
- "name": null,
- "ofType": {
- "kind": "SCALAR",
- "name": "Amount",
- "ofType": null
- }
- },
- "isDeprecated": false,
- "deprecationReason": null
- }
- ],
- "inputFields": null,
- "interfaces": [],
- "enumValues": null,
- "possibleTypes": null
- },
{
"kind": "OBJECT",
"name": "WorkDescription",
@@ -6521,7 +6459,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "SignedFee",
+ "name": "FeeExcess",
"ofType": null
}
},
@@ -6553,7 +6491,7 @@
"name": null,
"ofType": {
"kind": "OBJECT",
- "name": "SignedFee",
+ "name": "SignedAmount",
"ofType": null
}
},
@@ -10467,81 +10405,192 @@
"possibleTypes": null
},
{
- "kind": "SCALAR",
- "name": "PendingCoinbaseHash",
- "description":
- "Base58Check-encoded hash of a pending coinbase hash",
- "fields": null,
- "inputFields": null,
- "interfaces": null,
- "enumValues": null,
- "possibleTypes": null
- },
- {
- "kind": "SCALAR",
- "name": "PendingCoinbaseAuxHash",
- "description":
- "Base58Check-encoded hash of a pending coinbase auxiliary hash",
- "fields": null,
+ "kind": "OBJECT",
+ "name": "SignedFee",
+ "description": "Signed fee",
+ "fields": [
+ {
+ "name": "sign",
+ "description": "+/-",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": { "kind": "ENUM", "name": "sign", "ofType": null }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "feeMagnitude",
+ "description": "Fee",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": { "kind": "SCALAR", "name": "Fee", "ofType": null }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
"inputFields": null,
- "interfaces": null,
+ "interfaces": [],
"enumValues": null,
"possibleTypes": null
},
{
- "kind": "SCALAR",
- "name": "StagedLedgerAuxHash",
- "description":
- "Base58Check-encoded hash of the staged ledger hash's aux_hash",
- "fields": null,
+ "kind": "OBJECT",
+ "name": "FeeExcess",
+ "description": "Fee excess divided into left, right components",
+ "fields": [
+ {
+ "name": "feeTokenLeft",
+ "description": "Token id for left component of fee excess",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "TokenId",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "feeExcessLeft",
+ "description": "Fee for left component of fee excess",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "SignedFee",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "feeTokenRight",
+ "description": "Token id for right component of fee excess",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "TokenId",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "feeExcessRight",
+ "description": "Fee for right component of fee excess",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "SignedFee",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
"inputFields": null,
- "interfaces": null,
+ "interfaces": [],
"enumValues": null,
"possibleTypes": null
},
{
- "kind": "SCALAR",
- "name": "LedgerHash",
- "description": "Base58Check-encoded ledger hash",
+ "kind": "ENUM",
+ "name": "sign",
+ "description": null,
"fields": null,
"inputFields": null,
"interfaces": null,
- "enumValues": null,
+ "enumValues": [
+ {
+ "name": "PLUS",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "MINUS",
+ "description": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
"possibleTypes": null
},
{
"kind": "OBJECT",
- "name": "BlockchainState",
- "description": null,
+ "name": "SignedAmount",
+ "description": "Signed amount",
"fields": [
{
- "name": "date",
- "description":
- "date (stringified Unix time - number of milliseconds since January 1, 1970)",
+ "name": "sign",
+ "description": "+/-",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": { "kind": "ENUM", "name": "sign", "ofType": null }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "amountMagnitude",
+ "description": "Amount",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "BlockTime",
+ "name": "Amount",
"ofType": null
}
},
"isDeprecated": false,
"deprecationReason": null
- },
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "LocalState",
+ "description": null,
+ "fields": [
{
- "name": "utcDate",
- "description":
- "utcDate (stringified Unix time - number of milliseconds since January 1, 1970). Time offsets are adjusted to reflect true wall-clock time instead of genesis time.",
+ "name": "stackFrame",
+ "description": "Stack frame component of local state",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "BlockTime",
+ "name": "FieldElem",
"ofType": null
}
},
@@ -10549,15 +10598,15 @@
"deprecationReason": null
},
{
- "name": "snarkedLedgerHash",
- "description": "Base58Check-encoded hash of the snarked ledger",
+ "name": "callStack",
+ "description": "Call stack component of local state",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "LedgerHash",
+ "name": "FieldElem",
"ofType": null
}
},
@@ -10565,16 +10614,16 @@
"deprecationReason": null
},
{
- "name": "stagedLedgerHash",
+ "name": "transactionCommitment",
"description":
- "Base58Check-encoded hash of the staged ledger hash's main ledger hash",
+ "Transaction commitment component of local state",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "LedgerHash",
+ "name": "FieldElem",
"ofType": null
}
},
@@ -10582,16 +10631,16 @@
"deprecationReason": null
},
{
- "name": "stagedLedgerAuxHash",
+ "name": "fullTransactionCommitment",
"description":
- "Base58Check-encoded hash of the staged ledger hash's aux_hash",
+ "Full transaction commitment component of local state",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
- "name": "StagedLedgerAuxHash",
+ "name": "FieldElem",
"ofType": null
}
},
@@ -10599,16 +10648,15 @@
"deprecationReason": null
},
{
- "name": "stagedLedgerPendingCoinbaseAux",
- "description":
- "Base58Check-encoded staged ledger hash's pending_coinbase_aux",
+ "name": "excess",
+ "description": "Excess component of local state",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "PendingCoinbaseAuxHash",
+ "kind": "OBJECT",
+ "name": "SignedAmount",
"ofType": null
}
},
@@ -10616,16 +10664,15 @@
"deprecationReason": null
},
{
- "name": "stagedLedgerPendingCoinbaseHash",
- "description":
- "Base58Check-encoded hash of the staged ledger hash's pending_coinbase_hash",
+ "name": "supplyIncrease",
+ "description": "Supply increase component of local state",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
- "kind": "SCALAR",
- "name": "PendingCoinbaseHash",
+ "kind": "OBJECT",
+ "name": "SignedAmount",
"ofType": null
}
},
@@ -10633,7 +10680,551 @@
"deprecationReason": null
},
{
- "name": "stagedLedgerProofEmitted",
+ "name": "ledger",
+ "description": "Ledger component of local state",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "LedgerHash",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "success",
+ "description": "Success component of local state",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "accountUpdateIndex",
+ "description": "Account update index component of local state",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "UInt32",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "failureStatusTable",
+ "description": "Failure status table component of local state",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ }
+ }
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "willSucceed",
+ "description": "Will-succeed component of local state",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "StateStack",
+ "description": null,
+ "fields": [
+ {
+ "name": "initial",
+ "description": "Initial hash",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "FieldElem",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "current",
+ "description": "Current hash",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "FieldElem",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "PendingCoinbaseStack",
+ "description": null,
+ "fields": [
+ {
+ "name": "dataStack",
+ "description": "Data component of pending coinbase stack",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "FieldElem",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "stateStack",
+ "description": "State component of pending coinbase stack",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "StateStack",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "Registers",
+ "description": null,
+ "fields": [
+ {
+ "name": "firstPassLedger",
+ "description": "First pass ledger hash",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "LedgerHash",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "secondPassLedger",
+ "description": "Second pass ledger hash",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "LedgerHash",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "pendingCoinbaseStack",
+ "description": "Pending coinbase stack",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "PendingCoinbaseStack",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "localState",
+ "description": "Local state",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "LocalState",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "SnarkedLedgerState",
+ "description": null,
+ "fields": [
+ {
+ "name": "sourceRegisters",
+ "description": null,
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "Registers",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "targetRegisters",
+ "description": null,
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "Registers",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "connectingLedgerLeft",
+ "description": null,
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "LedgerHash",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "connectingLedgerRight",
+ "description": null,
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "LedgerHash",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "supplyIncrease",
+ "description": null,
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "SignedAmount",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "feeExcess",
+ "description": null,
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "FeeExcess",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "sokDigest",
+ "description": "Placeholder for SOK digest",
+ "args": [],
+ "type": { "kind": "SCALAR", "name": "String", "ofType": null },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "SCALAR",
+ "name": "PendingCoinbaseHash",
+ "description":
+ "Base58Check-encoded hash of a pending coinbase hash",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "SCALAR",
+ "name": "PendingCoinbaseAuxHash",
+ "description":
+ "Base58Check-encoded hash of a pending coinbase auxiliary hash",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "SCALAR",
+ "name": "StagedLedgerAuxHash",
+ "description":
+ "Base58Check-encoded hash of the staged ledger hash's aux_hash",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "SCALAR",
+ "name": "LedgerHash",
+ "description": "Base58Check-encoded ledger hash",
+ "fields": null,
+ "inputFields": null,
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "BlockchainState",
+ "description": null,
+ "fields": [
+ {
+ "name": "date",
+ "description":
+ "date (stringified Unix time - number of milliseconds since January 1, 1970)",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "BlockTime",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "utcDate",
+ "description":
+ "utcDate (stringified Unix time - number of milliseconds since January 1, 1970). Time offsets are adjusted to reflect true wall-clock time instead of genesis time.",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "BlockTime",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "snarkedLedgerHash",
+ "description": "Base58Check-encoded hash of the snarked ledger",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "LedgerHash",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "stagedLedgerHash",
+ "description":
+ "Base58Check-encoded hash of the staged ledger hash's main ledger hash",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "LedgerHash",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "stagedLedgerAuxHash",
+ "description":
+ "Base58Check-encoded hash of the staged ledger hash's aux_hash",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "StagedLedgerAuxHash",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "stagedLedgerPendingCoinbaseAux",
+ "description":
+ "Base58Check-encoded staged ledger hash's pending_coinbase_aux",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "PendingCoinbaseAuxHash",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "stagedLedgerPendingCoinbaseHash",
+ "description":
+ "Base58Check-encoded hash of the staged ledger hash's pending_coinbase_hash",
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "PendingCoinbaseHash",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "stagedLedgerProofEmitted",
"description":
"Block finished a staged ledger, and a proof was emitted from it and included into this block's proof. If there is no transition frontier available or no block found, this will return null.",
"args": [],
@@ -10641,6 +11232,22 @@
"isDeprecated": false,
"deprecationReason": null
},
+ {
+ "name": "ledgerProofStatement",
+ "description": null,
+ "args": [],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "SnarkedLedgerState",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
{
"name": "bodyReference",
"description":
diff --git a/helm/archive-node/templates/db-bootstrap.yaml b/helm/archive-node/templates/db-bootstrap.yaml
index 8267a0cff7d..0735a8bb267 100644
--- a/helm/archive-node/templates/db-bootstrap.yaml
+++ b/helm/archive-node/templates/db-bootstrap.yaml
@@ -9,7 +9,7 @@ spec:
containers:
{{- if .Values.archive.initFromDump }}
- name: import-dump
- image: gcr.io/o1labs-192920/postgresql-curl:latest
+ image: postgres:15-alpine
env:
- name: PGPASSWORD
valueFrom:
@@ -19,6 +19,7 @@ spec:
command: ["bash", "-c"]
args:
- 'sleep 30
+ && apk add curl
&& cd /tmp
&& curl https://storage.googleapis.com/mina-archive-dumps/{{ .Values.testnetName }}-archive-dump-$(date -Idate)_0000.sql.tar.gz -o {{ .Values.testnetName }}-archive-dump.tar.gz
&& tar -xvf {{ .Values.testnetName }}-archive-dump.tar.gz
@@ -37,7 +38,7 @@ spec:
-c "ALTER DATABASE {{ .Values.postgresql.auth.database }} SET DEFAULT_TRANSACTION_ISOLATION TO SERIALIZABLE;"'
{{- else }}
- name: import-schema
- image: gcr.io/o1labs-192920/postgresql-curl:latest
+ image: postgres:15-alpine
env:
- name: PGPASSWORD
valueFrom:
@@ -47,6 +48,7 @@ spec:
command: ["bash", "-c"]
args:
- 'sleep 30
+ && apk add curl
&& cd /tmp
&& {{ range .Values.archive.remoteSchemaAuxFiles }} curl -O {{.}} && {{ end }}
psql
diff --git a/helm/cron_jobs/README.md b/helm/cron_jobs/README.md
new file mode 100644
index 00000000000..edd63378621
--- /dev/null
+++ b/helm/cron_jobs/README.md
@@ -0,0 +1,18 @@
+Replayer cron jobs
+==================
+
+There are replayer cron jobs for Mainnet, Devnet, and Berkeley. These
+jobs are run daily, to replay a day's worth of transactions.
+
+Each cron job downloads the most recent archive dump corresponding to
+a network, and loads the data into PostgreSQL. That results in an
+archive database. The most recent replayer checkpoint file is
+downloaded, which provides the starting point for the replayer. When
+the replayer runs, it creates new checkpoint files every 50
+blocks. When the replayer finishes, it uploads the most recent
+checkpoint file, so it can be used in the following day's run. If
+there are any errors, the replayer logs are also uploaded.
+
+There is a separate checkpoint file bucket for each network. Both the
+checkpoint files and error files for a given network are uploaded to
+the same bucket.
diff --git a/nix/README.md b/nix/README.md
index 725f98976ac..5e07ffee572 100644
--- a/nix/README.md
+++ b/nix/README.md
@@ -169,7 +169,11 @@ Now, whenever you start vim from `nix develop mina#with-lsp`, it should just wor
##### Emacs
You need to install [tuareg](https://github.com/ocaml/tuareg) and a LSP client, like [lsp-mode](https://github.com/emacs-lsp/lsp-mode) or [eglot](https://github.com/joaotavora/eglot).
+You do not need to use [merlin](https://github.com/ocaml/merlin) directly (through `merlin-mode`), as `ocaml-lsp-server` that LSP client will use uses `merlin` backend.
+Note that LSP with flycheck and similar tools will not provide the global project compilation functionality, they will focus on individual buffers instead.
+To compile the whole project you can still use `M-x compile` or anything else; compilation results will be then seen by the LSP/flycheck.
This should just work without any configuration, as long as you start it from `nix develop mina#with-lsp`.
+If you prefer to have just one instance of `emacs` running, consider installing `direnv` as explained in the sections below: emacs packages [envrc](https://github.com/purcell/envrc) and [emacs-direnv](https://github.com/wbolster/emacs-direnv) (just `direnv` in MELPA) provide integration with the tool, allowing emacs to use nix-defined sandbox variables when the open buffer is a repository file.
### "Pure" build
@@ -210,7 +214,7 @@ branches, or otherwise changing the dependency tree of Mina.
TL;DR:
```
$(nix build mina#mina-image-full) | docker load
-# Also available: mina-image-slim, mina-archive-image-full
+# Also available: mina-image-slim, mina-image-instr, mina-archive-image-full,
```
Since a "pure" build can happen entirely inside the Nix sandbox, we can use its
@@ -228,6 +232,8 @@ us-west2-docker.pkg.dev/o1labs-192920/nix-containers/mina-image-full:develop` .
The `slim` image only has the Mina daemon itself, whereas `full` images also
contain many useful tools, such as coreutils, fake init, jq, etc.
+The `instr` image is a replica of `full` image with additional instrumenation data.
+
### Debian package
TL;DR:
@@ -376,6 +382,14 @@ networking inside the Nix sandbox (in order to vendor all the dependencies using
specified explicitly. This is the hash you're updating by running
`./nix/update-libp2p-hashes.sh`.
+### Notes on instrumenation package
+
+`nix build mina#mina_with_instrumentation` allows to build a special version on mina
+ with instrumentation enabled. This can be helpful if one would like verify
+what is a code coverage of end-to-end/manual tests performed over mina under development.
+Additionally there is a docker image available which wraps up above mina build into full mina image.
+One can prepare it using command: `$(nix build mina#mina-image-instr-full --print-out-paths) | docker load`
+
### Discovering all the packages this Flake provides
`nix flake show` doesn't work due to
@@ -601,4 +615,4 @@ Before running any `dune` commands.
Alternatively, you can just run your commands inside `nix develop
--ignore-environment mina`, which unsets all the outside environment variables,
-resulting in a more reproducible but less convenient environment.
\ No newline at end of file
+resulting in a more reproducible but less convenient environment.
diff --git a/nix/docker.nix b/nix/docker.nix
index 748c2336952..6904a3f9681 100644
--- a/nix/docker.nix
+++ b/nix/docker.nix
@@ -1,5 +1,5 @@
{ lib, dockerTools, buildEnv, ocamlPackages_mina, runCommand, dumb-init
-, coreutils, bashInteractive, python3, libp2p_helper, procps, postgresql, curl
+, coreutils, findutils, bashInteractive, python3, libp2p_helper, procps, postgresql, curl
, jq, stdenv, rsync, bash, gnutar, gzip, currentTime, flockenzeit }:
let
created = flockenzeit.lib.ISO-8601 currentTime;
@@ -47,12 +47,13 @@ let
'';
};
- mkFullImage = name: packages: dockerTools.streamLayeredImage {
+ mkFullImage = name: packages: additional_envs: dockerTools.streamLayeredImage {
name = "${name}-full";
inherit created;
contents = [
dumb-init
coreutils
+ findutils
bashInteractive
python3
libp2p_helper
@@ -65,7 +66,7 @@ let
chmod 777 tmp
'';
config = {
- env = [ "MINA_TIME_OFFSET=0" ];
+ env = [ "MINA_TIME_OFFSET=0" ] ++ additional_envs;
WorkingDir = "/root";
cmd = [ "/bin/dumb-init" "/entrypoint.sh" ];
};
@@ -77,6 +78,7 @@ in {
inherit created;
contents = [ ocamlPackages_mina.mina.out ];
};
+
mina-image-full = mkFullImage "mina" (with ocamlPackages_mina; [
mina-build-config
mina-daemon-scripts
@@ -85,6 +87,18 @@ in {
mina.mainnet
mina.genesis
]);
+
+ # Image with enhanced binary capable of generating coverage report on mina exit
+ # For more details please visit: https://github.com/aantron/bisect_ppx/blob/master/doc/advanced.md#sigterm-handling
+ mina-image-instr-full = mkFullImage "mina-instr" (with ocamlPackages_mina; [
+ mina-build-config
+ mina-daemon-scripts
+
+ with_instrumentation.out
+ mina.mainnet
+ mina.genesis
+ ]) ["BISECT_SIGTERM=yes"];
+
mina-archive-image-full = mkFullImage "mina-archive" (with ocamlPackages_mina; [
mina-archive-scripts
gnutar
@@ -92,4 +106,4 @@ in {
mina.archive
]);
-}
+}
\ No newline at end of file
diff --git a/nix/go.nix b/nix/go.nix
index dc3619b5104..6e4aa6ec2b1 100644
--- a/nix/go.nix
+++ b/nix/go.nix
@@ -37,22 +37,22 @@ final: prev: {
src = ../src/app/libp2p_helper/src;
doCheck = false; # TODO: tests hang
vendorSha256 = let hashes = final.lib.importJSON ./libp2p_helper.json; in
- # sanity check, to make sure the fixed output drv doesn't keep working
+ # sanity check, to make sure the fixed output drv doesn't keep working
# when the inputs change
if builtins.hashFile "sha256" ../src/app/libp2p_helper/src/go.mod
- == hashes."go.mod"
- && builtins.hashFile "sha256" ../src/app/libp2p_helper/src/go.sum
- == hashes."go.sum" then
+ == hashes."go.mod"
+ && builtins.hashFile "sha256" ../src/app/libp2p_helper/src/go.sum
+ == hashes."go.sum" then
hashes.vendorSha256
else
final.lib.warn
- ''
- Below, you will find an error about a hash mismatch.
- This is likely because you have updated go.mod and/or go.sum in libp2p_helper.
- Please, locate the "got: " hash in the aforementioned error. If it's in SRI format ([35;1msha256-<...>[31;1m), copy the entire hash, including the `[35;1msha256-[31;1m'. Otherwise (if it's in the base32 format, like `[35;1msha256:<...>[31;1m'), copy only the base32 part, without `[35;1msha256:[31;1m'.
- Then, run [37;1m./nix/update-libp2p-hashes.sh [35;1m""[31;0m
- ''
- final.lib.fakeHash;
+ ''
+ Below, you will find an error about a hash mismatch.
+ This is likely because you have updated go.mod and/or go.sum in libp2p_helper.
+ Please, locate the "got: " hash in the aforementioned error. If it's in SRI format ([35;1msha256-<...>[31;1m), copy the entire hash, including the `[35;1msha256-[31;1m'. Otherwise (if it's in the base32 format, like `[35;1msha256:<...>[31;1m'), copy only the base32 part, without `[35;1msha256:[31;1m'.
+ Then, run [37;1m./nix/update-libp2p-hashes.sh [35;1m""[31;0m
+ ''
+ final.lib.fakeHash;
NO_MDNS_TEST = 1; # no multicast support inside the nix sandbox
overrideModAttrs = n: {
# Yo dawg
diff --git a/nix/javascript.nix b/nix/javascript.nix
index 68f451c1106..12791560f42 100644
--- a/nix/javascript.nix
+++ b/nix/javascript.nix
@@ -36,9 +36,9 @@ in {
# better error messages
# TODO: find a less hacky way to make adjustments to jsoo compiler output
# `s` is the jsoo representation of the error message string, and `s.c` is the actual JS string
- sed -i 's/function failwith(s){throw \[0,Failure,s\]/function failwith(s){throw joo_global_object.Error(s.c)/' "$BINDINGS_PATH"/snarky_js_node.bc.js
- sed -i 's/function invalid_arg(s){throw \[0,Invalid_argument,s\]/function invalid_arg(s){throw joo_global_object.Error(s.c)/' "$BINDINGS_PATH"/snarky_js_node.bc.js
- sed -i 's/return \[0,Exn,t\]/return joo_global_object.Error(t.c)/' "$BINDINGS_PATH"/snarky_js_node.bc.js
+ sed -i 's/function failwith(s){throw \[0,Failure,s\]/function failwith(s){throw globalThis.Error(s.c)/' "$BINDINGS_PATH"/snarky_js_node.bc.js
+ sed -i 's/function invalid_arg(s){throw \[0,Invalid_argument,s\]/function invalid_arg(s){throw globalThis.Error(s.c)/' "$BINDINGS_PATH"/snarky_js_node.bc.js
+ sed -i 's/return \[0,Exn,t\]/return globalThis.Error(t.c)/' "$BINDINGS_PATH"/snarky_js_node.bc.js
'';
npmBuild = "npm run build";
};
diff --git a/nix/ocaml.nix b/nix/ocaml.nix
index d6a0be0a9e8..95d5699d4f0 100644
--- a/nix/ocaml.nix
+++ b/nix/ocaml.nix
@@ -256,6 +256,22 @@ let
# Same as above, but wrapped with version info.
mina = wrapMina self.mina-dev { };
+ # Mina with additional instrumentation info.
+ with-instrumentation-dev = self.mina-dev.overrideAttrs (oa: {
+ pname = "with-instrumentation";
+ outputs = [ "out" ];
+
+ buildPhase = ''
+ dune build --display=short --profile=testnet_postake_medium_curves --instrument-with bisect_ppx src/app/cli/src/mina.exe
+ '';
+ installPhase = ''
+ mkdir -p $out/bin
+ mv _build/default/src/app/cli/src/mina.exe $out/bin/mina
+ '';
+ });
+
+ with-instrumentation = wrapMina self.with-instrumentation-dev { };
+
# Unit tests
mina_tests = runMinaCheck {
name = "tests";
@@ -338,4 +354,4 @@ let
test_executive = wrapMina self.test_executive-dev { };
};
-in scope.overrideScope' overlay
+in scope.overrideScope' overlay
\ No newline at end of file
diff --git a/nix/rust.nix b/nix/rust.nix
index 51726d84501..5619593f0f2 100644
--- a/nix/rust.nix
+++ b/nix/rust.nix
@@ -8,10 +8,16 @@ let
# override stdenv.targetPlatform here, if neccesary
};
toolchainHashes = {
- "1.67.0" = "sha256-riZUc+R9V35c/9e8KJUE+8pzpXyl0lRXt3ZkKlxoY0g=";
- "nightly-2023-02-05" =
- "sha256-MM8fdvveBEWzpwjH7u6C0F7qSWGPIMpfZWLgVxSqtxY=";
- # copy this line with the correct toolchain name
+ "1.72" = "sha256-dxE7lmCFWlq0nl/wKcmYvpP9zqQbBitAQgZ1zx9Ooik=";
+ "nightly-2023-09-01" = "sha256-zek9JAnRaoX8V0U2Y5ssXVe9tvoQ0ERGXfUCUGYdrMA=";
+ # copy the placeholder line with the correct toolchain name when adding a new toolchain
+ # That is,
+ # 1. Put the correct version name;
+ #
+ # 2. Put the hash you get in line "got" from the error you obtain, which looks like
+ # error: hash mismatch in fixed-output derivation '/nix/store/XXXXX'
+ # specified: sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
+ # got: sha256-Q9UgzzvxLi4x9aWUJTn+/5EXekC98ODRU1TwhUs9RnY=
"placeholder" = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=";
};
# rust-toolchain.toml -> { rustc, cargo, rust-analyzer, ... }
@@ -135,10 +141,10 @@ in
version = deps.wasm-bindgen.version;
src = final.fetchCrate {
inherit pname version;
- sha256 = "sha256-0rK+Yx4/Jy44Fw5VwJ3tG243ZsyOIBBehYU54XP/JGk=";
+ sha256 = "sha256-0u9bl+FkXEK2b54n7/l9JOCtKo+pb42GF9E1EnAUQa0=";
};
- cargoSha256 = "sha256-vcpxcRlW1OKoD64owFF6mkxSqmNrvY+y3Ckn5UwEQ50=";
+ cargoSha256 = "sha256-AsZBtE2qHJqQtuCt/wCAgOoxYMfvDh8IzBPAOkYSYko=";
nativeBuildInputs = [ final.pkg-config ];
buildInputs = with final;
@@ -150,8 +156,9 @@ in
checkInputs = [ final.nodejs ];
- # other tests require it to be ran in the wasm-bindgen monorepo
- cargoTestFlags = [ "--test=interface-types" ];
+ # other tests, like --test=wasm-bindgen, require it to be ran in the
+ # wasm-bindgen monorepo
+ cargoTestFlags = [ "--test=reference" ];
};
in
rustPlatform.buildRustPackage {
@@ -200,4 +207,3 @@ in
cargoLock.lockFile = ../src/app/trace-tool/Cargo.lock;
};
}
-
diff --git a/rfcs/0050-genesis-ledger-export.md b/rfcs/0050-genesis-ledger-export.md
new file mode 100644
index 00000000000..5b784076353
--- /dev/null
+++ b/rfcs/0050-genesis-ledger-export.md
@@ -0,0 +1,176 @@
+## Summary
+
+This RFC describes the procedure to generate a genesis ledger from a
+running network, using a node connected to that network.
+
+## Motivation
+
+The procedure described here is a part of the hard fork procedure,
+which aims at spawning a new network, being a direct continuation of
+the mainnet (or any other Mina network for that matter). To enable
+this, the ledger of the old network must be exported in some form and
+then fed into the newly created network. Because the new network's
+initial state can be fed into nodes in a configuration file, it makes
+sense to generate that file directly from the old node. Then necessary
+updates can be made to it manually to update various protocol
+constants, and then the new configuration file can be handed over to
+node operators.
+
+## Detailed design
+
+The genesis ledger export is achieved using a GraphQL field named
+`fork_config`. Asking for this field requires providing a slot or a
+state hash of the block that we want to base the exported ledger on.
+This field, if asked for, contains a new runtime configuration,
+automatically updated with:
+
+* the dump of the **staged ledger** at the fork point
+* updated values of `Fork_config`, i.e. previous state hash, previous
+blockchain length and previous global slot;
+* Current epoch ledger;
+* Current epoch data (total currency and seed);
+* Next epoch ledger;
+* Next epoch data (total currency and seed);
+* Protocol state at the fork point;
+
+**IMPORTANT**: as of now the `genesis_ledger_timestamp` is **not**
+being updated and must be manually set to the right value (which is at
+the moment unknown).
+
+By the fork point above we mean the last block before the slot where
+no more transactions were accepted (transaction-stop slot).
+
+Thus generated configuration can be saved to a file, modified if
+needed and fed directly into a new node, running a different protocol
+version, using `--config-file` flag. As of the moment of writing this,
+`compatible` and `berkeley` branches' configuration files are
+compatible with each other (see: [PR #13768](https://github.com/MinaProtocol/mina/pull/13768)).
+Sadly since then that compatibility has been broken by [PR #14014](https://github.com/MinaProtocol/mina/pull/14014).
+We need to either port this change back to `compatible` or create a
+migration script which will adapt a `mainnet` config file to the
+format required by `berkeley`. The former solution would probably
+be better.
+
+The `fork_config` field has been added to GraphQL in [PR
+#13787](https://github.com/MinaProtocol/mina/pull/13787). It needs to
+be extended to return the blockchain state for a given block (height
+or state hash) so that we can export the desired ledger after the
+blockchain has moved on.
+
+## Drawbacks
+
+This RFC provides a simple enough procedure to generate the genesis
+ledger for the new network. However, it's not without its problems.
+
+### File size
+
+At the moment the mainnet has more than 100 000 accounts created.
+Each account takes at least 4 lines in the configuration, which adds
+up to around 600kB of JSON data. The daemon can take considerable time
+at startup to parse it and load its contents into memory. If we move
+on with this approach, it might be desirable to make a dedicated
+effort to improving the configuration parsing speed, as these files
+will only grow larger in subsequent hard forks. Alternatively, we
+might want to devise a better (less verbose) storage mechanism for the
+genesis ledger.
+
+### Security concerns
+
+The generated genesis ledger is prone to malevolent manual
+modifications. Beyond containing the hash of the previous ledger, it's
+unprotected from tampering with.
+
+One way to improve this is to provide an external program, capable of
+computing hash of the ledger as it will be after the config is loaded
+into a node. Users will be able to obtain a raw fork config file from
+their nodes. Later, given the official config for the new network,
+they will be able to run the program against both files and compute
+ledger hashes. The reason why this is needed is that the configuration
+file will likely contain some manual updates. For instance the genesis
+ledger timestamp will need to be updated manually when the start time
+of the new network is known. Further changes may concern genesis
+constants and other network configuration. All these changes should be
+ignored during the hash computation and only the genesis ledger itself
+should be taken into consideration. This way a user seeing that the
+configuration file is not identical to the one they computed, still
+does not contain any changes to the genesis ledger.
+
+Further protection against tampering with the ledger we gain from the
+fact that all the nodes must use the same one, or they'll be kicked
+out from the network.
+
+## Rationale and alternatives
+
+The presented way of handling the ledger export is the simplest one
+and the easiest to implement. The security concern indicated above
+cannot be mitigated with any method currently available. In order to
+overcome it, we would have to re-think the whole procedure and somehow
+continue the existing network with the changed protocol instead of
+creating a new one.
+
+It seems reasonable to export the ledger in binary form instead, but
+currently the node does not persist the staged ledger in any way that
+could survive the existing node and could be loaded by another one.
+Even if we had such a process, the encoding of the ledger would have
+to be compatible between `compatible` and `berkeley`, which could be
+difficult to maintain in any binary format.
+
+Otherwise there's no reasonable alternative to the process described.
+
+## Prior art
+
+Some of the existing blockchains, like Tezos, deal with the protocol
+upgrade problem, avoiding hard-forking entirely, and therefore
+avoiding the ledger export in particular. They achieve it by careful
+software design in which the protocol (containing in particular the
+consensus mechanism and transaction logic) consists in a plugin to the
+daemon, which can be loaded and unloaded at runtime. Thus the protocol
+update is as simple as loading another plugin at runtime and does not
+even require a node restart.
+
+It would certainly be beneficial to Mina to implement a similar
+solution, but this is obviously a huge amount of work (involving
+redesigning the whole code base), which makes it infeasible for the
+moment.
+
+## Unresolved questions
+
+The genesis timestamp of the new network needs to be specified in the
+runtime configuration, but it is as of now (and will probably remain
+for some time still) unknown. This makes it hard to put it into the
+configuration in any automated fashion. Relying on personnel
+performing the hard fork to update it is far from ideal, but there
+seems to be no better solution available at the moment.
+
+Also epoch seeds from mainnet are incompatible with those on berkeley.
+When epoch ledgers are being exported from a compatible node and
+transferred into a berkeley node, the latter cannot load them, because
+Base58check fails to decode them. This is a problem we need to overcome
+or decide that we won't export the epoch ledgers and assume they're
+the same as the genesis ledger for the purpose of hard fork.
+
+## Testing
+
+An automatic integration test will be written to check that the data is
+being exported properly. The procedure is to start a fresh network and
+generate a couple of transactions. Then the transactions are stopped.
+Finally the ledger export is performed and the test compares the
+exported state to the current state of the blockchain as obtained
+through GraphQL. These checks must take into account the fact, that
+it has changed slightly since the transaction stop (a couple additional
+blocks might have been produced). However, all balances should definitely
+be the same (after the transaction stop no transactions are allowed, there
+are no fees of coinbase rewards anymore).
+
+The procedure can also be tested manually as follows:
+* Sync up with the mainnet.
+* Export the genesis ledger at any point in time.
+* The program mentioned in a previous section can be
+used to verify the exported ledger.
+* Possibly add an account you control and change everyone's
+delegation to point at that account so that you can produce
+blocks.
+* Start a new network with the exported state.
+* The new network should be able to produce blocks.
+* All the accounts should have the same balances and
+delegates as on the mainnet at the moment of export.
diff --git a/scripts/Brewfile b/scripts/Brewfile
index af4aabb93d3..7c95594ff22 100644
--- a/scripts/Brewfile
+++ b/scripts/Brewfile
@@ -14,6 +14,6 @@ brew "openssl@1.1"
brew "python@3.8"
brew "zlib"
brew "libpq"
-brew "postgresql"
-brew "go"
+brew "postgresql@14"
brew "gnu-sed"
+brew "goenv"
diff --git a/scripts/Brewfile.lock.json b/scripts/Brewfile.lock.json
index d281f7d6fea..133b7e46426 100644
--- a/scripts/Brewfile.lock.json
+++ b/scripts/Brewfile.lock.json
@@ -2,126 +2,201 @@
"entries": {
"brew": {
"bash": {
- "version": "5.1.4",
+ "version": "5.2.15",
"bottle": {
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:f3a42b9282e6779504034485634a2f3e6e3bddfc70b9990e09e66e3c8c926b7d",
+ "sha256": "f3a42b9282e6779504034485634a2f3e6e3bddfc70b9990e09e66e3c8c926b7d"
+ },
+ "arm64_monterey": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:5e7e3e3387fc60e907683b437ac6e64879e117a3c5c1421fe6e6257f6aaa3c69",
+ "sha256": "5e7e3e3387fc60e907683b437ac6e64879e117a3c5c1421fe6e6257f6aaa3c69"
+ },
"arm64_big_sur": {
"cellar": "/opt/homebrew/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:253a8f71bb8ca1444fa5951caa3e4d0e6f51ca6cd6d7c9fc9f79f0c58dc3e693",
- "sha256": "253a8f71bb8ca1444fa5951caa3e4d0e6f51ca6cd6d7c9fc9f79f0c58dc3e693"
+ "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:d19858831275271cc8aa9a1a28de6223faa44c6ebbc88e83898fd559de5b627e",
+ "sha256": "d19858831275271cc8aa9a1a28de6223faa44c6ebbc88e83898fd559de5b627e"
},
- "big_sur": {
+ "ventura": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:1c7c13309368474e6f7b3afd9c6ba13b213b00caeb9b990e171cf5e097e8e5e1",
- "sha256": "1c7c13309368474e6f7b3afd9c6ba13b213b00caeb9b990e171cf5e097e8e5e1"
+ "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:fd01a9dbdc56f6313a725cb345a3b991cfdaa9e1a91b08fd9791a0e695b55723",
+ "sha256": "fd01a9dbdc56f6313a725cb345a3b991cfdaa9e1a91b08fd9791a0e695b55723"
},
- "catalina": {
+ "monterey": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:2195ea39cf6607ec440addd6aed524c5a66719e998d74d5f9595f594f6593b21",
- "sha256": "2195ea39cf6607ec440addd6aed524c5a66719e998d74d5f9595f594f6593b21"
+ "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:05a5f9435c9e9ffe8377b03e0ca6b27bbb32cc01aff47dd1692cd8d7e735ab3a",
+ "sha256": "05a5f9435c9e9ffe8377b03e0ca6b27bbb32cc01aff47dd1692cd8d7e735ab3a"
},
- "mojave": {
+ "big_sur": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:4a294caec86652221a9901b9d892723a84e60d05bc91155efcb661829b13a898",
- "sha256": "4a294caec86652221a9901b9d892723a84e60d05bc91155efcb661829b13a898"
+ "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:680dd3b37e17cc4fa1af6dd8c51c774dd0c9aa3e594e96527020845516b1ea77",
+ "sha256": "680dd3b37e17cc4fa1af6dd8c51c774dd0c9aa3e594e96527020845516b1ea77"
+ },
+ "x86_64_linux": {
+ "cellar": "/home/linuxbrew/.linuxbrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/bash/blobs/sha256:6185e7cdba0e671528c9f38b104c4af58a670240672f83537bfc95983476fbc2",
+ "sha256": "6185e7cdba0e671528c9f38b104c4af58a670240672f83537bfc95983476fbc2"
}
}
}
},
"yarn": {
- "version": "1.22.10",
- "bottle": false
+ "version": "1.22.19",
+ "bottle": {
+ "rebuild": 0,
+ "root_url": "https://ghcr.io/v2/homebrew/core",
+ "files": {
+ "all": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/yarn/blobs/sha256:79b90324a5365189a144b786e9bdb3bf32be3823e9041d5f3250ea7b804dcd0b",
+ "sha256": "79b90324a5365189a144b786e9bdb3bf32be3823e9041d5f3250ea7b804dcd0b"
+ }
+ }
+ }
},
"boost": {
- "version": "1.75.0_2",
+ "version": "1.81.0_1",
"bottle": {
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:8a4a21f28eea820cdfb2ca94d6a9c2ecad40592b145de06698283dc3c7ae0eeb",
+ "sha256": "8a4a21f28eea820cdfb2ca94d6a9c2ecad40592b145de06698283dc3c7ae0eeb"
+ },
+ "arm64_monterey": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:da47f5dce669699eb052452fe166e5cd118a6f6d3f64abe4cae53461743a2cc2",
+ "sha256": "da47f5dce669699eb052452fe166e5cd118a6f6d3f64abe4cae53461743a2cc2"
+ },
"arm64_big_sur": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:a6ca6c43f67270378ae0400e66095c329ebe90a1989a4a9c4606f1b8e72a692f",
- "sha256": "a6ca6c43f67270378ae0400e66095c329ebe90a1989a4a9c4606f1b8e72a692f"
+ "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:640b02baab8cf76935b79203660de45e0721f1428697b9916327b06e86b9300a",
+ "sha256": "640b02baab8cf76935b79203660de45e0721f1428697b9916327b06e86b9300a"
},
- "big_sur": {
+ "ventura": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:be8564844a1e5bb58c26287453617458db6e886f85197c8ce35c21cfa74b1bc0",
- "sha256": "be8564844a1e5bb58c26287453617458db6e886f85197c8ce35c21cfa74b1bc0"
+ "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:b3fc7aade48d9a8bec56ac3cc57a3c5ead36d67365cf3447c578cd31ddb8fbee",
+ "sha256": "b3fc7aade48d9a8bec56ac3cc57a3c5ead36d67365cf3447c578cd31ddb8fbee"
},
- "catalina": {
+ "monterey": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:aef0fade9e8159b572907189bb8dfd828dab94c44e036cdd782c2b3834d218f3",
- "sha256": "aef0fade9e8159b572907189bb8dfd828dab94c44e036cdd782c2b3834d218f3"
+ "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:160aabda5d6497dc72a389dd251becc971e37d4702763b3b45a5c7bbc29f0419",
+ "sha256": "160aabda5d6497dc72a389dd251becc971e37d4702763b3b45a5c7bbc29f0419"
},
- "mojave": {
+ "big_sur": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:e24d396d90a8db75738cba4543b678c79ef720a96bf2f93688bd2f35fef66d3a",
- "sha256": "e24d396d90a8db75738cba4543b678c79ef720a96bf2f93688bd2f35fef66d3a"
+ "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:51a2646e51a7a304848efa7cca17312c4a3acc5e28ef664037d0675c5c9a1e83",
+ "sha256": "51a2646e51a7a304848efa7cca17312c4a3acc5e28ef664037d0675c5c9a1e83"
+ },
+ "x86_64_linux": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/boost/blobs/sha256:26a83186402f3625806df9d7f6e41a1188d726d7f21ee5ccbfb3310e763d1ebc",
+ "sha256": "26a83186402f3625806df9d7f6e41a1188d726d7f21ee5ccbfb3310e763d1ebc"
}
}
}
},
"cmake": {
- "version": "3.20.1",
+ "version": "3.26.0",
"bottle": {
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:3c39c077ead3f8ccc94727c275ac16af5f75a088844df034d10b34ad85dfb8bf",
+ "sha256": "3c39c077ead3f8ccc94727c275ac16af5f75a088844df034d10b34ad85dfb8bf"
+ },
+ "arm64_monterey": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:6fb143b21a378921ee86e61d0cf77584e42ead38076f92ea1ebb57dcefb6b85d",
+ "sha256": "6fb143b21a378921ee86e61d0cf77584e42ead38076f92ea1ebb57dcefb6b85d"
+ },
"arm64_big_sur": {
"cellar": ":any_skip_relocation",
- "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:b94fa9c13065ce31259621e1ac1ff8f46c0a6ee606a5944f2562ed86c7fcf2a6",
- "sha256": "b94fa9c13065ce31259621e1ac1ff8f46c0a6ee606a5944f2562ed86c7fcf2a6"
+ "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:714058b6941002393dcadcefb13f5b16ae094724c734977cc6a2dcf2db5484ae",
+ "sha256": "714058b6941002393dcadcefb13f5b16ae094724c734977cc6a2dcf2db5484ae"
},
- "big_sur": {
+ "ventura": {
"cellar": ":any_skip_relocation",
- "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:c8b975b0911f9125065459e9b55da2c43fc58485446ec35d8294d2db2ad77972",
- "sha256": "c8b975b0911f9125065459e9b55da2c43fc58485446ec35d8294d2db2ad77972"
+ "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:96a930fa2836355c767057f336521113f419f51c2444ec0cb095a6776170997e",
+ "sha256": "96a930fa2836355c767057f336521113f419f51c2444ec0cb095a6776170997e"
},
- "catalina": {
+ "monterey": {
"cellar": ":any_skip_relocation",
- "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:1875ab07ed5843cdc06368ae851ec1232a72bb679f70f816e549acfe5fff6c31",
- "sha256": "1875ab07ed5843cdc06368ae851ec1232a72bb679f70f816e549acfe5fff6c31"
+ "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:ca050ee8541df0df30c3b06bcce0b8be0a37fc16dcfc83fe2c29dd6bf13b8643",
+ "sha256": "ca050ee8541df0df30c3b06bcce0b8be0a37fc16dcfc83fe2c29dd6bf13b8643"
},
- "mojave": {
+ "big_sur": {
"cellar": ":any_skip_relocation",
- "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:0af0a3d97a83dcdece0c5a8ba867d6b199b928f1c4e0a325eef785af6b8f2f1e",
- "sha256": "0af0a3d97a83dcdece0c5a8ba867d6b199b928f1c4e0a325eef785af6b8f2f1e"
+ "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:5175a6fee503ce7cd67fd6d23ea589995ac1d0eb8114756315a106b8261affda",
+ "sha256": "5175a6fee503ce7cd67fd6d23ea589995ac1d0eb8114756315a106b8261affda"
+ },
+ "x86_64_linux": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/cmake/blobs/sha256:a71c04366f7b5fd26d49bd683ae3a2cab717967085fd60ffa8bc8c802a9f9c48",
+ "sha256": "a71c04366f7b5fd26d49bd683ae3a2cab717967085fd60ffa8bc8c802a9f9c48"
}
}
}
},
"gmp": {
- "version": "6.2.1",
+ "version": "6.2.1_1",
"bottle": {
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:2436cd120e5678d67c24020a50cbbf7c0220e7ecaac63981335872b9d666bcad",
+ "sha256": "2436cd120e5678d67c24020a50cbbf7c0220e7ecaac63981335872b9d666bcad"
+ },
+ "arm64_monterey": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:a43a2ae4c44d90626b835a968a32327c8b8bbf754ec1d2590f8ac656c71dace9",
+ "sha256": "a43a2ae4c44d90626b835a968a32327c8b8bbf754ec1d2590f8ac656c71dace9"
+ },
"arm64_big_sur": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:ff4ad8d068ba4c14d146abb454991b6c4f246796ec2538593dc5f04ca7593eec",
- "sha256": "ff4ad8d068ba4c14d146abb454991b6c4f246796ec2538593dc5f04ca7593eec"
+ "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:491220f1ff2c662b96295d931a80702523eeaee681d7305fb02b561e527dcbb8",
+ "sha256": "491220f1ff2c662b96295d931a80702523eeaee681d7305fb02b561e527dcbb8"
+ },
+ "ventura": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:4c6488dfd53b8287702827a4e6d50569926417f2cd08613d37720de54b6afe0c",
+ "sha256": "4c6488dfd53b8287702827a4e6d50569926417f2cd08613d37720de54b6afe0c"
+ },
+ "monterey": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:dddc6d8c871c92f6e5fb1249c28768aa2b4b47c38836a69cf787a639cf5eee73",
+ "sha256": "dddc6d8c871c92f6e5fb1249c28768aa2b4b47c38836a69cf787a639cf5eee73"
},
"big_sur": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:6a44705536f25c4b9f8547d44d129ae3b3657755039966ad2b86b821e187c32c",
- "sha256": "6a44705536f25c4b9f8547d44d129ae3b3657755039966ad2b86b821e187c32c"
+ "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:e566452815d2ff5dc66da160bd1cd3d9cf02a17a07284cf0bac46496133383ae",
+ "sha256": "e566452815d2ff5dc66da160bd1cd3d9cf02a17a07284cf0bac46496133383ae"
},
"catalina": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:35e9f82d80708ae8dea2d6b0646dcd86d692321b96effaa76b7fad4d6cffa5be",
- "sha256": "35e9f82d80708ae8dea2d6b0646dcd86d692321b96effaa76b7fad4d6cffa5be"
+ "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:5ee7a460668864c28e541db15420e1480c3d31c5f216797a453a5310106fbc97",
+ "sha256": "5ee7a460668864c28e541db15420e1480c3d31c5f216797a453a5310106fbc97"
},
"mojave": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:00fb998dc2abbd09ee9f2ad733ae1adc185924fb01be8814e69a57ef750b1a32",
- "sha256": "00fb998dc2abbd09ee9f2ad733ae1adc185924fb01be8814e69a57ef750b1a32"
+ "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:b9d7d36c8d263be0e02e17d435350546f9f7008eb21b6e86bf42f719efcba85e",
+ "sha256": "b9d7d36c8d263be0e02e17d435350546f9f7008eb21b6e86bf42f719efcba85e"
},
- "high_sierra": {
- "cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:54191ce7fa888df64b9c52870531ac0ce2e8cbd40a7c4cdec74cb2c4a421af97",
- "sha256": "54191ce7fa888df64b9c52870531ac0ce2e8cbd40a7c4cdec74cb2c4a421af97"
+ "x86_64_linux": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/gmp/blobs/sha256:786ae29f0c0b06ea86e42bd9c6ac2c49bd5757da037dead7053e8bd612c4cf8c",
+ "sha256": "786ae29f0c0b06ea86e42bd9c6ac2c49bd5757da037dead7053e8bd612c4cf8c"
}
}
}
@@ -132,11 +207,31 @@
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:51ff39d1b008d1f03d8dfc9d42ed483d64fea632b31f4ccf3dc15ddb2de09794",
+ "sha256": "51ff39d1b008d1f03d8dfc9d42ed483d64fea632b31f4ccf3dc15ddb2de09794"
+ },
+ "arm64_monterey": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:0958a773e875dfbab2e70e80cd10a0406eed6f92352ae432b44f4bf74dcce35e",
+ "sha256": "0958a773e875dfbab2e70e80cd10a0406eed6f92352ae432b44f4bf74dcce35e"
+ },
"arm64_big_sur": {
"cellar": ":any_skip_relocation",
"url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:c90e7baee17d21e0cb594db676912e108f7df68b71509e15d37edfadcd6b12e9",
"sha256": "c90e7baee17d21e0cb594db676912e108f7df68b71509e15d37edfadcd6b12e9"
},
+ "ventura": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:4578068decc9e78f130aff8e714d99a45a7154a51ce5a0e0ec4e40c31dd686bc",
+ "sha256": "4578068decc9e78f130aff8e714d99a45a7154a51ce5a0e0ec4e40c31dd686bc"
+ },
+ "monterey": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:1a3e9eb276bb35ecb33bcdc50b689f1f7cebe1d014566754c5faa85e72251789",
+ "sha256": "1a3e9eb276bb35ecb33bcdc50b689f1f7cebe1d014566754c5faa85e72251789"
+ },
"big_sur": {
"cellar": ":any_skip_relocation",
"url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:4c18141474072f9fac171680e75c77fa22af016d1cda998a052792980d9ce4f9",
@@ -166,98 +261,153 @@
"cellar": ":any_skip_relocation",
"url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:bd67af8b9c24fa785a2da2a1d3475305593dbc183331aed657313e4066de3259",
"sha256": "bd67af8b9c24fa785a2da2a1d3475305593dbc183331aed657313e4066de3259"
+ },
+ "x86_64_linux": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/gpatch/blobs/sha256:f49b09a0cf8b312de84a07f7dee7029a0965277baa080f5e4eb57c1457539325",
+ "sha256": "f49b09a0cf8b312de84a07f7dee7029a0965277baa080f5e4eb57c1457539325"
}
}
}
},
"jemalloc": {
- "version": "5.2.1_1",
+ "version": "5.3.0",
"bottle": {
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:33e0c3fbe56642e081018a9674df734d34afdc35af7d03f5dd2b484a804555e3",
+ "sha256": "33e0c3fbe56642e081018a9674df734d34afdc35af7d03f5dd2b484a804555e3"
+ },
+ "arm64_monterey": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:b7ef9abad498e6eb53fb476fde4396fc9ab99a23092ea14bcf576548e198f9bd",
+ "sha256": "b7ef9abad498e6eb53fb476fde4396fc9ab99a23092ea14bcf576548e198f9bd"
+ },
"arm64_big_sur": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:724ab5947e53f571b9fed9e776a1ba22b1d71fe27ce5775553d70e990ef9dc63",
- "sha256": "724ab5947e53f571b9fed9e776a1ba22b1d71fe27ce5775553d70e990ef9dc63"
+ "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:b24e4a9413b347397a10ebc9a7a2d309d88c0f9479c1cdebe6c302acba9a43a9",
+ "sha256": "b24e4a9413b347397a10ebc9a7a2d309d88c0f9479c1cdebe6c302acba9a43a9"
},
- "big_sur": {
+ "ventura": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:7797788be2da677a8343ac6199e2f180c2e6b627c0b9abc9da133fbc34e86678",
- "sha256": "7797788be2da677a8343ac6199e2f180c2e6b627c0b9abc9da133fbc34e86678"
+ "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:66b5f3a4c4ad9f7801e6ad2e76d1586e7b57e2cc64b24c2684dd1c2af8bc82f3",
+ "sha256": "66b5f3a4c4ad9f7801e6ad2e76d1586e7b57e2cc64b24c2684dd1c2af8bc82f3"
},
- "catalina": {
+ "monterey": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:b1b211e5bead798c236d478dd74310a97a7b59470f607b608c07222648b08bf5",
- "sha256": "b1b211e5bead798c236d478dd74310a97a7b59470f607b608c07222648b08bf5"
+ "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:27ae29c02d718c38ee5f623c3ef08ad3530a6fd3595d16d2ddadd6552bf32c12",
+ "sha256": "27ae29c02d718c38ee5f623c3ef08ad3530a6fd3595d16d2ddadd6552bf32c12"
},
- "mojave": {
+ "big_sur": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:d3f6f85e74b08c8c97448e289734df484f884af35cd10ce9d9db43cf721fbf94",
- "sha256": "d3f6f85e74b08c8c97448e289734df484f884af35cd10ce9d9db43cf721fbf94"
+ "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:72aef17aa140b457400c4f2b74d0473bf1160616c3df7cb8604ac2bf734afea5",
+ "sha256": "72aef17aa140b457400c4f2b74d0473bf1160616c3df7cb8604ac2bf734afea5"
},
- "high_sierra": {
+ "catalina": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:8080c98844153da08346431fe0a0592f6f718cb7a17525f9ffb909c395bc0b6d",
- "sha256": "8080c98844153da08346431fe0a0592f6f718cb7a17525f9ffb909c395bc0b6d"
+ "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:3f5cf334d16ab432bf210c7e171510d0edcd834f939b57bddfd428af5ed248ae",
+ "sha256": "3f5cf334d16ab432bf210c7e171510d0edcd834f939b57bddfd428af5ed248ae"
+ },
+ "x86_64_linux": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/jemalloc/blobs/sha256:240b20cc078b21d90c32bd34447952b9b464958b1858ae109f168558993f9278",
+ "sha256": "240b20cc078b21d90c32bd34447952b9b464958b1858ae109f168558993f9278"
}
}
}
},
"libffi": {
- "version": "3.3_3",
+ "version": "3.4.4",
"bottle": {
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:66d9dcb218283c43250b04e507b7b96f0cf18fb1017fcaf811729324d11127f7",
+ "sha256": "66d9dcb218283c43250b04e507b7b96f0cf18fb1017fcaf811729324d11127f7"
+ },
+ "arm64_monterey": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:e7ea0921a053dc81e818c3893887e819ed26c0e231fd306e05e905b51b9ea902",
+ "sha256": "e7ea0921a053dc81e818c3893887e819ed26c0e231fd306e05e905b51b9ea902"
+ },
"arm64_big_sur": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:10a6d66c264f9a23d1162e535fe49f27c23f6ef452b4701ed7110f06aaf1e01d",
- "sha256": "10a6d66c264f9a23d1162e535fe49f27c23f6ef452b4701ed7110f06aaf1e01d"
+ "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:8d44b24963c114512934de23cc776a6190f5bcb65db8e6cc65e1b60122571747",
+ "sha256": "8d44b24963c114512934de23cc776a6190f5bcb65db8e6cc65e1b60122571747"
+ },
+ "ventura": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:a86ed7eb1b02a3d44cd6e75977c910466357a1715743f89be94416d000577133",
+ "sha256": "a86ed7eb1b02a3d44cd6e75977c910466357a1715743f89be94416d000577133"
+ },
+ "monterey": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:9dd80c4c3d4451cc3216dbf1129a2bddec474aa9266b6bb5c603e0a6cce7605b",
+ "sha256": "9dd80c4c3d4451cc3216dbf1129a2bddec474aa9266b6bb5c603e0a6cce7605b"
},
"big_sur": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:8a7a02cffb368dfdeaeb1176a7a7bcc6402371aee0a30bb001aff3452a4202c6",
- "sha256": "8a7a02cffb368dfdeaeb1176a7a7bcc6402371aee0a30bb001aff3452a4202c6"
+ "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:b5c4e2054802f97a68b8f32d9ff2c6782f9a37223cd0a3b3d2175ecf04740a4f",
+ "sha256": "b5c4e2054802f97a68b8f32d9ff2c6782f9a37223cd0a3b3d2175ecf04740a4f"
},
"catalina": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:66caa8a807684ce5d5173ffc4db1eaa7167eabd634335a2ce3b8ba667efe2686",
- "sha256": "66caa8a807684ce5d5173ffc4db1eaa7167eabd634335a2ce3b8ba667efe2686"
+ "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:1f53646211da139b423eb38f923bc38da1de86b7a68bfc2df5351098fe3c67e3",
+ "sha256": "1f53646211da139b423eb38f923bc38da1de86b7a68bfc2df5351098fe3c67e3"
},
- "mojave": {
- "cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:1205c19a1d51940726534923db0e1c291b001a3ea541d0694afccad7968343a3",
- "sha256": "1205c19a1d51940726534923db0e1c291b001a3ea541d0694afccad7968343a3"
+ "x86_64_linux": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/libffi/blobs/sha256:dcc9412995b5e319f64796a77b1eb8e684f1d1b6b5d7ac824f434ada692e4ff8",
+ "sha256": "dcc9412995b5e319f64796a77b1eb8e684f1d1b6b5d7ac824f434ada692e4ff8"
}
}
}
},
"libomp": {
- "version": "12.0.0",
+ "version": "15.0.7",
"bottle": {
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:8c5c7b912a075e598fb7ae10f2999853343b2662061d92040b1a584cbb3ba7d2",
+ "sha256": "8c5c7b912a075e598fb7ae10f2999853343b2662061d92040b1a584cbb3ba7d2"
+ },
+ "arm64_monterey": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:1b1aad07e8677744cdaa264419fade98bd1a852894c77d01985053a96b7d1c7d",
+ "sha256": "1b1aad07e8677744cdaa264419fade98bd1a852894c77d01985053a96b7d1c7d"
+ },
"arm64_big_sur": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:2d2befd8f1ab88eac44e71bf05b4b03172e4b3352cc21d994898874905efadbe",
- "sha256": "2d2befd8f1ab88eac44e71bf05b4b03172e4b3352cc21d994898874905efadbe"
+ "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:00e04fbe9783ad7751eaa6d2edda92dfbff85131777255a74e364f3217a7a2df",
+ "sha256": "00e04fbe9783ad7751eaa6d2edda92dfbff85131777255a74e364f3217a7a2df"
},
- "big_sur": {
+ "ventura": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:fe1e5c0fa8ff667deb348e64e695ac355a43da34c020fa983e081ea67cb5f56c",
- "sha256": "fe1e5c0fa8ff667deb348e64e695ac355a43da34c020fa983e081ea67cb5f56c"
+ "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:762c461db6af3cf78983b1eb58aee62699652b96237abf79469c8ac034b2156b",
+ "sha256": "762c461db6af3cf78983b1eb58aee62699652b96237abf79469c8ac034b2156b"
},
- "catalina": {
+ "monterey": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:33818af9e5fa26153645f63dab95d060fea69757570910d2f86d56eff29a5cf6",
- "sha256": "33818af9e5fa26153645f63dab95d060fea69757570910d2f86d56eff29a5cf6"
+ "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:0b944a6bbe8955e7900882b94f1b0b09030d5791191dc5b0c8b3d5d0895f4b12",
+ "sha256": "0b944a6bbe8955e7900882b94f1b0b09030d5791191dc5b0c8b3d5d0895f4b12"
},
- "mojave": {
+ "big_sur": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:e6ccdea1356c28931543f73ebcc3fa5693056f40a5b04150fd54908fac17109e",
- "sha256": "e6ccdea1356c28931543f73ebcc3fa5693056f40a5b04150fd54908fac17109e"
+ "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:f92e5b31f86c22c0fe875b50e050c19a89993b36106a9ad2737230ae2cb68069",
+ "sha256": "f92e5b31f86c22c0fe875b50e050c19a89993b36106a9ad2737230ae2cb68069"
+ },
+ "x86_64_linux": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/libomp/blobs/sha256:d2a16a906c029e8405a11924837417ad1008d41bb1877399f494cb872a179f01",
+ "sha256": "d2a16a906c029e8405a11924837417ad1008d41bb1877399f494cb872a179f01"
}
}
}
@@ -268,11 +418,31 @@
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:9a473cc4469e5f641ff79fac0331c7b86ac22778becd1155a2395e52346116d8",
+ "sha256": "9a473cc4469e5f641ff79fac0331c7b86ac22778becd1155a2395e52346116d8"
+ },
+ "arm64_monterey": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:493ce4faacb1fba817e73213cde331a68f73531d89260200726cc17c1ca00797",
+ "sha256": "493ce4faacb1fba817e73213cde331a68f73531d89260200726cc17c1ca00797"
+ },
"arm64_big_sur": {
"cellar": ":any",
"url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:ab7029c599665005a9c9ec9e72c74bf4d543fd7a995d9af9cfe9e6c10de79177",
"sha256": "ab7029c599665005a9c9ec9e72c74bf4d543fd7a995d9af9cfe9e6c10de79177"
},
+ "ventura": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:bbb929946689440afc6bb47effbc3e8d70db86e86c381d8ba99c1befc07e5602",
+ "sha256": "bbb929946689440afc6bb47effbc3e8d70db86e86c381d8ba99c1befc07e5602"
+ },
+ "monterey": {
+ "cellar": ":any",
+ "url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:522ac3f26c646f3d276b0c997e1a2771559d4766362d28f16ca1a9585bc20206",
+ "sha256": "522ac3f26c646f3d276b0c997e1a2771559d4766362d28f16ca1a9585bc20206"
+ },
"big_sur": {
"cellar": ":any",
"url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:5afc5678e30a174c1e46f1e905124f2619e6d9815ac776836090c0bff85631d6",
@@ -292,35 +462,55 @@
"cellar": ":any",
"url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:fc972755eb60f4221d7b32e58fc0f94e99b913fefefc84c4c76dc4bca1c5c445",
"sha256": "fc972755eb60f4221d7b32e58fc0f94e99b913fefefc84c4c76dc4bca1c5c445"
+ },
+ "x86_64_linux": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/libsodium/blobs/sha256:1ab2c66fc8ae6c1245b49c9bd7a32853c1b348afe7086d4c2d3baf5ea30bbac9",
+ "sha256": "1ab2c66fc8ae6c1245b49c9bd7a32853c1b348afe7086d4c2d3baf5ea30bbac9"
}
}
}
},
"opam": {
- "version": "2.0.8",
+ "version": "2.1.4",
"bottle": {
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:4203dd8ed7d01e2e27c226f41cde68f797433b39cea3b32d5f265205aad3c0d9",
+ "sha256": "4203dd8ed7d01e2e27c226f41cde68f797433b39cea3b32d5f265205aad3c0d9"
+ },
+ "arm64_monterey": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:442fda0ec64b42667e5299217e1053057fed3c0c2f84685302fa8f1fb4fa72c0",
+ "sha256": "442fda0ec64b42667e5299217e1053057fed3c0c2f84685302fa8f1fb4fa72c0"
+ },
"arm64_big_sur": {
"cellar": ":any_skip_relocation",
- "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:83fedf7b107a1cc3ea02a3782e3d830feeec7b8482a8e015707af65c0bb94ac9",
- "sha256": "83fedf7b107a1cc3ea02a3782e3d830feeec7b8482a8e015707af65c0bb94ac9"
+ "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:6462d0f11704126247331049f1e737ae459b8bb11459534a673caf2a4b834938",
+ "sha256": "6462d0f11704126247331049f1e737ae459b8bb11459534a673caf2a4b834938"
},
- "big_sur": {
+ "ventura": {
"cellar": ":any_skip_relocation",
- "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:d34e0dcbfa4302960a8f813d4e06c113e24beff31d2fbf8e55e470c5b51ecc0b",
- "sha256": "d34e0dcbfa4302960a8f813d4e06c113e24beff31d2fbf8e55e470c5b51ecc0b"
+ "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:a392de4983f5be70c57469250d82bb81e08ec32f88fec9a755b678ac285b8898",
+ "sha256": "a392de4983f5be70c57469250d82bb81e08ec32f88fec9a755b678ac285b8898"
},
- "catalina": {
+ "monterey": {
"cellar": ":any_skip_relocation",
- "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:882bf7f9d3f94fbbc2d5f08019456f533e0a71fd58c0a02650aa5781faefca9a",
- "sha256": "882bf7f9d3f94fbbc2d5f08019456f533e0a71fd58c0a02650aa5781faefca9a"
+ "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:507ad56c58cd33a903932870720154be8a4bac7a53dbf26cbc54ab1e0d200d87",
+ "sha256": "507ad56c58cd33a903932870720154be8a4bac7a53dbf26cbc54ab1e0d200d87"
},
- "mojave": {
+ "big_sur": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:b7d269a8eacb55dfa391b361711cace261aff40941137d015f1f2fa0a7c8c0e3",
+ "sha256": "b7d269a8eacb55dfa391b361711cace261aff40941137d015f1f2fa0a7c8c0e3"
+ },
+ "x86_64_linux": {
"cellar": ":any_skip_relocation",
- "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:e091ed13ebfa241890e0489cdc2645d66c9c189f618466cf8f7576751b381726",
- "sha256": "e091ed13ebfa241890e0489cdc2645d66c9c189f618466cf8f7576751b381726"
+ "url": "https://ghcr.io/v2/homebrew/core/opam/blobs/sha256:c2212e56b77c1c3c591ced93249ea9cd12f2a6eeebda161569b1c013938fb2b3",
+ "sha256": "c2212e56b77c1c3c591ced93249ea9cd12f2a6eeebda161569b1c013938fb2b3"
}
}
}
@@ -331,224 +521,319 @@
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:3ff612c5e44b945c8c0cc6df7d3edb407ca67cddad9c89f9ab99ced494b7a8c2",
+ "sha256": "3ff612c5e44b945c8c0cc6df7d3edb407ca67cddad9c89f9ab99ced494b7a8c2"
+ },
+ "arm64_monterey": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:2af9bceb60b70a259f236f1d46d2bb24c4d0a4af8cd63d974dde4d76313711e0",
+ "sha256": "2af9bceb60b70a259f236f1d46d2bb24c4d0a4af8cd63d974dde4d76313711e0"
+ },
"arm64_big_sur": {
- "cellar": ":any_skip_relocation",
+ "cellar": "/opt/homebrew/Cellar",
"url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:ffd4491f62201d14b7eca6beff954a2ab265351589cd5b3b79b8bbb414485574",
"sha256": "ffd4491f62201d14b7eca6beff954a2ab265351589cd5b3b79b8bbb414485574"
},
+ "ventura": {
+ "cellar": "/usr/local/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:c44b1544815518726d280d92d6f6df09bd45e41ad20fd43424725c1c20760be8",
+ "sha256": "c44b1544815518726d280d92d6f6df09bd45e41ad20fd43424725c1c20760be8"
+ },
+ "monterey": {
+ "cellar": "/usr/local/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:a6ba80711f98b65d8a2bf2c9278540860415e9b5e545da338a4d94f39d119285",
+ "sha256": "a6ba80711f98b65d8a2bf2c9278540860415e9b5e545da338a4d94f39d119285"
+ },
"big_sur": {
- "cellar": ":any_skip_relocation",
+ "cellar": "/usr/local/Cellar",
"url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:0040b6ebe07f60549800b211343fd5fb3cf83c866d9f62e40f5fb2f38b71e161",
"sha256": "0040b6ebe07f60549800b211343fd5fb3cf83c866d9f62e40f5fb2f38b71e161"
},
"catalina": {
- "cellar": ":any_skip_relocation",
+ "cellar": "/usr/local/Cellar",
"url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:80f141e695f73bd058fd82e9f539dc67471666ff6800c5e280b5af7d3050f435",
"sha256": "80f141e695f73bd058fd82e9f539dc67471666ff6800c5e280b5af7d3050f435"
},
"mojave": {
- "cellar": ":any_skip_relocation",
+ "cellar": "/usr/local/Cellar",
"url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:0d14b797dba0e0ab595c9afba8ab7ef9c901b60b4f806b36580ef95ebb370232",
"sha256": "0d14b797dba0e0ab595c9afba8ab7ef9c901b60b4f806b36580ef95ebb370232"
},
"high_sierra": {
- "cellar": ":any_skip_relocation",
+ "cellar": "/usr/local/Cellar",
"url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:8c6160305abd948b8cf3e0d5c6bb0df192fa765bbb9535dda0b573cb60abbe52",
"sha256": "8c6160305abd948b8cf3e0d5c6bb0df192fa765bbb9535dda0b573cb60abbe52"
+ },
+ "x86_64_linux": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/pkg-config/blobs/sha256:3d9b8bf9b7b4bd08086be1104e3e18afb1c437dfaca03e6e7df8f2710b9c1c1a",
+ "sha256": "3d9b8bf9b7b4bd08086be1104e3e18afb1c437dfaca03e6e7df8f2710b9c1c1a"
}
}
}
},
"openssl@1.1": {
- "version": "1.1.1k",
+ "version": "1.1.1t",
"bottle": {
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:e1e08ddd93298ef8776b202e4b2f86fc519bf27a72f7cfb082b69ff2868a0175",
+ "sha256": "e1e08ddd93298ef8776b202e4b2f86fc519bf27a72f7cfb082b69ff2868a0175"
+ },
+ "arm64_monterey": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:524ec08838d2826793e26b2ed084efdefec931e1aaa6dea01455aa77409b86c8",
+ "sha256": "524ec08838d2826793e26b2ed084efdefec931e1aaa6dea01455aa77409b86c8"
+ },
"arm64_big_sur": {
"cellar": "/opt/homebrew/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:0a75e0f116c0653bc7a2b422e5dc500e7e51557303aa4fca9c1a28786189c1da",
- "sha256": "0a75e0f116c0653bc7a2b422e5dc500e7e51557303aa4fca9c1a28786189c1da"
+ "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:f80836e1ee1be8f531665451699061dcb02c7e4d10da90330c83d47ee2af88e5",
+ "sha256": "f80836e1ee1be8f531665451699061dcb02c7e4d10da90330c83d47ee2af88e5"
},
- "big_sur": {
+ "ventura": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:17d94c51ddfa8364baed5f3a754063e1ca75f807194f68d0b976619cf4e69c1a",
- "sha256": "17d94c51ddfa8364baed5f3a754063e1ca75f807194f68d0b976619cf4e69c1a"
+ "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:97676d1a616421e472c46fc7930fa4a9ced514cabc1d66ae0fb8597be09ac802",
+ "sha256": "97676d1a616421e472c46fc7930fa4a9ced514cabc1d66ae0fb8597be09ac802"
},
- "catalina": {
+ "monterey": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:cb610ecdda346011031b890d7b7c6e1942d7fc08cf083b74f148ec7ffed8c7e1",
- "sha256": "cb610ecdda346011031b890d7b7c6e1942d7fc08cf083b74f148ec7ffed8c7e1"
+ "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:43c00851b8447bd5d1fba3e8140b74ca3d4a5b19343e64ec50bafae376f95454",
+ "sha256": "43c00851b8447bd5d1fba3e8140b74ca3d4a5b19343e64ec50bafae376f95454"
},
- "mojave": {
+ "big_sur": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:7928c80c309c6ece50b1c0d968a1e54011088cc896d26aa511249978a246bd50",
- "sha256": "7928c80c309c6ece50b1c0d968a1e54011088cc896d26aa511249978a246bd50"
+ "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:c357ccf7ece01905099a0cde58a2bbfb14141edb3aafed7d20391ed6bf726381",
+ "sha256": "c357ccf7ece01905099a0cde58a2bbfb14141edb3aafed7d20391ed6bf726381"
+ },
+ "x86_64_linux": {
+ "cellar": "/home/linuxbrew/.linuxbrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/openssl/1.1/blobs/sha256:8844b2e735dd6e8bc1395eda1a123c136f90cb8985fcec6a7ae6815b5aad971b",
+ "sha256": "8844b2e735dd6e8bc1395eda1a123c136f90cb8985fcec6a7ae6815b5aad971b"
}
}
}
},
"python@3.8": {
- "version": "3.8.9",
+ "version": "3.8.16",
"bottle": {
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:1f466c82b6a1c351b5f991cec4ef8a67434428f45c1444436200f47bb2f0c85b",
+ "sha256": "1f466c82b6a1c351b5f991cec4ef8a67434428f45c1444436200f47bb2f0c85b"
+ },
+ "arm64_monterey": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:3bc726770581d74e306c96b59113e1d9c9628d7cdcd7a179e455f2351fa05ed6",
+ "sha256": "3bc726770581d74e306c96b59113e1d9c9628d7cdcd7a179e455f2351fa05ed6"
+ },
"arm64_big_sur": {
"cellar": "/opt/homebrew/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:e0aa205ed6ff34c99c3659490ccbc280c070dc04ac6a8d04960b36ff9076dd2e",
- "sha256": "e0aa205ed6ff34c99c3659490ccbc280c070dc04ac6a8d04960b36ff9076dd2e"
+ "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:f5cd1b15c99ea84472064379445bffebdbbb95d7a900b3329e5bf18c3053aaa8",
+ "sha256": "f5cd1b15c99ea84472064379445bffebdbbb95d7a900b3329e5bf18c3053aaa8"
},
- "big_sur": {
+ "ventura": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:6111e285226a59c3c3b0f684de2a810deb1b5b5b68e81fdafcb11f0a0b0f6606",
- "sha256": "6111e285226a59c3c3b0f684de2a810deb1b5b5b68e81fdafcb11f0a0b0f6606"
+ "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:7edc200046d2c86bd21340a4da9770eb00f9d08ebf4a2be8e1406a012953ee3e",
+ "sha256": "7edc200046d2c86bd21340a4da9770eb00f9d08ebf4a2be8e1406a012953ee3e"
},
- "catalina": {
+ "monterey": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:65a3d5fa32b16df0886c7390e992f4948b51ce56d10e57bd05895e5795efe0fd",
- "sha256": "65a3d5fa32b16df0886c7390e992f4948b51ce56d10e57bd05895e5795efe0fd"
+ "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:8dec63a5d442ad9c3d124ee3a58f805e1e914d5013bb09d3608c4ed0d789aca0",
+ "sha256": "8dec63a5d442ad9c3d124ee3a58f805e1e914d5013bb09d3608c4ed0d789aca0"
},
- "mojave": {
+ "big_sur": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:5d408f56ab185c3e7644e6ac3fe063cc367aa14810050cd2a9297332de97f5a9",
- "sha256": "5d408f56ab185c3e7644e6ac3fe063cc367aa14810050cd2a9297332de97f5a9"
+ "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:c237c8d7d53954f52d3093090d1802b00cc3191f0c17d6848e8d5ee22bc032d6",
+ "sha256": "c237c8d7d53954f52d3093090d1802b00cc3191f0c17d6848e8d5ee22bc032d6"
+ },
+ "x86_64_linux": {
+ "cellar": "/home/linuxbrew/.linuxbrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/python/3.8/blobs/sha256:03296288039955cdfcaa96066df0d5faf68565e0a8681c112a859dbbcd972957",
+ "sha256": "03296288039955cdfcaa96066df0d5faf68565e0a8681c112a859dbbcd972957"
}
}
}
},
"zlib": {
- "version": "1.2.11",
+ "version": "1.2.13",
"bottle": {
- "rebuild": 0,
+ "rebuild": 1,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
- "arm64_big_sur": {
- "cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:b480ed6baf10880f61b5a3097fb0921d44466857e1dde53a09e2ae4e378b1a8c",
- "sha256": "b480ed6baf10880f61b5a3097fb0921d44466857e1dde53a09e2ae4e378b1a8c"
- },
- "big_sur": {
+ "arm64_ventura": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:b95aa332dfc7c6dfb5e86fd30068f78e2cf87ee0232e5bef0adddae8215f543d",
- "sha256": "b95aa332dfc7c6dfb5e86fd30068f78e2cf87ee0232e5bef0adddae8215f543d"
+ "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:565286ede6cc691fb781b96a76235d714159bf47c7af2cadbca01bffa92bd785",
+ "sha256": "565286ede6cc691fb781b96a76235d714159bf47c7af2cadbca01bffa92bd785"
},
- "catalina": {
+ "arm64_monterey": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:8ec66cf6faa310712767efc3022fdd16568a79234439f64bf579acb628f893bc",
- "sha256": "8ec66cf6faa310712767efc3022fdd16568a79234439f64bf579acb628f893bc"
+ "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:71825106a1d3cc348f145e58a0f2580f7394c6e747455041551517bb0958b9a6",
+ "sha256": "71825106a1d3cc348f145e58a0f2580f7394c6e747455041551517bb0958b9a6"
},
- "mojave": {
+ "arm64_big_sur": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:245a43a59c57f83848e7382974bb80a46eac1d53bcaefb1bdebd1f85107d4169",
- "sha256": "245a43a59c57f83848e7382974bb80a46eac1d53bcaefb1bdebd1f85107d4169"
+ "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:5dfa4fd7fb89f0aff96b98965da0af7e01ef6c3b8f4a90f7b2b135e2f757783f",
+ "sha256": "5dfa4fd7fb89f0aff96b98965da0af7e01ef6c3b8f4a90f7b2b135e2f757783f"
},
- "high_sierra": {
+ "ventura": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:30548658b43cf66979f2756680fbb32d3c19c967e478ceea22d07f536b22bbce",
- "sha256": "30548658b43cf66979f2756680fbb32d3c19c967e478ceea22d07f536b22bbce"
+ "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:39899e784ac736887dd6b5a08740c0a625bcb5da06fa473dede99c67b7fcbccc",
+ "sha256": "39899e784ac736887dd6b5a08740c0a625bcb5da06fa473dede99c67b7fcbccc"
},
- "sierra": {
+ "monterey": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:f822b4dbab4a15b889316b89248c7b4d15d6af9dc460bf209b9425b0accb7fa3",
- "sha256": "f822b4dbab4a15b889316b89248c7b4d15d6af9dc460bf209b9425b0accb7fa3"
+ "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:ceee8b2e24b0c8e7fbb72d63f7844a0cdf4677771e94c46153190ba11be0f48c",
+ "sha256": "ceee8b2e24b0c8e7fbb72d63f7844a0cdf4677771e94c46153190ba11be0f48c"
},
- "el_capitan": {
+ "big_sur": {
"cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:3f912f6f1ce6c586128ebde29756c883b89409e652ca7aa9a29a773c2d4d0915",
- "sha256": "3f912f6f1ce6c586128ebde29756c883b89409e652ca7aa9a29a773c2d4d0915"
+ "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:c7e4e0fed83c7515f658f802604e2b6a0be47f1020d4ddfd2025aa748641fe00",
+ "sha256": "c7e4e0fed83c7515f658f802604e2b6a0be47f1020d4ddfd2025aa748641fe00"
},
- "yosemite": {
- "cellar": ":any",
- "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:5b969eb38b90a3e31869586df9d62e59d359212b16c6a270aee690dd67caa491",
- "sha256": "5b969eb38b90a3e31869586df9d62e59d359212b16c6a270aee690dd67caa491"
+ "x86_64_linux": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/zlib/blobs/sha256:087e022c50655b9a7cdfd980bcff0764ce0f53f02724d4a9cbb7ba3b68b863a9",
+ "sha256": "087e022c50655b9a7cdfd980bcff0764ce0f53f02724d4a9cbb7ba3b68b863a9"
}
}
}
},
"libpq": {
- "version": "13.2",
+ "version": "15.2",
"bottle": {
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:c070425023520a337b84ace4ab2735577b00055bc7e4870c6993b6e6ca93a750",
+ "sha256": "c070425023520a337b84ace4ab2735577b00055bc7e4870c6993b6e6ca93a750"
+ },
+ "arm64_monterey": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:15e14f40369631580b69778d0a9c92b951f3e969ae40cae9c0b5fadbd8509a26",
+ "sha256": "15e14f40369631580b69778d0a9c92b951f3e969ae40cae9c0b5fadbd8509a26"
+ },
"arm64_big_sur": {
"cellar": "/opt/homebrew/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:be102bcef1030289e73fe3643c9fd575471df27f4b958e1155abb7a76f21107c",
- "sha256": "be102bcef1030289e73fe3643c9fd575471df27f4b958e1155abb7a76f21107c"
+ "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:f58a19b8834600e6b42595f40c1295dc25d8246c695a798df99b55b189709472",
+ "sha256": "f58a19b8834600e6b42595f40c1295dc25d8246c695a798df99b55b189709472"
},
- "big_sur": {
+ "ventura": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:eae0a60decded85f7b0af6c880f81d746fc0f0e285eba091b75763e63da946ca",
- "sha256": "eae0a60decded85f7b0af6c880f81d746fc0f0e285eba091b75763e63da946ca"
+ "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:1c588ee96000d09510522991025d15d49ed34b004eb6d4b6b2ad17dbae5956cc",
+ "sha256": "1c588ee96000d09510522991025d15d49ed34b004eb6d4b6b2ad17dbae5956cc"
},
- "catalina": {
+ "monterey": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:9bf464e2cd8c0c8b07ba1ed8e203427103921ba051fb0db4965c880b0d085339",
- "sha256": "9bf464e2cd8c0c8b07ba1ed8e203427103921ba051fb0db4965c880b0d085339"
+ "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:ca68207e33c0ff6a394a85d2ed7fa0c07aa4fe6f80e21acd321e7ffbe2f214bb",
+ "sha256": "ca68207e33c0ff6a394a85d2ed7fa0c07aa4fe6f80e21acd321e7ffbe2f214bb"
},
- "mojave": {
+ "big_sur": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:51f2ac5acb1e614e6bc005fb2e975040bf72937f4ac1c70edcaeec3a0d396621",
- "sha256": "51f2ac5acb1e614e6bc005fb2e975040bf72937f4ac1c70edcaeec3a0d396621"
+ "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:66552a11b4f11fc93128ff292487d3c4508ae7d06c909db74131f619b16e9fbe",
+ "sha256": "66552a11b4f11fc93128ff292487d3c4508ae7d06c909db74131f619b16e9fbe"
+ },
+ "x86_64_linux": {
+ "cellar": "/home/linuxbrew/.linuxbrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/libpq/blobs/sha256:d13f0d4a667199a5427cba37a5af212ca9676daed78054c1730f0b75426679ee",
+ "sha256": "d13f0d4a667199a5427cba37a5af212ca9676daed78054c1730f0b75426679ee"
}
}
}
},
"postgresql": {
- "version": "13.2_1",
+ "version": "14.7",
"bottle": {
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:762067b573bf672b638b6354e1bed5fb675a7d3bb26ec0d6eac1bb1e24e427dd",
+ "sha256": "762067b573bf672b638b6354e1bed5fb675a7d3bb26ec0d6eac1bb1e24e427dd"
+ },
+ "arm64_monterey": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:0ade8371ec5d58225e90982d5e75a0cada9625eee44d903e3f809d847203d1d4",
+ "sha256": "0ade8371ec5d58225e90982d5e75a0cada9625eee44d903e3f809d847203d1d4"
+ },
"arm64_big_sur": {
"cellar": "/opt/homebrew/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/postgresql/blobs/sha256:299babccbbf29b9769ab402aca01c4a0c4bc173a19a928e09fe1edabe7461c88",
- "sha256": "299babccbbf29b9769ab402aca01c4a0c4bc173a19a928e09fe1edabe7461c88"
+ "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:97a4a71a9373419b604ecde9de3d6480fc40a4648e56b050cf5d26e6edccd2c9",
+ "sha256": "97a4a71a9373419b604ecde9de3d6480fc40a4648e56b050cf5d26e6edccd2c9"
},
- "big_sur": {
+ "ventura": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/postgresql/blobs/sha256:67a547842ae49911d301d490e70b5fff1ee27a65cea403abeff3a25d1806e8d6",
- "sha256": "67a547842ae49911d301d490e70b5fff1ee27a65cea403abeff3a25d1806e8d6"
+ "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:adfc715bdc8204a91dee0d20bf5cf04b6ec152f6105b7ae5b4cf006841c19cd1",
+ "sha256": "adfc715bdc8204a91dee0d20bf5cf04b6ec152f6105b7ae5b4cf006841c19cd1"
},
- "catalina": {
+ "monterey": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/postgresql/blobs/sha256:02af915cc2b5291c5a15b59a74dff255e918e7a6af34dbef53cf6ad264627628",
- "sha256": "02af915cc2b5291c5a15b59a74dff255e918e7a6af34dbef53cf6ad264627628"
+ "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:d46d6770f2069a51b6b20310f46c26490672ec99b4c292b836fdc7ea4bbe4911",
+ "sha256": "d46d6770f2069a51b6b20310f46c26490672ec99b4c292b836fdc7ea4bbe4911"
},
- "mojave": {
+ "big_sur": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/postgresql/blobs/sha256:37f0b76c0f034d8a6837805eb27da3787c39cf895516a193ad298ea96f68e98a",
- "sha256": "37f0b76c0f034d8a6837805eb27da3787c39cf895516a193ad298ea96f68e98a"
+ "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:af5b8ba17a1f9946396b130edd741088c0c7c7322c23891580ba3d3f0b2c026a",
+ "sha256": "af5b8ba17a1f9946396b130edd741088c0c7c7322c23891580ba3d3f0b2c026a"
+ },
+ "x86_64_linux": {
+ "cellar": "/home/linuxbrew/.linuxbrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:6853d14ffd29a1f80dafc76d88583b769f272567cb39f6a9a6c717b73d0c89ac",
+ "sha256": "6853d14ffd29a1f80dafc76d88583b769f272567cb39f6a9a6c717b73d0c89ac"
}
}
}
},
"go": {
- "version": "1.16.3",
+ "version": "1.20.2",
"bottle": {
"rebuild": 0,
"root_url": "https://ghcr.io/v2/homebrew/core",
"files": {
+ "arm64_ventura": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:3517ed5d96d5a40cd4c44a35f7799a239b82d9855a6799c6f60193768f9825cd",
+ "sha256": "3517ed5d96d5a40cd4c44a35f7799a239b82d9855a6799c6f60193768f9825cd"
+ },
+ "arm64_monterey": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:3517ed5d96d5a40cd4c44a35f7799a239b82d9855a6799c6f60193768f9825cd",
+ "sha256": "3517ed5d96d5a40cd4c44a35f7799a239b82d9855a6799c6f60193768f9825cd"
+ },
"arm64_big_sur": {
"cellar": "/opt/homebrew/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:e7c1efdd09e951eb46d01a3200b01e7fa55ce285b75470051be7fef34f4233ce",
- "sha256": "e7c1efdd09e951eb46d01a3200b01e7fa55ce285b75470051be7fef34f4233ce"
+ "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:3517ed5d96d5a40cd4c44a35f7799a239b82d9855a6799c6f60193768f9825cd",
+ "sha256": "3517ed5d96d5a40cd4c44a35f7799a239b82d9855a6799c6f60193768f9825cd"
},
- "big_sur": {
+ "ventura": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:ea37f33fd27369612a3e4e6db6adc46db0e8bdf6fac1332bf51bafaa66d43969",
- "sha256": "ea37f33fd27369612a3e4e6db6adc46db0e8bdf6fac1332bf51bafaa66d43969"
+ "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:ea6a2d446679fd9bc460425f3fce2e1c4a27504c6e6ad2cd9c8f7380fc75988a",
+ "sha256": "ea6a2d446679fd9bc460425f3fce2e1c4a27504c6e6ad2cd9c8f7380fc75988a"
},
- "catalina": {
+ "monterey": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:69c28f5e60612801c66e51e93d32068f822b245ab83246cb6cb374572eb59e15",
- "sha256": "69c28f5e60612801c66e51e93d32068f822b245ab83246cb6cb374572eb59e15"
+ "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:ea6a2d446679fd9bc460425f3fce2e1c4a27504c6e6ad2cd9c8f7380fc75988a",
+ "sha256": "ea6a2d446679fd9bc460425f3fce2e1c4a27504c6e6ad2cd9c8f7380fc75988a"
},
- "mojave": {
+ "big_sur": {
"cellar": "/usr/local/Cellar",
- "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:bf1e90ed1680b8ee1acb49f2f99426c8a8ac3e49efd63c7f3b41e57e7214dd19",
- "sha256": "bf1e90ed1680b8ee1acb49f2f99426c8a8ac3e49efd63c7f3b41e57e7214dd19"
+ "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:ea6a2d446679fd9bc460425f3fce2e1c4a27504c6e6ad2cd9c8f7380fc75988a",
+ "sha256": "ea6a2d446679fd9bc460425f3fce2e1c4a27504c6e6ad2cd9c8f7380fc75988a"
+ },
+ "x86_64_linux": {
+ "cellar": "/home/linuxbrew/.linuxbrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/go/blobs/sha256:dabdff435af1ef8289dbfe4313cb190f5c61cb46b3b845b37d79beaf38c2434b",
+ "sha256": "dabdff435af1ef8289dbfe4313cb190f5c61cb46b3b845b37d79beaf38c2434b"
}
}
}
@@ -581,6 +866,143 @@
}
}
}
+ },
+ "gnu-sed": {
+ "version": "4.9",
+ "bottle": {
+ "rebuild": 0,
+ "root_url": "https://ghcr.io/v2/homebrew/core",
+ "files": {
+ "arm64_ventura": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:5abaf39c16d02125db97d14cd36a96cf1a20a87821199cb38a55134fd4e0aaef",
+ "sha256": "5abaf39c16d02125db97d14cd36a96cf1a20a87821199cb38a55134fd4e0aaef"
+ },
+ "arm64_monterey": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:20ae3f853a32e7f7f0f340e8c751ab7350888a655bfe7c5c20e5746c61a24fd7",
+ "sha256": "20ae3f853a32e7f7f0f340e8c751ab7350888a655bfe7c5c20e5746c61a24fd7"
+ },
+ "arm64_big_sur": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:d7c89842a90d03dbb497bc1ded17b7d732fe20eaf69613fd4abb48820ab80895",
+ "sha256": "d7c89842a90d03dbb497bc1ded17b7d732fe20eaf69613fd4abb48820ab80895"
+ },
+ "ventura": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:a1ac59a9a6fa20c6c904e047df3ee4d0b4e57c0a5df3821b17b8cd82bcc67b5a",
+ "sha256": "a1ac59a9a6fa20c6c904e047df3ee4d0b4e57c0a5df3821b17b8cd82bcc67b5a"
+ },
+ "monterey": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:f5e2460ad86516b2517f1e77d672a4fd6ad30b158c470cccbb3b6464f228674d",
+ "sha256": "f5e2460ad86516b2517f1e77d672a4fd6ad30b158c470cccbb3b6464f228674d"
+ },
+ "big_sur": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:c1c63d995d132a82fadc80b470eecfe816cb86c8cd716f01de5f003bc1199fcc",
+ "sha256": "c1c63d995d132a82fadc80b470eecfe816cb86c8cd716f01de5f003bc1199fcc"
+ },
+ "catalina": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:fb5ee7317d987d9ac7f2ee357736a9bc594c88b5fbbca4f6a65046f1c2898c44",
+ "sha256": "fb5ee7317d987d9ac7f2ee357736a9bc594c88b5fbbca4f6a65046f1c2898c44"
+ },
+ "x86_64_linux": {
+ "cellar": "/home/linuxbrew/.linuxbrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/gnu-sed/blobs/sha256:8abd5b48de6b706c1ce7c2f7b8775420f63078ba294bd5ad801e458776228bbc",
+ "sha256": "8abd5b48de6b706c1ce7c2f7b8775420f63078ba294bd5ad801e458776228bbc"
+ }
+ }
+ }
+ },
+ "postgresql@14": {
+ "version": "14.7",
+ "bottle": {
+ "rebuild": 0,
+ "root_url": "https://ghcr.io/v2/homebrew/core",
+ "files": {
+ "arm64_ventura": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:762067b573bf672b638b6354e1bed5fb675a7d3bb26ec0d6eac1bb1e24e427dd",
+ "sha256": "762067b573bf672b638b6354e1bed5fb675a7d3bb26ec0d6eac1bb1e24e427dd"
+ },
+ "arm64_monterey": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:0ade8371ec5d58225e90982d5e75a0cada9625eee44d903e3f809d847203d1d4",
+ "sha256": "0ade8371ec5d58225e90982d5e75a0cada9625eee44d903e3f809d847203d1d4"
+ },
+ "arm64_big_sur": {
+ "cellar": "/opt/homebrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:97a4a71a9373419b604ecde9de3d6480fc40a4648e56b050cf5d26e6edccd2c9",
+ "sha256": "97a4a71a9373419b604ecde9de3d6480fc40a4648e56b050cf5d26e6edccd2c9"
+ },
+ "ventura": {
+ "cellar": "/usr/local/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:adfc715bdc8204a91dee0d20bf5cf04b6ec152f6105b7ae5b4cf006841c19cd1",
+ "sha256": "adfc715bdc8204a91dee0d20bf5cf04b6ec152f6105b7ae5b4cf006841c19cd1"
+ },
+ "monterey": {
+ "cellar": "/usr/local/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:d46d6770f2069a51b6b20310f46c26490672ec99b4c292b836fdc7ea4bbe4911",
+ "sha256": "d46d6770f2069a51b6b20310f46c26490672ec99b4c292b836fdc7ea4bbe4911"
+ },
+ "big_sur": {
+ "cellar": "/usr/local/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:af5b8ba17a1f9946396b130edd741088c0c7c7322c23891580ba3d3f0b2c026a",
+ "sha256": "af5b8ba17a1f9946396b130edd741088c0c7c7322c23891580ba3d3f0b2c026a"
+ },
+ "x86_64_linux": {
+ "cellar": "/home/linuxbrew/.linuxbrew/Cellar",
+ "url": "https://ghcr.io/v2/homebrew/core/postgresql/14/blobs/sha256:6853d14ffd29a1f80dafc76d88583b769f272567cb39f6a9a6c717b73d0c89ac",
+ "sha256": "6853d14ffd29a1f80dafc76d88583b769f272567cb39f6a9a6c717b73d0c89ac"
+ }
+ }
+ }
+ },
+ "goenv": {
+ "version": "2.0.6",
+ "bottle": {
+ "rebuild": 0,
+ "root_url": "https://ghcr.io/v2/homebrew/core",
+ "files": {
+ "arm64_ventura": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/goenv/blobs/sha256:b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4",
+ "sha256": "b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4"
+ },
+ "arm64_monterey": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/goenv/blobs/sha256:b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4",
+ "sha256": "b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4"
+ },
+ "arm64_big_sur": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/goenv/blobs/sha256:b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4",
+ "sha256": "b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4"
+ },
+ "ventura": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/goenv/blobs/sha256:82a9a7d404efa1f809605b3f5cc4ad99c55b0819a7088d00c8294effee7de3a9",
+ "sha256": "82a9a7d404efa1f809605b3f5cc4ad99c55b0819a7088d00c8294effee7de3a9"
+ },
+ "monterey": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/goenv/blobs/sha256:82a9a7d404efa1f809605b3f5cc4ad99c55b0819a7088d00c8294effee7de3a9",
+ "sha256": "82a9a7d404efa1f809605b3f5cc4ad99c55b0819a7088d00c8294effee7de3a9"
+ },
+ "big_sur": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/goenv/blobs/sha256:82a9a7d404efa1f809605b3f5cc4ad99c55b0819a7088d00c8294effee7de3a9",
+ "sha256": "82a9a7d404efa1f809605b3f5cc4ad99c55b0819a7088d00c8294effee7de3a9"
+ },
+ "x86_64_linux": {
+ "cellar": ":any_skip_relocation",
+ "url": "https://ghcr.io/v2/homebrew/core/goenv/blobs/sha256:b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4",
+ "sha256": "b0e5d50704c497080a8b7ba300d12e5086ac2e248a69aa0b25fe2148a8ae1cd4"
+ }
+ }
+ }
}
}
},
@@ -593,6 +1015,22 @@
"CLT": "12.4.0.0.1.1610135815",
"Xcode": "12.0",
"macOS": "10.15.7"
+ },
+ "big_sur": {
+ "HOMEBREW_VERSION": "4.0.6-126-g1a72b86",
+ "HOMEBREW_PREFIX": "/usr/local",
+ "Homebrew/homebrew-core": "api",
+ "CLT": "13.2.0.0.1.1638488800",
+ "Xcode": "13.2.1",
+ "macOS": "11.7.4"
+ },
+ "ventura": {
+ "HOMEBREW_VERSION": "4.0.6-147-gb3684e5",
+ "HOMEBREW_PREFIX": "/opt/homebrew",
+ "Homebrew/homebrew-core": "api",
+ "CLT": "14.2.0.0.1.1668646533",
+ "Xcode": "14.2",
+ "macOS": "13.2.1"
}
}
}
diff --git a/scripts/archive/split_precomputed_log.sh b/scripts/archive/split_precomputed_log.sh
new file mode 100755
index 00000000000..e447e63cdae
--- /dev/null
+++ b/scripts/archive/split_precomputed_log.sh
@@ -0,0 +1,19 @@
+#!/usr/bin/env bash
+
+if [[ $# -lt 1 ]]; then
+ echo "Usage: $0 precomputed-log-file [output-folder]"
+ exit 1
+fi
+
+
+
+ARCHIVE_URI=${ARCHIVE_URI:-postgres://postgres@localhost:5432/archive}
+PRECOMPUTED_LOG_FILE=$1
+
+while IFS= read -r line; do
+ LEDGER_HASH=$(echo $line | jq -r '.protocol_state.body.blockchain_state.staged_ledger_hash.non_snark.ledger_hash')
+ FILE_NAME=$(psql $ARCHIVE_URI -t -c "SELECT 'mainnet-' || height || '-' ||state_hash || '.json' FROM blocks WHERE ledger_hash = '$LEDGER_HASH'")
+ echo $line > $FILE_NAME
+done < $PRECOMPUTED_LOG_FILE
+
+
diff --git a/scripts/generate-community-keys.sh b/scripts/generate-community-keys.sh
new file mode 100755
index 00000000000..9e8543957dc
--- /dev/null
+++ b/scripts/generate-community-keys.sh
@@ -0,0 +1,53 @@
+#!/bin/bash
+set -u
+
+# This script depends on the mina daemon package, zip, and pwgen
+# to generate unique passwords for each key, and output a zip file containing:
+# the password along with the public and private keypairs
+# For convenience, the script finally zips together all of the individual zip files,
+# plus a txt file containing just the public keys
+# Set the prefix to the node name (like "community", "seed" or "block-producer")
+# and set count as the number of keys of this type to generate (e.g. "./generate-community-keys.sh bp 5" produces 3 keys, "bp-1" through "bp-5")
+PREFIX=$1
+COUNT=$2
+
+mkdir "${PREFIX}"
+cd "${PREFIX}"
+
+for i in $(seq 1 ${COUNT}); do
+
+ NODE="${PREFIX}-${i}"
+ PASS="${NODE}-password.txt"
+
+ mkdir "./${NODE}"
+
+ export MINA_PRIVKEY_PASS=$(pwgen --no-vowels --secure --ambiguous 64 1)
+ echo "${MINA_PRIVKEY_PASS}" > "${NODE}/${PASS}"
+
+ KEY="${NODE}-key"
+ PUB="${NODE}-key.pub"
+ ZIP="${NODE}.zip"
+
+ echo "Generating key for ${NODE}"
+ mina advanced generate-keypair --privkey-path "${NODE}/${KEY}" # 2> /dev/null
+
+ echo "Copying public key for use in ledgers:"
+ cp "${NODE}/${PUB}" .
+
+ echo "Generating zip file ${ZIP}"
+ zip -r "${ZIP}" "${NODE}"
+
+ echo "Cleaning up ${NODE} directory"
+ rm -rf ${NODE}
+done
+
+echo "Combining .pub files into one ${PREFIX}-keys.txt and cleaning up"
+cat ${PREFIX}-*.pub > ${PREFIX}-keys.txt
+cp ${PREFIX}-keys.txt ../
+rm -rf ${PREFIX}-*.pub
+
+cd ..
+echo "All keys generated successfully! Combining into one zip file"
+zip -r "${PREFIX}.zip" "${PREFIX}"
+
+rm -rf "${PREFIX}"
diff --git a/scripts/mina-local-network/mina-local-network.sh b/scripts/mina-local-network/mina-local-network.sh
old mode 100644
new mode 100755
index 93ae1860c01..890869d19fa
--- a/scripts/mina-local-network/mina-local-network.sh
+++ b/scripts/mina-local-network/mina-local-network.sh
@@ -1,5 +1,5 @@
#!/usr/bin/env bash
-# set -x
+#set -x
# Exit script when commands fail
set -e
@@ -34,6 +34,7 @@ ZKAPP_TRANSACTIONS=false
RESET=false
UPDATE_GENESIS_TIMESTAMP=false
PROOF_LEVEL="full"
+LOG_PRECOMPUTED_BLOCKS=false
SNARK_WORKER_FEE=0.001
TRANSACTION_FREQUENCY=10 # in seconds
@@ -116,6 +117,8 @@ help() {
echo " | Default: ${TRANSACTION_FREQUENCY}"
echo "-sf |--snark-worker-fee <#> | SNARK Worker fee"
echo " | Default: ${SNARK_WORKER_FEE}"
+ echo "-lp |--log-precomputed-blocks | Log precomputed blocks"
+ echo " | Default: ${LOG_PRECOMPUTED_BLOCKS}"
echo "-pl |--proof-level | Proof level (currently consumed by SNARK Workers only)"
echo " | Default: ${PROOF_LEVEL}"
echo "-r |--reset | Whether to reset the Mina Local Network storage file-system (presence of argument)"
@@ -170,6 +173,8 @@ exec-daemon() {
-log-json \
-log-level ${LOG_LEVEL} \
-file-log-level ${FILE_LOG_LEVEL} \
+ -precomputed-blocks-file ${FOLDER}/precomputed_blocks.log \
+ -log-precomputed-blocks ${LOG_PRECOMPUTED_BLOCKS} \
$@
}
@@ -337,6 +342,7 @@ while [[ "$#" -gt 0 ]]; do
SNARK_WORKER_FEE="${2}"
shift
;;
+ -lp | --log-precomputed-blocks) LOG_PRECOMPUTED_BLOCKS=true ;;
-pl | --proof-level)
PROOF_LEVEL="${2}"
shift
diff --git a/scripts/version-linter.py b/scripts/version-linter.py
index 8b334f8f213..4d19c9be0ce 100755
--- a/scripts/version-linter.py
+++ b/scripts/version-linter.py
@@ -2,6 +2,30 @@
# version-linter.py -- makes sure serializations of versioned types don't change
+"""
+For the PR branch, PR base branch, and release branch, download the
+type shapes file from Google storage There should be a type shape file
+available for every commit in a PR branch.
+
+For each branch, store the type shape information in a Python dictionary, truncating
+the shapes at a maximum depth.
+
+For each type, compare the type shapes of each branch. If the shapes don't match, print an
+error message. The exact comparison rules are given in RFC 0047 (with some embellishments
+mentioned below).
+
+The maximum depth should be set high enough so that all differences are caught
+(no false negatives).
+
+There may be some false positives, where a difference is reported for
+type t1 due to a change to a type t2 contained in t1. The
+difference will always also be reported for t2 directly. The maximum
+depth should be set low enough to minimize such false positives.
+
+There are some special rules for the types associated with signed commands and zkApp commands.
+See `check_command_types` below.
+"""
+
import subprocess
import os
import io
diff --git a/src/app/archive/create_schema.sql b/src/app/archive/create_schema.sql
index c3c232465db..c6004db6d11 100644
--- a/src/app/archive/create_schema.sql
+++ b/src/app/archive/create_schema.sql
@@ -135,7 +135,7 @@ CREATE TABLE protocol_versions
, transaction int NOT NULL
, network int NOT NULL
, patch int NOT NULL
-, UNIQUE (transaction,network)
+, UNIQUE (transaction,network,patch)
);
CREATE TYPE chain_status_type AS ENUM ('canonical', 'orphaned', 'pending');
diff --git a/src/app/archive/lib/load_data.ml b/src/app/archive/lib/load_data.ml
index 0dc3c4bb1bc..70fc28e66f2 100644
--- a/src/app/archive/lib/load_data.ml
+++ b/src/app/archive/lib/load_data.ml
@@ -1,5 +1,9 @@
(* load_data.ml -- load archive db data to "native" OCaml data *)
+(* these functions are used by the replayer and `extract_blocks` to load particular pieces
+ of archive db data
+*)
+
open Core_kernel
open Async
open Mina_base
diff --git a/src/app/archive/lib/processor.ml b/src/app/archive/lib/processor.ml
index a0322fd7bbb..bb1b346640f 100644
--- a/src/app/archive/lib/processor.ml
+++ b/src/app/archive/lib/processor.ml
@@ -1,5 +1,23 @@
(* processor.ml -- database processing for archive node *)
+(* For each table in the archive database schema, a
+ corresponding module contains code to read from and write to
+ that table. The module defines a type `t`, a record with fields
+ corresponding to columns in the table; typically, the `id` column
+ that does not have an associated field.
+
+ The more recently written modules use the Mina_caqti library to
+ construct the SQL for those queries. For consistency and
+ simplicity, the older modules should probably be refactored to use
+ Mina_caqti.
+
+ Module `Account_identifiers` is a good example of how Mina_caqti
+ can be used.
+
+ After these table-related modules, there are functions related to
+ running the archive process and archive-related apps.
+*)
+
module Archive_rpc = Rpc
open Async
open Core
@@ -51,6 +69,7 @@ module Public_key = struct
public_key
end
+(* Unlike other modules here, `Token_owners` does not correspond with a database table *)
module Token_owners = struct
(* hash table of token owners, updated for each block *)
let owner_tbl : Account_id.t Token_id.Table.t = Token_id.Table.create ()
@@ -3755,6 +3774,7 @@ let add_block_aux ?(retries = 3) ~logger ~pool ~add_block ~hash
in
retry ~f:add ~logger ~error_str:"add_block_aux" retries
+(* used by `archive_blocks` app *)
let add_block_aux_precomputed ~constraint_constants ~logger ?retries ~pool
~delete_older_than block =
add_block_aux ~logger ?retries ~pool ~delete_older_than
@@ -3765,6 +3785,7 @@ let add_block_aux_precomputed ~constraint_constants ~logger ?retries ~pool
~accounts_created:block.Precomputed.accounts_created
~tokens_used:block.Precomputed.tokens_used block
+(* used by `archive_blocks` app *)
let add_block_aux_extensional ~logger ?retries ~pool ~delete_older_than block =
add_block_aux ~logger ?retries ~pool ~delete_older_than
~add_block:Block.add_from_extensional
@@ -3773,6 +3794,7 @@ let add_block_aux_extensional ~logger ?retries ~pool ~delete_older_than block =
~accounts_created:block.Extensional.Block.accounts_created
~tokens_used:block.Extensional.Block.tokens_used block
+(* receive blocks from a daemon, write them to the database *)
let run pool reader ~constraint_constants ~logger ~delete_older_than :
unit Deferred.t =
Strict_pipe.Reader.iter reader ~f:(function
@@ -3799,6 +3821,7 @@ let run pool reader ~constraint_constants ~logger ~delete_older_than :
| Transition_frontier _ ->
Deferred.unit )
+(* [add_genesis_accounts] is called when starting the archive process *)
let add_genesis_accounts ~logger ~(runtime_config_opt : Runtime_config.t option)
pool =
match runtime_config_opt with
@@ -3976,6 +3999,7 @@ let create_metrics_server ~logger ~metrics_server_port ~missing_blocks_width
in
go ()
+(* for running the archive process *)
let setup_server ~metrics_server_port ~constraint_constants ~logger
~postgres_address ~server_port ~delete_older_than ~runtime_config_opt
~missing_blocks_width =
diff --git a/src/app/archive_blocks/README.md b/src/app/archive_blocks/README.md
new file mode 100644
index 00000000000..718e7a48467
--- /dev/null
+++ b/src/app/archive_blocks/README.md
@@ -0,0 +1,20 @@
+archive_blocks
+==============
+
+The `archive_blocks` app adds blocks in either "precomputed" or
+"extensional" format to the archive database.
+
+Precomputed blocks are stored in the bucket `mina_network_block_data`
+on Google Cloud Storage. Blocks are named NETWORK-HEIGHT-STATEHASH.json.
+Example: mainnet-100000-3NKLvMCimUjX1zjjiC3XPMT34D1bVQGzkKW58XDwFJgQ5wDQ9Tki.json.
+
+Extensional blocks are extracted from other archive databases using
+the `extract_blocks` app.
+
+As many blocks as are available can be added at a time, but all blocks
+must be in the same format.
+
+Except for blocks from the original mainnet, both precomputed and
+extensional blocks have a version in their JSON representation. That
+version must match the corresponding OCaml type in the code when this
+app was built.
diff --git a/src/app/archive_blocks/archive_blocks.ml b/src/app/archive_blocks/archive_blocks.ml
index caa4203939e..7851ca04518 100644
--- a/src/app/archive_blocks/archive_blocks.ml
+++ b/src/app/archive_blocks/archive_blocks.ml
@@ -1,4 +1,4 @@
-(* archive_blocks.ml -- archive precomputed or extensional blocks to Postgresql *)
+(* archive_blocks.ml *)
open Core_kernel
open Async
diff --git a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml
index be21243b53e..038551dbb15 100644
--- a/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml
+++ b/src/app/cli/src/cli_entrypoint/mina_cli_entrypoint.ml
@@ -73,9 +73,12 @@ let setup_daemon logger =
flag "--block-producer-key" ~aliases:[ "block-producer-key" ]
~doc:
(sprintf
- "KEYFILE Private key file for the block producer. You cannot \
+ "DEPRECATED: Use environment variable `MINA_BP_PRIVKEY` instead. \
+ Private key file for the block producer. Providing this flag or \
+ the environment variable will enable block production. You cannot \
provide both `block-producer-key` and `block-producer-pubkey`. \
- (default: don't produce blocks). %s"
+ (default: use environment variable `MINA_BP_PRIVKEY`, if provided, \
+ or else don't produce any blocks) %s"
receiver_key_warning )
(optional string)
and block_production_pubkey =
@@ -85,8 +88,8 @@ let setup_daemon logger =
(sprintf
"PUBLICKEY Public key for the associated private key that is being \
tracked by this daemon. You cannot provide both \
- `block-producer-key` and `block-producer-pubkey`. (default: don't \
- produce blocks). %s"
+ `block-producer-key` (or `MINA_BP_PRIVKEY`) and \
+ `block-producer-pubkey`. (default: don't produce blocks) %s"
receiver_key_warning )
(optional public_key_compressed)
and block_production_password =
@@ -953,21 +956,39 @@ let setup_daemon logger =
Unix.putenv ~key:Secrets.Keypair.env ~data:password )
block_production_password ;
let%bind block_production_keypair =
- match (block_production_key, block_production_pubkey) with
- | Some _, Some _ ->
+ match
+ ( block_production_key
+ , block_production_pubkey
+ , Sys.getenv "MINA_BP_PRIVKEY" )
+ with
+ | Some _, Some _, _ ->
Mina_user_error.raise
"You cannot provide both `block-producer-key` and \
`block_production_pubkey`"
- | None, None ->
+ | None, Some _, Some _ ->
+ Mina_user_error.raise
+ "You cannot provide both `MINA_BP_PRIVKEY` and \
+ `block_production_pubkey`"
+ | None, None, None ->
Deferred.return None
- | Some sk_file, _ ->
+ | None, None, Some base58_privkey ->
+ let kp =
+ Private_key.of_base58_check_exn base58_privkey
+ |> Keypair.of_private_key_exn
+ in
+ Deferred.return (Some kp)
+ (* CLI argument takes precedence over env variable *)
+ | Some sk_file, None, (Some _ | None) ->
+ [%log warn]
+ "`block-producer-key` is deprecated. Please set \
+ `MINA_BP_PRIVKEY` environment variable instead." ;
let%map kp =
Secrets.Keypair.Terminal_stdin.read_exn
~should_prompt_user:false ~which:"block producer keypair"
sk_file
in
Some kp
- | _, Some tracked_pubkey ->
+ | None, Some tracked_pubkey, None ->
let%map kp =
Secrets.Wallets.get_tracked_keypair ~logger
~which:"block producer keypair"
diff --git a/src/app/cli/src/init/client.ml b/src/app/cli/src/init/client.ml
index d63b616495e..5d034aa6d67 100644
--- a/src/app/cli/src/init/client.ml
+++ b/src/app/cli/src/init/client.ml
@@ -1034,13 +1034,13 @@ let pending_snark_work =
(Array.map
~f:(fun bundle ->
Array.map bundle.workBundle ~f:(fun w ->
- let f = w.fee_excess in
+ let fee_excess_left = w.fee_excess.feeExcessLeft in
{ Cli_lib.Graphql_types.Pending_snark_work.Work
.work_id = w.work_id
; fee_excess =
Currency.Amount.Signed.of_fee
- (to_signed_fee_exn f.sign
- (Currency.Amount.to_fee f.fee_magnitude) )
+ (to_signed_fee_exn fee_excess_left.sign
+ fee_excess_left.feeMagnitude )
; supply_increase = w.supply_increase
; source_first_pass_ledger_hash =
w.source_first_pass_ledger_hash
@@ -1512,7 +1512,7 @@ let create_account =
in
let pk_string =
Public_key.Compressed.to_base58_check
- response.createAccount.public_key
+ response.createAccount.account.public_key
in
printf "\n😄 Added new account!\nPublic key: %s\n" pk_string ) )
@@ -1529,7 +1529,7 @@ let create_hd_account =
in
let pk_string =
Public_key.Compressed.to_base58_check
- response.createHDAccount.public_key
+ response.createHDAccount.account.public_key
in
printf "\n😄 created HD account with HD-index %s!\nPublic key: %s\n"
(Mina_numbers.Hd_index.to_string hd_index)
@@ -1563,7 +1563,7 @@ let unlock_account =
in
let pk_string =
Public_key.Compressed.to_base58_check
- response.unlockAccount.public_key
+ response.unlockAccount.account.public_key
in
printf "\n🔓 Unlocked account!\nPublic key: %s\n" pk_string
| Error e ->
diff --git a/src/app/cli/src/init/graphql_queries.ml b/src/app/cli/src/init/graphql_queries.ml
index 50e0a9da10c..f9534625159 100644
--- a/src/app/cli/src/init/graphql_queries.ml
+++ b/src/app/cli/src/init/graphql_queries.ml
@@ -45,7 +45,7 @@ module Create_account =
{|
mutation ($password: String!) @encoders(module: "Encoders"){
createAccount(input: {password: $password}) {
- public_key: publicKey
+ account: account { public_key : publicKey }
}
}
|}]
@@ -55,7 +55,7 @@ module Create_hd_account =
{|
mutation ($hd_index: UInt32!) @encoders(module: "Encoders"){
createHDAccount(input: {index: $hd_index}) {
- public_key: publicKey
+ account : account { public_key: publicKey }
}
}
|}]
@@ -65,7 +65,7 @@ module Unlock_account =
{|
mutation ($password: String!, $public_key: PublicKey!) @encoders(module: "Encoders"){
unlockAccount(input: {password: $password, publicKey: $public_key }) {
- public_key: publicKey
+ account: account { public_key: publicKey }
}
}
|}]
@@ -109,10 +109,18 @@ query pendingSnarkWork {
source_second_pass_ledger_hash: sourceSecondPassLedgerHash
target_second_pass_ledger_hash: targetSecondPassLedgerHash
fee_excess: feeExcess {
- sign
- fee_magnitude: feeMagnitude
+ feeTokenLeft
+ feeExcessLeft {
+ sign
+ feeMagnitude
+ }
+ feeTokenRight
+ feeExcessRight {
+ sign
+ feeMagnitude
+ }
}
- supply_increase: supplyIncrease
+ supply_increase : supplyIncrease
work_id: workId
}
}
@@ -207,10 +215,10 @@ module Pooled_user_commands =
query user_commands($public_key: PublicKey) @encoders(module: "Encoders"){
pooledUserCommands(publicKey: $public_key) @bsRecord {
id
- isDelegation
+ kind
nonce
- from
- to_: to
+ feePayer { public_key: publicKey }
+ receiver { public_key: publicKey }
amount
fee
memo
diff --git a/src/app/extract_blocks/README.md b/src/app/extract_blocks/README.md
new file mode 100644
index 00000000000..bbaecdbb29b
--- /dev/null
+++ b/src/app/extract_blocks/README.md
@@ -0,0 +1,12 @@
+extract_blocks
+==============
+
+The `extract_blocks` app pulls out individual blocks from an archive
+database in "extensional" format. Such blocks can be added to other
+archive databases using the `archive_blocks` app.
+
+Blocks are extracted into files with name .json.
+
+The app offers the choice to extract all canonical blocks, or a
+subchain specified with starting state hash, or a subchain specified
+with starting and ending state hashes.
diff --git a/src/app/extract_blocks/extract_blocks.ml b/src/app/extract_blocks/extract_blocks.ml
index f4fa0ac2e77..e7f58718cba 100644
--- a/src/app/extract_blocks/extract_blocks.ml
+++ b/src/app/extract_blocks/extract_blocks.ml
@@ -1,4 +1,5 @@
(* extract_blocks.ml -- dump extensional blocks from archive db *)
+
[@@@coverage exclude_file]
open Core_kernel
diff --git a/src/app/heap_usage/values.ml b/src/app/heap_usage/values.ml
index 5c36b5e8528..386a1b200d2 100644
--- a/src/app/heap_usage/values.ml
+++ b/src/app/heap_usage/values.ml
@@ -242,7 +242,7 @@ let scan_state_merge_node :
let sok_msg : Mina_base.Sok_message.t =
{ fee = Currency.Fee.zero; prover = sample_pk_compressed }
in
- let proof = Mina_base.Proof.transaction_dummy in
+ let proof = Lazy.force Mina_base.Proof.transaction_dummy in
let statement =
let without_sok =
Quickcheck.random_value ~seed:(`Deterministic "no sok left")
@@ -258,7 +258,7 @@ let scan_state_merge_node :
{ fee = Currency.Fee.zero; prover = sample_pk_compressed }
in
(* so the left, right proofs differ, don't want sharing *)
- let proof = Mina_base.Proof.blockchain_dummy in
+ let proof = Lazy.force Mina_base.Proof.blockchain_dummy in
let statement =
let without_sok =
Quickcheck.random_value ~seed:(`Deterministic "no sok right")
diff --git a/src/app/missing_blocks_auditor/README.md b/src/app/missing_blocks_auditor/README.md
new file mode 100644
index 00000000000..e5db4ee64a3
--- /dev/null
+++ b/src/app/missing_blocks_auditor/README.md
@@ -0,0 +1,10 @@
+missing_blocks_auditor
+======================
+
+The `missing_blocks_auditor` app looks for blocks without parent
+blocks in an archive database.
+
+The app also looks for blocks marked as pending that are lower (have a
+lesser height) than the highest (most recent) canonical block. There
+can be such blocks if blocks are added when there are missing blocks
+in the database.
diff --git a/src/app/missing_blocks_auditor/missing_blocks_auditor.ml b/src/app/missing_blocks_auditor/missing_blocks_auditor.ml
index 5bce995ea28..e6fd538f1b5 100644
--- a/src/app/missing_blocks_auditor/missing_blocks_auditor.ml
+++ b/src/app/missing_blocks_auditor/missing_blocks_auditor.ml
@@ -1,4 +1,4 @@
-(* missing_blocks_auditor.ml -- report missing blocks from an archive db *)
+(* missing_blocks_auditor.ml *)
open Core_kernel
open Async
diff --git a/src/app/print_blockchain_snark_vk/.ocamlformat b/src/app/print_blockchain_snark_vk/.ocamlformat
new file mode 120000
index 00000000000..8e0a87983aa
--- /dev/null
+++ b/src/app/print_blockchain_snark_vk/.ocamlformat
@@ -0,0 +1 @@
+../../.ocamlformat
\ No newline at end of file
diff --git a/src/app/print_blockchain_snark_vk/blockchain_snark_vk.json b/src/app/print_blockchain_snark_vk/blockchain_snark_vk.json
new file mode 100644
index 00000000000..c964bad4454
--- /dev/null
+++ b/src/app/print_blockchain_snark_vk/blockchain_snark_vk.json
@@ -0,0 +1 @@
+{"commitments":{"sigma_comm":[["0x1A8B2FCF1A5D4F6D1902517B6BF31A4288186F5AA647804402E14E4DC63ABE1F","0x3224B1C182312B364F659896357A40D2EF566959E6C11AD0A347EA80D82CDBF6"],["0x22CFAD2E118F91BE8E133664585947E25A282017E007700D67F0D72999639BCC","0x31AA847B558743C3CAD651B085327F0B67E1C1BFEA2FF472976589ADD952E6D4"],["0x02FB6661EE11262AFC7C8B0389A9B1E4ED18263C3F2468AAF035A07ECC61C9EB","0x30705D77AB82D6BAAC8E23640BBAB40F7E7FB53826CD5011575CAE52C6D0517D"],["0x0FD615E77CF392BE8285848DF60D4F87350ABCD65200FC4932E309133D145D05","0x3E75EA0E3810A97D8F1D1F8A6B0F9AE5D0F68AE34A4D0EFC29F00AB3BF6D480E"],["0x20C89F37CF8E43831F077DFF9AB9A676907BF9A0F64E418A643667808AB72DAB","0x389F98A86437D8A7F667ABB9B0B0DEACBD7E420B583B795363EBCECDBF1C187B"],["0x2F5C56AA39D6FD87055E7CA6F5FA61E94FCF17336DA4476E328D2CC184F93D47","0x089B80235867482E24087360E161AC76A5249D826CFAC51AB537093D86EAA632"],["0x20216D064B7466C42626F66F3F141997E6F29D17CC6EBA8FF91E1E03D31FE555","0x0082380869AFA6A961738C93BCEEA3042F3ECEA143A585B565A2BF6BD78A1224"]],"coefficients_comm":[["0x2113503AE01039434E72D555369C460EE5C45260396DD4782CE0BE81B19F2F6C","0x2F72056BEC498916F4176C6410A31801F81CDE4D427F1C15566C018393751E2B"],["0x08060FD15238AE40E726A04EA5C1AE41D8A1D267C9AF7355A8D6C70607139922","0x13D36CAD9C78FD0AF514AB7ECAC433592389AE84283024B1D4C85D2E8C952B28"],["0x3AE007735587246066F71A1F09801C359E393CBF49DBDFEDD01E61AA88ECAF0F","0x15882EBC62C0E9BC93A14F2CB2ECE43402063B50C09CE735E56D51447689D2C6"],["0x3DCBFD7F258F598AEFB560A1B3BCE632B4AC20534FEFF3B35C82082FE708B236","0x254668B50AB44E074902E0DB981FDF045FC614C1E61EBDDA27C06DF9131FC71A"],["0x364F1609C5A72CC0A6F5966400AE1A5AA4B7684805B46D0EE09001F7618DD614","0x0F3E45A3DEC2B73BEC5E91ED78BB750B87C02A2339E5A40C2D51FAA0EE1D5B7E"],["0x2CD387D1747E5594DF7FBAE6B40C7A674875F6F9FBB4E3632FBAFD49D41E67A6","0x33C14453CA5F229C77B5259999798D42B36BE08F68C09F969937C2C13FE1C34D"],["0x3E32E6702761E653043644E9DC5D75FE7A351B764CD90D72DD1176DB1A541121","0x0454C684E407E066394B31CFCCBAF22844B7E17D162EFE860030960A38AD2B2E"],["0x1AE2CE814A97742F41565FEB5881767A2BCF55981A3C8ED33BAE069CBE652FCA","0x14DC3F97387117CCA4884F28DCC82C9CF1B7101623B481FD3D1588F568B3E26B"],["0x0A10FA40BB6C61E8185412EE2BAE86C51D19EA7276593DFA7FA3FABB0345521F","0x3A8ACF73B5EF4E52ED5DC96404A60C69A067B84FE77211C5407A48437BD5CF89"],["0x21B2C2D62891139A91056B9D298DA8713B7ADA51963B864A5A2376771A1AA766","0x1AC7782A588865397F219624D70D11A109283E82B9CD50FFEE23753461E755FE"],["0x2763E7A5B2C387147A0381B50D5C504A5012B7F7CA474C2B4A351011B9BBD036","0x13DEA6F4AEBDC606331746A886756C7EA266A52F60B45DE5544A04BFDB277455"],["0x32596E43A053571EE478A3106CABFE9ECB29437F78A978409B4DDE33FE637103","0x3D76AF5EE3EFF37E666087AC2827A8BD0D9690BF440FF24434DA3E6AFF7A2AF4"],["0x1D73FE7224F38A37B2C69E22FA6750BABAED17B0F9998877B33E488D9063CE8E","0x3E24CEADB1BDA92A0DBDA0F90DF3B8FBD7C6F7ABCC039E9D41AB6916A799F645"],["0x2FDF5D887BC70465AFAC06B7A43632732B5AF0B933EA045D35E99D532BD44CAF","0x211A76FD7B76DF3E693CAA0BBB5C178D5DDE827AB6A902AF04AB39D8040E29DF"],["0x0D29BA887D54D915CFB321573463A3CAF5C354848A51FFD6D4FFC0E0B4464D39","0x232829C5C662E87CD033AFB3E84E86EC342F4942AC9D75123A21172EE06CF567"]],"generic_comm":["0x363662743B4E693E18C0CF5FB651DF1205BB27FABCD93ADF54ECD973B21B921B","0x116FBA051A4A146C88BCB0F2B56309429CD63514EEEFBE6EA0B39927E72BB20C"],"psm_comm":["0x0A8B3EF5670C6367C721EDAA40AF511C18B1602A4732FEA01124D5D949304324","0x1DBE04516C4A33CDFBBD1F54F413B9F21B5D41B6CD668B249879A2688693E51B"],"complete_add_comm":["0x1E859218F11F787CE75C06FD5303457CBD307BDEEB693CC66A235CB85B314D4B","0x228167E190903072E8F34BD7AF61A0C02DE0BC3D54FF8760A2BCBFDD6A880688"],"mul_comm":["0x3EC97D3A8CD405A92B31B67184817925B99B1527065A28677AEAAEC37CC7B9C5","0x3844006206FF29A55DBB44A3D06E46610639E24E960B4BC32A663EEC4D04C689"],"emul_comm":["0x114772020FAF5E6660D7D75B666B7121829027A866A8214B42899E824D820CB9","0x01F7FC015E2F0C5E02E34F0FD6FBA0FCE01E40EA183F0F6F7C197553524A96B9"],"endomul_scalar_comm":["0x04C30A9B6412594ECD5EEFCA20D4B759BBC52B08868E01F74FDC82B557A76ADD","0x019413D8112950CB93D20BA29452DC281FFE1A692706C34BD148E331F844D244"]},"index":{"domain":{"log_size_of_group":14,"group_gen":"0x1E5587687024253BB079B38D9C5371594958E496C605D3BD898B34D068AFBEE7"},"max_poly_size":32768,"public":40,"prev_challenges":2,"srs":null,"evals":{"sigma_comm":[{"unshifted":[["Finite",["0x1A8B2FCF1A5D4F6D1902517B6BF31A4288186F5AA647804402E14E4DC63ABE1F","0x3224B1C182312B364F659896357A40D2EF566959E6C11AD0A347EA80D82CDBF6"]]],"shifted":null},{"unshifted":[["Finite",["0x22CFAD2E118F91BE8E133664585947E25A282017E007700D67F0D72999639BCC","0x31AA847B558743C3CAD651B085327F0B67E1C1BFEA2FF472976589ADD952E6D4"]]],"shifted":null},{"unshifted":[["Finite",["0x02FB6661EE11262AFC7C8B0389A9B1E4ED18263C3F2468AAF035A07ECC61C9EB","0x30705D77AB82D6BAAC8E23640BBAB40F7E7FB53826CD5011575CAE52C6D0517D"]]],"shifted":null},{"unshifted":[["Finite",["0x0FD615E77CF392BE8285848DF60D4F87350ABCD65200FC4932E309133D145D05","0x3E75EA0E3810A97D8F1D1F8A6B0F9AE5D0F68AE34A4D0EFC29F00AB3BF6D480E"]]],"shifted":null},{"unshifted":[["Finite",["0x20C89F37CF8E43831F077DFF9AB9A676907BF9A0F64E418A643667808AB72DAB","0x389F98A86437D8A7F667ABB9B0B0DEACBD7E420B583B795363EBCECDBF1C187B"]]],"shifted":null},{"unshifted":[["Finite",["0x2F5C56AA39D6FD87055E7CA6F5FA61E94FCF17336DA4476E328D2CC184F93D47","0x089B80235867482E24087360E161AC76A5249D826CFAC51AB537093D86EAA632"]]],"shifted":null},{"unshifted":[["Finite",["0x20216D064B7466C42626F66F3F141997E6F29D17CC6EBA8FF91E1E03D31FE555","0x0082380869AFA6A961738C93BCEEA3042F3ECEA143A585B565A2BF6BD78A1224"]]],"shifted":null}],"coefficients_comm":[{"unshifted":[["Finite",["0x2113503AE01039434E72D555369C460EE5C45260396DD4782CE0BE81B19F2F6C","0x2F72056BEC498916F4176C6410A31801F81CDE4D427F1C15566C018393751E2B"]]],"shifted":null},{"unshifted":[["Finite",["0x08060FD15238AE40E726A04EA5C1AE41D8A1D267C9AF7355A8D6C70607139922","0x13D36CAD9C78FD0AF514AB7ECAC433592389AE84283024B1D4C85D2E8C952B28"]]],"shifted":null},{"unshifted":[["Finite",["0x3AE007735587246066F71A1F09801C359E393CBF49DBDFEDD01E61AA88ECAF0F","0x15882EBC62C0E9BC93A14F2CB2ECE43402063B50C09CE735E56D51447689D2C6"]]],"shifted":null},{"unshifted":[["Finite",["0x3DCBFD7F258F598AEFB560A1B3BCE632B4AC20534FEFF3B35C82082FE708B236","0x254668B50AB44E074902E0DB981FDF045FC614C1E61EBDDA27C06DF9131FC71A"]]],"shifted":null},{"unshifted":[["Finite",["0x364F1609C5A72CC0A6F5966400AE1A5AA4B7684805B46D0EE09001F7618DD614","0x0F3E45A3DEC2B73BEC5E91ED78BB750B87C02A2339E5A40C2D51FAA0EE1D5B7E"]]],"shifted":null},{"unshifted":[["Finite",["0x2CD387D1747E5594DF7FBAE6B40C7A674875F6F9FBB4E3632FBAFD49D41E67A6","0x33C14453CA5F229C77B5259999798D42B36BE08F68C09F969937C2C13FE1C34D"]]],"shifted":null},{"unshifted":[["Finite",["0x3E32E6702761E653043644E9DC5D75FE7A351B764CD90D72DD1176DB1A541121","0x0454C684E407E066394B31CFCCBAF22844B7E17D162EFE860030960A38AD2B2E"]]],"shifted":null},{"unshifted":[["Finite",["0x1AE2CE814A97742F41565FEB5881767A2BCF55981A3C8ED33BAE069CBE652FCA","0x14DC3F97387117CCA4884F28DCC82C9CF1B7101623B481FD3D1588F568B3E26B"]]],"shifted":null},{"unshifted":[["Finite",["0x0A10FA40BB6C61E8185412EE2BAE86C51D19EA7276593DFA7FA3FABB0345521F","0x3A8ACF73B5EF4E52ED5DC96404A60C69A067B84FE77211C5407A48437BD5CF89"]]],"shifted":null},{"unshifted":[["Finite",["0x21B2C2D62891139A91056B9D298DA8713B7ADA51963B864A5A2376771A1AA766","0x1AC7782A588865397F219624D70D11A109283E82B9CD50FFEE23753461E755FE"]]],"shifted":null},{"unshifted":[["Finite",["0x2763E7A5B2C387147A0381B50D5C504A5012B7F7CA474C2B4A351011B9BBD036","0x13DEA6F4AEBDC606331746A886756C7EA266A52F60B45DE5544A04BFDB277455"]]],"shifted":null},{"unshifted":[["Finite",["0x32596E43A053571EE478A3106CABFE9ECB29437F78A978409B4DDE33FE637103","0x3D76AF5EE3EFF37E666087AC2827A8BD0D9690BF440FF24434DA3E6AFF7A2AF4"]]],"shifted":null},{"unshifted":[["Finite",["0x1D73FE7224F38A37B2C69E22FA6750BABAED17B0F9998877B33E488D9063CE8E","0x3E24CEADB1BDA92A0DBDA0F90DF3B8FBD7C6F7ABCC039E9D41AB6916A799F645"]]],"shifted":null},{"unshifted":[["Finite",["0x2FDF5D887BC70465AFAC06B7A43632732B5AF0B933EA045D35E99D532BD44CAF","0x211A76FD7B76DF3E693CAA0BBB5C178D5DDE827AB6A902AF04AB39D8040E29DF"]]],"shifted":null},{"unshifted":[["Finite",["0x0D29BA887D54D915CFB321573463A3CAF5C354848A51FFD6D4FFC0E0B4464D39","0x232829C5C662E87CD033AFB3E84E86EC342F4942AC9D75123A21172EE06CF567"]]],"shifted":null}],"generic_comm":{"unshifted":[["Finite",["0x363662743B4E693E18C0CF5FB651DF1205BB27FABCD93ADF54ECD973B21B921B","0x116FBA051A4A146C88BCB0F2B56309429CD63514EEEFBE6EA0B39927E72BB20C"]]],"shifted":null},"psm_comm":{"unshifted":[["Finite",["0x0A8B3EF5670C6367C721EDAA40AF511C18B1602A4732FEA01124D5D949304324","0x1DBE04516C4A33CDFBBD1F54F413B9F21B5D41B6CD668B249879A2688693E51B"]]],"shifted":null},"complete_add_comm":{"unshifted":[["Finite",["0x1E859218F11F787CE75C06FD5303457CBD307BDEEB693CC66A235CB85B314D4B","0x228167E190903072E8F34BD7AF61A0C02DE0BC3D54FF8760A2BCBFDD6A880688"]]],"shifted":null},"mul_comm":{"unshifted":[["Finite",["0x3EC97D3A8CD405A92B31B67184817925B99B1527065A28677AEAAEC37CC7B9C5","0x3844006206FF29A55DBB44A3D06E46610639E24E960B4BC32A663EEC4D04C689"]]],"shifted":null},"emul_comm":{"unshifted":[["Finite",["0x114772020FAF5E6660D7D75B666B7121829027A866A8214B42899E824D820CB9","0x01F7FC015E2F0C5E02E34F0FD6FBA0FCE01E40EA183F0F6F7C197553524A96B9"]]],"shifted":null},"endomul_scalar_comm":{"unshifted":[["Finite",["0x04C30A9B6412594ECD5EEFCA20D4B759BBC52B08868E01F74FDC82B557A76ADD","0x019413D8112950CB93D20BA29452DC281FFE1A692706C34BD148E331F844D244"]]],"shifted":null},"xor_comm":null,"range_check0_comm":null,"range_check1_comm":null,"foreign_field_add_comm":null,"foreign_field_mul_comm":null,"rot_comm":null},"shifts":["0x0000000000000000000000000000000000000000000000000000000000000001","0x00B9CDC8FD0BD4B27E2A74AF7AEBD5734D52D75BDF85EBF1CAD03413E914A2E3","0x007CF68160D84012626E0046A932AD12E68B3394D6E2A001A537FFB40D3527C6","0x0077D45AECB939AE97A3952B48189964AA209609F19BE4A4B89F339A33440F6D","0x0077C7E54505D4771F6AF1FED2195500481EF1F3C0397B0AC819E678BD2309B4","0x00B3AF68ECC6AE7A4727F0708EDF4736BE1C99281FA380846E42264C62407484","0x00381CA4536FC0ED935D50A74A87136F1A0675B618898DBCE67E564AB20174A1"],"lookup_index":null,"zk_rows":3},"data":{"constraints":16384}}
\ No newline at end of file
diff --git a/src/app/print_blockchain_snark_vk/dune b/src/app/print_blockchain_snark_vk/dune
new file mode 100644
index 00000000000..3b1137fd543
--- /dev/null
+++ b/src/app/print_blockchain_snark_vk/dune
@@ -0,0 +1,18 @@
+(executable
+ (name print_blockchain_snark_vk)
+ (libraries
+ blockchain_snark)
+ (instrumentation (backend bisect_ppx))
+ (preprocess (pps ppx_version)))
+
+(rule
+ (deps print_blockchain_snark_vk.exe)
+ (targets blockchain_snark_vk.json.corrected)
+ (action
+ (with-stdout-to %{targets}
+ (run %{deps}))))
+
+(rule
+ (alias runtest)
+ (action
+ (diff blockchain_snark_vk.json blockchain_snark_vk.json.corrected)))
diff --git a/src/app/print_blockchain_snark_vk/dune-project b/src/app/print_blockchain_snark_vk/dune-project
new file mode 100644
index 00000000000..7b17fb2d308
--- /dev/null
+++ b/src/app/print_blockchain_snark_vk/dune-project
@@ -0,0 +1 @@
+(lang dune 3.3)
diff --git a/src/app/print_blockchain_snark_vk/print_blockchain_snark_vk.ml b/src/app/print_blockchain_snark_vk/print_blockchain_snark_vk.ml
new file mode 100644
index 00000000000..c111eacabd3
--- /dev/null
+++ b/src/app/print_blockchain_snark_vk/print_blockchain_snark_vk.ml
@@ -0,0 +1,41 @@
+open Core_kernel
+
+module Config = struct
+ let constraint_constants = Genesis_constants.Constraint_constants.compiled
+
+ let proof_level = Genesis_constants.Proof_level.Full
+end
+
+let () = Format.eprintf "Generating transaction snark circuit..@."
+
+let before = Time.now ()
+
+module Transaction_snark_instance = Transaction_snark.Make (Config)
+
+let after = Time.now ()
+
+let () =
+ Format.eprintf "Generated transaction snark circuit in %s.@."
+ (Time.Span.to_string_hum (Time.diff after before))
+
+let () = Format.eprintf "Generating blockchain snark circuit..@."
+
+let before = Time.now ()
+
+module Blockchain_snark_instance =
+Blockchain_snark.Blockchain_snark_state.Make (struct
+ let tag = Transaction_snark_instance.tag
+
+ include Config
+end)
+
+let after = Time.now ()
+
+let () =
+ Format.eprintf "Generated blockchain snark circuit in %s.@."
+ (Time.Span.to_string_hum (Time.diff after before))
+
+let () =
+ Lazy.force Blockchain_snark_instance.Proof.verification_key
+ |> Pickles.Verification_key.to_yojson |> Yojson.Safe.to_string
+ |> Format.print_string
diff --git a/src/app/replayer/replayer.ml b/src/app/replayer/replayer.ml
index a47e9eaeca4..8afad11dbd2 100644
--- a/src/app/replayer/replayer.ml
+++ b/src/app/replayer/replayer.ml
@@ -530,7 +530,7 @@ let zkapp_command_to_transaction ~logger ~pool (cmd : Sql.Zkapp_command.t) :
let (authorization : Control.t) =
match body.authorization_kind with
| Proof _ ->
- Proof Proof.transaction_dummy
+ Proof (Lazy.force Proof.transaction_dummy)
| Signature ->
Signature Signature.dummy
| None_given ->
diff --git a/src/app/test_executive/zkapps.ml b/src/app/test_executive/zkapps.ml
index 753c48225dd..a983ba3b11f 100644
--- a/src/app/test_executive/zkapps.ml
+++ b/src/app/test_executive/zkapps.ml
@@ -383,7 +383,8 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct
| Proof _ ->
{ other_p with
authorization =
- Control.Proof Mina_base.Proof.blockchain_dummy
+ Control.Proof
+ (Lazy.force Mina_base.Proof.blockchain_dummy)
}
| _ ->
other_p )
@@ -764,6 +765,8 @@ module Make (Inputs : Intf.Test.Inputs_intf) = struct
(Network.Node.get_ingress_uri node)
zkapp_command_insufficient_fee "Insufficient fee" )
in
+ let%bind () = wait_for t (Wait_condition.blocks_to_be_produced 1) in
+ let%bind () = Malleable_error.lift (after (Time.Span.of_sec 30.0)) in
(* Won't be accepted until the previous transactions are applied *)
let%bind () =
section_hard "Send a zkApp transaction to update all fields"
diff --git a/src/dune-project b/src/dune-project
index aeaeae71671..7ecace1391f 100644
--- a/src/dune-project
+++ b/src/dune-project
@@ -60,6 +60,7 @@
(package (name graphql_wrapper))
(package (name hash_prefixes))
(package (name hash_prefix_states))
+(package (name hash_prefix_create))
(package (name heap_usage))
(package (name hex))
(package (name immutable_array))
@@ -146,13 +147,13 @@
(package (name pipe_lib))
(package (name pokolog))
(package (name ppx_annot))
-(package (name ppx_dhall_type))
(package (name ppx_mina))
(package (name ppx_register_event))
(package (name ppx_representatives))
(package (name ppx_to_enum))
(package (name ppx_util))
(package (name ppx_version))
+(package (name ppx_version.runtime))
(package (name precomputed_values))
(package (name promise))
(package (name proof_carrying_data))
diff --git a/src/internal_tracing.opam b/src/internal_tracing.opam
new file mode 100644
index 00000000000..7be19e3d612
--- /dev/null
+++ b/src/internal_tracing.opam
@@ -0,0 +1,5 @@
+opam-version: "2.0"
+version: "0.1"
+build: [
+ ["dune" "build" "--only" "src" "--root" "." "-j" jobs "@install"]
+]
diff --git a/src/lib/base58_check/base58_check.ml b/src/lib/base58_check/base58_check.ml
index 14c3faecb0c..d09f28a689d 100644
--- a/src/lib/base58_check/base58_check.ml
+++ b/src/lib/base58_check/base58_check.ml
@@ -104,53 +104,3 @@ struct
end
module Version_bytes = Version_bytes
-
-let%test_module "base58check tests" =
- ( module struct
- module Base58_check = Make (struct
- let description = "Base58check tests"
-
- let version_byte = '\x53'
- end)
-
- open Base58_check
-
- let test_roundtrip payload =
- let encoded = encode payload in
- let payload' = decode_exn encoded in
- String.equal payload payload'
-
- let%test "empty_string" = test_roundtrip ""
-
- let%test "nonempty_string" =
- test_roundtrip "Somewhere, over the rainbow, way up high"
-
- let%test "longer_string" =
- test_roundtrip
- "Someday, I wish upon a star, wake up where the clouds are far behind \
- me, where trouble melts like lemon drops, High above the chimney top, \
- that's where you'll find me"
-
- let%test "invalid checksum" =
- try
- let encoded = encode "Bluer than velvet were her eyes" in
- let bytes = Bytes.of_string encoded in
- let len = Bytes.length bytes in
- let last_ch = Bytes.get bytes (len - 1) in
- (* change last byte to invalidate checksum *)
- let new_last_ch =
- if Char.equal last_ch '\xFF' then '\x00'
- else Char.of_int_exn (Char.to_int last_ch + 1)
- in
- Bytes.set bytes (len - 1) new_last_ch ;
- let encoded_bad_checksum = Bytes.to_string bytes in
- let _payload = decode_exn encoded_bad_checksum in
- false
- with Invalid_base58_checksum _ -> true
-
- let%test "invalid length" =
- try
- let _payload = decode_exn "abcd" in
- false
- with Invalid_base58_check_length _ -> true
- end )
diff --git a/src/lib/base58_check/tests/dune b/src/lib/base58_check/tests/dune
new file mode 100644
index 00000000000..46019aaed4a
--- /dev/null
+++ b/src/lib/base58_check/tests/dune
@@ -0,0 +1,3 @@
+(tests
+ (names test_base58_check)
+ (libraries core_kernel base58_check alcotest))
diff --git a/src/lib/base58_check/tests/test_base58_check.ml b/src/lib/base58_check/tests/test_base58_check.ml
new file mode 100644
index 00000000000..735c9fa0e86
--- /dev/null
+++ b/src/lib/base58_check/tests/test_base58_check.ml
@@ -0,0 +1,75 @@
+module M = Base58_check.Make (struct
+ let description = "Base58check tests"
+
+ let version_byte = '\x53'
+end)
+
+open M
+
+let helper_test_roundtrip payload =
+ let encoded = encode payload in
+ let payload' = decode_exn encoded in
+ assert (String.equal payload payload')
+
+let test_roundtrip_empty_string () = helper_test_roundtrip ""
+
+let test_roundtrip_nonempty_string () =
+ helper_test_roundtrip "Somewhere, over the rainbow, way up high"
+
+let test_roundtrip_longer_string () =
+ helper_test_roundtrip
+ "Someday, I wish upon a star, wake up where the clouds are far behind me, \
+ where trouble melts like lemon drops, High above the chimney top, that's \
+ where you'll find me"
+
+let test_invalid_checksum () =
+ try
+ let encoded = encode "Bluer than velvet were her eyes" in
+ let bytes = Bytes.of_string encoded in
+ let len = Bytes.length bytes in
+ let last_ch = Bytes.get bytes (len - 1) in
+ (* change last byte to invalidate checksum *)
+ let new_last_ch =
+ if Char.equal last_ch '\xFF' then '\x00'
+ else Core_kernel.Char.of_int_exn (Core_kernel.Char.to_int last_ch + 1)
+ in
+ Bytes.set bytes (len - 1) new_last_ch ;
+ let encoded_bad_checksum = Bytes.to_string bytes in
+ let _payload = decode_exn encoded_bad_checksum in
+ assert false
+ with Base58_check.Invalid_base58_checksum _ -> assert true
+
+let test_invalid_length () =
+ try
+ let _payload = decode_exn "abcd" in
+ assert false
+ with Base58_check.Invalid_base58_check_length _ -> assert true
+
+let test_vectors () =
+ let vectors =
+ [ ("", "AR3b7Dr")
+ ; ("vectors", "2aML9fKacueS1p5W3")
+ ; ("test", "24cUQZMy5c7Mj")
+ ]
+ in
+ assert (
+ List.for_all
+ (fun (inp, exp_output) ->
+ let output = M.encode inp in
+ String.equal output exp_output )
+ vectors )
+
+let () =
+ let open Alcotest in
+ run "Base58_check"
+ [ ( "test_roundtrip"
+ , [ test_case "empty string" `Quick test_roundtrip_empty_string
+ ; test_case "non empty string" `Quick test_roundtrip_nonempty_string
+ ; test_case "longer string" `Quick test_roundtrip_longer_string
+ ] )
+ ; ( "negative tests"
+ , [ test_case "invalid checksym" `Quick test_invalid_checksum
+ ; test_case "invalid length" `Quick test_invalid_length
+ ] )
+ ; ("test vectors", [ test_case "vectors" `Quick test_vectors ])
+ ]
diff --git a/src/lib/consensus/proof_of_stake.ml b/src/lib/consensus/proof_of_stake.ml
index a9ef8cf0755..b67cd45c0d6 100644
--- a/src/lib/consensus/proof_of_stake.ml
+++ b/src/lib/consensus/proof_of_stake.ml
@@ -842,9 +842,9 @@ module Make_str (A : Wire_types.Concrete) = struct
| _ ->
respond
(Provide
- (Snarky_backendless.Request.Handler.run handlers
- [ "Ledger Handler"; "Pending Coinbase Handler" ]
- request ) )
+ (Option.value_exn ~message:"unhandled request"
+ (Snarky_backendless.Request.Handler.run handlers request) )
+ )
end
let check ~context:(module Context : CONTEXT)
@@ -2579,9 +2579,9 @@ module Make_str (A : Wire_types.Concrete) = struct
| _ ->
respond
(Provide
- (Snarky_backendless.Request.Handler.run handlers
- [ "Ledger Handler"; "Pending Coinbase Handler" ]
- request ) )
+ (Option.value_exn ~message:"unhandled request"
+ (Snarky_backendless.Request.Handler.run handlers request) )
+ )
let ledger_depth { ledger; _ } = ledger.depth
end
diff --git a/src/lib/crypto/kimchi_backend/common/dlog_plonk_based_keypair.ml b/src/lib/crypto/kimchi_backend/common/dlog_plonk_based_keypair.ml
index f4fe273b8b5..84d79803e2c 100644
--- a/src/lib/crypto/kimchi_backend/common/dlog_plonk_based_keypair.ml
+++ b/src/lib/crypto/kimchi_backend/common/dlog_plonk_based_keypair.ml
@@ -53,13 +53,30 @@ module type Inputs_intf = sig
val set_prev_challenges : t -> int -> unit
- val finalize_and_get_gates : t -> Gate_vector.t
+ val finalize_and_get_gates :
+ t
+ -> Gate_vector.t
+ * Scalar_field.t Kimchi_types.lookup_table array
+ * Scalar_field.t Kimchi_types.runtime_table_cfg array
end
module Index : sig
type t
- val create : Gate_vector.t -> int -> int -> Urs.t -> t
+ (** [create
+ gates
+ nb_public
+ runtime_tables_cfg
+ nb_prev_challanges
+ srs] *)
+ val create :
+ Gate_vector.t
+ -> int
+ -> Scalar_field.t Kimchi_types.lookup_table array
+ -> Scalar_field.t Kimchi_types.runtime_table_cfg array
+ -> int
+ -> Urs.t
+ -> t
end
module Curve : sig
@@ -156,7 +173,9 @@ module Make (Inputs : Inputs_intf) = struct
(set_urs_info, load)
let create ~prev_challenges cs =
- let gates = Inputs.Constraint_system.finalize_and_get_gates cs in
+ let gates, fixed_lookup_tables, runtime_table_cfgs =
+ Inputs.Constraint_system.finalize_and_get_gates cs
+ in
let public_input_size =
Inputs.Constraint_system.get_primary_input_size cs
in
@@ -170,7 +189,8 @@ module Make (Inputs : Inputs_intf) = struct
prev_challenges'
in
let index =
- Inputs.Index.create gates public_input_size prev_challenges (load_urs ())
+ Inputs.Index.create gates public_input_size fixed_lookup_tables
+ runtime_table_cfgs prev_challenges (load_urs ())
in
{ index; cs }
@@ -203,4 +223,51 @@ module Make (Inputs : Inputs_intf) = struct
; emul_comm = g t.evals.emul_comm
; endomul_scalar_comm = g t.evals.endomul_scalar_comm
}
+
+ let full_vk_commitments (t : Inputs.Verifier_index.t) :
+ ( Inputs.Curve.Affine.t array
+ , Inputs.Curve.Affine.t array option )
+ Pickles_types.Plonk_verification_key_evals.Step.t =
+ let g c : Inputs.Curve.Affine.t array =
+ match Inputs.Poly_comm.of_backend_without_degree_bound c with
+ | `Without_degree_bound x ->
+ x
+ | `With_degree_bound _ ->
+ assert false
+ in
+ let lookup f =
+ let open Option.Let_syntax in
+ let%bind l = t.lookup_index in
+ f l >>| g
+ in
+ { sigma_comm =
+ Pickles_types.Vector.init Pickles_types.Plonk_types.Permuts.n
+ ~f:(fun i -> g t.evals.sigma_comm.(i))
+ ; coefficients_comm =
+ Pickles_types.Vector.init Pickles_types.Plonk_types.Columns.n
+ ~f:(fun i -> g t.evals.coefficients_comm.(i))
+ ; generic_comm = g t.evals.generic_comm
+ ; psm_comm = g t.evals.psm_comm
+ ; complete_add_comm = g t.evals.complete_add_comm
+ ; mul_comm = g t.evals.mul_comm
+ ; emul_comm = g t.evals.emul_comm
+ ; endomul_scalar_comm = g t.evals.endomul_scalar_comm
+ ; xor_comm = Option.map ~f:g t.evals.xor_comm
+ ; range_check0_comm = Option.map ~f:g t.evals.range_check0_comm
+ ; range_check1_comm = Option.map ~f:g t.evals.range_check1_comm
+ ; foreign_field_add_comm = Option.map ~f:g t.evals.foreign_field_add_comm
+ ; foreign_field_mul_comm = Option.map ~f:g t.evals.foreign_field_mul_comm
+ ; rot_comm = Option.map ~f:g t.evals.rot_comm
+ ; lookup_table_comm =
+ Pickles_types.Vector.init
+ Pickles_types.Plonk_types.Lookup_sorted_minus_1.n ~f:(fun i ->
+ lookup (fun l -> Option.try_with (fun () -> l.lookup_table.(i))) )
+ ; lookup_table_ids = lookup (fun l -> l.table_ids)
+ ; runtime_tables_selector = lookup (fun l -> l.runtime_tables_selector)
+ ; lookup_selector_lookup = lookup (fun l -> l.lookup_selectors.lookup)
+ ; lookup_selector_xor = lookup (fun l -> l.lookup_selectors.xor)
+ ; lookup_selector_range_check =
+ lookup (fun l -> l.lookup_selectors.range_check)
+ ; lookup_selector_ffmul = lookup (fun l -> l.lookup_selectors.ffmul)
+ }
end
diff --git a/src/lib/crypto/kimchi_backend/common/kimchi_backend_common.mli b/src/lib/crypto/kimchi_backend/common/kimchi_backend_common.mli
index ed045783443..ffb3b49e495 100644
--- a/src/lib/crypto/kimchi_backend/common/kimchi_backend_common.mli
+++ b/src/lib/crypto/kimchi_backend/common/kimchi_backend_common.mli
@@ -11,6 +11,20 @@ module Plonk_constraint_system : sig
val get_public_input_size : ('a, 'b) t -> int Core_kernel.Set_once.t
+ (** Return the size of all the fixed lookup tables concatenated, without the
+ built-in XOR and RangeCheck tables *)
+ val get_concatenated_fixed_lookup_table_size : ('a, 'b) t -> int
+
+ (** Return the size of all the runtime lookup tables concatenated *)
+ val get_concatenated_runtime_lookup_table_size : ('a, 'b) t -> int
+
+ (** Finalize the fixed lookup tables. The function can not be called twice *)
+ val finalize_fixed_lookup_tables : _ t -> unit
+
+ (** Finalize the runtime lookup table configurations. The function can not be
+ called twice. *)
+ val finalize_runtime_lookup_tables : _ t -> unit
+
val get_rows_len : ('a, 'b) t -> int
end
diff --git a/src/lib/crypto/kimchi_backend/common/plonk_constraint_system.ml b/src/lib/crypto/kimchi_backend/common/plonk_constraint_system.ml
index ad7141591e2..9c8cadca30f 100644
--- a/src/lib/crypto/kimchi_backend/common/plonk_constraint_system.ml
+++ b/src/lib/crypto/kimchi_backend/common/plonk_constraint_system.ml
@@ -306,6 +306,8 @@ module Plonk_constraint = struct
; bound_crumb7 : 'v
; (* Coefficients *) two_to_rot : 'f (* Rotation scalar 2^rot *)
}
+ | AddFixedLookupTable of { id : int32; data : 'f array array }
+ | AddRuntimeTableCfg of { id : int32; first_column : 'f array }
| Raw of
{ kind : Kimchi_gate_type.t; values : 'v array; coeffs : 'f array }
[@@deriving sexp]
@@ -619,6 +621,12 @@ module Plonk_constraint = struct
; bound_crumb7 = f bound_crumb7
; (* Coefficients *) two_to_rot
}
+ | AddFixedLookupTable { id; data } ->
+ (* TODO: see a possible better API -
+ https://github.com/MinaProtocol/mina/issues/13984 *)
+ AddFixedLookupTable { id; data }
+ | AddRuntimeTableCfg { id; first_column } ->
+ AddRuntimeTableCfg { id; first_column }
| Raw { kind; values; coeffs } ->
Raw { kind; values = Array.map ~f values; coeffs }
@@ -700,6 +708,14 @@ type ('f, 'rust_gates) circuit =
and a list of gates that corresponds to the circuit.
*)
+type 'f fixed_lookup_tables =
+ | Unfinalized_fixed_lookup_tables_rev of 'f Kimchi_types.lookup_table list
+ | Compiled_fixed_lookup_tables of 'f Kimchi_types.lookup_table array
+
+type 'f runtime_tables_cfg =
+ | Unfinalized_runtime_tables_cfg_rev of 'f Kimchi_types.runtime_table_cfg list
+ | Compiled_runtime_tables_cfg of 'f Kimchi_types.runtime_table_cfg array
+
(** The constraint system. *)
type ('f, 'rust_gates) t =
{ (* Map of cells that share the same value (enforced by to the permutation). *)
@@ -713,6 +729,13 @@ type ('f, 'rust_gates) t =
The finalized tag contains the digest of the circuit.
*)
mutable gates : ('f, 'rust_gates) circuit
+ (* Witnesses values corresponding to each runtime lookups *)
+ ; mutable runtime_lookups_rev : (V.t * (V.t * V.t)) list
+ (* The user-provided lookup tables associated with this circuit. *)
+ ; mutable fixed_lookup_tables : 'f fixed_lookup_tables
+ (* The user-provided runtime table configurations associated with this
+ circuit. *)
+ ; mutable runtime_tables_cfg : 'f runtime_tables_cfg
; (* The row to use the next time we add a constraint. *)
mutable next_row : int
; (* The size of the public input (which fills the first rows of our constraint system. *)
@@ -750,6 +773,48 @@ let get_prev_challenges sys = sys.prev_challenges
let set_prev_challenges sys challenges =
Core_kernel.Set_once.set_exn sys.prev_challenges [%here] challenges
+let get_concatenated_fixed_lookup_table_size sys =
+ match sys.fixed_lookup_tables with
+ | Unfinalized_fixed_lookup_tables_rev _ ->
+ failwith
+ "Cannot get the fixed lookup tables before finalizing the constraint \
+ system"
+ | Compiled_fixed_lookup_tables flts ->
+ let get_table_size (flt : _ Kimchi_types.lookup_table) =
+ if Array.length flt.data = 0 then 0
+ else Array.length (Array.get flt.data 0)
+ in
+ Array.fold_left (fun acc flt -> acc + get_table_size flt) 0 flts
+
+let get_concatenated_runtime_lookup_table_size sys =
+ match sys.runtime_tables_cfg with
+ | Unfinalized_runtime_tables_cfg_rev _ ->
+ failwith
+ "Cannot get the runtime table configurations before finalizing the \
+ constraint system"
+ | Compiled_runtime_tables_cfg rt_cfgs ->
+ Array.fold_left
+ (fun acc (rt_cfg : _ Kimchi_types.runtime_table_cfg) ->
+ acc + Array.length rt_cfg.first_column )
+ 0 rt_cfgs
+
+let finalize_fixed_lookup_tables sys =
+ match sys.fixed_lookup_tables with
+ | Unfinalized_fixed_lookup_tables_rev fixed_lt_rev ->
+ sys.fixed_lookup_tables <-
+ Compiled_fixed_lookup_tables
+ (Core_kernel.Array.of_list_rev fixed_lt_rev)
+ | Compiled_fixed_lookup_tables _ ->
+ failwith "Fixed lookup tables have already been finalized"
+
+let finalize_runtime_lookup_tables sys =
+ match sys.runtime_tables_cfg with
+ | Unfinalized_runtime_tables_cfg_rev rt_cfgs_rev ->
+ sys.runtime_tables_cfg <-
+ Compiled_runtime_tables_cfg (Core_kernel.Array.of_list_rev rt_cfgs_rev)
+ | Compiled_runtime_tables_cfg _ ->
+ failwith "Runtime table configurations have already been finalized"
+
(* TODO: shouldn't that Make create something bounded by a signature? As we know what a back end should be? Check where this is used *)
(* TODO: glossary of terms in this file (terms, reducing, feeding) + module doc *)
@@ -789,6 +854,17 @@ module Make
val next_row : t -> int
+ val get_concatenated_fixed_lookup_table_size : t -> int
+
+ val get_concatenated_runtime_lookup_table_size : t -> int
+
+ (** Finalize the fixed lookup tables. The function can not be called twice *)
+ val finalize_fixed_lookup_tables : t -> unit
+
+ (** Finalize the runtime lookup table configurations. The function can not be
+ called twice. *)
+ val finalize_runtime_lookup_tables : t -> unit
+
val add_constraint :
?label:string
-> t
@@ -797,11 +873,18 @@ module Make
Snarky_backendless.Constraint.basic
-> unit
- val compute_witness : t -> (int -> Fp.t) -> Fp.t array array
+ val compute_witness :
+ t
+ -> (int -> Fp.t)
+ -> Fp.t array array * Fp.t Kimchi_types.runtime_table array
val finalize : t -> unit
- val finalize_and_get_gates : t -> Gates.t
+ val finalize_and_get_gates :
+ t
+ -> Gates.t
+ * Fp.t Kimchi_types.lookup_table array
+ * Fp.t Kimchi_types.runtime_table_cfg array
val num_constraints : t -> int
@@ -812,6 +895,17 @@ end = struct
open Core_kernel
open Pickles_types
+ (* Used by compute_witness to build the runtime tables from the Lookup
+ constraint *)
+ module MapRuntimeTable = struct
+ module T = struct
+ type t = int32 * Fp.t [@@deriving hash, sexp, compare]
+ end
+
+ include T
+ include Core_kernel.Hashable.Make (T)
+ end
+
type nonrec t = (Fp.t, Gates.t) t
(** Converts the set of permutations (equivalence_classes) to
@@ -852,7 +946,7 @@ end = struct
and a function that converts the indexed secret inputs to their concrete values.
*)
let compute_witness (sys : t) (external_values : int -> Fp.t) :
- Fp.t array array =
+ Fp.t array array * Fp.t Kimchi_types.runtime_table array =
let internal_values : Fp.t Internal_var.Table.t =
Internal_var.Table.create ()
in
@@ -899,8 +993,61 @@ end = struct
let value = compute lc in
res.(col_idx).(row_idx) <- value ;
Hashtbl.set internal_values ~key:var ~data:value ) ) ;
+
+ let map_runtime_tables = MapRuntimeTable.Table.create () in
+ let runtime_tables : Fp.t Kimchi_types.runtime_table array =
+ match sys.runtime_tables_cfg with
+ | Unfinalized_runtime_tables_cfg_rev _ ->
+ failwith
+ "Attempted to generate a witness for an unfinalized constraint \
+ system"
+ | Compiled_runtime_tables_cfg cfgs ->
+ Array.mapi cfgs ~f:(fun rt_idx { Kimchi_types.id; first_column } ->
+ let data =
+ Array.mapi first_column ~f:(fun i v ->
+ ignore
+ (* `add` leaves the value unchanged if the index has been
+ already used. Therefore, it keeps the first value.
+ This handles the case that the first column has
+ duplicated index values.
+ *)
+ @@ MapRuntimeTable.Table.add map_runtime_tables ~key:(id, v)
+ ~data:(i, rt_idx) ;
+ (* default padding value for lookup *)
+ Fp.zero )
+ in
+ let rt : Fp.t Kimchi_types.runtime_table = { id; data } in
+ rt )
+ in
+
+ (* Fill in the used entries of the runtime lookup tables. *)
+ List.iter (List.rev sys.runtime_lookups_rev) ~f:(fun (id, (idx, v)) ->
+ let compute_value x = compute ([ (Fp.one, x) ], None) in
+ let vid = compute_value id in
+ let vidx = compute_value idx in
+ let vv = compute_value v in
+ (* FIXME: we should have a int32 here. We are not sure the ID will be a
+ int32. We should enforce that.
+ See https://github.com/MinaProtocol/mina/issues/13955
+ *)
+ let id_int32 = Int32.of_string @@ Fp.to_string vid in
+ (* Using find allows to handle fixed lookup tables
+ As the map has been built from the runtime table configurations,
+ except in the case that a runtime table and a fixed table shares the
+ same ID, the lookups in fixed lookup tables will return None.
+ See https://github.com/MinaProtocol/mina/issues/14016
+ *)
+ let v =
+ MapRuntimeTable.Table.find map_runtime_tables (id_int32, vidx)
+ in
+ if Option.is_some v then
+ let i, rt_idx = Option.value_exn v in
+ let rt = runtime_tables.(rt_idx) in
+ (* Important note: we do not check if the value has been set before.
+ Therefore, it will always use the latest value *)
+ rt.data.(i) <- vv ) ;
(* Return the witness. *)
- res
+ (res, runtime_tables)
let union_find sys v =
Hashtbl.find_or_add sys.union_finds v ~default:(fun () ->
@@ -919,6 +1066,9 @@ end = struct
; prev_challenges = Set_once.create ()
; internal_vars = Internal_var.Table.create ()
; gates = Unfinalized_rev [] (* Gates.create () *)
+ ; runtime_lookups_rev = []
+ ; fixed_lookup_tables = Unfinalized_fixed_lookup_tables_rev []
+ ; runtime_tables_cfg = Unfinalized_runtime_tables_cfg_rev []
; rows_rev = []
; next_row = 0
; equivalence_classes = V.Table.create ()
@@ -954,6 +1104,16 @@ end = struct
let next_row (sys : t) = sys.next_row
+ let get_concatenated_fixed_lookup_table_size (sys : t) =
+ get_concatenated_fixed_lookup_table_size sys
+
+ let get_concatenated_runtime_lookup_table_size (sys : t) =
+ get_concatenated_runtime_lookup_table_size sys
+
+ let finalize_fixed_lookup_tables = finalize_fixed_lookup_tables
+
+ let finalize_runtime_lookup_tables = finalize_runtime_lookup_tables
+
(** Adds {row; col} to the system's wiring under a specific key.
A key is an external or internal variable.
The row must be given relative to the start of the circuit
@@ -995,14 +1155,29 @@ end = struct
*)
let rec finalize_and_get_gates sys =
match sys with
- | { gates = Compiled (_, gates); _ } ->
- gates
+ | { gates = Compiled (_, gates)
+ ; fixed_lookup_tables = Compiled_fixed_lookup_tables fixed_lookup_tables
+ ; runtime_tables_cfg = Compiled_runtime_tables_cfg runtime_tables_cfg
+ ; _
+ } ->
+ (gates, fixed_lookup_tables, runtime_tables_cfg)
+ (* Finalizing lookup tables and runtime table cfgs first *)
+ | { fixed_lookup_tables = Unfinalized_fixed_lookup_tables_rev _; _ } ->
+ finalize_fixed_lookup_tables sys ;
+ finalize_and_get_gates sys
+ | { runtime_tables_cfg = Unfinalized_runtime_tables_cfg_rev _; _ } ->
+ finalize_runtime_lookup_tables sys ;
+ finalize_and_get_gates sys
| { pending_generic_gate = Some (l, r, o, coeffs); _ } ->
(* Finalize any pending generic constraint first. *)
add_row sys [| l; r; o |] Generic coeffs ;
sys.pending_generic_gate <- None ;
finalize_and_get_gates sys
- | { gates = Unfinalized_rev gates_rev; _ } ->
+ | { gates = Unfinalized_rev gates_rev
+ ; fixed_lookup_tables = Compiled_fixed_lookup_tables fixed_lookup_tables
+ ; runtime_tables_cfg = Compiled_runtime_tables_cfg runtime_tables_cfg
+ ; _
+ } ->
let rust_gates = Gates.create () in
(* Create rows for public input. *)
@@ -1075,15 +1250,24 @@ end = struct
sys.gates <- Compiled (md5_digest, rust_gates) ;
(* return the gates *)
- rust_gates
+ (rust_gates, fixed_lookup_tables, runtime_tables_cfg)
(** Calls [finalize_and_get_gates] and ignores the result. *)
- let finalize t = ignore (finalize_and_get_gates t : Gates.t)
+ let finalize t =
+ ignore
+ ( finalize_and_get_gates t
+ : Gates.t
+ * Fp.t Kimchi_types.lookup_table array
+ * Fp.t Kimchi_types.runtime_table_cfg array )
- let num_constraints sys = finalize_and_get_gates sys |> Gates.len
+ let num_constraints sys =
+ let gates, _, _ = finalize_and_get_gates sys in
+ Gates.len gates
let to_json (sys : t) : string =
- let gates = finalize_and_get_gates sys in
+ (* TODO: add lookup tables and runtime table cfgs *)
+ (* https://github.com/MinaProtocol/mina/issues/13886 *)
+ let gates, _, _ = finalize_and_get_gates sys in
let public_input_size = Set_once.get_exn sys.public_input_size [%here] in
Gates.to_json public_input_size gates
@@ -1188,8 +1372,8 @@ end = struct
let reduce_lincom sys (x : Fp.t Snarky_backendless.Cvar.t) =
let constant, terms =
Fp.(
- Snarky_backendless.Cvar.to_constant_and_terms ~add ~mul ~zero:(of_int 0)
- ~equal ~one:(of_int 1))
+ Snarky_backendless.Cvar.to_constant_and_terms ~add ~mul ~zero ~equal
+ ~one)
x
in
let terms = accumulate_terms terms in
@@ -1686,16 +1870,37 @@ end = struct
(Fn.compose add_endoscale_scalar_round
(Endoscale_scalar_round.map ~f:reduce_to_v) )
| Plonk_constraint.T (Lookup { w0; w1; w2; w3; w4; w5; w6 }) ->
+ (* table ID *)
+ let red_w0 = reduce_to_v w0 in
+ (* idx1 *)
+ let red_w1 = reduce_to_v w1 in
+ (* v1 *)
+ let red_w2 = reduce_to_v w2 in
+ (* idx2 *)
+ let red_w3 = reduce_to_v w3 in
+ (* v2 *)
+ let red_w4 = reduce_to_v w4 in
+ (* idx3 *)
+ let red_w5 = reduce_to_v w5 in
+ (* v3 *)
+ let red_w6 = reduce_to_v w6 in
let vars =
- [| Some (reduce_to_v w0)
- ; Some (reduce_to_v w1)
- ; Some (reduce_to_v w2)
- ; Some (reduce_to_v w3)
- ; Some (reduce_to_v w4)
- ; Some (reduce_to_v w5)
- ; Some (reduce_to_v w6)
+ [| Some red_w0
+ ; Some red_w1
+ ; Some red_w2
+ ; Some red_w3
+ ; Some red_w4
+ ; Some red_w5
+ ; Some red_w6
|]
in
+ let lookup1 = (red_w0, (red_w1, red_w2)) in
+ let lookup2 = (red_w0, (red_w3, red_w4)) in
+ let lookup3 = (red_w0, (red_w5, red_w6)) in
+ (* We populate with the first lookup. In the case the user uses the same
+ index multiple times, the last value will be used *)
+ sys.runtime_lookups_rev <-
+ lookup3 :: lookup2 :: lookup1 :: sys.runtime_lookups_rev ;
add_row sys vars Lookup [||]
| Plonk_constraint.T
(RangeCheck0
@@ -2064,9 +2269,9 @@ end = struct
//! | 5 | `bound_limb2` | `shifted_limb2` | `excess_limb2` | `word_limb2` |
//! | 6 | `bound_limb3` | `shifted_limb3` | `excess_limb3` | `word_limb3` |
//! | 7 | `bound_crumb0` | `shifted_crumb0` | `excess_crumb0` | `word_crumb0` |
- //! | 8 | `bound_crumb1` | `shifted_crumb1` | `excess_crumb1` | `word_crumb1` |
- //! | 9 | `bound_crumb2` | `shifted_crumb2` | `excess_crumb2` | `word_crumb2` |
- //! | 10 | `bound_crumb3` | `shifted_crumb3` | `excess_crumb3` | `word_crumb3` |
+ //! | 8 | `bound_crumb1` | `shifted_crumb1` | `excess_crumb1` | `word_crumb1` |
+ //! | 9 | `bound_crumb2` | `shifted_crumb2` | `excess_crumb2` | `word_crumb2` |
+ //! | 10 | `bound_crumb3` | `shifted_crumb3` | `excess_crumb3` | `word_crumb3` |
//! | 11 | `bound_crumb4` | `shifted_crumb4` | `excess_crumb4` | `word_crumb4` |
//! | 12 | `bound_crumb5` | `shifted_crumb5` | `excess_crumb5` | `word_crumb5` |
//! | 13 | `bound_crumb6` | `shifted_crumb6` | `excess_crumb6` | `word_crumb6` |
@@ -2091,6 +2296,28 @@ end = struct
|]
in
add_row sys vars_curr Rot64 [| two_to_rot |]
+ | Plonk_constraint.T (AddFixedLookupTable { id; data }) -> (
+ match sys.fixed_lookup_tables with
+ | Unfinalized_fixed_lookup_tables_rev fixed_lookup_tables ->
+ let lt : Fp.t Kimchi_types.lookup_table list =
+ { id; data } :: fixed_lookup_tables
+ in
+ sys.fixed_lookup_tables <- Unfinalized_fixed_lookup_tables_rev lt
+ | Compiled_fixed_lookup_tables _ ->
+ failwith
+ "Trying to add a fixed lookup tables when it has been already \
+ finalized" )
+ | Plonk_constraint.T (AddRuntimeTableCfg { id; first_column }) -> (
+ match sys.runtime_tables_cfg with
+ | Unfinalized_runtime_tables_cfg_rev runtime_tables_cfg ->
+ let rt_cfg : Fp.t Kimchi_types.runtime_table_cfg list =
+ { id; first_column } :: runtime_tables_cfg
+ in
+ sys.runtime_tables_cfg <- Unfinalized_runtime_tables_cfg_rev rt_cfg
+ | Compiled_runtime_tables_cfg _ ->
+ failwith
+ "Trying to add a runtime table configuration it has been \
+ already finalized" )
| Plonk_constraint.T (Raw { kind; values; coeffs }) ->
let values =
Array.init 15 ~f:(fun i ->
diff --git a/src/lib/crypto/kimchi_backend/common/plonk_dlog_oracles.ml b/src/lib/crypto/kimchi_backend/common/plonk_dlog_oracles.ml
index 89011971fb8..1aa9b766d1c 100644
--- a/src/lib/crypto/kimchi_backend/common/plonk_dlog_oracles.ml
+++ b/src/lib/crypto/kimchi_backend/common/plonk_dlog_oracles.ml
@@ -11,20 +11,33 @@ module type Inputs_intf = sig
module Proof : sig
type t
+ type with_public_evals
+
module Challenge_polynomial : T0
module Backend : sig
type t
+
+ type with_public_evals
end
val to_backend :
Challenge_polynomial.t list -> Field.t list -> t -> Backend.t
+
+ val to_backend_with_public_evals :
+ Challenge_polynomial.t list
+ -> Field.t list
+ -> with_public_evals
+ -> Backend.with_public_evals
end
module Backend : sig
type t = Field.t Kimchi_types.oracles
val create : Verifier_index.t -> Proof.Backend.t -> t
+
+ val create_with_public_evals :
+ Verifier_index.t -> Proof.Backend.with_public_evals -> t
end
end
@@ -35,6 +48,11 @@ module Make (Inputs : Inputs_intf) = struct
let pi = Proof.to_backend prev_challenge input pi in
Backend.create vk pi
+ let create_with_public_evals vk prev_challenge input
+ (pi : Proof.with_public_evals) =
+ let pi = Proof.to_backend_with_public_evals prev_challenge input pi in
+ Backend.create_with_public_evals vk pi
+
open Backend
let scalar_challenge t = Scalar_challenge.create t
diff --git a/src/lib/crypto/kimchi_backend/common/plonk_dlog_proof.ml b/src/lib/crypto/kimchi_backend/common/plonk_dlog_proof.ml
index 9a1d0d5a0d1..0eb9994342d 100644
--- a/src/lib/crypto/kimchi_backend/common/plonk_dlog_proof.ml
+++ b/src/lib/crypto/kimchi_backend/common/plonk_dlog_proof.ml
@@ -23,7 +23,7 @@ module type Stable_v1 = sig
module Latest = V1
end
- type t = Stable.V1.t [@@deriving sexp, compare, yojson]
+ type t = Stable.V1.t [@@deriving sexp, compare, yojson, hash, equal]
end
module type Inputs_intf = sig
@@ -90,27 +90,31 @@ module type Inputs_intf = sig
end
module Backend : sig
+ type with_public_evals =
+ (Curve.Affine.Backend.t, Scalar_field.t) Kimchi_types.proof_with_public
+
type t = (Curve.Affine.Backend.t, Scalar_field.t) Kimchi_types.prover_proof
val create :
Index.t
- -> Scalar_field.Vector.t
- -> Scalar_field.Vector.t
- -> Scalar_field.t array
- -> Curve.Affine.Backend.t array
- -> t
+ -> primary:Scalar_field.Vector.t
+ -> auxiliary:Scalar_field.Vector.t
+ -> prev_chals:Scalar_field.t array
+ -> prev_comms:Curve.Affine.Backend.t array
+ -> with_public_evals
val create_async :
Index.t
- -> Scalar_field.Vector.t
- -> Scalar_field.Vector.t
- -> Scalar_field.t array
- -> Curve.Affine.Backend.t array
- -> t Promise.t
+ -> primary:Scalar_field.Vector.t
+ -> auxiliary:Scalar_field.Vector.t
+ -> prev_chals:Scalar_field.t array
+ -> prev_comms:Curve.Affine.Backend.t array
+ -> with_public_evals Promise.t
- val verify : Verifier_index.t -> t -> bool
+ val verify : Verifier_index.t -> with_public_evals -> bool
- val batch_verify : Verifier_index.t array -> t array -> bool Promise.t
+ val batch_verify :
+ Verifier_index.t array -> with_public_evals array -> bool Promise.t
end
end
@@ -179,7 +183,7 @@ module Make (Inputs : Inputs_intf) = struct
let map_creator c ~f ~messages ~openings = f (c ~messages ~openings)
let create ~messages ~openings =
- let open Pickles_types.Plonk_types.Proof in
+ let open Pickles_types.Plonk_types.Proof.Stable.Latest in
{ messages; openings }
end
@@ -196,14 +200,54 @@ module Make (Inputs : Inputs_intf) = struct
end
end]
+ module T = struct
+ type t = (G.Affine.t, Fq.t, Fq.t array) Pickles_types.Plonk_types.Proof.t
+ [@@deriving compare, sexp, yojson, hash, equal]
+
+ let id = "plong_dlog_proof_" ^ Inputs.id
+
+ type 'a creator =
+ messages:G.Affine.t Pickles_types.Plonk_types.Messages.t
+ -> openings:
+ (G.Affine.t, Fq.t, Fq.t array) Pickles_types.Plonk_types.Openings.t
+ -> 'a
+
+ let map_creator c ~f ~messages ~openings = f (c ~messages ~openings)
+
+ let create ~messages ~openings =
+ let open Pickles_types.Plonk_types.Proof in
+ { messages; openings }
+ end
+
+ include T
+
include (
- Stable.Latest :
+ struct
+ include Allocation_functor.Make.Basic (T)
+ include Allocation_functor.Make.Partial.Sexp (T)
+ include Allocation_functor.Make.Partial.Yojson (T)
+ end :
sig
- type t [@@deriving compare, sexp, yojson, hash, equal, bin_io]
- end
- with type t := t )
-
- [%%define_locally Stable.Latest.(create)]
+ include
+ Allocation_functor.Intf.Output.Basic_intf
+ with type t := t
+ and type 'a creator := 'a creator
+
+ include
+ Allocation_functor.Intf.Output.Sexp_intf
+ with type t := t
+ and type 'a creator := 'a creator
+
+ include
+ Allocation_functor.Intf.Output.Yojson_intf
+ with type t := t
+ and type 'a creator := 'a creator
+ end )
+
+ type with_public_evals =
+ { proof : t
+ ; public_evals : (Scalar_field.t array * Scalar_field.t array) option
+ }
let g t f = G.Affine.of_backend (f t)
@@ -212,7 +256,8 @@ module Make (Inputs : Inputs_intf) = struct
Array.iter arr ~f:(fun fe -> Fq.Vector.emplace_back vec fe) ;
vec
- (** Note that this function will panic if any of the points are points at infinity *)
+ (** Note that this function will panic if any of the points are points at
+ infinity *)
let opening_proof_of_backend_exn (t : Opening_proof_backend.t) =
let g (x : G.Affine.Backend.t) : G.Affine.t =
G.Affine.of_backend x |> Pickles_types.Or_infinity.finite_exn
@@ -286,13 +331,12 @@ module Make (Inputs : Inputs_intf) = struct
; foreign_field_mul_lookup_selector
}
+ let evals_to_tuple ({ zeta; zeta_omega } : _ Kimchi_types.point_evaluations) =
+ (zeta, zeta_omega)
+
let of_backend (t : Backend.t) : t =
let proof = opening_proof_of_backend_exn t.proof in
let evals =
- let evals_to_tuple
- ({ zeta; zeta_omega } : _ Kimchi_types.point_evaluations) =
- (zeta, zeta_omega)
- in
Plonk_types.Evals.map ~f:evals_to_tuple (eval_of_backend t.evals)
in
let wo x : Inputs.Curve.Affine.t array =
@@ -313,13 +357,30 @@ module Make (Inputs : Inputs_intf) = struct
; lookup =
Option.map t.commitments.lookup
~f:(fun l : _ Pickles_types.Plonk_types.Messages.Lookup.t ->
- { sorted = Array.map ~f:wo l.sorted
+ { sorted =
+ Vector.init
+ Pickles_types.Plonk_types.Lookup_sorted_minus_1.n
+ ~f:(fun i -> wo l.sorted.(i))
+ ; sorted_5th_column =
+ (* TODO: This is ugly and error-prone *)
+ Option.try_with (fun () ->
+ wo
+ l.sorted.(Nat.to_int
+ Pickles_types.Plonk_types
+ .Lookup_sorted_minus_1
+ .n) )
; aggreg = wo l.aggreg
; runtime = Option.map ~f:wo l.runtime
} )
}
~openings:{ proof; evals; ft_eval1 = t.ft_eval1 }
+ let of_backend_with_public_evals (t : Backend.with_public_evals) :
+ with_public_evals =
+ { proof = of_backend t.proof
+ ; public_evals = Option.map ~f:evals_to_tuple t.public_evals
+ }
+
let eval_to_backend
{ Pickles_types.Plonk_types.Evals.w
; coefficients
@@ -379,6 +440,9 @@ module Make (Inputs : Inputs_intf) = struct
(v : t) =
Array.init (V.length v) ~f:(V.get v)
+ let evals_of_tuple (zeta, zeta_omega) : _ Kimchi_types.point_evaluations =
+ { zeta; zeta_omega }
+
let to_backend' (chal_polys : Challenge_polynomial.t list) primary_input
({ messages = { w_comm; z_comm; t_comm; lookup }
; openings =
@@ -391,16 +455,16 @@ module Make (Inputs : Inputs_intf) = struct
let g x = G.Affine.to_backend (Pickles_types.Or_infinity.Finite x) in
let pcwo t = Poly_comm.to_backend (`Without_degree_bound t) in
let lr = Array.map lr ~f:(fun (x, y) -> (g x, g y)) in
- let evals_of_tuple (zeta, zeta_omega) : _ Kimchi_types.point_evaluations =
- { zeta; zeta_omega }
- in
{ commitments =
{ w_comm = tuple15_of_vec (Pickles_types.Vector.map ~f:pcwo w_comm)
; z_comm = pcwo z_comm
; t_comm = pcwo t_comm
; lookup =
Option.map lookup ~f:(fun t : _ Kimchi_types.lookup_commitments ->
- { sorted = Array.map ~f:pcwo t.sorted
+ { sorted =
+ Array.map ~f:pcwo
+ (Array.append (Vector.to_array t.sorted)
+ (Option.to_array t.sorted_5th_column) )
; aggreg = pcwo t.aggreg
; runtime = Option.map ~f:pcwo t.runtime
} )
@@ -429,6 +493,16 @@ module Make (Inputs : Inputs_intf) = struct
let to_backend chal_polys primary_input t =
to_backend' chal_polys (List.to_array primary_input) t
+ let to_backend_with_public_evals' (chal_polys : Challenge_polynomial.t list)
+ primary_input ({ proof; public_evals } : with_public_evals) :
+ Backend.with_public_evals =
+ { proof = to_backend' chal_polys primary_input proof
+ ; public_evals = Option.map ~f:evals_of_tuple public_evals
+ }
+
+ let to_backend_with_public_evals chal_polys primary_input t =
+ to_backend_with_public_evals' chal_polys (List.to_array primary_input) t
+
let create ?message pk ~primary ~auxiliary =
let chal_polys =
match (message : message option) with Some s -> s | None -> []
@@ -443,8 +517,11 @@ module Make (Inputs : Inputs_intf) = struct
~f:(fun { Challenge_polynomial.commitment; _ } ->
G.Affine.to_backend (Finite commitment) )
in
- let res = Backend.create pk primary auxiliary challenges commitments in
- of_backend res
+ let res =
+ Backend.create pk ~primary ~auxiliary ~prev_chals:challenges
+ ~prev_comms:commitments
+ in
+ of_backend_with_public_evals res
let create_async ?message pk ~primary ~auxiliary =
let chal_polys =
@@ -461,17 +538,22 @@ module Make (Inputs : Inputs_intf) = struct
G.Affine.to_backend (Finite commitment) )
in
let%map.Promise res =
- Backend.create_async pk primary auxiliary challenges commitments
+ Backend.create_async pk ~primary ~auxiliary ~prev_chals:challenges
+ ~prev_comms:commitments
in
- of_backend res
+ of_backend_with_public_evals res
let batch_verify' (conv : 'a -> Fq.t array)
- (ts : (Verifier_index.t * t * 'a * message option) list) =
+ (ts : (Verifier_index.t * with_public_evals * 'a * message option) list) =
let logger = Internal_tracing_context_logger.get () in
[%log internal] "Batch_verify_backend_convert_inputs" ;
let vks_and_v =
Array.of_list_map ts ~f:(fun (vk, t, xs, m) ->
- let p = to_backend' (Option.value ~default:[] m) (conv xs) t in
+ let p =
+ to_backend_with_public_evals'
+ (Option.value ~default:[] m)
+ (conv xs) t
+ in
(vk, p) )
in
[%log internal] "Batch_verify_backend_convert_inputs_done" ;
@@ -488,7 +570,7 @@ module Make (Inputs : Inputs_intf) = struct
let verify ?message t vk xs : bool =
Backend.verify vk
- (to_backend'
+ (to_backend_with_public_evals'
(Option.value ~default:[] message)
(vec_to_array (module Scalar_field.Vector) xs)
t )
diff --git a/src/lib/crypto/kimchi_backend/common/poly_comm.ml b/src/lib/crypto/kimchi_backend/common/poly_comm.ml
index 326dbbafce6..c394040811f 100644
--- a/src/lib/crypto/kimchi_backend/common/poly_comm.ml
+++ b/src/lib/crypto/kimchi_backend/common/poly_comm.ml
@@ -128,7 +128,9 @@ module Make (Inputs : Inputs_intf) = struct
`Without_degree_bound
(Array.map unshifted ~f:(function
| Infinity ->
- assert false
+ failwith
+ "Pickles cannot handle point at infinity. Commitments must \
+ be representable in affine coordinates"
| Finite (x, y) ->
(x, y) ) )
| _ ->
diff --git a/src/lib/crypto/kimchi_backend/common/tests/dune b/src/lib/crypto/kimchi_backend/common/tests/dune
new file mode 100644
index 00000000000..befeaadf26c
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/common/tests/dune
@@ -0,0 +1,38 @@
+(tests
+ (names test_lookup_table_constraint_kind)
+ (libraries
+ ;; opam libraries
+ alcotest
+ bignum.bigint
+ core_kernel
+ base
+ digestif
+ ppx_inline_test.config
+ zarith
+ ;; local libraries
+ kimchi_bindings
+ kimchi_types
+ pasta_bindings
+ kimchi_backend.pasta
+ kimchi_backend.pasta.basic
+ kimchi_backend.gadgets_test_runner
+ kimchi_backend.pasta.constraint_system
+ bitstring_lib
+ snarky.intf
+ snarky.backendless
+ snarky_group_map
+ sponge
+ kimchi_backend
+ mina_version
+ base58_check
+ codable
+ random_oracle_input
+ snarky_log
+ group_map
+ snarky_curve
+ key_cache
+ snark_keys_header
+ tuple_lib
+ promise
+ kimchi_backend.common
+ ppx_version.runtime))
diff --git a/src/lib/crypto/kimchi_backend/common/tests/test_lookup_table_constraint_kind.ml b/src/lib/crypto/kimchi_backend/common/tests/test_lookup_table_constraint_kind.ml
new file mode 100644
index 00000000000..19919d38c87
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/common/tests/test_lookup_table_constraint_kind.ml
@@ -0,0 +1,492 @@
+(** Testing
+ -------
+ Component: Kimchi_backend_common
+ Subject: Testing computation of the witness and the tracking of fixed and
+ runtime lookup tables
+ Invocation: dune exec \
+ src/lib/crypto/kimchi_backend/common/tests/test_lookup_table_constraint_kind.exe
+*)
+
+(* Keeping the test low-level for learning purposes *)
+
+open Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint
+
+module Tick = Kimchi_backend.Pasta.Vesta_based_plonk
+module Impl = Snarky_backendless.Snark.Run.Make (Tick)
+
+let add_constraint c = Impl.assert_ { basic = T c; annotation = None }
+
+(* Verify finalize_and_get_gates *)
+let test_finalize_and_get_gates_with_lookup_tables () =
+ let cs = Tick.R1CS_constraint_system.create () in
+ let xor_table =
+ [| [| Tick.Field.zero; Tick.Field.zero; Tick.Field.zero |]
+ ; [| Tick.Field.zero; Tick.Field.one; Tick.Field.one |]
+ ; [| Tick.Field.one; Tick.Field.zero; Tick.Field.one |]
+ ; [| Tick.Field.one; Tick.Field.one; Tick.Field.one |]
+ |]
+ in
+ let and_table =
+ [| [| Tick.Field.zero; Tick.Field.zero; Tick.Field.zero |]
+ ; [| Tick.Field.zero; Tick.Field.one; Tick.Field.zero |]
+ ; [| Tick.Field.one; Tick.Field.zero; Tick.Field.zero |]
+ ; [| Tick.Field.one; Tick.Field.one; Tick.Field.one |]
+ |]
+ in
+ let () =
+ Tick.R1CS_constraint_system.(
+ add_constraint cs (T (AddFixedLookupTable { id = 1l; data = xor_table })))
+ in
+ let () =
+ Tick.R1CS_constraint_system.(
+ add_constraint cs (T (AddFixedLookupTable { id = 2l; data = and_table })))
+ in
+ let () = Tick.R1CS_constraint_system.set_primary_input_size cs 1 in
+ let _gates, lts, _rt =
+ Tick.R1CS_constraint_system.finalize_and_get_gates cs
+ in
+ assert (lts.(0).id = 1l) ;
+ assert (lts.(1).id = 2l) ;
+ assert (Array.length lts = 2)
+
+let test_finalize_and_get_gates_with_runtime_table_cfg () =
+ let cs = Tick.R1CS_constraint_system.create () in
+
+ let indexed_runtime_table_cfg = Array.init 4 Tick.Field.of_int in
+
+ let () =
+ Tick.R1CS_constraint_system.(
+ add_constraint cs
+ (T
+ (AddRuntimeTableCfg
+ { id = 1l; first_column = indexed_runtime_table_cfg } ) ))
+ in
+ let () = Tick.R1CS_constraint_system.set_primary_input_size cs 1 in
+ let _aux = Tick.R1CS_constraint_system.set_auxiliary_input_size cs 1 in
+ let _gates, _lt, rt = Tick.R1CS_constraint_system.finalize_and_get_gates cs in
+ assert (rt.(0).id = 1l) ;
+ assert (Array.length rt = 1)
+
+let test_compute_witness_with_lookup_to_the_same_idx_twice () =
+ (* See the comment in compute_witness when populating the runtime tables. The
+ function does not check that the runtime table has already been set at a
+ certain position, and it overwrites the previously set value *)
+ let table_id = 0 in
+ let table_size = 10 in
+ let first_column = Array.init table_size Tick.Field.of_int in
+ let repeated_idx = 0 in
+ let other_idx = 1 in
+ let fv2 = Tick.Field.random () in
+ let fv3 = Tick.Field.random () in
+ let external_values =
+ Tick.Field.
+ [| of_int table_id
+ ; of_int repeated_idx
+ ; random ()
+ ; of_int repeated_idx
+ ; fv2
+ ; of_int other_idx
+ ; fv3
+ |]
+ in
+ let cs =
+ Impl.constraint_system ~input_typ:Impl.Typ.unit ~return_typ:Impl.Typ.unit
+ (fun () () ->
+ let vtable_id =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(0))
+ in
+ let vidx1 =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(1))
+ in
+ let vv1 =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(2))
+ in
+ let vidx2 =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(3))
+ in
+ let vv2 =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(4))
+ in
+ let vidx3 =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(5))
+ in
+ let vv3 =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(6))
+ in
+ add_constraint
+ (AddRuntimeTableCfg { id = Int32.of_int table_id; first_column }) ;
+ add_constraint
+ (Lookup
+ { w0 = vtable_id
+ ; w1 = vidx1
+ ; w2 = vv1
+ ; w3 = vidx2
+ ; w4 = vv2
+ ; w5 = vidx3
+ ; w6 = vv3
+ } ) )
+ in
+ let _ = Tick.R1CS_constraint_system.finalize cs in
+ let _witnesses, runtime_tables =
+ Tick.R1CS_constraint_system.compute_witness cs (Array.get external_values)
+ in
+ (* checking only one table has been created *)
+ assert (Array.length runtime_tables = 1) ;
+ let rt = runtime_tables.(0) in
+ (* Second value is chosen *)
+ assert (Tick.Field.equal rt.data.(repeated_idx) fv2) ;
+ assert (Tick.Field.equal rt.data.(other_idx) fv3)
+
+let test_compute_witness_returns_correctly_filled_runtime_tables_one_lookup () =
+ (* We have one table with ID 0, indexed from 0 to n, and we will fill with
+ some values using the constraint RuntimeLookup.
+ We start with one lookup
+ *)
+ let n = 10 in
+ let first_column = Array.init n Tick.Field.of_int in
+ let table_id = 0 in
+ let idx = Random.int n in
+ let v = Tick.Field.random () in
+ let external_values = Tick.Field.[| of_int table_id; of_int idx; v |] in
+ let cs =
+ Impl.constraint_system ~input_typ:Impl.Typ.unit ~return_typ:Impl.Typ.unit
+ (fun () () ->
+ let vtable_id =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(0))
+ in
+ let vidx =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(1))
+ in
+ let vv =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(2))
+ in
+ (* Config *)
+ add_constraint
+ (AddRuntimeTableCfg { id = Int32.of_int table_id; first_column }) ;
+ add_constraint
+ (Lookup
+ { w0 = vtable_id
+ ; w1 = vidx
+ ; w2 = vv
+ ; w3 = vidx
+ ; w4 = vv
+ ; w5 = vidx
+ ; w6 = vv
+ } ) )
+ in
+ let _ = Tick.R1CS_constraint_system.finalize cs in
+ let _witnesses, runtime_tables =
+ Tick.R1CS_constraint_system.compute_witness cs (Array.get external_values)
+ in
+ (* checking only one table has been created *)
+ assert (Array.length runtime_tables = 1) ;
+ let rt = runtime_tables.(0) in
+ (* with the correct ID *)
+ assert (Int32.(equal rt.id (of_int table_id))) ;
+ let exp_rt = Array.init n (fun i -> if i = idx then v else Tick.Field.zero) in
+ assert (Array.for_all2 Tick.Field.equal rt.data exp_rt)
+
+let test_compute_witness_returns_correctly_filled_runtime_tables_multiple_lookup
+ () =
+ (* We have one table with ID 0, indexed from 0 to n, and we will fill with
+ some values using the constraint RuntimeLookup.
+ We start with one lookup
+ *)
+ let n = 10 in
+ let first_column = Array.init n Tick.Field.of_int in
+ let table_id = 0 in
+ let exp_rt_data = Array.init n (fun _ -> Tick.Field.zero) in
+ (* nb of lookups *)
+ let m = Random.int n in
+ let external_values = Array.init (1 + (m * 2)) (fun _ -> Tick.Field.zero) in
+ let cs =
+ Impl.constraint_system ~input_typ:Impl.Typ.unit ~return_typ:Impl.Typ.unit
+ (fun () () ->
+ let vtable_id =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(0))
+ in
+ (* Config *)
+ add_constraint
+ (AddRuntimeTableCfg { id = Int32.of_int table_id; first_column }) ;
+ ignore
+ @@ List.init m (fun i ->
+ let j = (2 * i) + 1 in
+ let idx = Random.int n in
+ let v = Tick.Field.random () in
+ external_values.(j) <- Tick.Field.of_int idx ;
+ external_values.(j + 1) <- v ;
+ exp_rt_data.(idx) <- v ;
+ let vidx =
+ Impl.exists Impl.Field.typ ~compute:(fun () ->
+ external_values.(j) )
+ in
+ let vv =
+ Impl.exists Impl.Field.typ ~compute:(fun () ->
+ external_values.(j + 1) )
+ in
+ add_constraint
+ (Lookup
+ { w0 = vtable_id
+ ; w1 = vidx
+ ; w2 = vv
+ ; w3 = vidx
+ ; w4 = vv
+ ; w5 = vidx
+ ; w6 = vv
+ } ) ) )
+ in
+ let _ = Tick.R1CS_constraint_system.finalize cs in
+ let _witnesses, runtime_tables =
+ Tick.R1CS_constraint_system.compute_witness cs (Array.get external_values)
+ in
+ (* checking only one table has been created *)
+ assert (Array.length runtime_tables = 1) ;
+ let rt = runtime_tables.(0) in
+ (* with the correct ID *)
+ assert (Int32.(equal rt.id (of_int table_id))) ;
+ assert (Array.for_all2 Tick.Field.equal rt.data exp_rt_data)
+
+(* Checking that lookups within a lookup table works correctly with the Lookup
+ constraint in the case of the fixed lookup table does not share its ID with a
+ runtime table *)
+let test_compute_witness_with_fixed_lookup_table_and_runtime_table () =
+ let n = 10 in
+ (* Fixed table *)
+ let fixed_lt_id = 2 in
+ let indexes = Array.init n Tick.Field.of_int in
+ let fixed_lt_values = Array.init n (fun _ -> Tick.Field.random ()) in
+ let data = [| indexes; fixed_lt_values |] in
+ (* Lookup info for fixed lookup *)
+ let fixed_lookup_idx = 0 in
+ let fixed_lookup_v = fixed_lt_values.(fixed_lookup_idx) in
+ (* rt *)
+ let rt_cfg_id = 3 in
+ let first_column = Array.init n Tick.Field.of_int in
+ let rt_idx = 1 in
+ let rt_v = Tick.Field.random () in
+ let external_values =
+ [| Tick.Field.of_int fixed_lt_id
+ ; Tick.Field.of_int rt_cfg_id
+ ; Tick.Field.of_int fixed_lookup_idx
+ ; fixed_lookup_v
+ ; Tick.Field.of_int rt_idx
+ ; rt_v
+ |]
+ in
+ let cs =
+ Impl.constraint_system ~input_typ:Impl.Typ.unit ~return_typ:Impl.Typ.unit
+ (fun () () ->
+ (* Add the fixed lookup table to the cs *)
+ add_constraint
+ (AddFixedLookupTable { id = Int32.of_int fixed_lt_id; data }) ;
+ let vfixed_lt_id =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(0))
+ in
+
+ (* Runtime table cfg *)
+ let vrt_cfg_id =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(1))
+ in
+ (* Config *)
+ add_constraint
+ (AddRuntimeTableCfg { id = Int32.of_int rt_cfg_id; first_column }) ;
+ (* Lookup into fixed lookup table *)
+ let vfixed_lookup_idx =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(2))
+ in
+ let vfixed_lookup_v =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(3))
+ in
+ add_constraint
+ (Lookup
+ { w0 = vfixed_lt_id
+ ; w1 = vfixed_lookup_idx
+ ; w2 = vfixed_lookup_v
+ ; w3 = vfixed_lookup_idx
+ ; w4 = vfixed_lookup_v
+ ; w5 = vfixed_lookup_idx
+ ; w6 = vfixed_lookup_v
+ } ) ;
+ (* Lookup into runtime table *)
+ let vrt_idx =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(4))
+ in
+ let vrt_v =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(5))
+ in
+ add_constraint
+ (Lookup
+ { w0 = vrt_cfg_id
+ ; w1 = vrt_idx
+ ; w2 = vrt_v
+ ; w3 = vrt_idx
+ ; w4 = vrt_v
+ ; w5 = vrt_idx
+ ; w6 = vrt_v
+ } ) )
+ in
+
+ let _ = Tick.R1CS_constraint_system.finalize cs in
+ let _witnesses, runtime_tables =
+ Tick.R1CS_constraint_system.compute_witness cs (Array.get external_values)
+ in
+ (* checking only one table has been created *)
+ assert (Array.length runtime_tables = 1) ;
+ let rt = runtime_tables.(0) in
+ (* with the correct ID *)
+ assert (Int32.(equal rt.id (of_int rt_cfg_id))) ;
+ assert (Tick.Field.equal rt.data.(rt_idx) rt_v)
+
+(* Checking that lookups within a lookup table works correctly with the Lookup
+ constraint in the case of the fixed lookup table does share its ID with a
+ runtime table. *)
+let test_compute_witness_with_fixed_lookup_table_and_runtime_table_sharing_ids
+ () =
+ let n = 10 in
+ (* Fixed table *)
+ let fixed_lt_id = 2 in
+ let rt_cfg_id = fixed_lt_id in
+ let indexes = Array.init n Tick.Field.of_int in
+ let fixed_lt_values = Array.init n (fun _ -> Tick.Field.random ()) in
+ let data = [| indexes; fixed_lt_values |] in
+ (* Lookup into fixed lookup table *)
+ let fixed_lookup_idx = Random.int n in
+ let fixed_lookup_v = fixed_lt_values.(fixed_lookup_idx) in
+ let rt_idx = n + Random.int n in
+ let rt_v = Tick.Field.random () in
+ let external_values =
+ [| Tick.Field.of_int fixed_lt_id
+ ; Tick.Field.of_int rt_cfg_id
+ ; Tick.Field.of_int fixed_lookup_idx
+ ; fixed_lookup_v
+ ; Tick.Field.of_int rt_idx
+ ; rt_v
+ |]
+ in
+ (* Extend the lookup table *)
+ let first_column = Array.init n (fun i -> Tick.Field.of_int (n + i)) in
+ let cs =
+ Impl.constraint_system ~input_typ:Impl.Typ.unit ~return_typ:Impl.Typ.unit
+ (fun () () ->
+ (* Add the fixed lookup table to the cs *)
+ add_constraint
+ (AddFixedLookupTable { id = Int32.of_int fixed_lt_id; data }) ;
+
+ let vfixed_lt_id =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(0))
+ in
+ let vrt_cfg_id =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(1))
+ in
+ (* Config *)
+ add_constraint
+ (AddRuntimeTableCfg { id = Int32.of_int rt_cfg_id; first_column }) ;
+ let vfixed_lookup_idx =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(2))
+ in
+ let vfixed_lookup_v =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(3))
+ in
+ add_constraint
+ (Lookup
+ { w0 = vfixed_lt_id
+ ; w1 = vfixed_lookup_idx
+ ; w2 = vfixed_lookup_v
+ ; w3 = vfixed_lookup_idx
+ ; w4 = vfixed_lookup_v
+ ; w5 = vfixed_lookup_idx
+ ; w6 = vfixed_lookup_v
+ } ) ;
+ (* Lookup into runtime table *)
+ let vrt_idx =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(4))
+ in
+ let vrt_v =
+ Impl.exists Impl.Field.typ ~compute:(fun () -> external_values.(5))
+ in
+ add_constraint
+ (Lookup
+ { w0 = vrt_cfg_id
+ ; w1 = vrt_idx
+ ; w2 = vrt_v
+ ; w3 = vrt_idx
+ ; w4 = vrt_v
+ ; w5 = vrt_idx
+ ; w6 = vrt_v
+ } ) )
+ in
+ let _ = Tick.R1CS_constraint_system.finalize cs in
+ let _witnesses, runtime_tables =
+ Tick.R1CS_constraint_system.compute_witness cs (Array.get external_values)
+ in
+ (* checking only one table has been created *)
+ assert (Array.length runtime_tables = 1) ;
+ let rt = runtime_tables.(0) in
+ (* with the correct ID *)
+ assert (Int32.(equal rt.id (of_int rt_cfg_id))) ;
+ assert (Tick.Field.equal rt.data.(rt_idx - n) rt_v)
+
+let test_cannot_finalize_twice_the_fixed_lookup_tables () =
+ let module Tick = Kimchi_backend.Pasta.Vesta_based_plonk in
+ let size = 1 + Random.int 100 in
+ let indexes = Array.init size Tick.Field.of_int in
+ let values = Array.init size (fun _ -> Tick.Field.random ()) in
+ let cs = Tick.R1CS_constraint_system.create () in
+ let () =
+ Tick.R1CS_constraint_system.(
+ add_constraint cs
+ (T (AddFixedLookupTable { id = 1l; data = [| indexes; values |] })))
+ in
+ let () = Tick.R1CS_constraint_system.finalize_fixed_lookup_tables cs in
+ Alcotest.check_raises "Finalize a second time the fixed lookup tables"
+ (Failure "Fixed lookup tables have already been finalized") (fun () ->
+ Tick.R1CS_constraint_system.finalize_fixed_lookup_tables cs )
+
+let test_cannot_finalize_twice_the_runtime_table_cfgs () =
+ let module Tick = Kimchi_backend.Pasta.Vesta_based_plonk in
+ let size = 1 + Random.int 100 in
+ let first_column = Array.init size Tick.Field.of_int in
+ let cs = Tick.R1CS_constraint_system.create () in
+ let () =
+ Tick.R1CS_constraint_system.(
+ add_constraint cs (T (AddRuntimeTableCfg { id = 1l; first_column })))
+ in
+ let () = Tick.R1CS_constraint_system.finalize_runtime_lookup_tables cs in
+ Alcotest.check_raises
+ "Runtime table configurations have already been finalized"
+ (Failure "Runtime table configurations have already been finalized")
+ (fun () -> Tick.R1CS_constraint_system.finalize_runtime_lookup_tables cs)
+
+let () =
+ let open Alcotest in
+ run "Test constraint construction"
+ [ ( "Lookup tables"
+ , [ test_case "Add one fixed table" `Quick
+ test_finalize_and_get_gates_with_lookup_tables
+ ; test_case "Add one runtime table cfg" `Quick
+ test_finalize_and_get_gates_with_runtime_table_cfg
+ ; test_case "Compute witness with one runtime table lookup" `Quick
+ test_compute_witness_returns_correctly_filled_runtime_tables_one_lookup
+ ; test_case "Compute witness with multiple runtime table lookup" `Quick
+ test_compute_witness_returns_correctly_filled_runtime_tables_multiple_lookup
+ ; test_case
+ "Compute witness with runtime lookup at same index with\n\
+ \ different values" `Quick
+ test_compute_witness_with_lookup_to_the_same_idx_twice
+ ; test_case
+ "Compute witness with lookups within a runtime table and a fixed \
+ lookup table, not sharing the same ID"
+ `Quick
+ test_compute_witness_with_fixed_lookup_table_and_runtime_table
+ ; test_case
+ "Compute witness with lookups within a runtime table and a fixed \
+ lookup table, sharing the table ID"
+ `Quick
+ test_compute_witness_with_fixed_lookup_table_and_runtime_table_sharing_ids
+ ; test_case "Check that fixed lookup tables cannot be finalized twice"
+ `Quick test_cannot_finalize_twice_the_fixed_lookup_tables
+ ; test_case
+ "Check that runtime table configurations cannot be finalized twice"
+ `Quick test_cannot_finalize_twice_the_runtime_table_cfgs
+ ] )
+ ]
diff --git a/src/lib/crypto/kimchi_backend/gadgets/affine.ml.disabled b/src/lib/crypto/kimchi_backend/gadgets/affine.ml.disabled
new file mode 100644
index 00000000000..ac548772a65
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/affine.ml.disabled
@@ -0,0 +1,156 @@
+open Core_kernel
+module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint
+module Snark_intf = Snarky_backendless.Snark_intf
+
+(* Affine representation of an elliptic curve point over a foreign field *)
+
+let tests_enabled = true
+
+type bignum_point = Bignum_bigint.t * Bignum_bigint.t
+
+let two_to_4limb = Bignum_bigint.(Common.two_to_3limb * Common.two_to_limb)
+
+type 'field t =
+ 'field Foreign_field.Element.Standard.t
+ * 'field Foreign_field.Element.Standard.t
+
+let of_coordinates a = a
+
+let of_bignum_bigint_coordinates (type field)
+ (module Circuit : Snark_intf.Run with type field = field)
+ (point : bignum_point) : field t =
+ let x, y = point in
+ of_coordinates
+ ( Foreign_field.Element.Standard.of_bignum_bigint (module Circuit) x
+ , Foreign_field.Element.Standard.of_bignum_bigint (module Circuit) y )
+
+let const_of_bignum_bigint_coordinates (type field)
+ (module Circuit : Snark_intf.Run with type field = field)
+ (point : bignum_point) : field t =
+ let x, y = point in
+ of_coordinates
+ ( Foreign_field.Element.Standard.const_of_bignum_bigint (module Circuit) x
+ , Foreign_field.Element.Standard.const_of_bignum_bigint (module Circuit) y
+ )
+
+let to_coordinates a = a
+
+let to_string_as_prover (type field)
+ (module Circuit : Snark_intf.Run with type field = field) a : string =
+ let x, y = to_coordinates a in
+ sprintf "(%s, %s)"
+ (Foreign_field.Element.Standard.to_string_as_prover (module Circuit) x)
+ (Foreign_field.Element.Standard.to_string_as_prover (module Circuit) y)
+
+let x a =
+ let x_element, _ = to_coordinates a in
+ x_element
+
+let y a =
+ let _, y_element = to_coordinates a in
+ y_element
+
+let equal_as_prover (type field)
+ (module Circuit : Snark_intf.Run with type field = field) (left : field t)
+ (right : field t) : bool =
+ let left_x, left_y = to_coordinates left in
+ let right_x, right_y = to_coordinates right in
+ Foreign_field.Element.Standard.(
+ equal_as_prover (module Circuit) left_x right_x
+ && equal_as_prover (module Circuit) left_y right_y)
+
+(* Create constraints to assert equivalence between two affine points *)
+let assert_equal (type field)
+ (module Circuit : Snark_intf.Run with type field = field) (left : field t)
+ (right : field t) : unit =
+ let left_x, left_y = to_coordinates left in
+ let right_x, right_y = to_coordinates right in
+ Foreign_field.Element.Standard.(
+ assert_equal (module Circuit) left_x right_x ;
+ assert_equal (module Circuit) left_y right_y)
+
+let check_here_const_of_bignum_bigint_coordinates (type field)
+ (module Circuit : Snark_intf.Run with type field = field)
+ (point : bignum_point) : field t =
+ let const_point = const_of_bignum_bigint_coordinates (module Circuit) point in
+ let var_point = of_bignum_bigint_coordinates (module Circuit) point in
+ assert_equal (module Circuit) const_point var_point ;
+ const_point
+
+let const_zero (type field)
+ (module Circuit : Snark_intf.Run with type field = field) : field t =
+ of_coordinates
+ Foreign_field.Element.Standard.
+ ( const_of_bignum_bigint (module Circuit) Bignum_bigint.zero
+ , const_of_bignum_bigint (module Circuit) Bignum_bigint.zero )
+
+(* Uses 6 * 1.5 (Generics per Field) = 9 rows per Affine.if_ *)
+let if_ (type field) (module Circuit : Snark_intf.Run with type field = field)
+ (b : Circuit.Boolean.var) ~(then_ : field t) ~(else_ : field t) : field t =
+ let then_x, then_y = to_coordinates then_ in
+ let else_x, else_y = to_coordinates else_ in
+ of_coordinates
+ Foreign_field.Element.Standard.
+ ( if_ (module Circuit) b ~then_:then_x ~else_:else_x
+ , if_ (module Circuit) b ~then_:then_y ~else_:else_y )
+
+(****************)
+(* Affine tests *)
+(****************)
+
+let%test_unit "affine" =
+ if tests_enabled then
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+ (* Check Affine methods *)
+ let _cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof (fun () ->
+ let pt_a =
+ of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ ( Bignum_bigint.of_string
+ "15038058761817109681921033191530858996191372456511467769172810422323500124150"
+ , Bignum_bigint.of_string
+ "64223534476670136480328171927326822445460557333044467340973794755877726909525"
+ )
+ in
+ Foreign_field.result_row (module Runner.Impl) @@ fst pt_a ;
+ Foreign_field.result_row (module Runner.Impl) @@ snd pt_a ;
+ let pt_b =
+ of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ ( Bignum_bigint.of_string
+ "99660522603236469231535770150980484469424456619444894985600600952621144670700"
+ , Bignum_bigint.of_string
+ "8901505138963553768122761105087501646863888139548342861255965172357387323186"
+ )
+ in
+ Foreign_field.result_row (module Runner.Impl) @@ fst pt_b ;
+ Foreign_field.result_row (module Runner.Impl) @@ snd pt_b ;
+ let bit =
+ Runner.Impl.(exists Boolean.typ_unchecked ~compute:(fun () -> true))
+ in
+
+ let pt_c = if_ (module Runner.Impl) bit ~then_:pt_a ~else_:pt_b in
+ Foreign_field.result_row (module Runner.Impl) (fst pt_c) ;
+ Foreign_field.result_row (module Runner.Impl) (snd pt_c) ;
+
+ assert_equal (module Runner.Impl) pt_c pt_a ;
+
+ let bit2 =
+ Runner.Impl.(
+ exists Boolean.typ_unchecked ~compute:(fun () -> false))
+ in
+
+ let pt_d = if_ (module Runner.Impl) bit2 ~then_:pt_a ~else_:pt_b in
+ Foreign_field.result_row (module Runner.Impl) (fst pt_d) ;
+ Foreign_field.result_row (module Runner.Impl) (snd pt_d) ;
+
+ assert_equal (module Runner.Impl) pt_d pt_b ;
+
+ () )
+ in
+ ()
diff --git a/src/lib/crypto/kimchi_backend/gadgets/bitwise.ml b/src/lib/crypto/kimchi_backend/gadgets/bitwise.ml
new file mode 100644
index 00000000000..84695a0634a
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/bitwise.ml
@@ -0,0 +1,918 @@
+open Core_kernel
+
+open Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint
+
+module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint
+
+let tests_enabled = true
+
+(* Auxiliary functions *)
+
+(* returns a field containing the all one word of length bits *)
+let all_ones_field (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (length : int) : f =
+ Common.bignum_bigint_to_field (module Circuit)
+ @@ Bignum_bigint.(pow (of_int 2) (of_int length) - one)
+
+let fits_in_bits_as_prover (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (word : Circuit.Field.t) (length : int) =
+ let open Common in
+ assert (
+ Bignum_bigint.(
+ field_to_bignum_bigint
+ (module Circuit)
+ (cvar_field_to_field_as_prover (module Circuit) word)
+ < pow (of_int 2) (of_int length)) )
+
+(* ROT64 *)
+
+(* Side of rotation *)
+type rot_mode = Left | Right
+
+(* Performs the 64bit rotation and returns rotated word, excess, and shifted *)
+let rot_aux (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(check64 = false) (word : Circuit.Field.t) (bits : int) (mode : rot_mode) :
+ Circuit.Field.t * Circuit.Field.t * Circuit.Field.t =
+ let open Circuit in
+ (* Check that the rotation bits is smaller than 64 *)
+ assert (bits < 64) ;
+ (* Check that the rotation bits is non-negative *)
+ assert (bits >= 0) ;
+
+ (* Check that the input word has at most 64 bits *)
+ as_prover (fun () ->
+ fits_in_bits_as_prover (module Circuit) word 64 ;
+ () ) ;
+
+ (* Compute actual length depending on whether the rotation mode is Left or Right *)
+ let rot_bits = match mode with Left -> bits | Right -> 64 - bits in
+
+ (* Auxiliary Bignum_bigint values *)
+ let big_2_pow_64 = Bignum_bigint.(pow (of_int 2) (of_int 64)) in
+ let big_2_pow_rot = Bignum_bigint.(pow (of_int 2) (of_int rot_bits)) in
+
+ (* Compute the rotated word *)
+ let rotated, excess, shifted, bound =
+ exists (Typ.array ~length:4 Field.typ) ~compute:(fun () ->
+ (* Assert that word is at most 64 bits*)
+ let word_big =
+ Common.(
+ field_to_bignum_bigint
+ (module Circuit)
+ (cvar_field_to_field_as_prover (module Circuit) word))
+ in
+ assert (Bignum_bigint.(word_big < big_2_pow_64)) ;
+
+ (* Obtain rotated output, excess, and shifted for the equation
+ word * 2^rot = excess * 2^64 + shifted *)
+ let excess_big, shifted_big =
+ Common.bignum_bigint_div_rem
+ Bignum_bigint.(word_big * big_2_pow_rot)
+ big_2_pow_64
+ in
+
+ (* Compute rotated value as
+ rotated = excess + shifted *)
+ let rotated_big = Bignum_bigint.(shifted_big + excess_big) in
+
+ (* Compute bound that is the right input of FFAdd equation *)
+ let bound_big =
+ Bignum_bigint.(excess_big + big_2_pow_64 - big_2_pow_rot)
+ in
+
+ (* Convert back to field *)
+ let shifted =
+ Common.bignum_bigint_to_field (module Circuit) shifted_big
+ in
+ let excess =
+ Common.bignum_bigint_to_field (module Circuit) excess_big
+ in
+ let rotated =
+ Common.bignum_bigint_to_field (module Circuit) rotated_big
+ in
+ let bound = Common.bignum_bigint_to_field (module Circuit) bound_big in
+
+ [| rotated; excess; shifted; bound |] )
+ |> Common.tuple4_of_array
+ in
+
+ let of_bits =
+ Common.as_prover_cvar_field_bits_le_to_cvar_field (module Circuit)
+ in
+
+ (* Current row *)
+ with_label "rot64_gate" (fun () ->
+ (* Set up Rot64 gate *)
+ assert_
+ { annotation = Some __LOC__
+ ; basic =
+ Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T
+ (Rot64
+ { word
+ ; rotated
+ ; excess
+ ; bound_limb0 = of_bits bound 52 64
+ ; bound_limb1 = of_bits bound 40 52
+ ; bound_limb2 = of_bits bound 28 40
+ ; bound_limb3 = of_bits bound 16 28
+ ; bound_crumb0 = of_bits bound 14 16
+ ; bound_crumb1 = of_bits bound 12 14
+ ; bound_crumb2 = of_bits bound 10 12
+ ; bound_crumb3 = of_bits bound 8 10
+ ; bound_crumb4 = of_bits bound 6 8
+ ; bound_crumb5 = of_bits bound 4 6
+ ; bound_crumb6 = of_bits bound 2 4
+ ; bound_crumb7 = of_bits bound 0 2
+ ; two_to_rot =
+ Common.bignum_bigint_to_field
+ (module Circuit)
+ big_2_pow_rot
+ } )
+ } ) ;
+
+ (* Next row *)
+ Range_check.bits64 (module Circuit) shifted ;
+
+ (* Following row *)
+ Range_check.bits64 (module Circuit) excess ;
+
+ if check64 then Range_check.bits64 (module Circuit) word ;
+
+ (rotated, excess, shifted)
+
+(* 64-bit Rotation of rot_bits to the `mode` side
+ * Inputs
+ * - check: whether to check the input word is at most 64 bits (default is false)
+ * - word of maximum 64 bits to be rotated
+ * - rot_bits: number of bits to be rotated
+ * - mode: Left or Right
+ * Output: rotated word
+ *)
+let rot64 (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(check64 : bool = false) (word : Circuit.Field.t) (rot_bits : int)
+ (mode : rot_mode) : Circuit.Field.t =
+ let rotated, _excess, _shifted =
+ rot_aux (module Circuit) ~check64 word rot_bits mode
+ in
+
+ rotated
+
+(* 64-bit bitwise logical shift of bits to the left side
+ * Inputs
+ * - check64: whether to check the input word is at most 64 bits (default is false)
+ * - word of maximum 64 bits to be shifted
+ * - bits: number of bits to be shifted
+ * Output: left shifted word (with bits 0s at the least significant positions)
+ *)
+let lsl64 (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(check64 : bool = false) (word : Circuit.Field.t) (bits : int) :
+ Circuit.Field.t =
+ let _rotated, _excess, shifted =
+ rot_aux (module Circuit) ~check64 word bits Left
+ in
+
+ shifted
+
+(* 64-bit bitwise logical shift of bits to the right side
+ * Inputs
+ * - check64: whether to check the input word is at most 64 bits (default is false)
+ * - word of maximum 64 bits to be shifted
+ * - bits: number of bits to be shifted
+ * Output: right shifted word (with bits 0s at the most significant positions)
+*)
+let lsr64 (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(check64 : bool = false) (word : Circuit.Field.t) (bits : int) :
+ Circuit.Field.t =
+ let _rotated, excess, _shifted =
+ rot_aux (module Circuit) ~check64 word bits Right
+ in
+
+ excess
+
+(* XOR *)
+
+(* Boolean Xor of length bits
+ * input1 and input2 are the inputs to the Xor gate
+ * length is the number of bits to Xor
+ * len_xor is the number of bits of the lookup table (default is 4)
+ *)
+let bxor (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(len_xor = 4) (input1 : Circuit.Field.t) (input2 : Circuit.Field.t)
+ (length : int) : Circuit.Field.t =
+ (* Auxiliar function to compute the next variable for the chain of Xors *)
+ let as_prover_next_var (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (curr_var : Circuit.Field.t) (var0 : Circuit.Field.t)
+ (var1 : Circuit.Field.t) (var2 : Circuit.Field.t) (var3 : Circuit.Field.t)
+ (len_xor : int) : Circuit.Field.t =
+ let open Circuit in
+ let two_pow_len =
+ Common.bignum_bigint_to_field
+ (module Circuit)
+ Bignum_bigint.(pow (of_int 2) (of_int len_xor))
+ in
+ let two_pow_2len = Field.Constant.(two_pow_len * two_pow_len) in
+ let two_pow_3len = Field.Constant.(two_pow_2len * two_pow_len) in
+ let two_pow_4len = Field.Constant.(two_pow_3len * two_pow_len) in
+ let next_var =
+ exists Field.typ ~compute:(fun () ->
+ let curr_field =
+ Common.cvar_field_to_field_as_prover (module Circuit) curr_var
+ in
+ let field0 =
+ Common.cvar_field_to_field_as_prover (module Circuit) var0
+ in
+ let field1 =
+ Common.cvar_field_to_field_as_prover (module Circuit) var1
+ in
+ let field2 =
+ Common.cvar_field_to_field_as_prover (module Circuit) var2
+ in
+ let field3 =
+ Common.cvar_field_to_field_as_prover (module Circuit) var3
+ in
+ Field.Constant.(
+ ( curr_field - field0 - (field1 * two_pow_len)
+ - (field2 * two_pow_2len) - (field3 * two_pow_3len) )
+ / two_pow_4len) )
+ in
+ next_var
+ in
+
+ (* Recursively builds Xor
+ * input1and input2 are the inputs to the Xor gate as bits
+ * output is the output of the Xor gate as bits
+ * length is the number of remaining bits to Xor
+ * len_xor is the number of bits of the lookup table (default is 4)
+ *)
+ let rec bxor_rec (in1 : Circuit.Field.t) (in2 : Circuit.Field.t)
+ (out : Circuit.Field.t) (length : int) (len_xor : int) =
+ let open Circuit in
+ (* If inputs are zero and length is zero, add the zero check *)
+ if length = 0 then (
+ with_label "xor_zero_check" (fun () ->
+ assert_
+ { annotation = Some __LOC__
+ ; basic =
+ Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T
+ (Raw
+ { kind = Zero
+ ; values = [| in1; in2; out |]
+ ; coeffs = [||]
+ } )
+ } ) ;
+ Field.Assert.equal Field.zero in1 ;
+ Field.Assert.equal Field.zero in2 ;
+ Field.Assert.equal Field.zero out ;
+ () )
+ else
+ (* Define shorthand helper *)
+ let of_bits =
+ Common.as_prover_cvar_field_bits_le_to_cvar_field (module Circuit)
+ in
+
+ (* Nibble offsets *)
+ let first = len_xor in
+ let second = first + len_xor in
+ let third = second + len_xor in
+ let fourth = third + len_xor in
+
+ let in1_0 = of_bits in1 0 first in
+ let in1_1 = of_bits in1 first second in
+ let in1_2 = of_bits in1 second third in
+ let in1_3 = of_bits in1 third fourth in
+ let in2_0 = of_bits in2 0 first in
+ let in2_1 = of_bits in2 first second in
+ let in2_2 = of_bits in2 second third in
+ let in2_3 = of_bits in2 third fourth in
+ let out_0 = of_bits out 0 first in
+ let out_1 = of_bits out first second in
+ let out_2 = of_bits out second third in
+ let out_3 = of_bits out third fourth in
+
+ (* If length is more than 0, add the Xor gate *)
+ with_label "xor_gate" (fun () ->
+ (* Set up Xor gate *)
+ assert_
+ { annotation = Some __LOC__
+ ; basic =
+ Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T
+ (Xor
+ { in1
+ ; in2
+ ; out
+ ; in1_0
+ ; in1_1
+ ; in1_2
+ ; in1_3
+ ; in2_0
+ ; in2_1
+ ; in2_2
+ ; in2_3
+ ; out_0
+ ; out_1
+ ; out_2
+ ; out_3
+ } )
+ } ) ;
+
+ let next_in1 =
+ as_prover_next_var (module Circuit) in1 in1_0 in1_1 in1_2 in1_3 len_xor
+ in
+ let next_in2 =
+ as_prover_next_var (module Circuit) in2 in2_0 in2_1 in2_2 in2_3 len_xor
+ in
+ let next_out =
+ as_prover_next_var (module Circuit) out out_0 out_1 out_2 out_3 len_xor
+ in
+
+ (* Next length is 4*n less bits *)
+ let next_length = length - (4 * len_xor) in
+
+ (* Recursively call xor on the next nibble *)
+ bxor_rec next_in1 next_in2 next_out next_length len_xor ;
+ ()
+ in
+
+ let open Circuit in
+ let open Common in
+ (* Check that the length is positive *)
+ assert (length > 0 && len_xor > 0) ;
+ (* Check that the length fits in the field *)
+ assert (length <= Field.size_in_bits) ;
+
+ (* Initialize array of 255 bools all set to false *)
+ let input1_array = Array.create ~len:Field.size_in_bits false in
+ let input2_array = Array.create ~len:Field.size_in_bits false in
+
+ (* Sanity checks about lengths of inputs using bignum *)
+ as_prover (fun () ->
+ (* Read inputs, Convert to field type *)
+ let input1_field =
+ cvar_field_to_field_as_prover (module Circuit) input1
+ in
+ let input2_field =
+ cvar_field_to_field_as_prover (module Circuit) input2
+ in
+
+ (* Check real lengths are at most the desired length *)
+ fits_in_bits_as_prover (module Circuit) input1 length ;
+ fits_in_bits_as_prover (module Circuit) input2 length ;
+
+ (* Convert inputs field elements to list of bits of length 255 *)
+ let input1_bits = Field.Constant.unpack @@ input1_field in
+ let input2_bits = Field.Constant.unpack @@ input2_field in
+
+ (* Convert list of bits to arrays *)
+ let input1_bits_array = List.to_array @@ input1_bits in
+ let input2_bits_array = List.to_array @@ input2_bits in
+
+ (* Iterate over 255 positions to update value of arrays *)
+ for i = 0 to Field.size_in_bits - 1 do
+ input1_array.(i) <- input1_bits_array.(i) ;
+ input2_array.(i) <- input2_bits_array.(i)
+ done ;
+
+ () ) ;
+
+ let output_xor =
+ exists Field.typ ~compute:(fun () ->
+ (* Sanity checks about lengths of inputs using bignum *)
+ (* Check real lengths are at most the desired length *)
+ fits_in_bits_as_prover (module Circuit) input1 length ;
+ fits_in_bits_as_prover (module Circuit) input2 length ;
+
+ let input1_field =
+ cvar_field_to_field_as_prover (module Circuit) input1
+ in
+ let input2_field =
+ cvar_field_to_field_as_prover (module Circuit) input2
+ in
+
+ (* Convert inputs field elements to list of bits of length 255 *)
+ let input1_bits = Field.Constant.unpack @@ input1_field in
+ let input2_bits = Field.Constant.unpack @@ input2_field in
+
+ (* Xor list of bits to obtain output of the xor *)
+ let output_bits =
+ List.map2_exn input1_bits input2_bits ~f:(fun b1 b2 ->
+ Bool.(not (equal b1 b2)) )
+ in
+
+ (* Convert list of output bits to field element *)
+ Field.Constant.project output_bits )
+ in
+
+ (* Obtain pad length until the length is a multiple of 4*n for n-bit length lookup table *)
+ let pad_length =
+ if length mod (4 * len_xor) <> 0 then
+ length + (4 * len_xor) - (length mod (4 * len_xor))
+ else length
+ in
+
+ (* Recursively build Xor gadget *)
+ bxor_rec input1 input2 output_xor pad_length len_xor ;
+
+ (* Convert back to field *)
+ output_xor
+
+(* Boolean Xor of 16 bits
+ * This is a special case of Xor for 16 bits for Xor lookup table of 4 bits of inputs.
+ * Receives two input words to Xor together, of maximum 16 bits each.
+ * Returns the Xor of the two words.
+ *)
+let bxor16 (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (input1 : Circuit.Field.t) (input2 : Circuit.Field.t) : Circuit.Field.t =
+ bxor (module Circuit) input1 input2 16 ~len_xor:4
+
+(* Boolean Xor of 64 bits
+ * This is a special case of Xor for 64 bits for Xor lookup table of 4 bits of inputs.
+ * Receives two input words to Xor together, of maximum 64 bits each.
+ * Returns the Xor of the two words.
+ *)
+let bxor64 (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (input1 : Circuit.Field.t) (input2 : Circuit.Field.t) : Circuit.Field.t =
+ bxor (module Circuit) input1 input2 64 ~len_xor:4
+
+(* AND *)
+
+(* Boolean And of length bits
+ * input1 and input2 are the two inputs to AND
+ * length is the number of bits to AND
+ * len_xor is the number of bits of the inputs of the Xor lookup table (default is 4)
+ *)
+let band (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(len_xor = 4) (input1 : Circuit.Field.t) (input2 : Circuit.Field.t)
+ (length : int) : Circuit.Field.t =
+ let open Circuit in
+ (* Recursively build And gadget with leading Xors and a final Generic gate *)
+ (* It will also check the correct lengths of the inputs, no need to do it again *)
+ let xor_output = bxor (module Circuit) input1 input2 length ~len_xor in
+
+ let and_output =
+ exists Field.typ ~compute:(fun () ->
+ Common.cvar_field_bits_combine_as_prover
+ (module Circuit)
+ input1 input2
+ (fun b1 b2 -> b1 && b2) )
+ in
+
+ (* Compute sum of a + b and constrain in the circuit *)
+ let sum = Generic.add (module Circuit) input1 input2 in
+ let neg_one = Field.Constant.(negate one) in
+ let neg_two = Field.Constant.(neg_one + neg_one) in
+
+ (* Constrain AND as 2 * and = sum - xor *)
+ with_label "and_equation" (fun () ->
+ assert_
+ { annotation = Some __LOC__
+ ; basic =
+ Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T
+ (Basic
+ { l = (Field.Constant.one, sum)
+ ; r = (neg_one, xor_output)
+ ; o = (neg_two, and_output)
+ ; m = Field.Constant.zero
+ ; c = Field.Constant.zero
+ } )
+ } ) ;
+
+ and_output
+
+(* Boolean And of 64 bits
+ * This is a special case of And for 64 bits for Xor lookup table of 4 bits of inputs.
+ * Receives two input words to And together, of maximum 64 bits each.
+ * Returns the And of the two words.
+ *)
+let band64 (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (input1 : Circuit.Field.t) (input2 : Circuit.Field.t) : Circuit.Field.t =
+ band (module Circuit) input1 input2 64
+
+(* NOT *)
+
+(* Boolean Not of length bits for checked length (uses Xor gadgets inside to constrain the length)
+ * - input of word to negate
+ * - length of word to negate
+ * - len_xor is the length of the Xor lookup table to use beneath (default 4)
+ * Note that the length needs to be less than the bit length of the field.
+ *)
+let bnot_checked (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(len_xor = 4) (input : Circuit.Field.t) (length : int) : Circuit.Field.t =
+ let open Circuit in
+ (* Check it is not 255 or else 2^255-1 will not fit in Pallas *)
+ assert (length < Circuit.Field.size_in_bits) ;
+
+ let all_ones_f = all_ones_field (module Circuit) length in
+ let all_ones_var = exists Field.typ ~compute:(fun () -> all_ones_f) in
+
+ (* Negating is equivalent to XORing with all one word *)
+ let out_not = bxor (module Circuit) input all_ones_var length ~len_xor in
+
+ (* Doing this afterwards or else it can break chainability with Xor16's and Zero *)
+ Field.Assert.equal (Field.constant all_ones_f) all_ones_var ;
+
+ out_not
+
+(* Negates a word of 64 bits with checked length of 64 bits.
+ * This means that the bound in lenght is constrained in the circuit. *)
+let bnot64_checked (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (input : Circuit.Field.t) : Circuit.Field.t =
+ bnot_checked (module Circuit) input 64
+
+(* Boolean Not of length bits for unchecked length (uses Generic subtractions inside)
+ * - input of word to negate
+ * - length of word to negate
+ * (Note that this can negate two words per row, but it inputs need to be a copy of another
+ * variable with a correct length in order to make sure that the length is correct)
+ *)
+let bnot_unchecked (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (input : Circuit.Field.t) (length : int) : Circuit.Field.t =
+ let open Circuit in
+ (* Check it is not 255 or else 2^255-1 will not fit in Pallas *)
+ assert (length < Circuit.Field.size_in_bits) ;
+ assert (length > 0) ;
+
+ (* Check that the input word has at most length bits.
+ In the checked version this is done in the Xor *)
+ as_prover (fun () ->
+ fits_in_bits_as_prover (module Circuit) input length ;
+ () ) ;
+
+ let all_ones_f = all_ones_field (module Circuit) length in
+ let all_ones_var = exists Field.typ ~compute:(fun () -> all_ones_f) in
+ Field.Assert.equal all_ones_var (Field.constant all_ones_f) ;
+
+ (* Negating is equivalent to subtracting with all one word *)
+ (* [2^len - 1] - input = not (input) *)
+ Generic.sub (module Circuit) all_ones_var input
+
+(* Negates a word of 64 bits, but its length goes unconstrained in the circuit
+ (unless it is copied from a checked length value) *)
+let bnot64_unchecked (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (input : Circuit.Field.t) : Circuit.Field.t =
+ bnot_unchecked (module Circuit) input 64
+
+(**************)
+(* UNIT TESTS *)
+(**************)
+
+let%test_unit "bitwise rotation gadget" =
+ if tests_enabled then (
+ let (* Import the gadget test runner *)
+ open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Helper to test ROT gadget
+ * Input operands and expected output: word len mode rotated
+ * Returns unit if constraints are satisfied, error otherwise.
+ *)
+ let test_rot ?cs word length mode result =
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Set up snarky variables for inputs and output *)
+ let word =
+ exists Field.typ ~compute:(fun () ->
+ Field.Constant.of_string word )
+ in
+ let result =
+ exists Field.typ ~compute:(fun () ->
+ Field.Constant.of_string result )
+ in
+ (* Use the rot gate gadget *)
+ let output_rot = rot64 (module Runner.Impl) word length mode in
+ Field.Assert.equal output_rot result
+ (* Pad with a "dummy" constraint b/c Kimchi requires at least 2 *) )
+ in
+ cs
+ in
+ (* Positive tests *)
+ let _cs = test_rot "0" 0 Left "0" in
+ let _cs = test_rot "0" 32 Right "0" in
+ let _cs = test_rot "1" 1 Left "2" in
+ let _cs = test_rot "1" 63 Left "9223372036854775808" in
+ let cs = test_rot "256" 4 Right "16" in
+ (* 0x5A5A5A5A5A5A5A5A is 0xA5A5A5A5A5A5A5A5 both when rotate 4 bits Left or Right*)
+ let _cs =
+ test_rot ~cs "6510615555426900570" 4 Right "11936128518282651045"
+ in
+ let _cs = test_rot "6510615555426900570" 4 Left "11936128518282651045" in
+ let cs = test_rot "1234567890" 32 Right "5302428712241725440" in
+ let _cs = test_rot ~cs "2651214356120862720" 32 Right "617283945" in
+ let _cs = test_rot ~cs "1153202983878524928" 32 Right "268500993" in
+
+ (* Negatve tests *)
+ assert (Common.is_error (fun () -> test_rot "0" 1 Left "1")) ;
+ assert (Common.is_error (fun () -> test_rot "1" 64 Left "1")) ;
+ assert (Common.is_error (fun () -> test_rot ~cs "0" 0 Left "0")) ) ;
+ ()
+
+let%test_unit "bitwise shift gadgets" =
+ if tests_enabled then (
+ let (* Import the gadget test runner *)
+ open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Helper to test LSL and LSR gadgets
+ * Input operands and expected output: word len mode shifted
+ * Returns unit if constraints are satisfied, error otherwise.
+ *)
+ let test_shift ?cs word length mode result =
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Set up snarky variables for inputs and output *)
+ let word =
+ exists Field.typ ~compute:(fun () ->
+ Field.Constant.of_string word )
+ in
+ let result =
+ exists Field.typ ~compute:(fun () ->
+ Field.Constant.of_string result )
+ in
+ (* Use the xor gate gadget *)
+ let output_shift =
+ match mode with
+ | Left ->
+ lsl64 (module Runner.Impl) word length
+ | Right ->
+ lsr64 (module Runner.Impl) word length
+ in
+ Field.Assert.equal output_shift result )
+ in
+ cs
+ in
+ (* Positive tests *)
+ let cs1l = test_shift "0" 1 Left "0" in
+ let cs1r = test_shift "0" 1 Right "0" in
+ let _cs = test_shift ~cs:cs1l "1" 1 Left "2" in
+ let _cs = test_shift ~cs:cs1r "1" 1 Right "0" in
+ let _cs = test_shift "256" 4 Right "16" in
+ let _cs = test_shift "256" 20 Right "0" in
+ let _cs = test_shift "6510615555426900570" 16 Right "99344109427290" in
+ (* All 1's word *)
+ let cs_allones =
+ test_shift "18446744073709551615" 15 Left "18446744073709518848"
+ in
+ (* Random value ADCC7E30EDCAC126 -> ADCC7E30 -> EDCAC12600000000*)
+ let _cs = test_shift "12523523412423524646" 32 Right "2915860016" in
+ let _cs =
+ test_shift "12523523412423524646" 32 Left "17134720101237391360"
+ in
+
+ (* Negatve tests *)
+ assert (Common.is_error (fun () -> test_shift "0" 1 Left "1")) ;
+ assert (Common.is_error (fun () -> test_shift "1" 64 Left "1")) ;
+ assert (Common.is_error (fun () -> test_shift ~cs:cs_allones "0" 0 Left "0"))
+ ) ;
+ ()
+
+let%test_unit "bitwise xor gadget" =
+ if tests_enabled then (
+ let (* Import the gadget test runner *)
+ open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Helper to test XOR gadget
+ * Inputs operands and expected output: left_input xor right_input
+ * Returns true if constraints are satisfied, false otherwise.
+ *)
+ let test_xor ?cs left_input right_input output_xor length =
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Set up snarky variables for inputs and output *)
+ let left_input =
+ exists Field.typ ~compute:(fun () ->
+ Common.field_of_hex (module Runner.Impl) left_input )
+ in
+ let right_input =
+ exists Field.typ ~compute:(fun () ->
+ Common.field_of_hex (module Runner.Impl) right_input )
+ in
+ let output_xor =
+ exists Field.typ ~compute:(fun () ->
+ Common.field_of_hex (module Runner.Impl) output_xor )
+ in
+ (* Use the xor gate gadget *)
+ let result =
+ bxor (module Runner.Impl) left_input right_input length
+ in
+
+ (* Check that the result is equal to the expected output *)
+ Field.Assert.equal output_xor result )
+ in
+ cs
+ in
+
+ (* Positive tests *)
+ let cs16 = test_xor "1" "0" "1" 16 in
+ let _cs = test_xor ~cs:cs16 "0" "1" "1" 16 in
+ let _cs = test_xor ~cs:cs16 "2" "1" "3" 16 in
+ let _cs = test_xor ~cs:cs16 "a8ca" "ddd5" "751f" 16 in
+ let _cs = test_xor ~cs:cs16 "0" "0" "0" 8 in
+ let _cs = test_xor ~cs:cs16 "0" "0" "0" 1 in
+ let _cs = test_xor ~cs:cs16 "1" "0" "1" 1 in
+ let _cs = test_xor ~cs:cs16 "0" "0" "0" 4 in
+ let _cs = test_xor ~cs:cs16 "1" "1" "0" 4 in
+ let cs32 = test_xor "bb5c6" "edded" "5682b" 20 in
+ let cs64 =
+ test_xor "5a5a5a5a5a5a5a5a" "a5a5a5a5a5a5a5a5" "ffffffffffffffff" 64
+ in
+ let _cs =
+ test_xor ~cs:cs64 "f1f1f1f1f1f1f1f1" "0f0f0f0f0f0f0f0f" "fefefefefefefefe"
+ 64
+ in
+ let _cs =
+ test_xor ~cs:cs64 "cad1f05900fcad2f" "deadbeef010301db" "147c4eb601ffacf4"
+ 64
+ in
+
+ (* Negatve tests *)
+ assert (
+ Common.is_error (fun () ->
+ (* Reusing right CS with bad witness *)
+ test_xor ~cs:cs32 "ed1ed1" "ed1ed1" "010101" 20 ) ) ;
+ assert (
+ Common.is_error (fun () ->
+ (* Reusing wrong CS with right witness *)
+ test_xor ~cs:cs32 "1" "1" "0" 16 ) ) ;
+
+ assert (Common.is_error (fun () -> test_xor ~cs:cs16 "1" "0" "1" 0)) ;
+ assert (Common.is_error (fun () -> test_xor ~cs:cs16 "1" "0" "0" 1)) ;
+ assert (Common.is_error (fun () -> test_xor ~cs:cs16 "1111" "2222" "0" 16)) ;
+ assert (Common.is_error (fun () -> test_xor "0" "0" "0" 256)) ;
+ assert (Common.is_error (fun () -> test_xor "0" "0" "0" (-4))) ;
+ assert (
+ Common.is_error (fun () -> test_xor ~cs:cs32 "bb5c6" "edded" "ed1ed1" 20) )
+ ) ;
+ ()
+
+let%test_unit "bitwise and gadget" =
+ if tests_enabled then (
+ let (* Import the gadget test runner *)
+ open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+ (* Helper to test AND gadget
+ * Inputs operands and expected output: left_input and right_input = output
+ * Returns true if constraints are satisfied, false otherwise.
+ *)
+ let test_and ?cs left_input right_input output_and length =
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Set up snarky variables for inputs and outputs *)
+ let left_input =
+ exists Field.typ ~compute:(fun () ->
+ Common.field_of_hex (module Runner.Impl) left_input )
+ in
+ let right_input =
+ exists Field.typ ~compute:(fun () ->
+ Common.field_of_hex (module Runner.Impl) right_input )
+ in
+ let output_and =
+ exists Field.typ ~compute:(fun () ->
+ Common.field_of_hex (module Runner.Impl) output_and )
+ in
+ (* Use the and gate gadget *)
+ let result =
+ band (module Runner.Impl) left_input right_input length
+ in
+ Field.Assert.equal output_and result )
+ in
+ cs
+ in
+
+ (* Positive tests *)
+ let cs = test_and "0" "0" "0" 16 in
+ let _cs = test_and ~cs "457" "8ae" "6" 16 in
+ let _cs = test_and ~cs "a8ca" "ddd5" "88c0" 16 in
+ let _cs = test_and "0" "0" "0" 8 in
+ let cs = test_and "1" "1" "1" 1 in
+ let _cs = test_and ~cs "1" "0" "0" 1 in
+ let _cs = test_and ~cs "0" "1" "0" 1 in
+ let _cs = test_and ~cs "0" "0" "0" 1 in
+ let _cs = test_and "f" "f" "f" 4 in
+ let _cs = test_and "bb5c6" "edded" "a95c4" 20 in
+ let cs = test_and "5a5a5a5a5a5a5a5a" "a5a5a5a5a5a5a5a5" "0" 64 in
+ let cs =
+ test_and ~cs "385e243cb60654fd" "010fde9342c0d700" "e041002005400" 64
+ in
+ (* Negatve tests *)
+ assert (
+ Common.is_error (fun () ->
+ (* Reusing right CS with wrong witness *) test_and ~cs "1" "1" "0" 20 ) ) ;
+ assert (
+ Common.is_error (fun () ->
+ (* Reusing wrong CS with right witness *) test_and ~cs "1" "1" "1" 1 ) ) ;
+ assert (Common.is_error (fun () -> test_and "1" "1" "0" 1)) ;
+ assert (Common.is_error (fun () -> test_and "ff" "ff" "ff" 7)) ;
+ assert (Common.is_error (fun () -> test_and "1" "1" "1" (-1))) ) ;
+ ()
+
+let%test_unit "bitwise not gadget" =
+ if tests_enabled then (
+ let (* Import the gadget test runner *)
+ open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+ (* Helper to test NOT gadget with both checked and unchecked length procedures
+ * Input and expected output and desired length : not(input) = output
+ * Returns true if constraints are satisfied, false otherwise.
+ *)
+ let test_not ?cs input output length =
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Set up snarky variables for input and output *)
+ let input =
+ exists Field.typ ~compute:(fun () ->
+ Common.field_of_hex (module Runner.Impl) input )
+ in
+
+ let output =
+ exists Field.typ ~compute:(fun () ->
+ Common.field_of_hex (module Runner.Impl) output )
+ in
+
+ (* Use the not gate gadget *)
+ let result_checked =
+ bnot_checked (module Runner.Impl) input length
+ in
+ let result_unchecked =
+ bnot_unchecked (module Runner.Impl) input length
+ in
+ Field.Assert.equal output result_checked ;
+ Field.Assert.equal output result_unchecked )
+ in
+ cs
+ in
+
+ (* Positive tests *)
+ let _cs = test_not "0" "1" 1 in
+ let _cs = test_not "0" "f" 4 in
+ let _cs = test_not "0" "ff" 8 in
+ let _cs = test_not "0" "7ff" 11 in
+ let cs16 = test_not "0" "ffff" 16 in
+ let _cs = test_not ~cs:cs16 "a8ca" "5735" 16 in
+ let _cs = test_not "bb5c6" "44a39" 20 in
+ let cs64 = test_not "a5a5a5a5a5a5a5a5" "5a5a5a5a5a5a5a5a" 64 in
+ let _cs = test_not ~cs:cs64 "5a5a5a5a5a5a5a5a" "a5a5a5a5a5a5a5a5" 64 in
+ let _cs = test_not ~cs:cs64 "7b3f28d7496d75f0" "84c0d728b6928a0f" 64 in
+ let _cs = test_not ~cs:cs64 "ffffffffffffffff" "0" 64 in
+ let _cs = test_not ~cs:cs64 "00000fffffffffff" "fffff00000000000" 64 in
+ let _cs = test_not ~cs:cs64 "fffffffffffff000" "fff" 64 in
+ let _cs = test_not ~cs:cs64 "0" "ffffffffffffffff" 64 in
+ let _cs = test_not ~cs:cs64 "0" "ffffffffffffffff" 64 in
+ let _cs =
+ test_not
+ "3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF" "0"
+ 254
+ in
+
+ (* Negative tests *)
+ assert (
+ Common.is_error (fun () ->
+ (* Reusing right CS with bad witness *)
+ test_not ~cs:cs64 "0" "ff" 64 ) ) ;
+ assert (
+ Common.is_error (fun () ->
+ (* Reusing wrong CS with right witness *)
+ test_not ~cs:cs16 "1" "0" 1 ) ) ;
+ assert (Common.is_error (fun () -> test_not "0" "0" 1)) ;
+ assert (Common.is_error (fun () -> test_not "ff" "0" 4)) ;
+ assert (
+ Common.is_error (fun () ->
+ test_not
+ "7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"
+ "0" 255 ) ) ) ;
+ ()
diff --git a/src/lib/crypto/kimchi_backend/gadgets/bitwise.mli b/src/lib/crypto/kimchi_backend/gadgets/bitwise.mli
new file mode 100644
index 00000000000..2df3a65dedf
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/bitwise.mli
@@ -0,0 +1,143 @@
+(* Side of rotation *)
+type rot_mode = Left | Right
+
+(** 64-bit rotation of rot_bits to the `mode` side
+ * @param check64 whether to check the input word is at most 64 bits (default is false)
+ * @param word word of maximum 64 bits to be rotated
+ * @param bits number of bits to be rotated
+ * @param mode Left or Right
+ * Returns rotated word
+ *)
+val rot64 :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> ?check64:bool (* false *)
+ -> 'f Snarky_backendless.Cvar.t
+ -> int
+ -> rot_mode
+ -> 'f Snarky_backendless.Cvar.t
+
+(** 64-bit bitwise logical shift left of bits to the `mode` side
+ * Inputs
+ * @param check64 whether to check the input word is at most 64 bits (default is false)
+ * @param word word of maximum 64 bits to be shifted
+ * @param bits number of bits to be shifted
+ * Output: left shifted word (with bits 0s at the least significant positions)
+ *)
+val lsl64 :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> ?check64:bool (* false *)
+ -> 'f Snarky_backendless.Cvar.t
+ -> int
+ -> 'f Snarky_backendless.Cvar.t
+
+(** 64-bit bitwise logical shift of bits to the right side
+ * Inputs
+ * @param check64 whether to check the input word is at most 64 bits (default is false)
+ * @param word word of maximum 64 bits to be shifted
+ * @param bits number of bits to be shifted
+ * Output: right shifted word (with bits 0s at the most significant positions)
+ *)
+val lsr64 :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> ?check64:bool (* false *)
+ -> 'f Snarky_backendless.Cvar.t
+ -> int
+ -> 'f Snarky_backendless.Cvar.t
+
+(** Boolean Xor of length bits
+ * input1 and input2 are the inputs to the Xor gate
+ * length is the number of bits to Xor
+ * len_xor is the number of bits of the lookup table (default is 4)
+ *)
+val bxor :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> ?len_xor:int
+ -> 'f Snarky_backendless.Cvar.t
+ -> 'f Snarky_backendless.Cvar.t
+ -> int
+ -> 'f Snarky_backendless.Cvar.t
+
+(** Boolean Xor of 16 bits
+ * This is a special case of Xor for 16 bits for Xor lookup table of 4 bits of inputs.
+ * Receives two input words to Xor together, of maximum 16 bits each.
+ * Returns the Xor of the two words.
+*)
+val bxor16 :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> 'f Snarky_backendless.Cvar.t
+ -> 'f Snarky_backendless.Cvar.t
+ -> 'f Snarky_backendless.Cvar.t
+
+(** Boolean Xor of 64 bits
+ * This is a special case of Xor for 64 bits for Xor lookup table of 4 bits of inputs. * Receives two input words to Xor together, of maximum 64 bits each.
+ * Returns the Xor of the two words.
+*)
+val bxor64 :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> 'f Snarky_backendless.Cvar.t
+ -> 'f Snarky_backendless.Cvar.t
+ -> 'f Snarky_backendless.Cvar.t
+
+(** Boolean And of length bits
+ * input1 and input2 are the two inputs to AND
+ * length is the number of bits to AND
+ * len_xor is the number of bits of the inputs of the Xor lookup table (default is 4)
+*)
+val band :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> ?len_xor:int
+ -> 'f Snarky_backendless.Cvar.t
+ -> 'f Snarky_backendless.Cvar.t
+ -> int
+ -> 'f Snarky_backendless.Cvar.t
+
+(** Boolean And of 64 bits
+ * This is a special case of And for 64 bits for Xor lookup table of 4 bits of inputs.
+ * Receives two input words to And together, of maximum 64 bits each.
+ * Returns the And of the two words.
+ *)
+val band64 :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> 'f Snarky_backendless.Cvar.t
+ -> 'f Snarky_backendless.Cvar.t
+ -> 'f Snarky_backendless.Cvar.t
+
+(** Boolean Not of length bits for checked length (uses Xor gadgets inside to constrain the length)
+ * - input of word to negate
+ * - length of word to negate
+ * - len_xor is the length of the Xor lookup table to use beneath (default 4)
+ * Note that the length needs to be less than the bit length of the field.
+ *)
+val bnot_checked :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> ?len_xor:int
+ -> 'f Snarky_backendless.Cvar.t
+ -> int
+ -> 'f Snarky_backendless.Cvar.t
+
+(** Negates a word of 64 bits with checked length of 64 bits.
+ * This means that the bound in lenght is constrained in the circuit. *)
+val bnot64_checked :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> 'f Snarky_backendless.Cvar.t
+ -> 'f Snarky_backendless.Cvar.t
+
+(** Boolean Not of length bits for unchecked length (uses Generic subtractions inside)
+ * - input of word to negate
+ * - length of word to negate
+ * (Note that this can negate two words per row, but it inputs need to be a copy of another
+ variable with a correct length in order to make sure that the length is correct )
+ * Note that the length needs to be less than the bit length of the field.
+ *)
+val bnot_unchecked :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> 'f Snarky_backendless.Cvar.t
+ -> int
+ -> 'f Snarky_backendless.Cvar.t
+
+(** Negates a word of 64 bits, but its length goes unconstrained in the circuit
+ (unless it is copied from a checked length value) *)
+val bnot64_unchecked :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> 'f Snarky_backendless.Cvar.t
+ -> 'f Snarky_backendless.Cvar.t
diff --git a/src/lib/crypto/kimchi_backend/gadgets/common.ml b/src/lib/crypto/kimchi_backend/gadgets/common.ml
new file mode 100644
index 00000000000..d1ec54db093
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/common.ml
@@ -0,0 +1,473 @@
+(* Common gadget helpers *)
+
+open Core_kernel
+module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint
+
+let tests_enabled = true
+
+let tuple3_of_array array =
+ match array with [| a1; a2; a3 |] -> (a1, a2, a3) | _ -> assert false
+
+let tuple4_of_array array =
+ match array with
+ | [| a1; a2; a3; a4 |] ->
+ (a1, a2, a3, a4)
+ | _ ->
+ assert false
+
+(* Foreign field element limb size *)
+let limb_bits = 88
+
+(* Foreign field element limb size 2^L where L=88 *)
+let two_to_limb = Bignum_bigint.(pow (of_int 2) (of_int limb_bits))
+
+(* 2^3L *)
+let two_to_3limb = Bignum_bigint.(pow two_to_limb (of_int 3))
+
+(* Length of Bignum_bigint.t in bits *)
+let bignum_bigint_bit_length (bigint : Bignum_bigint.t) : int =
+ if Bignum_bigint.(equal bigint zero) then 1
+ else Z.log2 (Bignum_bigint.to_zarith_bigint bigint) + 1
+
+(* Conventions used in this interface
+ * 1. Functions prefixed with "as_prover_" only happen during proving
+ * and not during circuit creation
+ * * These functions are called twice (once during creation of
+ * the circuit and once during proving). Inside the definition
+ * of these functions, whatever resides within the exists is not executed
+ * during circuit creation, though there could be some
+ * code outside the exists (such as error checking code) that is
+ * run during the creation of the circuit.
+ * * The value returned by exists depends on what mode it is called in
+ * * In circuit generation mode it allocates a cvar without any backing memory
+ * * In proof generation mode it allocates a cvar with backing memory to store
+ * the values associated with the cvar. The prover can then access these
+ * with As_prover.read.
+ * 2. Functions suffixed with "_as_prover" can only be called outside
+ * the circuit. Specifically, this means within an exists, within
+ * an as_prover or in an "as_prover_" prefixed function)
+ *)
+
+(* Convert cvar field element (i.e. Field.t) to field *)
+let cvar_field_to_field_as_prover (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (field_element : Circuit.Field.t) : f =
+ Circuit.As_prover.read Circuit.Field.typ field_element
+
+(* Convert cvar bool element (i.e. Boolean.t) to field *)
+let cvar_bool_to_bool_as_prover (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (b : Circuit.Boolean.var) : bool =
+ Circuit.As_prover.read Circuit.Boolean.typ b
+
+(* Combines bits of two cvars with a given boolean function and returns the resulting field element *)
+let cvar_field_bits_combine_as_prover (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (input1 : Circuit.Field.t) (input2 : Circuit.Field.t)
+ (bfun : bool -> bool -> bool) : f =
+ let open Circuit in
+ let list1 =
+ Field.Constant.unpack
+ @@ cvar_field_to_field_as_prover (module Circuit)
+ @@ input1
+ in
+ let list2 =
+ Field.Constant.unpack
+ @@ cvar_field_to_field_as_prover (module Circuit)
+ @@ input2
+ in
+ Field.Constant.project @@ List.map2_exn list1 list2 ~f:bfun
+
+(* field_bits_le_to_field - Create a field element from contiguous bits of another
+ *
+ * Inputs:
+ * field_element: source field element
+ * start: zero-indexed starting bit offset
+ * stop: zero-indexed stopping bit index (or -1 to denote the last bit)
+ *
+ * Output:
+ * New field element created from bits [start, stop) of field_element input,
+ * placed into the lowest possible bit position, like so
+ *
+ * start stop
+ * \ /
+ * [......xxx.....] field_element
+ * [xxx...........] output
+ * lsb msb *)
+let field_bits_le_to_field (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (field_element : f) (start : int) (stop : int) : f =
+ let open Circuit in
+ (* Check range is valid *)
+ if stop <> -1 && stop <= start then
+ invalid_arg "stop offset must be greater than start offset" ;
+
+ (* Create field element *)
+ let bits = Field.Constant.unpack field_element in
+ if stop > List.length bits then
+ invalid_arg "stop must be less than bit-length" ;
+
+ let stop = if stop = -1 then List.length bits else stop in
+ (* Convert bits range (boolean list) to field element *)
+ Field.Constant.project @@ List.slice bits start stop
+
+(* Create cvar field element from contiguous bits of another
+ See field_bits_le_to_field for more information *)
+let as_prover_cvar_field_bits_le_to_cvar_field (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (field_element : Circuit.Field.t) (start : int) (stop : int) :
+ Circuit.Field.t =
+ let open Circuit in
+ (* Check range is valid - for exception handling we need to repeat this check
+ * so it happens outside exists *)
+ if stop <> -1 && stop <= start then
+ invalid_arg "stop offset must be greater than start offset" ;
+ exists Field.typ ~compute:(fun () ->
+ field_bits_le_to_field
+ (module Circuit)
+ (cvar_field_to_field_as_prover (module Circuit) field_element)
+ start stop )
+
+(* Create field element from base10 string *)
+let field_of_base10 (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (base10 : string) =
+ let open Circuit in
+ Field.Constant.of_string base10
+
+(* Create cvar field element from base10 string *)
+let as_prover_cvar_field_of_base10 (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (base10 : string) =
+ let open Circuit in
+ exists Field.typ ~compute:(fun () -> field_of_base10 (module Circuit) base10)
+
+(* Convert field element to bigint *)
+let field_to_bignum_bigint (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (field_element : f) : Bignum_bigint.t =
+ (* Bigint doesn't have bigint operators defined for it, so we must use Bignum_bigint *)
+ Circuit.Bigint.(to_bignum_bigint (of_field field_element))
+
+(* Convert bigint to field element *)
+let bignum_bigint_to_field (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (bigint : Bignum_bigint.t) : f =
+ Circuit.Bigint.(to_field (of_bignum_bigint bigint))
+
+(* Returns (quotient, remainder) such that numerator = quotient * denominator + remainder
+ * where quotient, remainder \in [0, denominator) *)
+let bignum_bigint_div_rem (numerator : Bignum_bigint.t)
+ (denominator : Bignum_bigint.t) : Bignum_bigint.t * Bignum_bigint.t =
+ let quotient = Bignum_bigint.(numerator / denominator) in
+ let remainder = Bignum_bigint.(numerator - (denominator * quotient)) in
+ (quotient, remainder)
+
+(* Bignum_bigint to bytes *)
+let bignum_bigint_unpack_bytes (bignum : Bignum_bigint.t) : string =
+ Z.to_bits @@ Bignum_bigint.to_zarith_bigint bignum
+
+(* Bignum_bigint to bool list *)
+let bignum_bigint_unpack ?(remove_trailing = false) (bignum : Bignum_bigint.t) :
+ bool list =
+ (* Helper to remove trailing false values *)
+ let remove_trailing_false_values (lst : bool list) =
+ let rev = List.rev lst in
+ let rec remove_leading_false_rec lst =
+ match lst with
+ | [] ->
+ []
+ | hd :: tl ->
+ if hd then hd :: tl else remove_leading_false_rec tl
+ in
+ List.rev @@ remove_leading_false_rec rev
+ in
+
+ (* Convert Bignum_bigint to bitstring *)
+ let bytestr = bignum_bigint_unpack_bytes bignum in
+ (* Convert bytestring to list of bool *)
+ let bits =
+ List.init
+ (8 * String.length bytestr)
+ ~f:(fun i ->
+ let c = Char.to_int bytestr.[i / 8] in
+ let j = i mod 8 in
+ if Int.((c lsr j) land 1 = 1) then true else false )
+ in
+ if remove_trailing then remove_trailing_false_values bits else bits
+
+let bignum_bigint_unpack_as (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(remove_trailing = false) (bignum : Bignum_bigint.t)
+ (typ : (Circuit.Boolean.var, bool) Circuit.Typ.t) : Circuit.Boolean.var list
+ =
+ let open Circuit in
+ exists
+ (Typ.list ~length:(bignum_bigint_bit_length bignum) typ)
+ ~compute:(fun () -> bignum_bigint_unpack ~remove_trailing bignum)
+
+let bignum_bigint_unpack_as_vars (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(remove_trailing = false) (bignum : Bignum_bigint.t) :
+ Circuit.Boolean.var list =
+ bignum_bigint_unpack_as
+ (module Circuit)
+ ~remove_trailing bignum Circuit.Boolean.typ
+
+let bignum_bigint_unpack_as_unchecked_vars (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(remove_trailing = false) (bignum : Bignum_bigint.t) :
+ Circuit.Boolean.var list =
+ bignum_bigint_unpack_as
+ (module Circuit)
+ ~remove_trailing bignum Circuit.Boolean.typ_unchecked
+
+(* Bignum_bigint to constants Boolean.var list (without creating boolean constraints) *)
+let bignum_bigint_unpack_as_unchecked_consts (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(remove_trailing = false) (bignum : Bignum_bigint.t) :
+ Circuit.Boolean.var list =
+ let open Circuit in
+ List.map
+ (bignum_bigint_unpack ~remove_trailing bignum)
+ ~f:Boolean.var_of_value
+
+(* Bignum_bigint to hex *)
+let bignum_bigint_to_hex (bignum : Bignum_bigint.t) : string =
+ Z.format "%x" @@ Bignum_bigint.to_zarith_bigint bignum
+
+(* Create Bignum_bigint.t from binary string *)
+let bignum_bigint_of_bin (bin : string) : Bignum_bigint.t =
+ Bignum_bigint.of_zarith_bigint @@ Z.of_bits bin
+
+(* Bignum_bigint.t of hex *)
+let bignum_bigint_of_hex (hex : string) : Bignum_bigint.t =
+ Bignum_bigint.of_zarith_bigint @@ Z.of_string_base 16 hex
+
+(* Convert cvar field element (i.e. Field.t) to Bignum_bigint.t *)
+let cvar_field_to_bignum_bigint_as_prover (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (field_element : Circuit.Field.t) : Bignum_bigint.t =
+ let open Circuit in
+ field_to_bignum_bigint (module Circuit)
+ @@ As_prover.read Field.typ field_element
+
+(* Compute modular square root using Tonelli-Shanks algorithm
+ * See https://en.wikipedia.org/wiki/Tonelli%E2%80%93Shanks_algorithm
+ *)
+let bignum_bigint_sqrt_mod (x : Bignum_bigint.t) (modulus : Bignum_bigint.t) :
+ Bignum_bigint.t =
+ let open Z in
+ let x = Bignum_bigint.to_zarith_bigint x in
+ let modulus = Bignum_bigint.to_zarith_bigint modulus in
+
+ (* Useful helpers and shorthands *)
+ let two = of_int 2 in
+ let mod_minus_1 = pred modulus in
+ let pow_mod base exp = powm base exp modulus in
+
+ (* Euler's criterion *)
+ let legendre x = pow_mod x (mod_minus_1 / two) in
+
+ if not (equal (legendre x) one) then
+ (* t = 0: x is quadratic residue iff x^{(modulus - 1)/2} == 1 *)
+ Bignum_bigint.zero
+ else
+ (* Solve: modulus - 1 = Q * 2^S for S *)
+ let s = of_int @@ trailing_zeros mod_minus_1 in
+ if equal s one then
+ (* Q = (modulus - 1)/2 and r = x^{(Q + 1)/2} *)
+ Bignum_bigint.of_zarith_bigint
+ @@ pow_mod x (((mod_minus_1 / two) + one) / two)
+ else
+ (* Solve: modulus - 1 = Q * 2^S for Q by shifting away zeros *)
+ let q = mod_minus_1 asr to_int s in
+
+ (* Search for z in Z/pZ which is a quadratic non-residue *)
+ let z =
+ let rec find_non_square z =
+ if equal (legendre z) mod_minus_1 then z
+ else find_non_square @@ (z + one)
+ in
+ find_non_square two
+ in
+
+ (* Solving loop *)
+ let rec loop m c t r =
+ if equal t one then r
+ else
+ (* Use repeated squaring to find the least 0 < i < M s.t. t^{2^i} = 1 *)
+ let rec find_least_i n i =
+ if equal n one || geq i m then i
+ else find_least_i (n * n mod modulus) (i + one)
+ in
+ let i = find_least_i t zero in
+ (* i = m can only happen in the first iteration, and implies
+ that t is a *primitive* root of unity and therefore not a square
+ (t is a root of unity by construction, t = n^Q)
+ *)
+ if equal i m then zero
+ else
+ (* b <- c^{2^{M - i - 1}} *)
+ let b = pow_mod c (pow_mod two (m - i - one)) in
+ (* M <- i *)
+ let m = i in
+ (* c <- b^2 *)
+ let c = b * b mod modulus in
+ (* t <- tb^2 *)
+ let t = t * c mod modulus in
+ (* R <- Rb *)
+ let r = r * b mod modulus in
+
+ (* Recurse *)
+ loop m c t r
+ in
+
+ (* M <- S *)
+ let m = s in
+ (* c <- Z^Q *)
+ let c = pow_mod z q in
+ (* R <- n^{(Q + 1)/2} *)
+ let r = pow_mod x ((q + one) / two) in
+ (* t <- x^Q *)
+ let t = pow_mod x q in
+
+ Bignum_bigint.of_zarith_bigint @@ loop m c t r
+
+(* Compute square root of Bignum_bigint value x *)
+let bignum_bigint_sqrt (x : Bignum_bigint.t) : Bignum_bigint.t =
+ Bignum_bigint.of_zarith_bigint @@ Z.sqrt @@ Bignum_bigint.to_zarith_bigint x
+
+(* Compute the inverse of Bignum_bigint value x with modulus *)
+let bignum_bigint_inverse (x : Bignum_bigint.t) (modulus : Bignum_bigint.t) :
+ Bignum_bigint.t =
+ let x = Bignum_bigint.to_zarith_bigint x in
+ let modulus = Bignum_bigint.to_zarith_bigint modulus in
+ Bignum_bigint.of_zarith_bigint @@ Z.invert x modulus
+
+(* Field to hex *)
+let field_to_hex (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (field_element : f) : string =
+ bignum_bigint_to_hex @@ field_to_bignum_bigint (module Circuit) field_element
+
+(* Field of hex *)
+let field_of_hex (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (hex : string) : f =
+ bignum_bigint_to_field (module Circuit) @@ bignum_bigint_of_hex hex
+
+(* List of field elements for each byte of hexadecimal input*)
+let field_bytes_of_hex (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (hex : string) : f list =
+ let chars = String.to_list hex in
+ let list_pairs = List.groupi chars ~break:(fun i _ _ -> i mod 2 = 0) in
+ let list_bytes =
+ List.map list_pairs ~f:(fun byte ->
+ let hex_i = String.of_char_list byte in
+ field_of_hex (module Circuit) hex_i )
+ in
+ list_bytes
+
+(* List of field elements of at most 1 byte to a Bignum_bigint *)
+let cvar_field_bytes_to_bignum_bigint_as_prover (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (bytestring : Circuit.Field.t list) : Bignum_bigint.t =
+ List.fold bytestring ~init:Bignum_bigint.zero ~f:(fun acc x ->
+ Bignum_bigint.(
+ (acc * of_int 2)
+ + cvar_field_to_bignum_bigint_as_prover (module Circuit) x) )
+
+(* Negative test helper *)
+let is_error (func : unit -> _) = Result.is_error (Or_error.try_with func)
+
+(* Two to the power of n as a field element *)
+let two_pow (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (n : int) =
+ bignum_bigint_to_field
+ (module Circuit)
+ Bignum_bigint.(pow (of_int 2) (of_int n))
+
+(*********)
+(* Tests *)
+(*********)
+
+let%test_unit "helper field_bits_le_to_field" =
+ ( if tests_enabled then
+ let (* Import the gadget test runner *)
+ open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ let _cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof (fun () ->
+ let open Runner.Impl in
+ let of_bits =
+ as_prover_cvar_field_bits_le_to_cvar_field (module Runner.Impl)
+ in
+ let of_base10 = as_prover_cvar_field_of_base10 (module Runner.Impl) in
+
+ (* Test value *)
+ let field_element =
+ of_base10
+ "25138500177533925254565157548260087092526215225485178888176592492127995051965"
+ in
+
+ (* Test extracting all bits as field element *)
+ Field.Assert.equal (of_bits field_element 0 (-1)) field_element ;
+
+ (* Test extracting 1st bit as field element *)
+ Field.Assert.equal (of_bits field_element 0 1) (of_base10 "1") ;
+
+ (* Test extracting last bit as field element *)
+ Field.Assert.equal (of_bits field_element 254 255) (of_base10 "0") ;
+
+ (* Test extracting first 12 bits as field element *)
+ Field.Assert.equal (of_bits field_element 0 12) (of_base10 "4029") ;
+
+ (* Test extracting third 16 bits as field element *)
+ Field.Assert.equal (of_bits field_element 32 48) (of_base10 "15384") ;
+
+ (* Test extracting 1st 4 bits as field element *)
+ Field.Assert.equal (of_bits field_element 0 4) (of_base10 "13") ;
+
+ (* Test extracting 5th 4 bits as field element *)
+ Field.Assert.equal (of_bits field_element 20 24) (of_base10 "1") ;
+
+ (* Test extracting first 88 bits as field element *)
+ Field.Assert.equal
+ (of_bits field_element 0 88)
+ (of_base10 "155123280218940970272309181") ;
+
+ (* Test extracting second 88 bits as field element *)
+ Field.Assert.equal
+ (of_bits field_element 88 176)
+ (of_base10 "293068737190883252403551981") ;
+
+ (* Test extracting last crumb as field element *)
+ Field.Assert.equal (of_bits field_element 254 255) (of_base10 "0") ;
+
+ (* Test extracting 2nd to last crumb as field element *)
+ Field.Assert.equal (of_bits field_element 252 254) (of_base10 "3") ;
+
+ (* Test extracting 3rd to last crumb as field element *)
+ Field.Assert.equal (of_bits field_element 250 252) (of_base10 "1") ;
+
+ (* Assert litttle-endian order *)
+ Field.Assert.equal
+ (of_bits (of_base10 "18446744073709551616" (* 2^64 *)) 64 65)
+ (of_base10 "1") ;
+
+ (* Test invalid range is denied *)
+ assert (is_error (fun () -> of_bits field_element 2 2)) ;
+ assert (is_error (fun () -> of_bits field_element 2 1)) ;
+
+ (* Padding *)
+ Boolean.Assert.is_true (Field.equal field_element field_element) )
+ in
+ () ) ;
+ ()
diff --git a/src/lib/crypto/kimchi_backend/gadgets/curve_params.ml.disabled b/src/lib/crypto/kimchi_backend/gadgets/curve_params.ml.disabled
new file mode 100644
index 00000000000..b482f6e3b48
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/curve_params.ml.disabled
@@ -0,0 +1,210 @@
+(* Elliptic curve public constants *)
+
+module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint
+module Snark_intf = Snarky_backendless.Snark_intf
+
+type 'typ ia_points = { acc : 'typ; neg_acc : 'typ }
+
+(* Out of circuit representation of Elliptic curve *)
+type t =
+ { modulus : Bignum_bigint.t (* Elliptic curve base field modulus *)
+ ; order : Bignum_bigint.t (* Elliptic curve group order *)
+ ; a : Bignum_bigint.t (* Elliptic curve a parameter *)
+ ; b : Bignum_bigint.t (* Elliptic curve b parameter *)
+ ; gen : Affine.bignum_point (* Elliptic curve generator point *)
+ ; mutable ia : Affine.bignum_point ia_points
+ (* Initial accumulator point (and its negation) *)
+ }
+
+let ia_of_points (type typ) (acc : typ * typ) (neg_acc : typ * typ) :
+ (typ * typ) ia_points =
+ { acc; neg_acc }
+
+let ia_of_strings ((acc_x, acc_y) : string * string)
+ ((neg_acc_x, neg_acc_y) : string * string) =
+ { acc = (Bignum_bigint.of_string acc_x, Bignum_bigint.of_string acc_y)
+ ; neg_acc =
+ (Bignum_bigint.of_string neg_acc_x, Bignum_bigint.of_string neg_acc_y)
+ }
+
+let ia_to_circuit_constants (type field)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = field)
+ (ia : Affine.bignum_point ia_points) : field Affine.t ia_points =
+ { acc = Affine.of_bignum_bigint_coordinates (module Circuit) ia.acc
+ ; neg_acc = Affine.of_bignum_bigint_coordinates (module Circuit) ia.neg_acc
+ }
+
+(* Default, empty curve parameters *)
+let default =
+ { modulus = Bignum_bigint.zero
+ ; order = Bignum_bigint.zero
+ ; a = Bignum_bigint.zero
+ ; b = Bignum_bigint.zero
+ ; gen = (Bignum_bigint.zero, Bignum_bigint.one)
+ ; ia =
+ { acc = (Bignum_bigint.zero, Bignum_bigint.zero)
+ ; neg_acc = (Bignum_bigint.zero, Bignum_bigint.zero)
+ }
+ }
+
+(* In circuit representation of Elliptic curve (public constants) *)
+module InCircuit = struct
+ type parent_t = t
+
+ type 'field t =
+ { bignum : parent_t
+ ; modulus : 'field Foreign_field.standard_limbs
+ ; order : 'field Foreign_field.standard_limbs
+ ; order_bit_length : int
+ ; order_bit_length_const : 'field Snarky_backendless.Cvar.t
+ ; order_minus_one : 'field Foreign_field.Element.Standard.t
+ ; order_minus_one_bits :
+ 'field Snarky_backendless.Cvar.t Snark_intf.Boolean0.t list
+ ; a : 'field Foreign_field.Element.Standard.t
+ ; b : 'field Foreign_field.Element.Standard.t
+ ; gen : 'field Affine.t
+ ; doubles : 'field Affine.t array
+ ; ia : 'field Affine.t ia_points
+ }
+end
+
+let compute_slope_bignum (curve : t) (left : Affine.bignum_point)
+ (right : Affine.bignum_point) : Bignum_bigint.t =
+ let left_x, left_y = left in
+ let right_x, right_y = right in
+
+ let open Bignum_bigint in
+ if equal left_x right_x && equal left_y right_y then
+ (* Compute slope using 1st derivative of sqrt(x^3 + a * x + b)
+ * s' = (3 * Px^2 + a )/ 2 * Py
+ *)
+ let numerator =
+ let point_x_squared = pow left_x (of_int 2) % curve.modulus in
+ let point_3x_squared = of_int 3 * point_x_squared % curve.modulus in
+
+ (point_3x_squared + curve.a) % curve.modulus
+ in
+ let denominator = of_int 2 * left_y % curve.modulus in
+
+ (* Compute inverse of denominator *)
+ let denominator_inv =
+ Common.bignum_bigint_inverse denominator curve.modulus
+ in
+ numerator * denominator_inv % curve.modulus
+ else
+ (* Computes s = (Ry - Ly)/(Rx - Lx) *)
+ let delta_y = (right_y - left_y) % curve.modulus in
+ let delta_x = (right_x - left_x) % curve.modulus in
+
+ (* Compute delta_x inverse *)
+ let delta_x_inv = Common.bignum_bigint_inverse delta_x curve.modulus in
+
+ delta_y * delta_x_inv % curve.modulus
+
+let double_bignum_point (curve : t) ?slope (point : Affine.bignum_point) :
+ Affine.bignum_point =
+ let open Bignum_bigint in
+ let slope =
+ match slope with
+ | Some slope ->
+ slope
+ | None ->
+ compute_slope_bignum curve point point
+ in
+ let slope_squared = (pow slope @@ of_int 2) % curve.modulus in
+
+ let point_x, point_y = point in
+
+ (* Compute result's x-coodinate: x = s^2 - 2 * Px *)
+ let result_x =
+ let point_x2 = of_int 2 * point_x % curve.modulus in
+ (slope_squared - point_x2) % curve.modulus
+ in
+
+ (* Compute result's y-coodinate: y = s * (Px - x) - Py *)
+ let result_y =
+ let x_diff = (point_x - result_x) % curve.modulus in
+ let x_diff_s = slope * x_diff % curve.modulus in
+ (x_diff_s - point_y) % curve.modulus
+ in
+
+ (result_x, result_y)
+
+let to_circuit_constants (type field)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = field)
+ ?(use_precomputed_gen_doubles = true) (curve : t) : field InCircuit.t =
+ let open Circuit in
+ (* Need to know native field size before we can check if it fits *)
+ Foreign_field.check_modulus_bignum_bigint (module Circuit) curve.modulus ;
+ Foreign_field.check_modulus_bignum_bigint (module Circuit) curve.order ;
+ let order_bit_length = Common.bignum_bigint_bit_length curve.order in
+ let order_minus_one =
+ Bignum_bigint.(if curve.order > zero then curve.order - one else zero)
+ in
+ InCircuit.
+ { bignum = curve
+ ; modulus =
+ Foreign_field.bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ curve.modulus
+ ; order =
+ Foreign_field.bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ curve.order
+ ; order_bit_length
+ ; order_bit_length_const =
+ (let const_len = Field.(constant @@ Constant.of_int order_bit_length) in
+ let var_len =
+ exists Field.typ ~compute:(fun () ->
+ Circuit.Field.Constant.of_int order_bit_length )
+ in
+ Field.Assert.equal const_len var_len ;
+ const_len )
+ ; order_minus_one =
+ Foreign_field.Element.Standard.check_here_const_of_bignum_bigint
+ (module Circuit)
+ order_minus_one
+ ; order_minus_one_bits =
+ Common.bignum_bigint_unpack_as_unchecked_consts
+ (module Circuit)
+ order_minus_one
+ ; a =
+ Foreign_field.Element.Standard.check_here_const_of_bignum_bigint
+ (module Circuit)
+ curve.a
+ ; b =
+ Foreign_field.Element.Standard.check_here_const_of_bignum_bigint
+ (module Circuit)
+ curve.b
+ ; gen =
+ Affine.check_here_const_of_bignum_bigint_coordinates
+ (module Circuit)
+ curve.gen
+ ; doubles =
+ ( if use_precomputed_gen_doubles then (
+ (* Precompute 2^i * curve.gen, 0 <= i < curve.order_bit_length *)
+ let doubles =
+ Array.init order_bit_length (fun _i ->
+ Affine.const_zero (module Circuit) )
+ in
+ let point = ref curve.gen in
+ for i = 0 to order_bit_length - 1 do
+ point := double_bignum_point curve !point ;
+ doubles.(i) <-
+ Affine.check_here_const_of_bignum_bigint_coordinates
+ (module Circuit)
+ !point
+ done ;
+ doubles )
+ else [||] )
+ ; ia =
+ { acc =
+ Affine.check_here_const_of_bignum_bigint_coordinates
+ (module Circuit)
+ curve.ia.acc
+ ; neg_acc =
+ Affine.check_here_const_of_bignum_bigint_coordinates
+ (module Circuit)
+ curve.ia.neg_acc
+ }
+ }
diff --git a/src/lib/crypto/kimchi_backend/gadgets/dune b/src/lib/crypto/kimchi_backend/gadgets/dune
index a8864d6dfa2..92d6d4bfcee 100644
--- a/src/lib/crypto/kimchi_backend/gadgets/dune
+++ b/src/lib/crypto/kimchi_backend/gadgets/dune
@@ -6,11 +6,14 @@
(preprocess (pps ppx_version ppx_jane))
(libraries
;; opam libraries
+ bignum.bigint
core_kernel
+ digestif
ppx_inline_test.config
+ zarith
;; local libraries
kimchi_backend.common
kimchi_backend.pasta
kimchi_gadgets_test_runner
- snarky.backendless
-))
+ mina_stdlib
+ snarky.backendless))
diff --git a/src/lib/crypto/kimchi_backend/gadgets/ec_group.ml.disabled b/src/lib/crypto/kimchi_backend/gadgets/ec_group.ml.disabled
new file mode 100644
index 00000000000..a1ec37040a1
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/ec_group.ml.disabled
@@ -0,0 +1,4005 @@
+open Core_kernel
+module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint
+module Snark_intf = Snarky_backendless.Snark_intf
+
+let basic_tests_enabled = true
+
+let scalar_mul_tests_enabled = true
+
+(* Array to tuple helper *)
+let tuple9_of_array array =
+ match array with
+ | [| a1; a2; a3; a4; a5; a6; a7; a8; a9 |] ->
+ (a1, a2, a3, a4, a5, a6, a7, a8, a9)
+ | _ ->
+ assert false
+
+(* Helper to check if point is on elliptic curve curve: y^2 = x^3 + a * x + b *)
+let is_on_curve_bignum_point (curve : Curve_params.t)
+ (point : Affine.bignum_point) : bool =
+ let x, y = point in
+ Bignum_bigint.(
+ zero
+ = (pow y (of_int 2) - (pow x (of_int 3) + (curve.a * x) + curve.b))
+ % curve.modulus)
+
+(* Gadget for (partial) elliptic curve group addition over foreign field
+ *
+ * Given input points L and R, constrains that
+ * s = (Ry - Ly)/(Rx - Lx) mod f
+ * x = s^2 - Lx - Rx mod f
+ * y = s * (Rx - x) - Ry mod f
+ *
+ * where f is the foreign field modulus.
+ * See p. 348 of "Introduction to Modern Cryptography" by Katz and Lindell
+ *
+ * Preconditions and limitations:
+ * L != R
+ * Lx != Rx (no invertibility)
+ * L and R are not O (the point at infinity)
+ *
+ * External checks: (not counting inputs and output)
+ * Bound checks: 6
+ * Multi-range-checks: 3
+ * Compact-range-checks: 3
+ * Total range-checks: 12
+ *
+ * Rows: (not counting inputs/outputs and constants)
+ * Group addition: 13
+ * Bound additions: 12
+ * Multi-range-checks: 48
+ * Total: 73
+ *
+ * Supported group axioms:
+ * Closure
+ * Associativity
+ *
+ * Note: We elide the Identity property because it is costly in circuit
+ * and we don't need it for our application. By doing this we also
+ * lose Invertibility, which we also don't need for our goals.
+ *)
+let add (type f) (module Circuit : Snark_intf.Run with type field = f)
+ (external_checks : f Foreign_field.External_checks.t)
+ (curve : f Curve_params.InCircuit.t) (left_input : f Affine.t)
+ (right_input : f Affine.t) : f Affine.t =
+ let open Circuit in
+ (* TODO: Remove sanity checks if this API is not public facing *)
+ as_prover (fun () ->
+ (* Sanity check that two points are not equal *)
+ assert (
+ not (Affine.equal_as_prover (module Circuit) left_input right_input) ) ;
+ (* Sanity check that both points are not infinity *)
+ assert (
+ not
+ (Affine.equal_as_prover
+ (module Circuit)
+ left_input
+ (Affine.const_zero (module Circuit)) ) ) ;
+ assert (
+ not
+ (Affine.equal_as_prover
+ (module Circuit)
+ right_input
+ (Affine.const_zero (module Circuit)) ) ) ) ;
+
+ (* Unpack coordinates *)
+ let left_x, left_y = Affine.to_coordinates left_input in
+ let right_x, right_y = Affine.to_coordinates right_input in
+
+ (* TODO: Remove sanity checks if this API is not public facing *)
+ (* Sanity check that x-coordinates are not equal (i.e. we don't support Invertibility) *)
+ as_prover (fun () ->
+ assert (
+ not
+ (Foreign_field.Element.Standard.equal_as_prover
+ (module Circuit)
+ left_x right_x ) ) ) ;
+
+ (* Compute witness values *)
+ let ( slope0
+ , slope1
+ , slope2
+ , result_x0
+ , result_x1
+ , result_x2
+ , result_y0
+ , result_y1
+ , result_y2 ) =
+ exists (Typ.array ~length:9 Field.typ) ~compute:(fun () ->
+ let left_x =
+ Foreign_field.Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ left_x
+ in
+ let left_y =
+ Foreign_field.Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ left_y
+ in
+ let right_x =
+ Foreign_field.Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ right_x
+ in
+ let right_y =
+ Foreign_field.Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ right_y
+ in
+
+ (* Compute slope and slope squared *)
+ let slope =
+ Curve_params.compute_slope_bignum curve.bignum (left_x, left_y)
+ (right_x, right_y)
+ in
+
+ let slope_squared =
+ Bignum_bigint.((pow slope @@ of_int 2) % curve.bignum.modulus)
+ in
+
+ (* Compute result's x-coodinate: x = s^2 - Lx - Rx *)
+ let result_x =
+ Bignum_bigint.(
+ let slope_squared_x =
+ (slope_squared - left_x) % curve.bignum.modulus
+ in
+ (slope_squared_x - right_x) % curve.bignum.modulus)
+ in
+
+ (* Compute result's y-coodinate: y = s * (Rx - x) - Ry *)
+ let result_y =
+ Bignum_bigint.(
+ let x_diff = (right_x - result_x) % curve.bignum.modulus in
+ let x_diff_s = slope * x_diff % curve.bignum.modulus in
+ (x_diff_s - right_y) % curve.bignum.modulus)
+ in
+
+ (* Convert from Bignums to field elements *)
+ let slope0, slope1, slope2 =
+ Foreign_field.bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ slope
+ in
+ let result_x0, result_x1, result_x2 =
+ Foreign_field.bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ result_x
+ in
+ let result_y0, result_y1, result_y2 =
+ Foreign_field.bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ result_y
+ in
+
+ (* Return and convert back to Cvars *)
+ [| slope0
+ ; slope1
+ ; slope2
+ ; result_x0
+ ; result_x1
+ ; result_x2
+ ; result_y0
+ ; result_y1
+ ; result_y2
+ |] )
+ |> tuple9_of_array
+ in
+
+ (* Convert slope and result into foreign field elements *)
+ let slope =
+ Foreign_field.Element.Standard.of_limbs (slope0, slope1, slope2)
+ in
+ let result_x =
+ Foreign_field.Element.Standard.of_limbs (result_x0, result_x1, result_x2)
+ in
+ let result_y =
+ Foreign_field.Element.Standard.of_limbs (result_y0, result_y1, result_y2)
+ in
+
+ (* C1: Constrain computation of slope squared *)
+ let slope_squared =
+ (* s * s = s^2 *)
+ Foreign_field.mul (module Circuit) external_checks slope slope curve.modulus
+ in
+ (* Bounds 1: Left input (slope) bound check below.
+ * Right input (slope) equal to left input (already checked)
+ * Result (s^2) bound check already tracked by Foreign_field.mul.
+ *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ (slope0, slope1, slope2) ;
+
+ (*
+ * Constrain result x-coordinate computation: x = s^2 - Lx - Rx with length 2 chain
+ *)
+
+ (* C2: Constrain s^2 - x = sΔx *)
+ let slope_squared_minus_x =
+ Foreign_field.sub
+ (module Circuit)
+ ~full:false slope_squared result_x curve.modulus
+ in
+
+ (* Bounds 2: Left input (s^2) bound check covered by (Bounds 1).
+ * Right input (x) bound check value is gadget output (checked by caller).
+ * Result is chained (no bound check required).
+ *)
+
+ (* C3: Constrain sΔx - Lx = Rx *)
+ let expected_right_x =
+ Foreign_field.sub
+ (module Circuit)
+ ~full:false slope_squared_minus_x left_x curve.modulus
+ in
+
+ (* Bounds 3: Left input (sΔx) is chained (no bound check required).
+ * Right input (Lx) is gadget input (checked by caller).
+ * Result is (Rx) gadget input (checked by caller)
+ *)
+
+ (* Copy expected_right_x to right_x *)
+ Foreign_field.Element.Standard.assert_equal
+ (module Circuit)
+ expected_right_x right_x ;
+
+ (* Continue the chain to length 4 by computing (Rx - x) * s (used later) *)
+
+ (* C4: Constrain Rx - x = RxΔ *)
+ let right_delta =
+ Foreign_field.sub
+ (module Circuit)
+ ~full:false expected_right_x result_x curve.modulus
+ in
+ (* Bounds 4: Left input (Rx) is chained (no bound check required).
+ * Right input (x) is gadget output (checked by caller).
+ * Addition chain result (right_delta) bound check added below.
+ *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs right_delta ;
+
+ (* C5: RxΔ * s = RxΔs *)
+ let right_delta_s =
+ Foreign_field.mul
+ (module Circuit)
+ external_checks right_delta slope curve.modulus
+ in
+
+ (* Bounds 5: Left input (right_delta) already covered by (Bounds 4)
+ * Right input (slope) already covered by (Bounds 1).
+ * Result bound check already tracked by Foreign_field.mul.
+ *)
+
+ (*
+ * Constrain slope computation: s = (Ry - Ly)/(Rx - Lx)
+ * with (Rx - Lx) * s + Ly = Ry
+ *)
+
+ (* C6: Rx - Lx = Δx *)
+ let delta_x =
+ Foreign_field.sub (module Circuit) ~full:false right_x left_x curve.modulus
+ in
+ (* Bounds 6: Inputs (Rx and Lx) are gadget inputs (checked by caller).
+ * Addition chain result (delta_x) bound check below.
+ *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs delta_x ;
+
+ (* C7: Δx * s = Δxs *)
+ let delta_x_s =
+ Foreign_field.mul
+ (module Circuit)
+ external_checks delta_x slope curve.modulus
+ in
+
+ (* Bounds 7: Left input (delta_x) already covered by (Bounds 6)
+ * Right input (slope) already covered by (Bounds 1).
+ * Result bound check tracked by Foreign_field.mul.
+ *)
+
+ (*
+ * Finish constraining slope in new chain (above mul ended chain)
+ *)
+
+ (* C8: Δxs + Ly = Ry *)
+ let expected_right_y =
+ Foreign_field.add
+ (module Circuit)
+ ~full:false delta_x_s left_y curve.modulus
+ in
+
+ (* Bounds 8: Left input (delta_x_s) check is tracked by (Bounds 7).
+ * Right input bound check value is gadget input (checked by caller).
+ * Result is chained (no check required)
+ *)
+
+ (* Copy expected_right_y to right_y *)
+ Foreign_field.Element.Standard.assert_equal
+ (module Circuit)
+ expected_right_y right_y ;
+
+ (*
+ * Constrain result y-coordinate computation: y = (Rx - x) * s - Ry
+ * with Ry + y = (Rx - x) * s
+ *)
+
+ (* C9: Ry + y = RxΔs *)
+ let expected_right_delta_s =
+ Foreign_field.add ~full:false
+ (module Circuit)
+ expected_right_y result_y curve.modulus
+ in
+ (* Result row *)
+ Foreign_field.result_row
+ (module Circuit)
+ ~label:"Ec_group.add_expected_right_delta_s" expected_right_delta_s ;
+ (* Bounds 9: Left input (Ry) check is chained (no check required).
+ * Right input (y) check value is gadget output (checked by caller).
+ * Addition chain result (expected_right_delta_s) check already covered by (Bounds 5).
+ *)
+ (* Copy expected_right_delta_s to right_delta_s *)
+ Foreign_field.Element.Standard.assert_equal
+ (module Circuit)
+ expected_right_delta_s right_delta_s ;
+
+ (* Return result point *)
+ Affine.of_coordinates (result_x, result_y)
+
+(* Gadget for (partial) elliptic curve group doubling over foreign field
+ *
+ * Given input point P, constrains that
+ * s' = 3 * Px^2 / (2 * Py) mod f
+ * x = s'^2 - 2 * Px mod f
+ * y = s' * (Px - x) - Py mod f
+ *
+ * where f is the foreign field modulus.
+ * See p. 348 of "Introduction to Modern Cryptography" by Katz and Lindell
+ *
+ * Preconditions and limitations:
+ * P is not O (the point at infinity)
+ *
+ * External checks: (not counting inputs and output)
+ * Bound checks: 8 (+1 when a != 0)
+ * Multi-range-checks: 4
+ * Compact-range-checks: 4
+ * Total range-checks: 16
+ *
+ * Rows: (not counting inputs/outputs and constants)
+ * Group double: 16 (+2 when a != 0)
+ * Bound additions: 16
+ * Multi-range-checks: 64
+ * Total: 96
+ *
+ * Note: See group addition notes (above) about group properties supported by this implementation
+ *)
+let double (type f) (module Circuit : Snark_intf.Run with type field = f)
+ (external_checks : f Foreign_field.External_checks.t)
+ (curve : f Curve_params.InCircuit.t) (point : f Affine.t) : f Affine.t =
+ let open Circuit in
+ (* TODO: Remove sanity checks if this API is not public facing *)
+ as_prover (fun () ->
+ (* Sanity check that point is not infinity *)
+ assert (
+ not
+ (Affine.equal_as_prover
+ (module Circuit)
+ point
+ (Affine.const_zero (module Circuit)) ) ) ) ;
+
+ (* Unpack coordinates *)
+ let point_x, point_y = Affine.to_coordinates point in
+
+ (* Compute witness values *)
+ let ( slope0
+ , slope1
+ , slope2
+ , result_x0
+ , result_x1
+ , result_x2
+ , result_y0
+ , result_y1
+ , result_y2 ) =
+ exists (Typ.array ~length:9 Field.typ) ~compute:(fun () ->
+ let point =
+ ( Foreign_field.Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ point_x
+ , Foreign_field.Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ point_y )
+ in
+
+ (* Compute slope *)
+ let slope =
+ Curve_params.compute_slope_bignum curve.bignum point point
+ in
+
+ (* Compute result point *)
+ let result_x, result_y =
+ Curve_params.double_bignum_point curve.bignum ~slope point
+ in
+
+ (* Convert from Bignums to field elements *)
+ let slope0, slope1, slope2 =
+ Foreign_field.bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ slope
+ in
+ let result_x0, result_x1, result_x2 =
+ Foreign_field.bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ result_x
+ in
+ let result_y0, result_y1, result_y2 =
+ Foreign_field.bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ result_y
+ in
+
+ (* Return and convert back to Cvars *)
+ [| slope0
+ ; slope1
+ ; slope2
+ ; result_x0
+ ; result_x1
+ ; result_x2
+ ; result_y0
+ ; result_y1
+ ; result_y2
+ |] )
+ |> tuple9_of_array
+ in
+
+ (* Convert slope and result into foreign field elements *)
+ let slope =
+ Foreign_field.Element.Standard.of_limbs (slope0, slope1, slope2)
+ in
+ let result_x =
+ Foreign_field.Element.Standard.of_limbs (result_x0, result_x1, result_x2)
+ in
+ let result_y =
+ Foreign_field.Element.Standard.of_limbs (result_y0, result_y1, result_y2)
+ in
+
+ (* C1: Constrain computation of slope squared *)
+ let slope_squared =
+ (* s * s = s^2 *)
+ Foreign_field.mul (module Circuit) external_checks slope slope curve.modulus
+ in
+ (* Bounds 1: Left input (slope) checked below.
+ * Right input (slope) is equal to left input (no check required).
+ * Result (slope_squared) check already tracked by Foreign_field.mul.
+ *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ (slope0, slope1, slope2) ;
+
+ (* C2: Constrain result x-coordinate computation: x = s^2 - 2 * Px with length 2 chain
+ * with s^2 - x = 2 * Px
+ *)
+ let point_x2 =
+ (* s^2 - x = 2Px *)
+ Foreign_field.sub
+ (module Circuit)
+ ~full:false slope_squared result_x curve.modulus
+ in
+
+ (* Bounds 2: Left input (s^2) check covered by (Bounds 1).
+ * Right input (x) check value is gadget output (checked by caller).
+ * Result (2Px) chained (no check required).
+ *)
+
+ (* C3: 2Px - Px = Px *)
+ let expected_point_x =
+ Foreign_field.sub
+ (module Circuit)
+ ~full:false point_x2 point_x curve.modulus
+ in
+ (* Bounds 3: Left input (2Px) is chained (no check required).
+ * Right input (Px) is gadget input (checked by caller).
+ * Result (Px) chained (no check required).
+ *)
+ (* Copy expected_point_x to point_x *)
+ Foreign_field.Element.Standard.assert_equal
+ (module Circuit)
+ expected_point_x point_x ;
+
+ (*
+ * Continue the chain to length 4 by computing (Px - x) * s (used later)
+ *)
+
+ (* C4: Px - x = Δx *)
+ let delta_x =
+ Foreign_field.sub
+ (module Circuit)
+ ~full:false expected_point_x result_x curve.modulus
+ in
+ (* Bounds 4: Left input (Px) is chained (no check required).
+ * Right input (x) check value is gadget output (checked by caller).
+ * Addition chain result (delta_x) bound check added below.
+ *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs delta_x ;
+
+ (* C5: Δx * s = Δxs *)
+ let delta_xs =
+ Foreign_field.mul
+ (module Circuit)
+ external_checks delta_x slope curve.modulus
+ in
+
+ (* Bounds 5: Left input (delta_x) check already covered by (Bounds 4).
+ * Right input (slope) already covered by (Bounds 1).
+ * Result (delta_xs) bound check already tracked by Foreign_field.mul.
+ *)
+
+ (*
+ * Constrain rest of y = s' * (Px - x) - Py and part of slope computation
+ * s = (3 * Px^2 + a)/(2 * Py) in length 3 chain
+ *)
+
+ (* C6: Δxs - y = Py *)
+ let expected_point_y =
+ Foreign_field.sub
+ (module Circuit)
+ ~full:false delta_xs result_y curve.modulus
+ in
+ (* Bounds 6: Left input (delta_xs) checked by (Bound 5).
+ * Right input is gadget output (checked by caller).
+ * Addition result (Py) is chained (no check required).
+ *)
+ (* Copy expected_point_y to point_y *)
+ Foreign_field.Element.Standard.assert_equal
+ (module Circuit)
+ expected_point_y point_y ;
+
+ (* C7: Py + Py = 2Py *)
+ let point_y2 =
+ Foreign_field.add (module Circuit) ~full:false point_y point_y curve.modulus
+ in
+
+ (* Bounds 7: Left input (Py) is gadget input (checked by caller).
+ * Right input (Py) is gadget input (checked by caller).
+ * Addition result (2Py) chained (no check required).
+ *)
+
+ (* C8: 2Py * s = 2Pys *)
+ let point_y2s =
+ Foreign_field.mul
+ (module Circuit)
+ external_checks point_y2 slope curve.modulus
+ in
+ (* Bounds 8: Left input (point_y2) bound check added below.
+ * Right input (slope) already checked by (Bound 1).
+ * Result (2Pys) bound check already tracked by Foreign_field.mul.
+ *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs point_y2 ;
+
+ (*
+ * Constrain rest slope computation s = (3 * Px^2 + a)/(2 * Py)
+ *)
+
+ (* C9: 2Px + Px = 3Px *)
+ let point_x3 =
+ Foreign_field.add
+ (module Circuit)
+ ~full:false point_x2 point_x curve.modulus
+ in
+ (* Bounds 9: Left input (point_x2) bound check added below.
+ * Right input (Px) is gadget input (checked by caller).
+ * Result (3Px) is chained (no check required).
+ *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs point_x2 ;
+
+ (* Check if the elliptic curve a parameter requires more constraints
+ * to be added in order to add final a (e.g. 3Px^2 + a where a != 0).
+ *)
+ ( if Bignum_bigint.(curve.bignum.a = zero) then (
+ (* C10a: 3Px * Px = 3Px^2 *)
+ let point_x3_squared =
+ Foreign_field.mul
+ (module Circuit)
+ external_checks ~bound_check_result:false point_x3 point_x curve.modulus
+ in
+
+ (* Bounds 10a: Left input (point_x3) bound check added below.
+ * Right input (Px) is gadget input (checked by caller).
+ * Result (3Px^2) bound check already covered by (Bounds 8) since
+ * point_x3_squared is equal to point_y2s.
+ *)
+
+ (* Add point_x3 bound check (Bounds 101) *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs point_x3 ;
+
+ (* Copy point_x3_squared to point_y2s *)
+ Foreign_field.Element.Standard.assert_equal
+ (module Circuit)
+ point_x3_squared point_y2s )
+ else
+ (* C10b: 3Px * Px = 3Px^2 *)
+ let point_x3_squared =
+ Foreign_field.mul
+ (module Circuit)
+ external_checks point_x3 point_x curve.modulus
+ in
+
+ (* Bounds 10b: Left input (point_x3) bound check added below.
+ * Right input (Px) is gadget input (checked by caller).
+ * Result (3Px^2) bound check already covered by Foreign_field.mul.
+ *)
+
+ (* Add point_x3 bound check (Bounds 10b) *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs point_x3 ;
+
+ (* Add curve constant a and constrain rest slope computation
+ * with s = (3 * Px^2 + a)/(2 * Py)
+ *)
+
+ (* C11: 3Px^2 + a = 3Px^2a *)
+ let point_x3_squared_plus_a =
+ Foreign_field.add
+ (module Circuit)
+ ~full:false point_x3_squared curve.a curve.modulus
+ in
+ (* Bounds 11: Left input (point_x3_squared) already tracked by (Bounds 10b).
+ * Right input (curve.a) is public constant.
+ * Result (3Px^2a) bound check already covered by (Bound 8) since
+ * point_x3_squared_plus_a = point_y2s.
+ *)
+ (* Result row *)
+ Foreign_field.result_row
+ (module Circuit)
+ ~label:"Ec_group.double_point_x3_squared_plus_a" point_x3_squared_plus_a ;
+
+ (* Copy point_x3_squared_plus_a to point_y2s *)
+ Foreign_field.Element.Standard.assert_equal
+ (module Circuit)
+ point_x3_squared_plus_a point_y2s ) ;
+
+ (* Return result point *)
+ Affine.of_coordinates (result_x, result_y)
+
+(* Gadget for elliptic curve group negation
+ *
+ * Note: this gadget does not create a Zero row for the negated result.
+ * If not already present in witness the caller is responsible for placing
+ * the negated result somewhere (e.g. in a Zero row or elsewhere).
+ *)
+let negate (type f) (module Circuit : Snark_intf.Run with type field = f)
+ (curve : f Curve_params.InCircuit.t) (point : f Affine.t) : f Affine.t =
+ let x, y = Affine.to_coordinates point in
+ (* Zero constant foreign field elemtn *)
+ let zero =
+ Foreign_field.Element.Standard.of_bignum_bigint
+ (module Circuit)
+ Bignum_bigint.zero
+ in
+ (* C1: Constrain computation of the negated point *)
+ let neg_y =
+ (* neg_y = 0 - y *)
+ Foreign_field.sub (module Circuit) ~full:false zero y curve.modulus
+ in
+
+ (* Bounds 1: Left input is public constant
+ * Right input parameter (checked by caller)
+ * Result bound is part of output (checked by caller)
+ *)
+ Affine.of_coordinates (x, neg_y)
+
+(* Select initial EC scalar mul accumulator value ia using trustless nothing-up-my-sleeve deterministic algorithm
+ *
+ * Simple hash-to-curve algorithm
+ *
+ * Trustlessly select an elliptic curve point for which noone knows the discrete logarithm!
+ *)
+let compute_ia_points ?(point : Affine.bignum_point option)
+ (curve : Curve_params.t) : Affine.bignum_point Curve_params.ia_points =
+ (* Hash generator point to get candidate x-coordinate *)
+ let open Digestif.SHA256 in
+ let ctx = init () in
+
+ let start_point =
+ match point with Some point -> point | None -> curve.gen
+ in
+
+ assert (is_on_curve_bignum_point curve start_point) ;
+
+ (* Hash to (possible) elliptic curve point function *)
+ let hash_to_curve_point ctx (point : Affine.bignum_point ref) =
+ (* Hash curve point *)
+ let x, y = !point in
+ let ctx = feed_string ctx @@ Common.bignum_bigint_unpack_bytes x in
+ let ctx = feed_string ctx @@ Common.bignum_bigint_unpack_bytes y in
+ let bytes = get ctx |> to_raw_string in
+
+ (* Initialize x-coordinate from hash output *)
+ let x = Bignum_bigint.(Common.bignum_bigint_of_bin bytes % curve.modulus) in
+
+ (* Compute y-coordinate: y = sqrt(x^3 + a * x + b) *)
+ let x3 = Bignum_bigint.(pow x (of_int 3) % curve.modulus) in
+ let ax = Bignum_bigint.(curve.a * x % curve.modulus) in
+ let x3ax = Bignum_bigint.((x3 + ax) % curve.modulus) in
+ let y2 = Bignum_bigint.((x3ax + curve.b) % curve.modulus) in
+ let y = Common.bignum_bigint_sqrt_mod y2 curve.modulus in
+
+ (* Sanity check *)
+ ( if Bignum_bigint.(not (equal y zero)) then
+ let y2_computed = Bignum_bigint.(y * y % curve.modulus) in
+ assert (Bignum_bigint.(y2_computed = y2)) ) ;
+
+ (* Return possibly valid curve point *)
+ (x, y)
+ in
+
+ (* Deterministically search for valid curve point *)
+ let candidate_point = ref (hash_to_curve_point ctx (ref start_point)) in
+
+ while not (is_on_curve_bignum_point curve !candidate_point) do
+ candidate_point := hash_to_curve_point ctx candidate_point
+ done ;
+
+ (* We have a valid curve point! *)
+ let point = !candidate_point in
+
+ (* Compute negated point (i.e. with other y-root) *)
+ let neg_point =
+ let x, y = point in
+ let neg_y = Bignum_bigint.(neg y % curve.modulus) in
+ (x, neg_y)
+ in
+
+ Curve_params.ia_of_points point neg_point
+
+(* Gadget to constrain a point in on the elliptic curve specified by
+ * y^2 = x^3 + ax + b mod p
+ * where a, b are the curve parameters and p is the base field modulus (curve.modulus)
+ *
+ * External checks: (not counting inputs and output)
+ * Bound checks: 3 (+1 when a != 0 and +1 when b != 0)
+ * Multi-range-checks: 3
+ * Compact-range-checks: 3
+ * Total range-checks: 9
+ *
+ * Rows: (not counting inputs/outputs and constants)
+ * Curve check: 8 (+1 when a != 0 and +2 when b != 0)
+ * Bound additions: 6
+ * Multi-range-checks: 36
+ * Total: 50
+ *
+ * Constants:
+ * Curve constants: 10 (for 256-bit curve; one-time cost per circuit)
+ * Pre-computing doubles: 767 (for 256-bit curve; one-time cost per circuit)
+ *)
+let is_on_curve (type f) (module Circuit : Snark_intf.Run with type field = f)
+ (external_checks : f Foreign_field.External_checks.t)
+ (curve : f Curve_params.InCircuit.t) (point : f Affine.t) =
+ let x, y = Affine.to_coordinates point in
+
+ (* C1: x^2 = x * x *)
+ let x_squared =
+ Foreign_field.mul (module Circuit) external_checks x x curve.modulus
+ in
+
+ (* Bounds 1: Left and right inputs are gadget input (checked by caller).
+ * Result bound check already tracked by Foreign_field.mul
+ *)
+
+ (* C2: Optionally constrain addition of curve parameter a *)
+ let x_squared_a =
+ if not Bignum_bigint.(curve.bignum.a = zero) then (
+ (* x^2 + a *)
+ let x_squared_a =
+ Foreign_field.add
+ (module Circuit)
+ ~full:false x_squared curve.a curve.modulus
+ in
+ (* Bounds 2: Left input already checked by (Bounds 1)
+ * Right input public parameter (no check necessary)
+ * Result bound check below
+ *)
+ (* Add x_squared_a bound check *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs x_squared_a ;
+ x_squared_a )
+ else x_squared
+ in
+
+ (* C3: x^3 + ax = (x^2 + a) * x *)
+ let x_cubed_ax =
+ Foreign_field.mul
+ (module Circuit)
+ external_checks x_squared_a x curve.modulus
+ in
+
+ (* Bounds 3: Left input already checked by (Bounds 2) or (Bounds 1)
+ * Right input is gadget input (checked by caller).
+ * Result bound check already tracked by Foreign_field.mul
+ *)
+
+ (* C4: Optionally constrain addition of curve parameter b *)
+ let x_cubed_ax_b =
+ if not Bignum_bigint.(curve.bignum.b = zero) then (
+ (* (x^2 + a) * x + b *)
+ let x_cubed_ax_b =
+ Foreign_field.add
+ (module Circuit)
+ ~full:false x_cubed_ax curve.b curve.modulus
+ in
+ (* Result row *)
+ Foreign_field.result_row
+ (module Circuit)
+ ~label:"Ec_group.is_on_curve_x_cubed_ax_b" x_cubed_ax_b ;
+
+ (* Bounds 4: Left input already checked by (Bounds 3)
+ * Right input public parameter (no check necessary)
+ * Result bound check below
+ *)
+
+ (* Add x_cubed_ax_b bound check *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs x_cubed_ax_b ;
+
+ x_cubed_ax_b )
+ else x_cubed_ax
+ in
+
+ (* C5: y^2 = y * y *)
+ let y_squared =
+ Foreign_field.mul (module Circuit) external_checks y y curve.modulus
+ in
+
+ (* Bounds 5: Left and right inputs are gadget input (checked by caller)
+ * Result bound check already tracked by Foreign_field.mul
+ *)
+
+ (* Copy y_squared to x_cubed_ax_b *)
+ Foreign_field.Element.Standard.assert_equal
+ (module Circuit)
+ y_squared x_cubed_ax_b ;
+ ()
+
+(* Gadget to constrain that initial accumulator (ia) point is on elliptic curve and the computation of its negation.
+ * Note: The value of the ia itself is a deterministically generated public constant (this computation is not checked),
+ * so using this gadget is only required in some situations.
+ *)
+let check_ia (type f) (module Circuit : Snark_intf.Run with type field = f)
+ (external_checks : f Foreign_field.External_checks.t)
+ (curve : f Curve_params.InCircuit.t) (ia : f Affine.t Curve_params.ia_points)
+ =
+ (* C1: Check that initial accumulator point is on curve *)
+ is_on_curve (module Circuit) external_checks curve ia.acc ;
+
+ (* C2: Constrain computation of the negated initial accumulator point *)
+ let neg_init_acc = negate (module Circuit) curve ia.acc in
+ (* Result row *)
+ Foreign_field.result_row
+ (module Circuit)
+ ~label:"Ec_group.check_ia_neg_init_y"
+ @@ Affine.y neg_init_acc ;
+
+ (* Bounds 1: Input is public constant
+ * Result is part of input (checked by caller)
+ *)
+
+ (* C3: Copy computed_neg_init_acc to ia.neg_acc *)
+ Affine.assert_equal (module Circuit) neg_init_acc ia.neg_acc ;
+
+ (* P is on curve <=> -P is on curve, thus we do not need to check
+ * ai.neg_acc is on curve *)
+ ()
+
+(* Gadget for elliptic curve group scalar multiplication over foreign field
+ *
+ * Given input point P and scalar field element s, computes and constrains that
+ * Q = s0 * P + ... + sz * 2^z * P
+ *
+ * where s0, s1, ..., sz is the binary expansion of s, (+) is group addition
+ * and the terms P, 2 * P, ... 2^z * P are obtained with group doubling.
+ *
+ * Inputs:
+ * external_checks := Context to track required external checks
+ * curve := Elliptic curve parameters
+ * scalar := Boolean array of scalar bits
+ * point := Affine point to scale
+ *
+ * Preconditions and limitations:
+ * P is not O (the point at infinity)
+ * P's coordinates are bounds checked
+ * P is on the curve
+ * s is not zero
+ * ia point is randomly selected and constrained to be on the curve
+ * ia negated point computation is constrained
+ * ia coordinates are bounds checked
+ *
+ * External checks: (per crumb, not counting inputs and output)
+ * Bound checks: 42 (+1 when a != 0)
+ * Multi-range-checks: 17
+ * Compact-range-checks: 17
+ * Total range-checks: 76
+ *
+ * Rows: (per crumb, not counting inputs/outputs and constants)
+ * Scalar multiplication: ~84 (+2 when a != 0)
+ * Bound additions: 84
+ * Multi-range-checks: 308
+ * Total: 476
+ *
+ * Constants:
+ * Curve constants: 10 (for 256-bit curve; one-time cost per circuit)
+ * Pre-computing doubles: 767 (for 256-bit curve; one-time cost per circuit)
+ *)
+let scalar_mul (type f) (module Circuit : Snark_intf.Run with type field = f)
+ (external_checks : f Foreign_field.External_checks.t)
+ (curve : f Curve_params.InCircuit.t) ?(doubles : f Affine.t array option)
+ (scalar : Circuit.Boolean.var list) (point : f Affine.t) : f Affine.t =
+ (* Double-and-add algorithm
+ * Only used for signature verification, so simple algorithm suffices.
+ *
+ * A = O; B = P
+ * for i in 0..z
+ * if si == 1
+ * A = group_add(A, B)
+ * B = group_double(B)
+ * return A
+ *
+ * Optimization:
+ *
+ * To avoid expensive in-circuit conditional checks for point at infinity,
+ * we employ a randomized strategy that avoids adding the identity element
+ * or the same point to itself. The strategy works as follows.
+ *
+ * Since the prover knows the the points that it will add and double during
+ * scaling, the prover could select an initial accumulator point I such that
+ * the double-and-add algorithm never adds the identity element, same point
+ * or negated point to itself whilst scaling.
+ *
+ * The algorithm above is modified to initialize the accumulator to I and
+ * then (group) subtract I after scaling to compute the final result point.
+ *
+ * A = I; B = P
+ * for i in 0..z
+ * if si == 1
+ * A = group_add(A, B)
+ * B = group_double(B)
+ * return A + -I
+ *
+ * The prover MUST additionally constrain that
+ * 1) point I is on the curve
+ * 2) I' = -I
+ *
+ * Simplification:
+ *
+ * Uniformly and randomly select initial accumulator point I, instead of using
+ * the complicated deterministic process.
+ *
+ * For a z-bit scalar, there are z unique B points. Each point also has its
+ * negative, which we cannot add to itself. Therefore, in total there are
+ * 2z points that we do not want to select as our initial point nor compute
+ * as an intermediate A point during scaling. The probability we select or
+ * compute one of these points is approx 2z^2/n, where n is the order of the
+ * elliptic curve group.
+ *
+ * The probability of selecting a bad point is negligible for our applications
+ * where z is very small (e.g. 256) and n is very large (e.g. 2^256). Thus,
+ * we can simply randomly select the initial accumulator I and the
+ * double-and-add algorithm will succeed with overwhelming probability.
+ *)
+ let acc, _base =
+ List.foldi scalar ~init:(curve.ia.acc, point) (* (acc, base) *)
+ ~f:(fun i (acc, base) bit ->
+ (* Add: sum = acc + base *)
+ let sum = add (module Circuit) external_checks curve acc base in
+ (* Bounds 1:
+ * Left input is previous result, so already checked.
+ * Right input is checked by previous doubling check.
+ * Initial acc and base are gadget inputs (checked by caller).
+ * Result bounds check below.
+ *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs @@ Affine.x sum ;
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs @@ Affine.y sum ;
+
+ (* Group double: double_base = base + base *)
+ let double_base =
+ match doubles with
+ | None ->
+ let double_base =
+ double (module Circuit) external_checks curve base
+ in
+ (* Bounds 2:
+ * Input is previous result, so already checked.
+ * Initial base is gadget input (checked by caller).
+ * Result bounds check below.
+ *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs @@ Affine.x double_base ;
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs @@ Affine.y double_base ;
+ double_base
+ | Some doubles ->
+ (* When the base point is public (e.g. the secp256k1 generator) we can
+ * improve performance by having them as precomputed public parameters *)
+ doubles.(i)
+ in
+
+ (* Group add conditionally *)
+ let acc = Affine.if_ (module Circuit) bit ~then_:sum ~else_:acc in
+
+ (acc, double_base) )
+ in
+
+ (* Subtract init_point from accumulator for final result *)
+ add (module Circuit) external_checks curve acc curve.ia.neg_acc
+
+(* Gadget to check point is in the subgroup
+ * nP = O
+ * where n is the elliptic curve group order and O is the point at infinity
+ *)
+let check_subgroup (type f)
+ (module Circuit : Snark_intf.Run with type field = f)
+ (external_checks : f Foreign_field.External_checks.t)
+ (curve : f Curve_params.InCircuit.t) ?(doubles : f Affine.t array option)
+ (point : f Affine.t) =
+ (* Subgroup check: nP = O
+ * We don't support identity element, so instead we check
+ * ((n - 1) + 1)P = O
+ * (n - 1)P = -P
+ *)
+
+ (* C1: Compute (n - 1)P *)
+ let n_minus_one_point =
+ scalar_mul
+ (module Circuit)
+ external_checks curve ?doubles curve.order_minus_one_bits point
+ in
+ (* Bounds 1: Left input is public constant (no bounds check required)
+ * Right input is gadget input (checked by caller)
+ * Result bound check below
+ *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs @@ Affine.x n_minus_one_point ;
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs @@ Affine.y n_minus_one_point ;
+
+ (* C2: Compute -P *)
+ let minus_point = negate (module Circuit) curve point in
+ (* Result row *)
+ Foreign_field.result_row (module Circuit) ~label:"minus_point_y"
+ @@ Affine.y minus_point ;
+ (* Bounds 2: Input is gadget input (checked by caller)
+ * Result bound check below
+ *)
+ Foreign_field.External_checks.append_bound_check external_checks
+ @@ Foreign_field.Element.Standard.to_limbs @@ Affine.y minus_point ;
+
+ (* C3: Assert (n - 1)P = -P *)
+ Affine.assert_equal (module Circuit) n_minus_one_point minus_point
+
+(***************)
+(* Group tests *)
+(***************)
+
+let%test_unit "Ec_group.add" =
+ if basic_tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test group add *)
+ let test_add ?cs (curve : Curve_params.t) (left_input : Affine.bignum_point)
+ (right_input : Affine.bignum_point)
+ (expected_result : Affine.bignum_point) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants (module Runner.Impl) curve
+ in
+ let left_input =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ left_input
+ in
+ let right_input =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ right_input
+ in
+ let expected_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ expected_result
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let unused_external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* L + R = S *)
+ let result =
+ add
+ (module Runner.Impl)
+ unused_external_checks curve left_input right_input
+ in
+
+ (* Check for expected quantity of external checks *)
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.bounds 6 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges
+ 3 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal
+ unused_external_checks.compact_multi_ranges 3 ) ;
+
+ (* Check output matches expected result *)
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ result expected_result ) ) ;
+ () )
+ in
+
+ cs
+ in
+
+ (* Tests for random points *)
+ let fake_curve5 =
+ Curve_params.{ default with modulus = Bignum_bigint.of_int 5 }
+ in
+ let _cs =
+ test_add fake_curve5
+ (Bignum_bigint.of_int 4, Bignum_bigint.one) (* left_input *)
+ (Bignum_bigint.of_int 0, Bignum_bigint.of_int 3) (* right_input *)
+ (Bignum_bigint.of_int 0, Bignum_bigint.of_int 2)
+ (* expected result *)
+ in
+ let _cs =
+ test_add fake_curve5
+ (Bignum_bigint.of_int 2, Bignum_bigint.of_int 3) (* left_input *)
+ (Bignum_bigint.of_int 1, Bignum_bigint.of_int 0) (* right_input *)
+ (Bignum_bigint.of_int 1, Bignum_bigint.of_int 0)
+ (* expected result *)
+ in
+
+ (* Constraint system reuse tests *)
+ let fake_curve13 =
+ Curve_params.{ default with modulus = Bignum_bigint.of_int 13 }
+ in
+ let cs =
+ test_add fake_curve13
+ (Bignum_bigint.of_int 3, Bignum_bigint.of_int 8) (* left_input *)
+ (Bignum_bigint.of_int 5, Bignum_bigint.of_int 11) (* right_input *)
+ (Bignum_bigint.of_int 4, Bignum_bigint.of_int 10)
+ (* expected result *)
+ in
+ let _cs =
+ test_add ~cs fake_curve13
+ (Bignum_bigint.of_int 10, Bignum_bigint.of_int 4) (* left_input *)
+ (Bignum_bigint.of_int 12, Bignum_bigint.of_int 7) (* right_input *)
+ (Bignum_bigint.of_int 3, Bignum_bigint.of_int 0)
+ (* expected result *)
+ in
+ let _cs =
+ test_add ~cs fake_curve13
+ (Bignum_bigint.of_int 8, Bignum_bigint.of_int 6) (* left_input *)
+ (Bignum_bigint.of_int 2, Bignum_bigint.of_int 1) (* right_input *)
+ (Bignum_bigint.of_int 12, Bignum_bigint.of_int 8)
+ (* expected result *)
+ in
+
+ (* Negative tests *)
+ let fake_curve9 =
+ Curve_params.{ default with modulus = Bignum_bigint.of_int 9 }
+ in
+ assert (
+ Common.is_error (fun () ->
+ (* Wrong constraint system (changed modulus) *)
+ test_add ~cs fake_curve9
+ (Bignum_bigint.of_int 8, Bignum_bigint.of_int 6) (* left_input *)
+ (Bignum_bigint.of_int 2, Bignum_bigint.of_int 1) (* right_input *)
+ (Bignum_bigint.of_int 12, Bignum_bigint.of_int 8)
+ (* expected result *) ) ) ;
+ assert (
+ Common.is_error (fun () ->
+ (* Wrong answer (right modulus) *)
+ test_add ~cs fake_curve13
+ (Bignum_bigint.of_int 8, Bignum_bigint.of_int 6) (* left_input *)
+ (Bignum_bigint.of_int 2, Bignum_bigint.of_int 1) (* right_input *)
+ (Bignum_bigint.of_int 12, Bignum_bigint.of_int 9)
+ (* expected result *) ) ) ;
+
+ (* Tests with secp256k1 curve points *)
+ let random_point1 =
+ ( Bignum_bigint.of_string
+ "11498799051185379176527662983290644419148625795866197242742376646044820710107"
+ , Bignum_bigint.of_string
+ "87365548140897354715632623292744880448736648603030553868546115582681395400362"
+ )
+ in
+ let expected_result1 =
+ ( Bignum_bigint.of_string
+ "29271032301589161601163082898984274448470999636237808164579416118817375265766"
+ , Bignum_bigint.of_string
+ "70576057075545750224511488165986665682391544714639291167940534165970533739040"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params Secp256k1.params.gen) ;
+ assert (is_on_curve_bignum_point Secp256k1.params random_point1) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_result1) ;
+
+ let _cs =
+ test_add Secp256k1.params random_point1 Secp256k1.params.gen
+ expected_result1
+ in
+
+ let random_point2 =
+ ( Bignum_bigint.of_string
+ "112776793647017636286801498409683698782792816810143189200772003475655331235512"
+ , Bignum_bigint.of_string
+ "37154006933110560524528936279434506593302537023736551486562363002969014272200"
+ )
+ in
+ let expected_result2 =
+ ( Bignum_bigint.of_string
+ "80919512080552099332189419005806362073658070117780992417768444957631350640350"
+ , Bignum_bigint.of_string
+ "4839884697531819803579082430572588557482298603278351225895977263486959680227"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params random_point2) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_result2) ;
+
+ let _cs =
+ test_add Secp256k1.params expected_result1 (* left_input *)
+ random_point2 (* right_input *)
+ expected_result2
+ (* expected result *)
+ in
+
+ let random_point3 =
+ ( Bignum_bigint.of_string
+ "36425953153418322223243576029807183106978427220826420108023201968296177476778"
+ , Bignum_bigint.of_string
+ "24007339127999344540320969916238304309192480878642453507169699691156248304362"
+ )
+ in
+ let random_point4 =
+ ( Bignum_bigint.of_string
+ "21639969699195480792170626687481368104641445608975892798617312168630290254356"
+ , Bignum_bigint.of_string
+ "30444719434143548339668041811488444063562085329168372025420048436035175999301"
+ )
+ in
+ let expected_result3 =
+ ( Bignum_bigint.of_string
+ "113188224115387667795245114738521133409188389625511152470086031332181459812059"
+ , Bignum_bigint.of_string
+ "82989616646064102138003387261138741187755389122561858439662322580504431694519"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params random_point3) ;
+ assert (is_on_curve_bignum_point Secp256k1.params random_point4) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_result3) ;
+
+ let _cs =
+ test_add Secp256k1.params random_point3 (* left_input *)
+ random_point4 (* right_input *)
+ expected_result3
+ (* expected result *)
+ in
+
+ (* Constraint system reuse tests *)
+ let pt1 =
+ ( Bignum_bigint.of_string
+ "75669526378790147634671888414445173066514756807031971924620136884638031442759"
+ , Bignum_bigint.of_string
+ "21417425897684876536576718477824646351185804513111016365368704154638046645765"
+ )
+ in
+ let pt2 =
+ ( Bignum_bigint.of_string
+ "14155322613096941824503892607495280579903778637099750589312382650686697414735"
+ , Bignum_bigint.of_string
+ "6513771125762614571725090849784101711151222857564970563886992272283710338112"
+ )
+ in
+ let expected_pt =
+ ( Bignum_bigint.of_string
+ "11234404138675683238798732023399338183955476104311735089175934636931978267582"
+ , Bignum_bigint.of_string
+ "2483077095355421104741807026372550508534866555013063406887316930008225336894"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params pt1) ;
+ assert (is_on_curve_bignum_point Secp256k1.params pt2) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_pt) ;
+
+ let cs = test_add Secp256k1.params pt1 pt2 expected_pt in
+
+ let pt1 =
+ ( Bignum_bigint.of_string
+ "97313026812541560473771297589757921196424145769025529099070592800256734650744"
+ , Bignum_bigint.of_string
+ "38700860102018844310665941222140210385381782344695476706452234109902874948789"
+ )
+ in
+ let pt2 =
+ ( Bignum_bigint.of_string
+ "82416105962835331584090450180444085592428397648594295814088133554696721893017"
+ , Bignum_bigint.of_string
+ "72361514636959418409520767179749571220723219394228755075988292395103362307597"
+ )
+ in
+ let expected_pt =
+ ( Bignum_bigint.of_string
+ "63066162743654726673830060769616154872212462240062945169518526070045923596428"
+ , Bignum_bigint.of_string
+ "54808797958010370431464079583774910620962703868682659560981623451275441505706"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params pt1) ;
+ assert (is_on_curve_bignum_point Secp256k1.params pt2) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_pt) ;
+
+ let _cs = test_add ~cs Secp256k1.params pt1 pt2 expected_pt in
+
+ let expected2 =
+ ( Bignum_bigint.of_string
+ "23989387498834566531803335539224216637656125335573670100510541031866883369583"
+ , Bignum_bigint.of_string
+ "8780199033752628541949962988447578555155504633890539264032735153636423550500"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params expected2) ;
+
+ let _cs = test_add ~cs Secp256k1.params expected_pt pt1 expected2 in
+
+ (* Negative tests *)
+ assert (
+ Common.is_error (fun () ->
+ (* Wrong constraint system (changed modulus) *)
+ test_add ~cs fake_curve9 expected_pt pt1 expected2 ) ) ;
+
+ assert (
+ Common.is_error (fun () ->
+ (* Wrong result *)
+ test_add ~cs Secp256k1.params expected_pt pt1 expected_pt ) ) ;
+
+ (* Test with some real Ethereum curve points *)
+
+ (* Curve point from pubkey of sender of 1st Ethereum transcation
+ * https://etherscan.io/tx/0x5c504ed432cb51138bcf09aa5e8a410dd4a1e204ef84bfed1be16dfba1b22060
+ *)
+ let first_eth_tx_pubkey =
+ ( Bignum_bigint.of_string
+ "25074680562105920500390488848505179172301959433246133200656053822731415560379"
+ , Bignum_bigint.of_string
+ "40207352835024964935479287038185466710938760823387493786206830664631160762596"
+ )
+ in
+ (* Vb pubkey curve point
+ * https://etherscan.io/address/0xab5801a7d398351b8be11c439e05c5b3259aec9b
+ *)
+ let vitalik_eth_pubkey =
+ ( Bignum_bigint.of_string
+ "49781623198970027997721070672560275063607048368575198229673025608762959476014"
+ , Bignum_bigint.of_string
+ "44999051047832679156664607491606359183507784636787036192076848057884504239143"
+ )
+ in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "5673019186984644139884227978304592898127494693953507135947623290000290975721"
+ , Bignum_bigint.of_string
+ "63149760798259320533576297417560108418144118481056410815317549443093209180466"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params first_eth_tx_pubkey) ;
+ assert (is_on_curve_bignum_point Secp256k1.params vitalik_eth_pubkey) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_result) ;
+
+ let _cs =
+ test_add ~cs Secp256k1.params first_eth_tx_pubkey vitalik_eth_pubkey
+ expected_result
+ in
+
+ () )
+
+let%test_unit "Ec_group.add_chained" =
+ if basic_tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test chained group add *)
+ let test_add_chained ?cs ?(chain_left = true) (curve : Curve_params.t)
+ (left_input : Affine.bignum_point) (right_input : Affine.bignum_point)
+ (input2 : Affine.bignum_point) (expected_result : Affine.bignum_point) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants (module Runner.Impl) curve
+ in
+ let left_input =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ left_input
+ in
+ let right_input =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ right_input
+ in
+ let input2 =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) input2
+ in
+ let expected_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ expected_result
+ in
+
+ (* Create external checks context for tracking extra constraints
+ * that are required for soundness (unused in this test) *)
+ let unused_external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* L + R = S *)
+ let result1 =
+ add
+ (module Runner.Impl)
+ unused_external_checks curve left_input right_input
+ in
+
+ let result2 =
+ if chain_left then
+ (* S + T = U *)
+ (* Chain result to left input *)
+ add
+ (module Runner.Impl)
+ unused_external_checks curve result1 input2
+ else
+ (* Chain result to right input *)
+ (* T + S = U *)
+ add
+ (module Runner.Impl)
+ unused_external_checks curve input2 result1
+ in
+
+ (* Check for expected quantity of external checks *)
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.bounds 12 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges
+ 6 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal
+ unused_external_checks.compact_multi_ranges 6 ) ;
+
+ (* Check output matches expected result *)
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ result2 expected_result ) ) ;
+ () )
+ in
+
+ cs
+ in
+
+ (* Group add chaining test *)
+ let pt1 =
+ ( Bignum_bigint.of_string
+ "22078445491128279362564324454450148838521766213873448035670368771866784776689"
+ , Bignum_bigint.of_string
+ "59164395213226911607629035235242369632135709209315776938135875644072412604417"
+ )
+ in
+ let pt2 =
+ ( Bignum_bigint.of_string
+ "43363091675487122074415344565583111028231348930161176231597524718735106294021"
+ , Bignum_bigint.of_string
+ "111622036424234525038201689158418296167019583124308154759441266557529051647503"
+ )
+ in
+ let pt3 =
+ ( Bignum_bigint.of_string
+ "27095120504150867682043281371962577090258298278269412698577541627879567814209"
+ , Bignum_bigint.of_string
+ "43319029043781297382854244012410471023426320563005937780035785457494374919933"
+ )
+ in
+ let expected =
+ ( Bignum_bigint.of_string
+ "94445004776077869359279503733865512156009118507534561304362934747962270973982"
+ , Bignum_bigint.of_string
+ "5771544338553827547535594828872899427364500537732448576560233867747655654290"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params pt1) ;
+ assert (is_on_curve_bignum_point Secp256k1.params pt2) ;
+ assert (is_on_curve_bignum_point Secp256k1.params pt3) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected) ;
+
+ (* Correct wiring for left chaining
+ * Result r1 = pt1 + pt2 and left operand of r2 = r1 + pt3
+ *
+ * ,--------------------------------------------,
+ * x0: `-> (2, 3) -> (4, 3) -> (20, 3) -> (16, 3) ->`
+ * r1x0 r1x0 Lx0 Lx0
+ *
+ * ,--------------------------------------------,
+ * x1: `-> (2, 4) -> (16, 4) -> (20, 4) -> (4, 4) ->`
+ * r1x1 Lx1 Lx1 r1x1
+ *
+ * ,--------------------------------------------,
+ * x2: `-> (2, 5) -> (20, 5) -> (4, 5) -> (16, 5) ->`
+ * r1x2 Lx2 r1x2 Lx2
+ *
+ * ,------------------------,
+ * y0: `-> (11, 3) -> (23, 3) ->`
+ * r1y0 Ly0
+ *
+ * ,------------------------,
+ * y1: `-> (11, 4) -> (23, 4) ->`
+ * r1y1 Ly1
+ *
+ * ,------------------------,
+ * y2: `-> (11, 5) -> (23, 5) ->`
+ * r1y2 Ly2
+ *)
+ let _cs = test_add_chained Secp256k1.params pt1 pt2 pt3 expected in
+
+ (* Correct wiring for right chaining
+ * Result r1 = pt1 + pt2 and right operand of r2 = pt3 + r1
+ *
+ * ,-------------------------------------------,
+ * x0: `-> (2, 3) -> (17, 0) -> (4, 3) -> (20, 0) /
+ * r1x0 Rx0 r1x0 Rx0
+ *
+ * ,-------------------------------------------,
+ * x1: `-> (2, 4) -> (17, 1) -> (20, 1) -> (4, 4) /
+ * r1x1 Rx1 Rx1 r1x1
+ *
+ * ,-------------------------------------------,
+ * x2: `-> (2, 5) -> (4, 5) -> (17, 2) -> (20, 2) /
+ * r1x2 r1x2 Rx2 Rx2
+ *
+ * ,------------------------,
+ * y0: `-> (11, 3) -> (24, 0) ->`
+ * r1y0 Ry0
+ *
+ * ,------------------------,
+ * y1: `-> (11, 4) -> (24, 1) ->`
+ * r1y1 Ry1
+ *
+ * ,------------------------,
+ * y2: `-> (11, 5) -> (24, 2) ->`
+ * r1y2 Ry2
+ *)
+ let _cs =
+ test_add_chained ~chain_left:false Secp256k1.params pt1 (* left_input *)
+ pt2 (* right_input *)
+ pt3 (* input2 *)
+ expected
+ (* expected result *)
+ in
+ () )
+
+let%test_unit "Ec_group.add_full" =
+ if basic_tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test full group add (with bounds cehcks) *)
+ let test_add_full ?cs (curve : Curve_params.t)
+ (left_input : Affine.bignum_point) (right_input : Affine.bignum_point)
+ (expected_result : Affine.bignum_point) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants (module Runner.Impl) curve
+ in
+ let left_input =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ left_input
+ in
+ let right_input =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ right_input
+ in
+ let expected_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ expected_result
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness *)
+ let external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* L + R = S *)
+ let result =
+ add
+ (module Runner.Impl)
+ external_checks curve left_input right_input
+ in
+
+ (* Add left_input to external checks *)
+ Foreign_field.(
+ External_checks.append_bound_check external_checks
+ @@ Element.Standard.to_limbs @@ Affine.x left_input) ;
+ Foreign_field.(
+ External_checks.append_bound_check external_checks
+ @@ Element.Standard.to_limbs @@ Affine.y left_input) ;
+
+ (* Add right_input to external checks *)
+ Foreign_field.(
+ External_checks.append_bound_check external_checks
+ @@ Element.Standard.to_limbs @@ Affine.x right_input) ;
+ Foreign_field.(
+ External_checks.append_bound_check external_checks
+ @@ Element.Standard.to_limbs @@ Affine.y right_input) ;
+
+ (* Add result to external checks *)
+ Foreign_field.(
+ External_checks.append_bound_check external_checks
+ @@ Element.Standard.to_limbs @@ Affine.x result) ;
+ Foreign_field.(
+ External_checks.append_bound_check external_checks
+ @@ Element.Standard.to_limbs @@ Affine.y result) ;
+
+ (* Check output matches expected result *)
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ result expected_result ) ) ;
+
+ (*
+ * Perform external checks
+ *)
+ assert (Mina_stdlib.List.Length.equal external_checks.bounds 12) ;
+ assert (Mina_stdlib.List.Length.equal external_checks.multi_ranges 3) ;
+ assert (
+ Mina_stdlib.List.Length.equal external_checks.compact_multi_ranges
+ 3 ) ;
+ (* Add gates for bound checks, multi-range-checks and compact-multi-range-checks *)
+ Foreign_field.constrain_external_checks
+ (module Runner.Impl)
+ external_checks curve.modulus ;
+
+ () )
+ in
+ cs
+ in
+
+ (* Full tests *)
+ let pt1 =
+ ( Bignum_bigint.of_string
+ "108106717441068942935036481412556424456551432537879152449804306833272168535105"
+ , Bignum_bigint.of_string
+ "76460339884983741488305111710326981694475523676336423409829095132008854584808"
+ )
+ in
+ let pt2 =
+ ( Bignum_bigint.of_string
+ "6918332104414828558125020939363051148342349799951368824506926403525772818971"
+ , Bignum_bigint.of_string
+ "112511987857588994657806651103271803396616867673371823390960630078201657435176"
+ )
+ in
+ let expected =
+ ( Bignum_bigint.of_string
+ "87351883076573600335277375022118065102135008483181597654369109297980597321941"
+ , Bignum_bigint.of_string
+ "42323967499650833993389664859011147254281400152806022789809987122536303627261"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params pt1) ;
+ assert (is_on_curve_bignum_point Secp256k1.params pt2) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected) ;
+
+ let _cs =
+ test_add_full Secp256k1.params pt1 (* left_input *)
+ pt2 (* right_input *)
+ expected
+ (* expected result *)
+ in
+
+ () )
+
+let%test_unit "Ec_group.double" =
+ if basic_tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test group double *)
+ let test_double ?cs (curve : Curve_params.t) (point : Affine.bignum_point)
+ (expected_result : Affine.bignum_point) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants (module Runner.Impl) curve
+ in
+ let point =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) point
+ in
+ let expected_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ expected_result
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let unused_external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* P + P = D *)
+ let result =
+ double (module Runner.Impl) unused_external_checks curve point
+ in
+
+ (* Check for expected quantity of external checks *)
+ if Bignum_bigint.(curve.bignum.a = zero) then
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.bounds 8 )
+ else
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.bounds 9 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges
+ 4 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal
+ unused_external_checks.compact_multi_ranges 4 ) ;
+
+ (* Check output matches expected result *)
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ result expected_result ) ) ;
+ () )
+ in
+
+ cs
+ in
+
+ (* Test with elliptic curve y^2 = x^3 + 2 * x + 5 mod 13 *)
+ let _cs =
+ let fake_curve1 =
+ Curve_params.
+ { default with
+ modulus = Bignum_bigint.of_int 13
+ ; a = Bignum_bigint.of_int 2
+ ; b = Bignum_bigint.of_int 5
+ }
+ in
+ let point = (Bignum_bigint.of_int 2, Bignum_bigint.of_int 2) in
+ let expected_result = (Bignum_bigint.of_int 5, Bignum_bigint.of_int 7) in
+ assert (is_on_curve_bignum_point fake_curve1 point) ;
+ assert (is_on_curve_bignum_point fake_curve1 expected_result) ;
+ test_double fake_curve1 point expected_result
+ in
+
+ (* Test with elliptic curve y^2 = x^3 + 5 mod 13 *)
+ let _cs =
+ let fake_curve2 =
+ Curve_params.
+ { default with
+ modulus = Bignum_bigint.of_int 13
+ ; b = Bignum_bigint.of_int 5
+ }
+ in
+ let point = (Bignum_bigint.of_int 4, Bignum_bigint.of_int 2) in
+ let expected_result = (Bignum_bigint.of_int 6, Bignum_bigint.of_int 0) in
+ assert (is_on_curve_bignum_point fake_curve2 point) ;
+ assert (is_on_curve_bignum_point fake_curve2 expected_result) ;
+ test_double fake_curve2 point expected_result
+ in
+
+ (* Test with elliptic curve y^2 = x^3 + 7 mod 13 *)
+ let fake_curve0 =
+ Curve_params.
+ { default with
+ modulus = Bignum_bigint.of_int 13
+ ; b = Bignum_bigint.of_int 7
+ }
+ in
+ let cs0 =
+ let point = (Bignum_bigint.of_int 7, Bignum_bigint.of_int 8) in
+ let expected_result = (Bignum_bigint.of_int 8, Bignum_bigint.of_int 8) in
+ assert (is_on_curve_bignum_point fake_curve0 point) ;
+ assert (is_on_curve_bignum_point fake_curve0 expected_result) ;
+ let cs = test_double fake_curve0 point expected_result in
+ let _cs = test_double fake_curve0 point expected_result in
+ cs
+ in
+
+ (* Test with elliptic curve y^2 = x^3 + 17 * x mod 7879 *)
+ let fake_curve17 =
+ Curve_params.
+ { default with
+ modulus = Bignum_bigint.of_int 7879
+ ; a = Bignum_bigint.of_int 17
+ }
+ in
+ let cs17 =
+ let point = (Bignum_bigint.of_int 7331, Bignum_bigint.of_int 888) in
+ let expected_result =
+ (Bignum_bigint.of_int 2754, Bignum_bigint.of_int 3623)
+ in
+ assert (is_on_curve_bignum_point fake_curve17 point) ;
+ assert (is_on_curve_bignum_point fake_curve17 expected_result) ;
+ test_double fake_curve17 point expected_result
+ in
+
+ (* Constraint system reuse tests *)
+ let _cs =
+ let point = (Bignum_bigint.of_int 8, Bignum_bigint.of_int 8) in
+ let expected_result = (Bignum_bigint.of_int 11, Bignum_bigint.of_int 8) in
+ assert (is_on_curve_bignum_point fake_curve0 point) ;
+ assert (is_on_curve_bignum_point fake_curve0 expected_result) ;
+ test_double ~cs:cs0 fake_curve0 point expected_result
+ in
+
+ let _cs =
+ let point = (Bignum_bigint.of_int 1729, Bignum_bigint.of_int 4830) in
+ let expected_result =
+ (Bignum_bigint.of_int 6020, Bignum_bigint.of_int 5832)
+ in
+ assert (is_on_curve_bignum_point fake_curve17 point) ;
+ assert (is_on_curve_bignum_point fake_curve17 expected_result) ;
+ let _cs = test_double ~cs:cs17 fake_curve17 point expected_result in
+
+ (* Negative test *)
+ assert (
+ Common.is_error (fun () ->
+ (* Wrong constraint system *)
+ test_double ~cs:cs0 fake_curve17 point expected_result ) ) ;
+ _cs
+ in
+
+ (* Tests with secp256k1 curve points *)
+ let point =
+ ( Bignum_bigint.of_string
+ "107002484780363838095534061209472738804517997328105554367794569298664989358181"
+ , Bignum_bigint.of_string
+ "92879551684948148252506282887871578114014191438980334462241462418477012406178"
+ )
+ in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "74712964529040634650603708923084871318006229334056222485473734005356559517441"
+ , Bignum_bigint.of_string
+ "115267803285637743262834568062293432343366237647730050692079006689357117890542"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params point) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_result) ;
+
+ let _cs = test_double Secp256k1.params point expected_result in
+
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "89565891926547004231252920425935692360644145829622209833684329913297188986597"
+ , Bignum_bigint.of_string
+ "12158399299693830322967808612713398636155367887041628176798871954788371653930"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params Secp256k1.params.gen) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_result) ;
+
+ let _cs =
+ test_double Secp256k1.params Secp256k1.params.gen expected_result
+ in
+
+ let point =
+ ( Bignum_bigint.of_string
+ "72340565915695963948758748585975158634181237057659908187426872555266933736285"
+ , Bignum_bigint.of_string
+ "26612022505003328753510360357395054342310218908477055087761596777225815854353"
+ )
+ in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "108904232316543774780790055701972437888102004393747607639914151522482739421637"
+ , Bignum_bigint.of_string
+ "12361022197403188621809379658301822420116828257004558379520642349031207949605"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params point) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_result) ;
+
+ let _cs = test_double Secp256k1.params point expected_result in
+
+ let point =
+ ( Bignum_bigint.of_string
+ "108904232316543774780790055701972437888102004393747607639914151522482739421637"
+ , Bignum_bigint.of_string
+ "12361022197403188621809379658301822420116828257004558379520642349031207949605"
+ )
+ in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "6412514063090203022225668498768852033918664033020116827066881895897922497918"
+ , Bignum_bigint.of_string
+ "46730676600197705465960490527225757352559615957463874893868944815778370642915"
+ )
+ in
+ assert (is_on_curve_bignum_point Secp256k1.params point) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_result) ;
+
+ let cs = test_double Secp256k1.params point expected_result in
+
+ (* CS reuse again*)
+ let point =
+ ( Bignum_bigint.of_string
+ "3994127195658013268703905225007935609302368792888634855477505418126918261961"
+ , Bignum_bigint.of_string
+ "25535899907968670181603106060653290873698485840006655398881908734054954693109"
+ )
+ in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "85505889528097925687832670439248941652336655858213625210338216314923495678594"
+ , Bignum_bigint.of_string
+ "49191910521103183437466384378802260055879125327516949990516385020354020159575"
+ )
+ in
+ assert (is_on_curve_bignum_point Secp256k1.params point) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_result) ;
+
+ let _cs = test_double ~cs Secp256k1.params point expected_result in
+
+ (* Negative tests *)
+ assert (
+ Common.is_error (fun () ->
+ (* Wrong constraint system *)
+ test_double ~cs:cs0 Secp256k1.params point expected_result ) ) ;
+
+ assert (
+ Common.is_error (fun () ->
+ (* Wrong answer *)
+ let wrong_result =
+ ( Bignum_bigint.of_string
+ "6412514063090203022225668498768852033918664033020116827066881895897922497918"
+ , Bignum_bigint.of_string
+ "46730676600197705465960490527225757352559615957463874893868944815778370642914"
+ )
+ in
+ test_double Secp256k1.params point wrong_result ) ) ;
+
+ () )
+
+let%test_unit "Ec_group.double_chained" =
+ if basic_tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test group double chaining *)
+ let test_double_chained ?cs (curve : Curve_params.t)
+ (point : Affine.bignum_point) (expected_result : Affine.bignum_point) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants (module Runner.Impl) curve
+ in
+ let point =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) point
+ in
+ let expected_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ expected_result
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let unused_external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ let result =
+ double (module Runner.Impl) unused_external_checks curve point
+ in
+ let result =
+ double (module Runner.Impl) unused_external_checks curve result
+ in
+
+ (* Check for expected quantity of external checks *)
+ if Bignum_bigint.(curve.bignum.a = zero) then
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.bounds 16 )
+ else
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.bounds 18 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges
+ 8 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal
+ unused_external_checks.compact_multi_ranges 8 ) ;
+
+ (* Check output matches expected result *)
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ result expected_result ) ) ;
+ () )
+ in
+
+ cs
+ in
+
+ let _cs =
+ let fake_curve0 =
+ Curve_params.
+ { default with
+ modulus = Bignum_bigint.of_int 7879
+ ; a = Bignum_bigint.of_int 17
+ }
+ in
+ let point = (Bignum_bigint.of_int 1729, Bignum_bigint.of_int 4830) in
+ let expected_result =
+ (Bignum_bigint.of_int 355, Bignum_bigint.of_int 3132)
+ in
+ assert (is_on_curve_bignum_point fake_curve0 point) ;
+ assert (is_on_curve_bignum_point fake_curve0 expected_result) ;
+ test_double_chained fake_curve0 point expected_result
+ in
+
+ let point =
+ ( Bignum_bigint.of_string
+ "42044065574201065781794313442437176970676726666507255383911343977315911214824"
+ , Bignum_bigint.of_string
+ "31965905005059593108764147692698952070443290622957461138987132030153087962524"
+ )
+ in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "25296422933760701668354080561191268087967569090553018544803607419093394376171"
+ , Bignum_bigint.of_string
+ "8046470730121032635013615006105175410553103561598164661406103935504325838485"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params point) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_result) ;
+
+ let _cs = test_double_chained Secp256k1.params point expected_result in
+ () )
+
+let%test_unit "Ec_group.double_full" =
+ if basic_tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test group double (full circuit with external checks) *)
+ let test_double_full ?cs (curve : Curve_params.t)
+ (point : Affine.bignum_point) (expected_result : Affine.bignum_point) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants (module Runner.Impl) curve
+ in
+ let point =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) point
+ in
+ let expected_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ expected_result
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness *)
+ let external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* P + P = D *)
+ let result =
+ double (module Runner.Impl) external_checks curve point
+ in
+
+ (* Add input point to external checks *)
+ Foreign_field.(
+ External_checks.append_bound_check external_checks
+ @@ Element.Standard.to_limbs @@ Affine.x point) ;
+ Foreign_field.(
+ External_checks.append_bound_check external_checks
+ @@ Element.Standard.to_limbs @@ Affine.y point) ;
+
+ (* Add result to external checks *)
+ Foreign_field.(
+ External_checks.append_bound_check external_checks
+ @@ Element.Standard.to_limbs @@ Affine.x result) ;
+ Foreign_field.(
+ External_checks.append_bound_check external_checks
+ @@ Element.Standard.to_limbs @@ Affine.y result) ;
+
+ (* Check output matches expected result *)
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ result expected_result ) ) ;
+
+ (*
+ * Perform external checks
+ *)
+
+ (* Sanity checks *)
+ if Bignum_bigint.(curve.bignum.a = zero) then
+ assert (Mina_stdlib.List.Length.equal external_checks.bounds 12)
+ else assert (Mina_stdlib.List.Length.equal external_checks.bounds 13) ;
+ assert (Mina_stdlib.List.Length.equal external_checks.multi_ranges 4) ;
+ assert (
+ Mina_stdlib.List.Length.equal external_checks.compact_multi_ranges
+ 4 ) ;
+
+ (* Add gates for bound checks, multi-range-checks and compact-multi-range-checks *)
+ Foreign_field.constrain_external_checks
+ (module Runner.Impl)
+ external_checks curve.modulus ;
+
+ () )
+ in
+
+ cs
+ in
+
+ let point =
+ ( Bignum_bigint.of_string
+ "422320656143453469357911138554881092132771509739438645920469442837105323580"
+ , Bignum_bigint.of_string
+ "99573693339481125202377937570343422789783140695684047090890158240546390265715"
+ )
+ in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "111592986473580724183094323045895279290564238712238558254671818420787861656338"
+ , Bignum_bigint.of_string
+ "21999887286188040786039896471521925680577344653927821650184541049020329991940"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params point) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_result) ;
+
+ let _cs = test_double_full Secp256k1.params point expected_result in
+
+ let point =
+ ( Bignum_bigint.of_string
+ "35572202113406269203741773940276421270986156279943921117631530910348880407195"
+ , Bignum_bigint.of_string
+ "77949858788528057664678921426007070786227653051729292366956150514299227362888"
+ )
+ in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "77054343462981168852324254689119448477035493875004605555517034503407691682302"
+ , Bignum_bigint.of_string
+ "71816304404296379298724767646016383731405297016881176644824032740912066853658"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params point) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_result) ;
+
+ let _cs = test_double_full Secp256k1.params point expected_result in
+
+ () )
+
+let%test_unit "Ec_group.ops_mixed" =
+ if basic_tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test mix of group operations (e.g. things are wired correctly *)
+ let test_group_ops_mixed ?cs (curve : Curve_params.t)
+ (left_input : Affine.bignum_point) (right_input : Affine.bignum_point)
+ (expected_result : Affine.bignum_point) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants (module Runner.Impl) curve
+ in
+ let left_input =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ left_input
+ in
+ let right_input =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ right_input
+ in
+ let expected_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ expected_result
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let unused_external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* R + L = S *)
+ let sum =
+ add
+ (module Runner.Impl)
+ unused_external_checks curve left_input right_input
+ in
+
+ (* S + S = D *)
+ let double =
+ double (module Runner.Impl) unused_external_checks curve sum
+ in
+
+ (* Check for expected quantity of external checks *)
+ if Bignum_bigint.(curve.bignum.a = zero) then
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.bounds 14 )
+ else
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.bounds 15 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges
+ 7 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal
+ unused_external_checks.compact_multi_ranges 7 ) ;
+
+ (* Check output matches expected result *)
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ double expected_result ) ) ;
+ () )
+ in
+
+ cs
+ in
+
+ let _cs =
+ let fake_curve =
+ Curve_params.
+ { default with
+ modulus = Bignum_bigint.of_int 7879
+ ; a = Bignum_bigint.of_int 17
+ }
+ in
+ let point1 = (Bignum_bigint.of_int 1729, Bignum_bigint.of_int 4830) in
+ let point2 = (Bignum_bigint.of_int 993, Bignum_bigint.of_int 622) in
+ let expected_result =
+ (Bignum_bigint.of_int 6762, Bignum_bigint.of_int 4635)
+ in
+ assert (is_on_curve_bignum_point fake_curve point1) ;
+ assert (is_on_curve_bignum_point fake_curve point2) ;
+ assert (is_on_curve_bignum_point fake_curve expected_result) ;
+
+ test_group_ops_mixed fake_curve point1 point2 expected_result
+ in
+
+ let point1 =
+ ( Bignum_bigint.of_string
+ "37404488720929062958906788322651728322575666040491554170565829193307192693651"
+ , Bignum_bigint.of_string
+ "9656313713772632982161856264262799630428732532087082991934556488549329780427"
+ )
+ in
+ let point2 =
+ ( Bignum_bigint.of_string
+ "31293985021118266786561893156019691372812643656725598796588178883202613100468"
+ , Bignum_bigint.of_string
+ "62519749065576060946018142578164411421793328932510041279923944104940749401503"
+ )
+ in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "43046886127279816590953923378970473409794361644471707353489087385548452456295"
+ , Bignum_bigint.of_string
+ "67554760054687646408788973635096250584575090419180209042279187069048864087921"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params point1) ;
+ assert (is_on_curve_bignum_point Secp256k1.params point2) ;
+ assert (is_on_curve_bignum_point Secp256k1.params expected_result) ;
+
+ let _cs =
+ test_group_ops_mixed Secp256k1.params point1 point2 expected_result
+ in
+ () )
+
+let%test_unit "Ec_group.properties" =
+ if basic_tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test group properties *)
+ let test_group_properties ?cs (curve : Curve_params.t)
+ (point_a : Affine.bignum_point) (point_b : Affine.bignum_point)
+ (point_c : Affine.bignum_point)
+ (expected_commutative_result : Affine.bignum_point)
+ (expected_associative_result : Affine.bignum_point)
+ (expected_distributive_result : Affine.bignum_point) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants (module Runner.Impl) curve
+ in
+ let point_a =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) point_a
+ in
+ let point_b =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) point_b
+ in
+ let point_c =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) point_c
+ in
+ let expected_commutative_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ expected_commutative_result
+ in
+ let expected_associative_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ expected_associative_result
+ in
+ let expected_distributive_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ expected_distributive_result
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let unused_external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (*
+ * Commutative property tests
+ *
+ * A + B = B + A
+ *)
+ let a_plus_b =
+ (* A + B *)
+ add
+ (module Runner.Impl)
+ unused_external_checks curve point_a point_b
+ in
+
+ let b_plus_a =
+ (* B + A *)
+ add
+ (module Runner.Impl)
+ unused_external_checks curve point_b point_a
+ in
+
+ (* Todo add equality wiring *)
+ (* Assert A + B = B + A *)
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover (module Runner.Impl) a_plus_b b_plus_a ) ) ;
+
+ (* Assert A + B = expected_commutative_result *)
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ a_plus_b expected_commutative_result ) ) ;
+
+ (*
+ * Associativity property tests
+ *
+ * (A + B) + C = A + (B + C)
+ *)
+ let b_plus_c =
+ (* B + C *)
+ add
+ (module Runner.Impl)
+ unused_external_checks curve point_b point_c
+ in
+
+ let a_plus_b_plus_c =
+ (* (A + B) + C *)
+ add
+ (module Runner.Impl)
+ unused_external_checks curve a_plus_b point_c
+ in
+
+ let b_plus_c_plus_a =
+ (* A + (B + C) *)
+ add
+ (module Runner.Impl)
+ unused_external_checks curve point_a b_plus_c
+ in
+
+ (* Assert (A + B) + C = A + (B + C) *)
+ Affine.assert_equal
+ (module Runner.Impl)
+ a_plus_b_plus_c b_plus_c_plus_a ;
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ a_plus_b_plus_c b_plus_c_plus_a ) ) ;
+
+ (* Assert A + B = expected_commutative_result *)
+ Affine.assert_equal
+ (module Runner.Impl)
+ a_plus_b_plus_c expected_associative_result ;
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ a_plus_b_plus_c expected_associative_result ) ) ;
+
+ (*
+ * Distributive property tests
+ *
+ * 2 * (A + B) = 2 * A + 2 * B
+ *)
+ let double_of_sum =
+ (* 2 * (A + B) *)
+ double (module Runner.Impl) unused_external_checks curve a_plus_b
+ in
+
+ let double_a =
+ (* 2 * A *)
+ double (module Runner.Impl) unused_external_checks curve point_a
+ in
+
+ let double_b =
+ (* 2 * B *)
+ double (module Runner.Impl) unused_external_checks curve point_b
+ in
+
+ let sum_of_doubles =
+ (* 2 * A + 2 * B *)
+ add
+ (module Runner.Impl)
+ unused_external_checks curve double_a double_b
+ in
+
+ (* Assert 2 * (A + B) = 2 * A + 2 * B *)
+ Affine.assert_equal
+ (module Runner.Impl)
+ double_of_sum sum_of_doubles ;
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ double_of_sum sum_of_doubles ) ) ;
+
+ (* Assert 2 * (A + B) = expected_distributive_result *)
+ Affine.assert_equal
+ (module Runner.Impl)
+ double_of_sum expected_distributive_result ;
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ double_of_sum expected_distributive_result ) ) ;
+ () )
+ in
+
+ cs
+ in
+
+ (* Test with secp256k1 curve *)
+ let point_a =
+ ( Bignum_bigint.of_string
+ "104139740379639537914620141697889522643195068624996157573145175343741564772195"
+ , Bignum_bigint.of_string
+ "24686993868898088086788882517246409097753788695591891584026176923146938009248"
+ )
+ in
+ let point_b =
+ ( Bignum_bigint.of_string
+ "36743784007303620043843440776745227903854397846775577839885696093428264537689"
+ , Bignum_bigint.of_string
+ "37572687997781202307536515813734773072395389211771147301250986255900442183367"
+ )
+ in
+ let point_c =
+ ( Bignum_bigint.of_string
+ "49696436312078070273833592624394555921078337653960324106519507173094660966846"
+ , Bignum_bigint.of_string
+ "8233980127281521579593600770666525234073102501648621450313070670075221490597"
+ )
+ in
+ let expected_commutative_result =
+ (* A + B *)
+ ( Bignum_bigint.of_string
+ "82115184826944281192212047494549730220285137025844635077989275753462094545317"
+ , Bignum_bigint.of_string
+ "65806312870411158102677100909644698935674071740730856487954465264167266803940"
+ )
+ in
+ let expected_associative_result =
+ (* A + B + C *)
+ ( Bignum_bigint.of_string
+ "32754193298666340516904674847278729692077935996237244820399615298932008086168"
+ , Bignum_bigint.of_string
+ "98091569220567533408383096211571578494419313923145170353903484742714309353581"
+ )
+ in
+ (* 2* (A + B) *)
+ let expected_distributive_result =
+ ( Bignum_bigint.of_string
+ "92833221040863134022467437260311951512477869225271942781021131905899386232859"
+ , Bignum_bigint.of_string
+ "88875130971526456079808346479572776785614636860343295137331156710761285100759"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params point_a) ;
+ assert (is_on_curve_bignum_point Secp256k1.params point_b) ;
+ assert (is_on_curve_bignum_point Secp256k1.params point_c) ;
+ assert (
+ is_on_curve_bignum_point Secp256k1.params expected_commutative_result ) ;
+ assert (
+ is_on_curve_bignum_point Secp256k1.params expected_associative_result ) ;
+ assert (
+ is_on_curve_bignum_point Secp256k1.params expected_distributive_result ) ;
+
+ let _cs =
+ test_group_properties Secp256k1.params point_a point_b point_c
+ expected_commutative_result expected_associative_result
+ expected_distributive_result
+ in
+
+ (*
+ * Test with NIST P-224 curve
+ * y^2 = x^3 -3 * x + 18958286285566608000408668544493926415504680968679321075787234672564
+ *)
+ let p224_curve =
+ Curve_params.
+ { default with
+ modulus =
+ Bignum_bigint.of_string
+ "0xffffffffffffffffffffffffffffffff000000000000000000000001"
+ ; a =
+ (* - 3 *)
+ Bignum_bigint.of_string
+ "0xfffffffffffffffffffffffffffffffefffffffffffffffffffffffe"
+ (* Note: p224 a_param < vesta_modulus *)
+ ; b =
+ (* 18958286285566608000408668544493926415504680968679321075787234672564 *)
+ Bignum_bigint.of_string
+ "0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4"
+ }
+ in
+
+ let point_a =
+ ( Bignum_bigint.of_string
+ "20564182195513988720077877094445678909500371329094056390559170498601"
+ , Bignum_bigint.of_string
+ "2677931089606376366731934050370502738338362171950142296573730478996"
+ )
+ in
+ let point_b =
+ ( Bignum_bigint.of_string
+ "15331822097908430690332647239357533892026967275700588538504771910797"
+ , Bignum_bigint.of_string
+ "4049755097518382314285232898392449281690500011901831745754040069555"
+ )
+ in
+ let point_c =
+ ( Bignum_bigint.of_string
+ "25082387259758106010480779115787834869202362152205819097823199674591"
+ , Bignum_bigint.of_string
+ "5836788343546154757468239805956174785568118741436223437725908467573"
+ )
+ in
+ let expected_commutative_result =
+ (* A + B *)
+ ( Bignum_bigint.of_string
+ "7995206472745921825893910722935139765985673196416788824369950333191"
+ , Bignum_bigint.of_string
+ "8265737252928447574971649463676620963677557474048291412774437728538"
+ )
+ in
+ let expected_associative_result =
+ (* A + B + C *)
+ ( Bignum_bigint.of_string
+ "3257699169520051230744895047894307554057883749899622226174209882724"
+ , Bignum_bigint.of_string
+ "7231957109409135332430424812410043083405298563323557216003172539215"
+ )
+ in
+ (* 2 * (A + B) *)
+ let expected_distributive_result =
+ ( Bignum_bigint.of_string
+ "12648120179660537445264809843313333879121180184951710403373354501995"
+ , Bignum_bigint.of_string
+ "130351274476047354152272911484022089680853927680837325730785745821"
+ )
+ in
+ assert (is_on_curve_bignum_point p224_curve point_a) ;
+ assert (is_on_curve_bignum_point p224_curve point_b) ;
+ assert (is_on_curve_bignum_point p224_curve point_c) ;
+ assert (is_on_curve_bignum_point p224_curve expected_commutative_result) ;
+ assert (is_on_curve_bignum_point p224_curve expected_associative_result) ;
+ assert (is_on_curve_bignum_point p224_curve expected_distributive_result) ;
+
+ let _cs =
+ test_group_properties p224_curve point_a point_b point_c
+ expected_commutative_result expected_associative_result
+ expected_distributive_result
+ in
+
+ (*
+ * Test with bn254 curve
+ * y^2 = x^3 + 0 * x + 2
+ *)
+ let bn254_curve =
+ Curve_params.
+ { default with
+ modulus =
+ Bignum_bigint.of_string
+ "16798108731015832284940804142231733909889187121439069848933715426072753864723"
+ ; a = Bignum_bigint.of_int 0
+ ; b = Bignum_bigint.of_int 2
+ }
+ in
+
+ let point_a =
+ ( Bignum_bigint.of_string
+ "7489139758950854827551487063927077939563321761044181276420624792983052878185"
+ , Bignum_bigint.of_string
+ "2141496180075348025061594016907544139242551437114964865155737156269728330559"
+ )
+ in
+ let point_b =
+ ( Bignum_bigint.of_string
+ "9956514278304933003335636627606783773825106169180128855351756770342193930117"
+ , Bignum_bigint.of_string
+ "1762095167736644705377345502398082775379271270251951679097189107067141702434"
+ )
+ in
+ let point_c =
+ ( Bignum_bigint.of_string
+ "15979993511612396332695593711346186397534040520881664680241489873512193259980"
+ , Bignum_bigint.of_string
+ "10163302455117602785156120251106605625181898385895334763785764107729313787391"
+ )
+ in
+ let expected_commutative_result =
+ (* A + B *)
+ ( Bignum_bigint.of_string
+ "13759678784866515747881317697821131633872329198354290325517257690138811932261"
+ , Bignum_bigint.of_string
+ "4040037229868341675068324615541961445935091050207890024311587166409180676332"
+ )
+ in
+ let expected_associative_result =
+ (* A + B + C *)
+ ( Bignum_bigint.of_string
+ "16098676871974911854784905872738346730775870232298829667865365025475731380192"
+ , Bignum_bigint.of_string
+ "12574401007382321193248731381385712204251317924015127170657534965607164101869"
+ )
+ in
+ (* 2 * (A + B) *)
+ let expected_distributive_result =
+ ( Bignum_bigint.of_string
+ "9395314037281443688092936149000099903064729021023078772338895863158377429106"
+ , Bignum_bigint.of_string
+ "14218226539011623427628171089944499674924086623747284955166459983416867234215"
+ )
+ in
+ assert (is_on_curve_bignum_point bn254_curve point_a) ;
+ assert (is_on_curve_bignum_point bn254_curve point_b) ;
+ assert (is_on_curve_bignum_point bn254_curve point_c) ;
+ assert (is_on_curve_bignum_point bn254_curve expected_commutative_result) ;
+ assert (is_on_curve_bignum_point bn254_curve expected_associative_result) ;
+ assert (is_on_curve_bignum_point bn254_curve expected_distributive_result) ;
+
+ let _cs =
+ test_group_properties bn254_curve point_a point_b point_c
+ expected_commutative_result expected_associative_result
+ expected_distributive_result
+ in
+
+ (*
+ * Test with (Pasta) Pallas curve (on Vesta native)
+ * y^2 = x^3 + 5
+ *)
+ let pallas_curve =
+ Curve_params.
+ { default with
+ modulus =
+ Bignum_bigint.of_string
+ "28948022309329048855892746252171976963363056481941560715954676764349967630337"
+ ; a = Bignum_bigint.of_int 0
+ ; b = Bignum_bigint.of_int 5
+ }
+ in
+
+ let point_a =
+ ( Bignum_bigint.of_string
+ "3687554385661875988153708668118568350801595287403286241588941623974773451174"
+ , Bignum_bigint.of_string
+ "4125300560830971348224390975663473429075828688503632065713036496032796088150"
+ )
+ in
+ let point_b =
+ ( Bignum_bigint.of_string
+ "13150688393980970390008393861087383374732464068960495642594966124646063172404"
+ , Bignum_bigint.of_string
+ "2084472543720136255281934655991399553143524556330848293815942786297013884533"
+ )
+ in
+ let point_c =
+ ( Bignum_bigint.of_string
+ "26740989696982304482414554371640280045791606641637898228291292575942109454805"
+ , Bignum_bigint.of_string
+ "14906024627800344780747375705291059367428823794643427263104879621768813059138"
+ )
+ in
+ let expected_commutative_result =
+ (* A + B *)
+ ( Bignum_bigint.of_string
+ "11878681988771676869370724830611253729756170947285460876552168044614948225457"
+ , Bignum_bigint.of_string
+ "14497133356854845193720136968564933709713968802446650329644811738138289288792"
+ )
+ in
+ let expected_associative_result =
+ (* A + B + C *)
+ ( Bignum_bigint.of_string
+ "8988194870545558903676114324437227470798902472195505563098874771184576333284"
+ , Bignum_bigint.of_string
+ "2715074574400479059415686517976976756653616385004805753779147804207672517454"
+ )
+ in
+ (* 2 * (A + B) *)
+ let expected_distributive_result =
+ ( Bignum_bigint.of_string
+ "5858337972845412034234591451268195730728808894992644330419904703508222498795"
+ , Bignum_bigint.of_string
+ "7758708768756582293117808728373210197717986974150537098853332332749930840785"
+ )
+ in
+ assert (is_on_curve_bignum_point pallas_curve point_a) ;
+ assert (is_on_curve_bignum_point pallas_curve point_b) ;
+ assert (is_on_curve_bignum_point pallas_curve point_c) ;
+ assert (is_on_curve_bignum_point pallas_curve expected_commutative_result) ;
+ assert (is_on_curve_bignum_point pallas_curve expected_associative_result) ;
+ assert (is_on_curve_bignum_point pallas_curve expected_distributive_result) ;
+
+ let _cs =
+ test_group_properties pallas_curve point_a point_b point_c
+ expected_commutative_result expected_associative_result
+ expected_distributive_result
+ in
+
+ () )
+
+(*******************************)
+(* Scalar multiplication tests *)
+(*******************************)
+
+let%test_unit "Ec_group.is_on_curve" =
+ if scalar_mul_tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test is_on_curve *)
+ let test_is_on_curve ?cs (curve : Curve_params.t)
+ (point : Affine.bignum_point) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants
+ (module Runner.Impl)
+ curve ~use_precomputed_gen_doubles:false
+ in
+ let point =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) point
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let unused_external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* Check point is on elliptic curve *)
+ is_on_curve (module Runner.Impl) unused_external_checks curve point ;
+
+ (* Check for expected quantity of external checks *)
+ let bound_checks_count = ref 3 in
+ if not Bignum_bigint.(curve.bignum.a = zero) then
+ bound_checks_count := !bound_checks_count + 1 ;
+ if not Bignum_bigint.(curve.bignum.b = zero) then
+ bound_checks_count := !bound_checks_count + 1 ;
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.bounds
+ !bound_checks_count ) ;
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges
+ 3 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal
+ unused_external_checks.compact_multi_ranges 3 ) ;
+ () )
+ in
+
+ cs
+ in
+
+ (* Positive tests *)
+ let _cs = test_is_on_curve Secp256k1.params Secp256k1.params.gen in
+
+ let good_pt =
+ ( Bignum_bigint.of_string
+ "18950551679048287927361677965259288422489066940346827203675447914841748996155"
+ , Bignum_bigint.of_string
+ "47337572658241658062145739798014345835092764795141449413289521900680935648400"
+ )
+ in
+ let _cs = test_is_on_curve Secp256k1.params good_pt in
+ let neg_good_pt =
+ let x, y = good_pt in
+ (x, Bignum_bigint.((zero - y) % Secp256k1.params.modulus))
+ in
+ let _cs = test_is_on_curve Secp256k1.params neg_good_pt in
+
+ (* Test with y^2 = x^3 -3 * x + 18958286285566608000408668544493926415504680968679321075787234672564 *)
+ let curve_p224 =
+ Curve_params.
+ { default with
+ modulus =
+ Bignum_bigint.of_string
+ "0xffffffffffffffffffffffffffffffff000000000000000000000001"
+ (* ; order = Bignum_bigint.one *)
+ ; a =
+ Bignum_bigint.of_string
+ "0xfffffffffffffffffffffffffffffffefffffffffffffffffffffffe"
+ ; b =
+ Bignum_bigint.of_string
+ "18958286285566608000408668544493926415504680968679321075787234672564"
+ }
+ in
+
+ let point =
+ ( Bignum_bigint.of_string
+ "20564182195513988720077877094445678909500371329094056390559170498601"
+ , Bignum_bigint.of_string
+ "2677931089606376366731934050370502738338362171950142296573730478996"
+ )
+ in
+ assert (is_on_curve_bignum_point curve_p224 point) ;
+ let _cs = test_is_on_curve curve_p224 point in
+
+ (* Test with elliptic curve y^2 = x^3 + 17 * x mod 7879 *)
+ let curve_c1 =
+ Curve_params.
+ { default with
+ modulus = Bignum_bigint.of_int 7879
+ ; a = Bignum_bigint.of_int 17
+ }
+ in
+ let _cs =
+ let point = (Bignum_bigint.of_int 7331, Bignum_bigint.of_int 888) in
+ assert (is_on_curve_bignum_point curve_c1 point) ;
+ test_is_on_curve curve_c1 point
+ in
+
+ (* Negative tests *)
+ assert (
+ Common.is_error (fun () ->
+ let bad_pt =
+ ( Bignum_bigint.of_string
+ "67973637023329354644729732876692436096994797487488454090437075702698953132769"
+ , Bignum_bigint.of_string
+ "208096131279561713744990959402407452508030289249215221172372441421932322041350"
+ )
+ in
+ test_is_on_curve Secp256k1.params bad_pt ) ) ;
+
+ assert (
+ Common.is_error (fun () ->
+ let bad_pt = (Bignum_bigint.zero, Bignum_bigint.one) in
+ test_is_on_curve Secp256k1.params bad_pt ) ) ;
+ assert (
+ Common.is_error (fun () ->
+ let bad_pt = (Bignum_bigint.one, Bignum_bigint.one) in
+ test_is_on_curve curve_p224 bad_pt ) ) ;
+ assert (
+ Common.is_error (fun () ->
+ let bad_pt = (Bignum_bigint.of_int 2, Bignum_bigint.of_int 77) in
+ test_is_on_curve curve_c1 bad_pt ) ) ;
+ () )
+
+let%test_unit "Ec_group.check_ia" =
+ if scalar_mul_tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test check_ia *)
+ let test_check_ia ?cs (curve : Curve_params.t)
+ (ia : Affine.bignum_point Curve_params.ia_points) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants (module Runner.Impl) curve
+ in
+ let ia =
+ Curve_params.ia_to_circuit_constants (module Runner.Impl) ia
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let unused_external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* Check initial accumulator values *)
+ check_ia (module Runner.Impl) unused_external_checks curve ia ;
+
+ (* Check for expected quantity of external checks *)
+ let bounds_checks_count = ref 3 in
+ if not Bignum_bigint.(curve.bignum.a = zero) then
+ bounds_checks_count := !bounds_checks_count + 1 ;
+ if not Bignum_bigint.(curve.bignum.b = zero) then
+ bounds_checks_count := !bounds_checks_count + 1 ;
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.bounds
+ !bounds_checks_count ) ;
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges
+ 3 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal
+ unused_external_checks.compact_multi_ranges 3 ) ;
+ () )
+ in
+
+ cs
+ in
+
+ (*
+ * Positive tests
+ *)
+
+ (* Check secp256k1 initial accumulator (ia) points are correctly computed *)
+ let ia = compute_ia_points Secp256k1.params in
+ assert (Stdlib.(ia = Secp256k1.params.ia)) ;
+ assert (
+ Bignum_bigint.(
+ equal (fst ia.acc) (fst Secp256k1.params.ia.acc)
+ && equal (snd ia.acc) (snd Secp256k1.params.ia.acc)
+ && equal (fst ia.neg_acc) (fst Secp256k1.params.ia.neg_acc)
+ && equal (snd ia.neg_acc) (snd Secp256k1.params.ia.neg_acc)) ) ;
+
+ (* Check secp256k1 ia *)
+ let _cs = test_check_ia Secp256k1.params Secp256k1.params.ia in
+
+ (* Check computation and constraining of another ia *)
+ let some_pt =
+ ( Bignum_bigint.of_string
+ "67973637023329354644729732876692436096994797487488454090437075702698953132769"
+ , Bignum_bigint.of_string
+ "108096131279561713744990959402407452508030289249215221172372441421932322041359"
+ )
+ in
+ let ia = compute_ia_points Secp256k1.params ~point:some_pt in
+ assert (
+ Bignum_bigint.(
+ equal (fst ia.acc)
+ (Bignum_bigint.of_string
+ "77808213848094917079255757522755861813805484598820680171349097575367307923684" )) ) ;
+ assert (
+ Bignum_bigint.(
+ equal (snd ia.acc)
+ (Bignum_bigint.of_string
+ "53863434441850287308371409267019602514253829996603354269738630468061457326859" )) ) ;
+ assert (
+ Bignum_bigint.(
+ equal (fst ia.neg_acc)
+ (Bignum_bigint.of_string
+ "77808213848094917079255757522755861813805484598820680171349097575367307923684" )) ) ;
+ assert (
+ Bignum_bigint.(
+ equal (snd ia.neg_acc)
+ (Bignum_bigint.of_string
+ "61928654795465908115199575741668305339016154669037209769718953539847377344804" )) ) ;
+ let cs = test_check_ia Secp256k1.params ia in
+
+ (* Constraint system reuse *)
+ let some_pt2 =
+ ( Bignum_bigint.of_string
+ "33321203307284859285457570648264200146777100201560799373305582914511875834316"
+ , Bignum_bigint.of_string
+ "7129423920069223884043324693587298420542722670070397102650821528843979421489"
+ )
+ in
+ let another_ia2 = compute_ia_points Secp256k1.params ~point:some_pt2 in
+ let _cs = test_check_ia ~cs Secp256k1.params another_ia2 in
+
+ (*
+ * Negative tests
+ *)
+ assert (
+ Common.is_error (fun () ->
+ (* Bad negated ia *)
+ let neg_init_acc = Secp256k1.params.ia.neg_acc in
+ let bad_neg =
+ (fst neg_init_acc, Bignum_bigint.(snd neg_init_acc + one))
+ in
+ let bad_ia =
+ Curve_params.ia_of_points Secp256k1.params.ia.acc bad_neg
+ in
+ test_check_ia Secp256k1.params bad_ia ) ) ;
+
+ assert (
+ Common.is_error (fun () ->
+ (* init_acc is not on curve, but negative is good *)
+ let bad_pt =
+ ( Bignum_bigint.of_string
+ "73748207725492941843355928046090697797026070566443284126849221438943867210748"
+ , Bignum_bigint.of_string
+ "71805440039692371678177852429904809925653495989672587996663750265844216498843"
+ )
+ in
+ assert (not (is_on_curve_bignum_point Secp256k1.params bad_pt)) ;
+ let neg_bad_pt =
+ let x, y = bad_pt in
+ (x, Bignum_bigint.((zero - y) % Secp256k1.params.modulus))
+ in
+ let bad_ia = Curve_params.ia_of_points bad_pt neg_bad_pt in
+ test_check_ia Secp256k1.params bad_ia ) ) ;
+ () )
+
+let%test_unit "Ec_group.scalar_mul" =
+ if scalar_mul_tests_enabled then
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test elliptic curve scalar multiplication *)
+ let test_scalar_mul ?cs (curve : Curve_params.t) (scalar : Bignum_bigint.t)
+ (point : Affine.bignum_point) (expected_result : Affine.bignum_point) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants (module Runner.Impl) curve
+ in
+ let scalar_bits =
+ Common.bignum_bigint_unpack_as_unchecked_vars
+ (module Runner.Impl)
+ ~remove_trailing:true scalar
+ in
+ let point =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) point
+ in
+ let expected_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ expected_result
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let unused_external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* Q = sP *)
+ let result =
+ scalar_mul
+ (module Runner.Impl)
+ unused_external_checks curve scalar_bits point
+ in
+
+ (* Check for expected quantity of external checks *)
+
+ (* Check output matches expected result *)
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ result expected_result ) ) ;
+ () )
+ in
+
+ cs
+ in
+
+ (*
+ * EC scalar multiplication tests
+ *)
+
+ (* Multiply by 1 *)
+ let scalar = Bignum_bigint.of_int 1 in
+ let point =
+ ( Bignum_bigint.of_string
+ "67973637023329354644729732876692436096994797487488454090437075702698953132769"
+ , Bignum_bigint.of_string
+ "108096131279561713744990959402407452508030289249215221172372441421932322041359"
+ )
+ in
+ let _cs = test_scalar_mul Secp256k1.params scalar point point in
+
+ (* Multiply by 3 *)
+ let scalar = Bignum_bigint.of_int 3 in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "157187898623115017197196263696044455473966365375620096488909462468556488992"
+ , Bignum_bigint.of_string
+ "8815915990003770986701969284580631365087521759318521999314517238992555623924"
+ )
+ in
+ let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in
+
+ let scalar = Bignum_bigint.of_int 5 in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "51167536897757234729699532493775077246692685149885509345450034909880529264629"
+ , Bignum_bigint.of_string
+ "44029933166959533883508578962900776387952087967919619281016528212534310213626"
+ )
+ in
+ let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in
+
+ let scalar = Bignum_bigint.of_int 6 in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "37941877700581055232085743160302884615963229784754572200220248617732513837044"
+ , Bignum_bigint.of_string
+ "103619381845871132282285745641400810486981078987965768860988615362483475376768"
+ )
+ in
+ let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in
+
+ let scalar = Bignum_bigint.of_int 7 in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "98789585776319197684463328274590329296514884375780947918152956981890869725107"
+ , Bignum_bigint.of_string
+ "53439843286771287571705008292825119475125031375071120429905353259479677320421"
+ )
+ in
+ let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in
+
+ (* Multiply by 391 (9-bits) *)
+ let scalar = Bignum_bigint.of_int 391 in
+ let point =
+ ( Bignum_bigint.of_string
+ "54895644447597143434988379138583445778456903839185254067441861567562618370751"
+ , Bignum_bigint.of_string
+ "104240867874630534073764110268869655023740253909668464291682942589488282068874"
+ )
+ in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "92358528850649079329920393962087666882076668287684124835881344341719861256355"
+ , Bignum_bigint.of_string
+ "27671880807027823848003850001152132266698242755975705342674616617508656063465"
+ )
+ in
+ let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in
+
+ (* Multiply by 56081 (16-bits) = 0b1000 1000 1101 1011 *)
+ let scalar = Bignum_bigint.of_int 56081 in
+ let point =
+ ( Bignum_bigint.of_string
+ "49950185608981313523985721024498375953313579282523275566585584189656370223502"
+ , Bignum_bigint.of_string
+ "63146279987886420302806526994276928563454160280333237123111833753346399349172"
+ )
+ in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "108851670764886172021315090022738025632501895048831561535857748171372817371035"
+ , Bignum_bigint.of_string
+ "39836887958851910836029687008284321008437801650048469660046898576758470452396"
+ )
+ in
+ let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in
+
+ (* Multiply by full-size secp256k1 scalar (256-bits) *)
+ let scalar =
+ Bignum_bigint.of_string
+ "99539640504241691246180604816121958450675059637016987953058113537095650715171"
+ in
+ let point =
+ ( Bignum_bigint.of_string
+ "68328903637429126750778604407754814031272668830649072423942370967409226150426"
+ , Bignum_bigint.of_string
+ "115181214446139478209347980655067703553667234783111668132659797097404834370543"
+ )
+ in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "39225021357252528375135552880830100632566425214595783585248505195330577648905"
+ , Bignum_bigint.of_string
+ "29440534631649867975583896121458013539074827830686556074829823458426851891598"
+ )
+ in
+ let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in
+
+ (* Multiply by another full-size secp256k1 scalar (256-bits) *)
+ let scalar =
+ Bignum_bigint.of_string
+ "35756276706511369289499344520446188493221382068841792677286014237073874389678"
+ in
+ let point =
+ ( Bignum_bigint.of_string
+ "43525911664736252471195991194779124044474905699728523733063794335880455509831"
+ , Bignum_bigint.of_string
+ "55128733880722898542773180558916537797992134106308528712389282845794719232809"
+ )
+ in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "92989598011225532261029933411922200506770253480509168102582704300806548851952"
+ , Bignum_bigint.of_string
+ "91632035281581329897770791332253791028537996389304501325297573948973121537913"
+ )
+ in
+ let _cs = test_scalar_mul Secp256k1.params scalar point expected_result in
+
+ (* Compute secp256k1 pub key from secret key *)
+ let scalar =
+ Bignum_bigint.of_string
+ "88112557240431687619949876834386306142823675858092281192015740375511510392207"
+ in
+ let expected_pubkey =
+ ( Bignum_bigint.of_string
+ "50567548908598322015490923046917426159132337313161362096244889522774999144344"
+ , Bignum_bigint.of_string
+ "35561449820918632865961375836489131575522128704654117756369029278244987778295"
+ )
+ in
+ let cs =
+ test_scalar_mul Secp256k1.params scalar Secp256k1.params.gen
+ expected_pubkey
+ in
+ (* Constraint system reuse *)
+ let scalar =
+ Bignum_bigint.of_string
+ "93102346685989503200550820820601664115283772668359982393657391253613200462560"
+ in
+ let expected_pt =
+ ( Bignum_bigint.of_string
+ "115384145918035657737810677734903949889161796282962842129612290299404313800919"
+ , Bignum_bigint.of_string
+ "86432196125585910060501672565270170370528330974696895998365685616223611168261"
+ )
+ in
+ let _cs =
+ test_scalar_mul ~cs Secp256k1.params scalar Secp256k1.params.gen
+ expected_pt
+ in
+ ()
+
+let%test_unit "Ec_group.scalar_mul_properties" =
+ if scalar_mul_tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test elliptic curve scalar multiplication properties *)
+ let test_scalar_mul_properties ?cs (curve : Curve_params.t)
+ (a_scalar : Bignum_bigint.t) (b_scalar : Bignum_bigint.t)
+ (point : Affine.bignum_point) (a_expected_result : Affine.bignum_point)
+ (b_expected_result : Affine.bignum_point)
+ (a_plus_b_expected : Affine.bignum_point)
+ (a_times_b_expected : Affine.bignum_point)
+ (negation_expected : Affine.bignum_point) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants (module Runner.Impl) curve
+ in
+ let a_scalar_bits =
+ Common.bignum_bigint_unpack_as_unchecked_vars
+ (module Runner.Impl)
+ ~remove_trailing:true a_scalar
+ in
+ let b_scalar_bits =
+ Common.bignum_bigint_unpack_as_unchecked_vars
+ (module Runner.Impl)
+ ~remove_trailing:true b_scalar
+ in
+ let c_scalar_bits =
+ let c_scalar =
+ Bignum_bigint.((a_scalar + b_scalar) % curve.bignum.order)
+ in
+ Common.bignum_bigint_unpack_as_unchecked_vars
+ (module Runner.Impl)
+ ~remove_trailing:true c_scalar
+ in
+ let point =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) point
+ in
+ let a_expected_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ a_expected_result
+ in
+ let b_expected_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ b_expected_result
+ in
+ let a_plus_b_expected =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ a_plus_b_expected
+ in
+ let a_times_b_expected =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ a_times_b_expected
+ in
+ let negation_expected =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ negation_expected
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let unused_external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (*
+ * Check distributive property with adding scalars: aP + bP = (a + b)P
+ *)
+
+ (* A = aP *)
+ let a_result =
+ scalar_mul
+ (module Runner.Impl)
+ unused_external_checks curve a_scalar_bits point
+ in
+
+ (* B = bP *)
+ let b_result =
+ scalar_mul
+ (module Runner.Impl)
+ unused_external_checks curve b_scalar_bits point
+ in
+
+ (* C = (a + b)P *)
+ let a_plus_b_result =
+ scalar_mul
+ (module Runner.Impl)
+ unused_external_checks curve c_scalar_bits point
+ in
+
+ (* A + B *)
+ let a_result_plus_b_result =
+ add
+ (module Runner.Impl)
+ unused_external_checks curve a_result b_result
+ in
+
+ (* Assert aP = expected A *)
+ Affine.assert_equal (module Runner.Impl) a_result a_expected_result ;
+ (* Assert bP = expected B *)
+ Affine.assert_equal (module Runner.Impl) b_result b_expected_result ;
+ (* Assert (a + b)P = expected *)
+ Affine.assert_equal
+ (module Runner.Impl)
+ a_plus_b_result a_plus_b_expected ;
+ (* Assert A + B = (a + b)P = cP *)
+ Affine.assert_equal
+ (module Runner.Impl)
+ a_result_plus_b_result a_plus_b_result ;
+
+ (*
+ * Check distributive property with multiplying scalars: [a]bP = [b]aP = [a*b]P
+ *)
+
+ (* [a]bP *)
+ let a_b_result =
+ scalar_mul
+ (module Runner.Impl)
+ unused_external_checks curve a_scalar_bits b_result
+ in
+
+ (* [b]aP *)
+ let b_a_result =
+ scalar_mul
+ (module Runner.Impl)
+ unused_external_checks curve b_scalar_bits a_result
+ in
+
+ (* Compute a*b as foreign field multiplication in scalar field *)
+ let ab_scalar_bits =
+ let ab_scalar =
+ Bignum_bigint.(a_scalar * b_scalar % curve.bignum.order)
+ in
+ Common.bignum_bigint_unpack_as_unchecked_vars
+ (module Runner.Impl)
+ ~remove_trailing:true ab_scalar
+ in
+
+ (* (a * b)P *)
+ let ab_result =
+ scalar_mul
+ (module Runner.Impl)
+ unused_external_checks curve ab_scalar_bits point
+ in
+
+ (* Assert [a]bP = [b]aP *)
+ Affine.assert_equal (module Runner.Impl) a_b_result b_a_result ;
+ (* Assert [b]aP = (a * b)P *)
+ Affine.assert_equal (module Runner.Impl) b_a_result ab_result ;
+ (* Assert (a * b)P = expected *)
+ Affine.assert_equal
+ (module Runner.Impl)
+ ab_result a_times_b_expected ;
+
+ (*
+ * Check scaling computes with negation: [-a]P = -(aP)
+ *)
+
+ (* Compute -a_scalar witness *)
+ let minus_a_scalar_bits =
+ let minus_a_scalar =
+ Bignum_bigint.(-a_scalar % curve.bignum.order)
+ in
+ Common.bignum_bigint_unpack_as_unchecked_vars
+ (module Runner.Impl)
+ ~remove_trailing:true minus_a_scalar
+ in
+
+ (* [-a]P *)
+ let minus_a_result =
+ scalar_mul
+ (module Runner.Impl)
+ unused_external_checks curve minus_a_scalar_bits point
+ in
+
+ (* -(aP) *)
+ let negated_a_result = negate (module Runner.Impl) curve a_result in
+ (* Result row: need to write negated y-coordinate to row in order to assert_equal on it *)
+ Foreign_field.result_row
+ (module Runner.Impl)
+ ~label:"negation_property_check"
+ @@ Affine.y negated_a_result ;
+
+ (* Assert [-a]P = -(aP) *)
+ Affine.assert_equal
+ (module Runner.Impl)
+ minus_a_result negated_a_result ;
+ (* Assert -(aP) = expected *)
+ Affine.assert_equal
+ (module Runner.Impl)
+ negated_a_result negation_expected ;
+
+ () )
+ in
+
+ cs
+ in
+
+ (*
+ * EC scalar multiplication properties tests
+ *)
+
+ (* Tests with generator *)
+ let a_scalar =
+ Bignum_bigint.of_string
+ "79401928295407367700174300280555320402843131478792245979539416476579739380993"
+ in
+ (* aG *)
+ let a_expected =
+ ( Bignum_bigint.of_string
+ "17125835931983334217694156357722716412757965999176597307946554943053675538785"
+ , Bignum_bigint.of_string
+ "46388026915780724534166509048612278793220290073988306084942872130687658791661"
+ )
+ in
+ let b_scalar =
+ Bignum_bigint.of_string
+ "89091288558408807474211262098870527285408764120538440460973310880924228023627"
+ in
+ (* bG *)
+ let b_expected =
+ ( Bignum_bigint.of_string
+ "79327061200655101960260174492040176163202074463842535225851740487556039447898"
+ , Bignum_bigint.of_string
+ "17719907321698144940791372349744661269763063699265755816142522447977929876765"
+ )
+ in
+ (* (a + b)G *)
+ let a_plus_b_expected =
+ ( Bignum_bigint.of_string
+ "81040990384669475923010997008987195868838198748766130146528604954229008315134"
+ , Bignum_bigint.of_string
+ "34561268318835956667566052477444512933985042899902969559255322703897774718063"
+ )
+ in
+ (* (a * b)G *)
+ let a_times_b_expected =
+ ( Bignum_bigint.of_string
+ "81456477659851325370442471400511783773782655276230587738882014172211964156628"
+ , Bignum_bigint.of_string
+ "95026373302104994624825470484745116441888023752189438912144935562310761663097"
+ )
+ in
+ (* [-a]G *)
+ let negation_expected =
+ ( Bignum_bigint.of_string
+ "17125835931983334217694156357722716412757965999176597307946554943053675538785"
+ , Bignum_bigint.of_string
+ "69404062321535470889404475960075629060049694591652257954514711877221175880002"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params a_expected) ;
+ assert (is_on_curve_bignum_point Secp256k1.params b_expected) ;
+ assert (is_on_curve_bignum_point Secp256k1.params a_plus_b_expected) ;
+ assert (is_on_curve_bignum_point Secp256k1.params a_times_b_expected) ;
+ assert (is_on_curve_bignum_point Secp256k1.params negation_expected) ;
+
+ let _cs =
+ test_scalar_mul_properties Secp256k1.params a_scalar b_scalar
+ Secp256k1.params.gen a_expected b_expected a_plus_b_expected
+ a_times_b_expected negation_expected
+ in
+
+ (* Tests with another curve point *)
+ let point =
+ ( Bignum_bigint.of_string
+ "33774054739397672981116348681092907963399779523481500939771509974082662984990"
+ , Bignum_bigint.of_string
+ "60414776605185041994402340927179985824709402511452021592188768672640080416757"
+ )
+ in
+ let a_scalar =
+ Bignum_bigint.of_string
+ "101698197574283114939368343806106834988902354006673798485060078476846328099457"
+ in
+ (* aP *)
+ let a_expected =
+ ( Bignum_bigint.of_string
+ "75195284589272297831705973079897644085806639251981864022525558637369799002975"
+ , Bignum_bigint.of_string
+ "21318219854954928210493202207122232794689530644716510309784081397689563830643"
+ )
+ in
+ let b_scalar =
+ Bignum_bigint.of_string
+ "29906750163917842454712060592346612426879165698013462577595179415632189050569"
+ in
+ (* bP *)
+ let b_expected =
+ ( Bignum_bigint.of_string
+ "31338730031552911193929716320599408654845663804319033450328019997834721773857"
+ , Bignum_bigint.of_string
+ "19509931248131549366806268091016515808560677012657535095393179462073374184004"
+ )
+ in
+ (* (a + b)P *)
+ let a_plus_b_expected =
+ ( Bignum_bigint.of_string
+ "3785015531479612950834562670482118046158085046729801327010146109899305257240"
+ , Bignum_bigint.of_string
+ "67252551234352942899384104854542424500400416990163373189382133933498016564076"
+ )
+ in
+ (* (a * b)P *)
+ let a_times_b_expected =
+ ( Bignum_bigint.of_string
+ "104796198157638974641325627725056289938393733264860209068332598339943619687138"
+ , Bignum_bigint.of_string
+ "62474612839119693016992187953610680368302121786246432257338185158014628586401"
+ )
+ in
+ (* [-a]P *)
+ let negation_expected =
+ ( Bignum_bigint.of_string
+ "75195284589272297831705973079897644085806639251981864022525558637369799002975"
+ , Bignum_bigint.of_string
+ "94473869382361267213077782801565675058580454020924053729673502610219270841020"
+ )
+ in
+
+ assert (is_on_curve_bignum_point Secp256k1.params point) ;
+ assert (is_on_curve_bignum_point Secp256k1.params a_expected) ;
+ assert (is_on_curve_bignum_point Secp256k1.params b_expected) ;
+ assert (is_on_curve_bignum_point Secp256k1.params a_plus_b_expected) ;
+ assert (is_on_curve_bignum_point Secp256k1.params a_times_b_expected) ;
+ assert (is_on_curve_bignum_point Secp256k1.params negation_expected) ;
+
+ let _cs =
+ test_scalar_mul_properties Secp256k1.params a_scalar b_scalar point
+ a_expected b_expected a_plus_b_expected a_times_b_expected
+ negation_expected
+ in
+ () )
+
+let%test_unit "Ec_group.scalar_mul_tiny" =
+ if scalar_mul_tests_enabled then
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test elliptic curve scalar multiplication with tiny scalar *)
+ let test_scalar_mul_tiny ?cs (curve : Curve_params.t)
+ (scalar : Bignum_bigint.t) (point : Affine.bignum_point)
+ (expected_result : Affine.bignum_point) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants
+ (module Runner.Impl)
+ curve ~use_precomputed_gen_doubles:false
+ in
+ let scalar_bits =
+ Common.bignum_bigint_unpack_as_unchecked_vars
+ (module Runner.Impl)
+ ~remove_trailing:true scalar
+ in
+ let point =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) point
+ in
+ let expected_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ expected_result
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let unused_external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* Q = sP *)
+ let result =
+ scalar_mul
+ (module Runner.Impl)
+ unused_external_checks curve scalar_bits point
+ in
+
+ (* Check for expected quantity of external checks *)
+ if Bignum_bigint.(curve.bignum.a = zero) then
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.bounds 42 )
+ else
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.bounds 43 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal unused_external_checks.multi_ranges
+ 17 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal
+ unused_external_checks.compact_multi_ranges 17 ) ;
+
+ (* Check output matches expected result *)
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ result expected_result ) ) ;
+ () )
+ in
+
+ cs
+ in
+
+ (*
+ * EC scalar multiplication tests
+ *)
+
+ (* Multiply by 2 *)
+ let scalar = Bignum_bigint.of_int 2 in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "89565891926547004231252920425935692360644145829622209833684329913297188986597"
+ , Bignum_bigint.of_string
+ "12158399299693830322967808612713398636155367887041628176798871954788371653930"
+ )
+ in
+ let _cs =
+ test_scalar_mul_tiny Secp256k1.params scalar Secp256k1.params.gen
+ expected_result
+ in
+
+ ()
+
+let%test_unit "Ec_group.scalar_mul_tiny_full" =
+ if scalar_mul_tests_enabled then
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test elliptic curve scalar multiplication with tiny scalar (fully constrained) *)
+ let test_scalar_mul_tiny_full ?cs (curve : Curve_params.t)
+ (scalar : Bignum_bigint.t) (point : Affine.bignum_point)
+ (expected_result : Affine.bignum_point) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants
+ (module Runner.Impl)
+ curve ~use_precomputed_gen_doubles:false
+ in
+ let scalar_bits =
+ Common.bignum_bigint_unpack_as_unchecked_vars
+ (module Runner.Impl)
+ ~remove_trailing:true scalar
+ in
+ let point =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) point
+ in
+ let expected_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ expected_result
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* Q = sP *)
+ let result =
+ scalar_mul
+ (module Runner.Impl)
+ external_checks curve scalar_bits point
+ in
+
+ (*
+ * Perform external checks
+ *)
+
+ (* Sanity checks *)
+ if Bignum_bigint.(curve.bignum.a = zero) then
+ assert (Mina_stdlib.List.Length.equal external_checks.bounds 42)
+ else assert (Mina_stdlib.List.Length.equal external_checks.bounds 43) ;
+ assert (
+ Mina_stdlib.List.Length.equal external_checks.multi_ranges 17 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal external_checks.compact_multi_ranges
+ 17 ) ;
+
+ (* Add gates for bound checks, multi-range-checks and compact-multi-range-checks *)
+ Foreign_field.constrain_external_checks
+ (module Runner.Impl)
+ external_checks curve.modulus ;
+
+ (* Check output matches expected result *)
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ result expected_result ) ) ;
+ () )
+ in
+
+ cs
+ in
+
+ (*
+ * EC scalar multiplication full tiny test
+ *)
+
+ (* Multiply by 2 *)
+ let scalar = Bignum_bigint.of_int 2 in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "89565891926547004231252920425935692360644145829622209833684329913297188986597"
+ , Bignum_bigint.of_string
+ "12158399299693830322967808612713398636155367887041628176798871954788371653930"
+ )
+ in
+ let _cs =
+ test_scalar_mul_tiny_full Secp256k1.params scalar Secp256k1.params.gen
+ expected_result
+ in
+
+ ()
+
+let%test_unit "Ec_group.scalar_mul_full" =
+ if scalar_mul_tests_enabled then
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Test elliptic curve scalar multiplication with scalar (fully constrained)
+ * Rows without external checks: 9,239
+ * Rows with external checks: 51,284
+ *)
+ let test_scalar_mul_full ?cs (curve : Curve_params.t)
+ (scalar : Bignum_bigint.t) (point : Affine.bignum_point)
+ (expected_result : Affine.bignum_point) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test public inputs *)
+ let curve =
+ Curve_params.to_circuit_constants
+ (module Runner.Impl)
+ curve ~use_precomputed_gen_doubles:false
+ in
+ let scalar_bits =
+ Common.bignum_bigint_unpack_as_unchecked_vars
+ (module Runner.Impl)
+ ~remove_trailing:true scalar
+ in
+ let point =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) point
+ in
+ let expected_result =
+ Affine.of_bignum_bigint_coordinates
+ (module Runner.Impl)
+ expected_result
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let external_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* Q = sP *)
+ let result =
+ scalar_mul
+ (module Runner.Impl)
+ external_checks curve scalar_bits point
+ in
+
+ (* Perform external checks *)
+ Foreign_field.constrain_external_checks
+ (module Runner.Impl)
+ external_checks curve.modulus ;
+
+ (* Check output matches expected result *)
+ as_prover (fun () ->
+ assert (
+ Affine.equal_as_prover
+ (module Runner.Impl)
+ result expected_result ) ) ;
+ () )
+ in
+
+ cs
+ in
+
+ (*
+ * EC scalar multiplication full test
+ *)
+ let scalar =
+ Bignum_bigint.of_string
+ "86328453031879654597075713189149610219798626760146420625950995482836591878435"
+ in
+ let expected_result =
+ ( Bignum_bigint.of_string
+ "34471291466947522722859799187843146224770255220707476910295898769840639813138"
+ , Bignum_bigint.of_string
+ "93602351553749687946251059563423164683238306171680072584629082513591162129572"
+ )
+ in
+ let _cs =
+ test_scalar_mul_full Secp256k1.params scalar Secp256k1.params.gen
+ expected_result
+ in
+
+ ()
diff --git a/src/lib/crypto/kimchi_backend/gadgets/ecdsa.ml.disabled b/src/lib/crypto/kimchi_backend/gadgets/ecdsa.ml.disabled
new file mode 100644
index 00000000000..576db6bda7c
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/ecdsa.ml.disabled
@@ -0,0 +1,1131 @@
+open Core_kernel
+module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint
+module Snark_intf = Snarky_backendless.Snark_intf
+
+let tests_enabled = true
+
+(* Array to tuple helper *)
+let tuple6_of_array array =
+ match array with
+ | [| a1; a2; a3; a4; a5; a6 |] ->
+ (a1, a2, a3, a4, a5, a6)
+ | _ ->
+ assert false
+
+(* Gadget to assert signature scalars r,s \in Fn
+ * Must be used when r and s are not public parameters
+ *
+ * Scalar field external checks:
+ * Bound checks: 6
+ * Multi-range-checks: 2
+ * Compact-range-checks: 2
+ * Total range-checks: 10
+ *
+ * Rows: (per crumb, not counting inputs/outputs and constants)
+ * Check: 4
+ * Bound additions: 12
+ * Multi-range-checks: 40
+ * Total: 56
+ *)
+let signature_scalar_check (type f)
+ (module Circuit : Snark_intf.Run with type field = f)
+ (scalar_checks : f Foreign_field.External_checks.t)
+ (curve : f Curve_params.InCircuit.t)
+ (signature :
+ f Foreign_field.Element.Standard.t * f Foreign_field.Element.Standard.t )
+ =
+ let open Circuit in
+ (* Signaure r and s *)
+ let r, s = signature in
+
+ (* Compute witness r^-1 and s^-1 needed for not-zero-check *)
+ let r_inv0, r_inv1, r_inv2, s_inv0, s_inv1, s_inv2 =
+ exists (Typ.array ~length:6 Field.typ) ~compute:(fun () ->
+ let curve_order =
+ Foreign_field.field_const_standard_limbs_to_bignum_bigint
+ (module Circuit)
+ curve.order
+ in
+
+ let r =
+ Foreign_field.Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ r
+ in
+
+ let s =
+ Foreign_field.Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ s
+ in
+
+ (* Compute r^-1 *)
+ let r_inv = Common.bignum_bigint_inverse r curve_order in
+
+ (* Compute s^-1 *)
+ let s_inv = Common.bignum_bigint_inverse s curve_order in
+
+ (* Convert from Bignums to field elements *)
+ let r_inv0, r_inv1, r_inv2 =
+ Foreign_field.bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ r_inv
+ in
+ let s_inv0, s_inv1, s_inv2 =
+ Foreign_field.bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ s_inv
+ in
+
+ (* Return and convert back to Cvars *)
+ [| r_inv0; r_inv1; r_inv2; s_inv0; s_inv1; s_inv2 |] )
+ |> tuple6_of_array
+ in
+ let r_inv =
+ Foreign_field.Element.Standard.of_limbs (r_inv0, r_inv1, r_inv2)
+ in
+ let s_inv =
+ Foreign_field.Element.Standard.of_limbs (s_inv0, s_inv1, s_inv2)
+ in
+
+ let one = Foreign_field.Element.Standard.one (module Circuit) in
+
+ (* C1: Constrain that r != 0 *)
+ let computed_one =
+ Foreign_field.mul (module Circuit) scalar_checks r r_inv curve.order
+ in
+ (* Bounds 1: Left input r is bound checked below
+ * Right input r_inv is bound checked below
+ * Result bound check is covered by scalar_checks
+ *)
+ Foreign_field.External_checks.append_bound_check scalar_checks
+ @@ Foreign_field.Element.Standard.to_limbs r ;
+ Foreign_field.External_checks.append_bound_check scalar_checks
+ @@ Foreign_field.Element.Standard.to_limbs r_inv ;
+ (* Assert r * r^-1 = 1 *)
+ Foreign_field.Element.Standard.assert_equal (module Circuit) computed_one one ;
+
+ (* C2: Constrain that s != 0 *)
+ let computed_one =
+ Foreign_field.mul (module Circuit) scalar_checks s s_inv curve.order
+ in
+ (* Bounds 2: Left input s is bound checked below
+ * Right input s_inv is bound checked below
+ * Result bound check is covered by scalar_checks
+ *)
+ Foreign_field.External_checks.append_bound_check scalar_checks
+ @@ Foreign_field.Element.Standard.to_limbs s ;
+ Foreign_field.External_checks.append_bound_check scalar_checks
+ @@ Foreign_field.Element.Standard.to_limbs s_inv ;
+ (* Assert s * s^-1 = 1 *)
+ Foreign_field.Element.Standard.assert_equal (module Circuit) computed_one one
+
+(* C3: Assert r \in [0, n)
+ * Already covered by bound check on r (Bounds 1)
+ *)
+(* C4: Assert s \in [0, n)
+ * Already covered by bound check on s (Bounds 2)
+ *)
+
+(* Gadget for constraining ECDSA signature verificationin zero-knowledge
+ *
+ * Inputs:
+ * base_checks := Context to track required base field external checks
+ * scalar_checks := Context to track required scalar field external checks
+ * curve := Elliptic curve parameters
+ * pubkey := Public key of signer
+ * doubles := Optional powers of 2^i * pubkey, 0 <= i < n where n is curve.order_bit_length
+ * signature := ECDSA signature (r, s) s.t. r, s \in [1, n)
+ * msg_hash := Message hash s.t. msg_hash \in Fn
+ *
+ * Preconditions:
+ * pubkey is on the curve and not O (use Ec_group.is_on_curve gadget)
+ * pubkey is in the subgroup (nP = O) (use Ec_group.check_subgroup gadget)
+ * pubkey is bounds checked (use multi-range-check gadgets)
+ * r, s \in [1, n) (use signature_scalar_check gadget)
+ * msg_hash \in Fn (use bytes_to_foreign_field_element gadget)
+ *
+ * Public parameters
+ * gen is the correct elliptic curve group generator point
+ * a, b are correct elliptic curve parameters
+ * curve order is the correct elliptic curve group order
+ * curve modulus is the correct elliptic curve base field modulus
+ * ia point is publically, deterministically and randomly selected (nothing-up-my-sleeve)
+ * ia on the curve
+ * ia negated point computation is correct
+ * ia coordinates are valid
+ *
+ * Base field external checks: (per crumb, not counting inputs and output)
+ * Bound checks: 100 (+2 when a != 0 and +1 when b != 0)
+ * Multi-range-checks: 40
+ * Compact-range-checks: 40
+ * Total range-checks: 180
+ *
+ * Scalar field external checks: (per crumb, not counting inputs and output)
+ * Bound checks: 5
+ * Multi-range-checks: 3
+ * Compact-range-checks: 3
+ * Total range-checks: 11
+ *
+ * Rows: (per crumb, not counting inputs/outputs and constants)
+ * Verify: ~205 (+5 when a != 0 and +2 when b != 0)
+ * Bound additions: 210
+ * Multi-range-checks: 764
+ * Total: 1179
+ *
+ * Constants:
+ * Curve constants: 10 (for 256-bit curve; one-time cost per circuit)
+ * Pre-computing doubles: 767 (for 256-bit curve; one-time cost per circuit)
+ *
+ *)
+let verify (type f) (module Circuit : Snark_intf.Run with type field = f)
+ (base_checks : f Foreign_field.External_checks.t)
+ (scalar_checks : f Foreign_field.External_checks.t)
+ (curve : f Curve_params.InCircuit.t) (pubkey : f Affine.t)
+ ?(use_precomputed_gen_doubles = true) ?(scalar_mul_bit_length = 0)
+ ?(doubles : f Affine.t array option)
+ (signature :
+ f Foreign_field.Element.Standard.t * f Foreign_field.Element.Standard.t )
+ (msg_hash : f Foreign_field.Element.Standard.t) =
+ let open Circuit in
+ (* Signaures r and s *)
+ let r, s = signature in
+
+ (* Compute witness value u1 and u2 *)
+ let u1_0, u1_1, u1_2, u2_0, u2_1, u2_2 =
+ exists (Typ.array ~length:6 Field.typ) ~compute:(fun () ->
+ let r =
+ Foreign_field.Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ r
+ in
+
+ let s =
+ Foreign_field.Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ s
+ in
+
+ let msg_hash =
+ Foreign_field.Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ msg_hash
+ in
+
+ (* Compute s^-1 *)
+ let s_inv = Common.bignum_bigint_inverse s curve.bignum.order in
+
+ (* Compute u1 = z * s^-1 *)
+ let u1 = Bignum_bigint.(msg_hash * s_inv % curve.bignum.order) in
+
+ (* Compute u2 = r * s^-1 *)
+ let u2 = Bignum_bigint.(r * s_inv % curve.bignum.order) in
+
+ (* Convert from Bignums to field elements *)
+ let u1_0, u1_1, u1_2 =
+ Foreign_field.bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ u1
+ in
+ let u2_0, u2_1, u2_2 =
+ Foreign_field.bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ u2
+ in
+
+ (* Return and convert back to Cvars *)
+ [| u1_0; u1_1; u1_2; u2_0; u2_1; u2_2 |] )
+ |> tuple6_of_array
+ in
+ let u1 = Foreign_field.Element.Standard.of_limbs (u1_0, u1_1, u1_2) in
+ let u2 = Foreign_field.Element.Standard.of_limbs (u2_0, u2_1, u2_2) in
+
+ (* C1: Constrain s * u1 = z *)
+ let msg_hash_computed =
+ Foreign_field.mul
+ (module Circuit)
+ scalar_checks ~bound_check_result:false s u1 curve.order
+ in
+ (* Bounds 1: Left input s is gadget input (checked externally)
+ * Right input u1 checked below
+ * Result is gadget input (already checked externally).
+ *)
+ Foreign_field.External_checks.append_bound_check scalar_checks
+ @@ Foreign_field.Element.Standard.to_limbs u1 ;
+
+ (* Assert s * u1 = z *)
+ Foreign_field.Element.Standard.assert_equal
+ (module Circuit)
+ msg_hash_computed msg_hash ;
+
+ (* C2: Constrain s * u2 = r *)
+ let r_computed =
+ Foreign_field.mul
+ (module Circuit)
+ scalar_checks ~bound_check_result:false s u2 curve.order
+ in
+
+ (* Bounds 2: Left input s is gadget input (checked externally)
+ * Right input u2 checked below
+ * Result is gadget input (already checked externally).
+ *)
+ Foreign_field.External_checks.append_bound_check scalar_checks
+ @@ Foreign_field.Element.Standard.to_limbs u2 ;
+
+ (* Assert s * u2 = r *)
+ Foreign_field.Element.Standard.assert_equal (module Circuit) r_computed r ;
+
+ (*
+ * Compute R = u1G + u2P
+ *)
+
+ (* Set optional alternative scalar_mul_bit_length *)
+ let scalar_bit_length =
+ if scalar_mul_bit_length > 0 then scalar_mul_bit_length
+ else curve.order_bit_length
+ in
+
+ (* C3: Decompose u1 into bits *)
+ let u1_bits =
+ Foreign_field.Element.Standard.unpack
+ (module Circuit)
+ u1 ~length:scalar_bit_length
+ in
+
+ (* C4: Decompose u2 into bits *)
+ let u2_bits =
+ Foreign_field.Element.Standard.unpack
+ (module Circuit)
+ u2 ~length:scalar_bit_length
+ in
+
+ (* C5: Constrain scalar multiplication u1G *)
+ let curve_doubles =
+ if use_precomputed_gen_doubles then Some curve.doubles else None
+ in
+ let u1_point =
+ Ec_group.scalar_mul
+ (module Circuit)
+ base_checks curve ?doubles:curve_doubles u1_bits curve.gen
+ in
+
+ (* Bounds 5: Generator is gadget input (public parameter)
+ * Initial accumulator is gadget input (checked externally or public parameter)
+ * Result bound check for u1_point below.
+ *)
+ Foreign_field.External_checks.append_bound_check base_checks
+ @@ Foreign_field.Element.Standard.to_limbs @@ Affine.x u1_point ;
+ Foreign_field.External_checks.append_bound_check base_checks
+ @@ Foreign_field.Element.Standard.to_limbs @@ Affine.y u1_point ;
+
+ (* C6: Constrain scalar multiplication u2P *)
+ let u2_point =
+ Ec_group.scalar_mul
+ (module Circuit)
+ base_checks curve ?doubles u2_bits pubkey
+ in
+
+ (* Bounds 6: Pubkey is gadget input (checked externally)
+ * Initial accumulator is gadget input (checked externally or public parameter)
+ * Result bound check for u2_point below.
+ *)
+ Foreign_field.External_checks.append_bound_check base_checks
+ @@ Foreign_field.Element.Standard.to_limbs @@ Affine.x u2_point ;
+ Foreign_field.External_checks.append_bound_check base_checks
+ @@ Foreign_field.Element.Standard.to_limbs @@ Affine.y u2_point ;
+
+ (* C7: R = u1G + u2P *)
+ let result =
+ Ec_group.add (module Circuit) base_checks curve u1_point u2_point
+ in
+
+ (* Bounds 7: Left and right inputs checked by (Bounds 5) and (Bounds 6)
+ * Result bound is bound checked below
+ *)
+ Foreign_field.External_checks.append_bound_check base_checks
+ @@ Foreign_field.Element.Standard.to_limbs @@ Affine.x result ;
+ Foreign_field.External_checks.append_bound_check base_checks
+ @@ Foreign_field.Element.Standard.to_limbs @@ Affine.y result ;
+
+ (* Constrain that r = Rx (mod n), where n is the scalar field modulus
+ *
+ * Note: The scalar field modulus (curve.order) may be greater or smaller than
+ * the base field modulus (curve.modulus)
+ *
+ * curve.order > curve.modulus => Rx = 0 * n + Rx
+ *
+ * curve.order < curve.modulus => Rx = q * n + Rx'
+ *
+ * Thus, to check for congruence we need to compute the modular reduction of Rx and
+ * assert that it equals r.
+ *
+ * Since we may want to target applications where the scalar field is much smaller
+ * than the base field, we cannot make any assumptions about the ratio between
+ * these moduli, so we will constrain Rx = q * n + Rx' using the foreign field
+ * multiplication gadget, rather than just constraining Rx + 0 with our foreign
+ * field addition gadget.
+ *
+ * As we are reducing Rx modulo n, we are performing foreign field arithmetic modulo n.
+ * However, the multiplicand n above is not a valid foreign field element in [0, n - 1].
+ * To be safe we must constrain Rx = q * (n - 1) + q + Rx' modulo n.
+ *)
+
+ (* Compute witness value q and Rx' *)
+ let quotient0, quotient1, quotient2, x_prime0, x_prime1, x_prime2 =
+ exists (Typ.array ~length:6 Field.typ) ~compute:(fun () ->
+ let x =
+ Foreign_field.Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ (Affine.x result)
+ in
+
+ (* Compute q and r of Rx = q * n + r *)
+ let quotient, x_prime =
+ Common.bignum_bigint_div_rem x curve.bignum.order
+ in
+
+ (* Convert from Bignums to field elements *)
+ let quotient0, quotient1, quotient2 =
+ Foreign_field.bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ quotient
+ in
+ let x_prime0, x_prime1, x_prime2 =
+ Foreign_field.bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ x_prime
+ in
+
+ (* Return and convert back to Cvars *)
+ [| quotient0; quotient1; quotient2; x_prime0; x_prime1; x_prime2 |] )
+ |> tuple6_of_array
+ in
+
+ (* C8: Constrain q * (n - 1) *)
+ let quotient =
+ Foreign_field.Element.Standard.of_limbs (quotient0, quotient1, quotient2)
+ in
+ let quotient_product =
+ Foreign_field.mul
+ (module Circuit)
+ scalar_checks quotient curve.order_minus_one curve.order
+ in
+
+ (* Bounds 8: Left input q is bound checked below
+ * Right input (n - 1) is a public parameter so not checked
+ * Result bound check is already covered by scalar_checks
+ *)
+ Foreign_field.External_checks.append_bound_check scalar_checks
+ @@ Foreign_field.Element.Standard.to_limbs quotient ;
+
+ (* C9: Compute qn = q * (n - 1) + q *)
+ let quotient_times_n =
+ Foreign_field.add
+ (module Circuit)
+ ~full:false quotient_product quotient curve.order
+ in
+
+ (* Bounds 9: Left input q * (n - 1) is covered by (Bounds 8)
+ * Right input q is covered by (Bounds 8)
+ * Result is chained into subsequent addition (no check necessary)
+ *)
+
+ (* C10: Compute Rx = qn + Rx' *)
+ let x_prime =
+ Foreign_field.Element.Standard.of_limbs (x_prime0, x_prime1, x_prime2)
+ in
+ let computed_x =
+ Foreign_field.add
+ (module Circuit)
+ ~full:false quotient_times_n x_prime curve.order
+ in
+ (* Addition chain final result row *)
+ Foreign_field.result_row
+ (module Circuit)
+ ~label:"Ecdsa.verify_computed_x" computed_x ;
+
+ (* Bounds 10: Left input qn is chained input, so not checked
+ * Right input x_prime bounds checked below
+ * Result already bound checked by (Bounds 7)
+ *)
+ Foreign_field.External_checks.append_bound_check scalar_checks
+ @@ Foreign_field.Element.Standard.to_limbs x_prime ;
+
+ (* C11: Check qn + r = Rx *)
+ Foreign_field.Element.Standard.assert_equal
+ (module Circuit)
+ computed_x (Affine.x result) ;
+
+ (* C12: Check that r = Rx' *)
+ Foreign_field.Element.Standard.assert_equal (module Circuit) r x_prime ;
+
+ (* C13: Check result is on curve (also implies result is not infinity) *)
+ Ec_group.is_on_curve (module Circuit) base_checks curve result ;
+
+ (* Bounds 13: Input already bound checked by (Bounds 8) *)
+ ()
+
+(***************)
+(* ECDSA tests *)
+(***************)
+
+let%test_unit "Ecdsa.verify" =
+ if tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Let's test proving ECDSA signature verification in ZK! *)
+ let test_verify ?cs ?(use_precomputed_gen_doubles = true)
+ ?(scalar_mul_bit_length = 0) (curve : Curve_params.t)
+ (pubkey : Affine.bignum_point)
+ (signature : Bignum_bigint.t * Bignum_bigint.t)
+ (msg_hash : Bignum_bigint.t) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ (* Prepare test inputs *)
+ let curve =
+ Curve_params.to_circuit_constants
+ (module Runner.Impl)
+ curve ~use_precomputed_gen_doubles
+ in
+ let pubkey =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) pubkey
+ in
+ let signature =
+ ( Foreign_field.Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ (fst signature)
+ , Foreign_field.Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ (snd signature) )
+ in
+ let msg_hash =
+ Foreign_field.Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ msg_hash
+ in
+
+ (* Create external checks contexts for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let unused_base_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+ let unused_scalar_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* Subgroup check for pubkey *)
+ Ec_group.check_subgroup
+ (module Runner.Impl)
+ unused_base_checks curve pubkey ;
+
+ (* Check r, s \in [1, n) *)
+ signature_scalar_check
+ (module Runner.Impl)
+ unused_scalar_checks curve signature ;
+
+ (* Verify ECDSA signature *)
+ verify
+ (module Runner.Impl)
+ ~use_precomputed_gen_doubles ~scalar_mul_bit_length
+ unused_base_checks unused_scalar_checks curve pubkey signature
+ msg_hash ;
+
+ () )
+ in
+
+ cs
+ in
+
+ (* Test 1: ECDSA verify test with real Ethereum mainnet signature
+ * Tx: https://etherscan.io/tx/0x0d26b1539304a214a6517b529a027f987cd52e70afd8fdc4244569a93121f144
+ *
+ * Raw tx: 0xf86580850df8475800830186a094353535353535353535353535353535353535353564801ba082de9950cc5aac0dca7210cb4b77320ac9e844717d39b1781e9d941d920a1206a01da497b3c134f50b2fce514d66e20c5e43f9615f097395a5527041d14860a52f
+ * Msg hash: 0x3e91cd8bd233b3df4e4762b329e2922381da770df1b31276ec77d0557be7fcef
+ * Raw pubkey: 0x046e0f66759bb520b026a9c7d61c82e8354025f2703696dcdac679b2f7945a352e637c8f71379941fa22f15a9fae9cb725ae337b16f216f5acdeefbd52a0882c27
+ * Raw signature: 0x82de9950cc5aac0dca7210cb4b77320ac9e844717d39b1781e9d941d920a12061da497b3c134f50b2fce514d66e20c5e43f9615f097395a5527041d14860a52f1b
+ * r := 0x82de9950cc5aac0dca7210cb4b77320ac9e844717d39b1781e9d941d920a1206
+ * s := 0x1da497b3c134f50b2fce514d66e20c5e43f9615f097395a5527041d14860a52f
+ * v := 27
+ *)
+ let eth_pubkey =
+ ( Bignum_bigint.of_string
+ "49781623198970027997721070672560275063607048368575198229673025608762959476014"
+ , Bignum_bigint.of_string
+ "44999051047832679156664607491606359183507784636787036192076848057884504239143"
+ )
+ in
+ let eth_signature =
+ ( (* r *)
+ Bignum_bigint.of_string
+ "59193968509713231970845573191808992654796038550727015999103892005508493218310"
+ , (* s *)
+ Bignum_bigint.of_string
+ "13407882537414256709292360527926092843766608354464979273376653245977131525423"
+ )
+ in
+ let tx_msg_hash =
+ Bignum_bigint.of_string
+ "0x3e91cd8bd233b3df4e4762b329e2922381da770df1b31276ec77d0557be7fcef"
+ in
+
+ assert (Ec_group.is_on_curve_bignum_point Secp256k1.params eth_pubkey) ;
+
+ let _cs =
+ test_verify Secp256k1.params ~use_precomputed_gen_doubles:true eth_pubkey
+ eth_signature tx_msg_hash
+ in
+
+ (* Negative test *)
+ assert (
+ Common.is_error (fun () ->
+ (* Bad hash *)
+ let bad_tx_msg_hash =
+ Bignum_bigint.of_string
+ "0x3e91cd8bd233b3df4e4762b329e2922381da770df1b31276ec77d0557be7fcee"
+ in
+ test_verify Secp256k1.params eth_pubkey eth_signature bad_tx_msg_hash ) ) ;
+
+ (* Test 2: ECDSA verify test with another real Ethereum mainnet signature
+ * Tx: https://etherscan.io/tx/0x9cec14aadb06b59b2646333f47efe0ee7f21fed48d93806023b8eb205aa3b161
+ *
+ * Raw tx: 0x02f9019c018201338405f5e100850cad3895d8830108949440a50cf069e992aa4536211b23f286ef88752187880b1a2bc2ec500000b90124322bba210000000000000000000000008a001303158670e284950565164933372807cd4800000000000000000000000012d220fbda92a9c8f281ea02871afa70dfde81e90000000000000000000000000000000000000000000000000afd4ea3d29472400000000000000000000000000000000000000000461c9bb5bb1c3429b25544e3f4b7bb67d63f9b432df61df28a9897e26284b370adcd7b558fa286babb0efdeb000000000000000000000000000000000000000000000000001cdd1f19bb8dc0000000000000000000000000000000000000000000000000000000006475ed380000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a8f2573c080a0893bc3facf19becba979e31d37ed1b222faab09b8c554a17072f6fbfc1e5658fa01119ef751f0fc3c1ec4d1eeb9db64c9f416ce1aa3267d7b98d8426ab35f0c422
+ * Msg hash: 0xf7c5983cdb051f68aa84444c4b8ecfdbf60548fe3f5f3f2d19cc5d3c096f0b5b
+ * Raw pubkey: 0x04ad53a68c2120f9a81288b1377adbe7477b7cec1b9b5ff57d5e331ee7f9e6c2372f997b48cf3faa91023f77754ef63ec49dcd5a61b681b53cda894616c28422c0
+ * Raw signature: 0x893bc3facf19becba979e31d37ed1b222faab09b8c554a17072f6fbfc1e5658f1119ef751f0fc3c1ec4d1eeb9db64c9f416ce1aa3267d7b98d8426ab35f0c4221c
+ * r := 0x893bc3facf19becba979e31d37ed1b222faab09b8c554a17072f6fbfc1e5658f
+ * s := 0x1119ef751f0fc3c1ec4d1eeb9db64c9f416ce1aa3267d7b98d8426ab35f0c422
+ * v := 0
+ *)
+ let eth_pubkey =
+ Ethereum.pubkey_hex_to_point
+ "0x04ad53a68c2120f9a81288b1377adbe7477b7cec1b9b5ff57d5e331ee7f9e6c2372f997b48cf3faa91023f77754ef63ec49dcd5a61b681b53cda894616c28422c0"
+ in
+
+ let eth_signature =
+ ( (* r *)
+ Bignum_bigint.of_string
+ "0x893bc3facf19becba979e31d37ed1b222faab09b8c554a17072f6fbfc1e5658f"
+ , (* s *)
+ Bignum_bigint.of_string
+ "0x1119ef751f0fc3c1ec4d1eeb9db64c9f416ce1aa3267d7b98d8426ab35f0c422"
+ )
+ in
+ let tx_msg_hash =
+ Bignum_bigint.of_string
+ "0xf7c5983cdb051f68aa84444c4b8ecfdbf60548fe3f5f3f2d19cc5d3c096f0b5b"
+ in
+
+ assert (Ec_group.is_on_curve_bignum_point Secp256k1.params eth_pubkey) ;
+
+ let _cs =
+ test_verify Secp256k1.params eth_pubkey eth_signature tx_msg_hash
+ in
+
+ (* Test 3: ECDSA verify test with yet another real Ethereum mainnet signature
+ * Tx: https://etherscan.io/tx/0x4eb2087dc31dda8fc1bd8680624cd2ae0c1ed0d880de1daefb6fddac208d08fb
+ *
+ * Raw tx: 0x02f90114011c8405f5e100850d90b9d72982f4a8948a3749936e723325c6b645a0901470cd9e790b9480b8a8b88d4fde00000000000000000000000085210d346e2baa59a486dd19cf9d18f1325d9ffc00000000000000000000000039f083386e75120d2c6c152900219849dbdaa7e60000000000000000000000000000000000000000000000000000000000000b7100000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000360c6ebec080a0a8c5ae8e178c29a3de4a70ef0d22cbb29a8a0013cfa81fea66885556573debe1a031532f9be326029161a4b7bedb80ea4d20b1293cbefb51cc570e72e6aa4ef4d1
+ * Msg hash: 0xccdea6d5fce0363b9fbc2cf9a14087fc67c79fbdf55b25789ee2d51dcd82dbc1
+ * Raw pubkey: 0x042b7a248bf6fa2acc079d4f451c68c56a40ef81aeaf6a89c10ed6d692f7a6fdea0c05f95d601c3ab4f75d9253d356ab7af4d7d2ac250e0832581d08f1e224a976
+ * Raw signature: 0xa8c5ae8e178c29a3de4a70ef0d22cbb29a8a0013cfa81fea66885556573debe131532f9be326029161a4b7bedb80ea4d20b1293cbefb51cc570e72e6aa4ef4d11c
+ * r := 0xa8c5ae8e178c29a3de4a70ef0d22cbb29a8a0013cfa81fea66885556573debe1
+ * s := 0x31532f9be326029161a4b7bedb80ea4d20b1293cbefb51cc570e72e6aa4ef4d1
+ * v := 0
+ *)
+ let eth_pubkey =
+ Ethereum.pubkey_hex_to_point
+ "0x042b7a248bf6fa2acc079d4f451c68c56a40ef81aeaf6a89c10ed6d692f7a6fdea0c05f95d601c3ab4f75d9253d356ab7af4d7d2ac250e0832581d08f1e224a976"
+ in
+
+ let eth_signature =
+ ( (* r *)
+ Bignum_bigint.of_string
+ "0xa8c5ae8e178c29a3de4a70ef0d22cbb29a8a0013cfa81fea66885556573debe1"
+ , (* s *)
+ Bignum_bigint.of_string
+ "0x31532f9be326029161a4b7bedb80ea4d20b1293cbefb51cc570e72e6aa4ef4d1"
+ )
+ in
+ let tx_msg_hash =
+ Bignum_bigint.of_string
+ "0xccdea6d5fce0363b9fbc2cf9a14087fc67c79fbdf55b25789ee2d51dcd82dbc1"
+ in
+
+ assert (Ec_group.is_on_curve_bignum_point Secp256k1.params eth_pubkey) ;
+
+ let cs =
+ test_verify Secp256k1.params eth_pubkey eth_signature tx_msg_hash
+ in
+
+ assert (
+ Common.is_error (fun () ->
+ (* Bad signature *)
+ let bad_eth_signature =
+ ( (* r *)
+ Bignum_bigint.of_string
+ "0xc8c5ae8e178c29a3de4a70ef0d22cbb29a8a0013cfa81fea66885556573debe1"
+ , (* s *)
+ Bignum_bigint.of_string
+ "0x31532f9be326029161a4b7bedb80ea4d20b1293cbefb51cc570e72e6aa4ef4d1"
+ )
+ in
+ test_verify Secp256k1.params eth_pubkey bad_eth_signature tx_msg_hash ) ) ;
+
+ (* Test 4: Constraint system reuse
+ * Tx: https://etherscan.io/tx/0xfc7d65547eb5192c2f35b7e190b4792a9ebf79876f164ead32288e9fe2b7e4f3
+ *
+ * Raw tx: 0x02f8730113843b9aca00851405ffdc00825b0494a9d1e08c7793af67e9d92fe308d5697fb81d3e4388299ce7c69d7b9c1780c001a06d5a635efe29deca27e52e96dd2d4056cff1a4b51f88d363f1c3802a26cd67a0a07c34d16c2831ee6265d6d2a55cee6e3273f41480424686d44fe709ce7cfd1567
+ * Msg hash: 0x62c771b337f1a0070dddb863b953017aa12918fc37f338419f7664fda443ce93
+ * Raw pubkey: 0x041d4911ee95f0858df65b942fe88cd54d6c06f73fc9e716db1e153d9994b16930e0284e96e308ef77f1d588aa446237111ab370eeab84059a08980e7e7ab0c467
+ * Raw signature: 0x6d5a635efe29deca27e52e96dd2d4056cff1a4b51f88d363f1c3802a26cd67a07c34d16c2831ee6265d6d2a55cee6e3273f41480424686d44fe709ce7cfd15671b
+ * r := 0xa8c5ae8e178c29a3de4a70ef0d22cbb29a8a0013cfa81fea66885556573debe1
+ * s := 0x31532f9be326029161a4b7bedb80ea4d20b1293cbefb51cc570e72e6aa4ef4d1
+ * v := 1
+ *)
+ let eth_pubkey =
+ Ethereum.pubkey_hex_to_point
+ "0x041d4911ee95f0858df65b942fe88cd54d6c06f73fc9e716db1e153d9994b16930e0284e96e308ef77f1d588aa446237111ab370eeab84059a08980e7e7ab0c467"
+ in
+
+ let eth_signature =
+ ( (* r *)
+ Bignum_bigint.of_string
+ "0x6d5a635efe29deca27e52e96dd2d4056cff1a4b51f88d363f1c3802a26cd67a0"
+ , (* s *)
+ Bignum_bigint.of_string
+ "0x7c34d16c2831ee6265d6d2a55cee6e3273f41480424686d44fe709ce7cfd1567"
+ )
+ in
+ let tx_msg_hash =
+ Bignum_bigint.of_string
+ "0x62c771b337f1a0070dddb863b953017aa12918fc37f338419f7664fda443ce93"
+ in
+
+ assert (Ec_group.is_on_curve_bignum_point Secp256k1.params eth_pubkey) ;
+
+ let _cs =
+ test_verify ~cs Secp256k1.params eth_pubkey eth_signature tx_msg_hash
+ in
+
+ (* Test without using precomputed curve doubles *)
+ let _cs =
+ test_verify ~use_precomputed_gen_doubles:false Secp256k1.params eth_pubkey
+ eth_signature tx_msg_hash
+ in
+
+ () )
+
+let%test_unit "Ecdsa.verify_light" =
+ if tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Light ecdsa verify circuit for manual checks *)
+ let test_verify_light ?cs ?(use_precomputed_gen_doubles = true)
+ ?(scalar_mul_bit_length = 0) (curve : Curve_params.t)
+ (pubkey : Affine.bignum_point)
+ (signature : Bignum_bigint.t * Bignum_bigint.t)
+ (msg_hash : Bignum_bigint.t) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ (* Prepare test inputs *)
+ let curve =
+ Curve_params.to_circuit_constants
+ (module Runner.Impl)
+ curve ~use_precomputed_gen_doubles
+ in
+ let pubkey =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) pubkey
+ in
+ Foreign_field.result_row (module Runner.Impl) (fst pubkey) ;
+ Foreign_field.result_row (module Runner.Impl) (snd pubkey) ;
+ let signature =
+ ( Foreign_field.Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ (fst signature)
+ , Foreign_field.Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ (snd signature) )
+ in
+ Foreign_field.result_row (module Runner.Impl) (fst signature) ;
+ Foreign_field.result_row (module Runner.Impl) (snd signature) ;
+ let msg_hash =
+ Foreign_field.Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ msg_hash
+ in
+ Foreign_field.result_row (module Runner.Impl) msg_hash ;
+
+ (* Create external checks contexts for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let unused_base_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+ let unused_scalar_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* Omit pubkey subgroup check *)
+
+ (* Omit checking r, s \in [1, n) *)
+
+ (* Verify ECDSA signature *)
+ verify
+ (module Runner.Impl)
+ ~use_precomputed_gen_doubles ~scalar_mul_bit_length
+ unused_base_checks unused_scalar_checks curve pubkey signature
+ msg_hash ;
+
+ (* The base field external check counts depend on curve and scalar size. We elide
+ * checking these because we want this test function able to be used with different
+ * curves, scalars and other parameters.
+ *)
+
+ (* Check scalar field external check counts *)
+ assert (Mina_stdlib.List.Length.equal unused_scalar_checks.bounds 5) ;
+ assert (
+ Mina_stdlib.List.Length.equal unused_scalar_checks.multi_ranges 3 ) ;
+ assert (
+ Mina_stdlib.List.Length.equal
+ unused_scalar_checks.compact_multi_ranges 3 ) ;
+ () )
+ in
+
+ cs
+ in
+
+ (* Tiny secp256k1 signature test: results in 2-bit u1 and u2 scalars
+ * Extracted with k = 1 -> secret key = 57896044618658097711785492504343953926418782139537452191302581570759080747168 *)
+ let pubkey =
+ ( Bignum_bigint.of_string
+ "86918276961810349294276103416548851884759982251107"
+ , Bignum_bigint.of_string
+ "28597260016173315074988046521176122746119865902901063272803125467328307387891"
+ )
+ in
+ let signature =
+ ( (* r = Gx *)
+ Bignum_bigint.of_string
+ "55066263022277343669578718895168534326250603453777594175500187360389116729240"
+ , (* s = r/2 *)
+ Bignum_bigint.of_string
+ "27533131511138671834789359447584267163125301726888797087750093680194558364620"
+ )
+ in
+ let msg_hash =
+ (* z = 2s *)
+ Bignum_bigint.of_string
+ "55066263022277343669578718895168534326250603453777594175500187360389116729240"
+ in
+
+ assert (Ec_group.is_on_curve_bignum_point Secp256k1.params pubkey) ;
+
+ let _cs =
+ test_verify_light Secp256k1.params ~scalar_mul_bit_length:2 pubkey
+ signature msg_hash
+ in
+ let _cs =
+ test_verify_light Secp256k1.params ~use_precomputed_gen_doubles:false
+ ~scalar_mul_bit_length:2 pubkey signature msg_hash
+ in
+
+ () )
+
+let%test_unit "Ecdsa.secp256k1_verify_tiny_full" =
+ if tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Tiny full circuit for ecdsa on secp256k1 manual checks.
+ * Note: pubkey, signature and msg_hash need to be specially crafted to produce 2-bit scalars
+ *)
+ let secp256k1_verify_tiny_full ?cs ?(use_precomputed_gen_doubles = true)
+ (pubkey : Affine.bignum_point)
+ (signature : Bignum_bigint.t * Bignum_bigint.t)
+ (msg_hash : Bignum_bigint.t) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ (* Prepare test inputs *)
+ let curve =
+ Curve_params.to_circuit_constants
+ (module Runner.Impl)
+ Secp256k1.params ~use_precomputed_gen_doubles
+ in
+ let pubkey =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) pubkey
+ in
+ Foreign_field.result_row (module Runner.Impl) (fst pubkey) ;
+ Foreign_field.result_row (module Runner.Impl) (snd pubkey) ;
+ let signature =
+ ( Foreign_field.Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ (fst signature)
+ , Foreign_field.Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ (snd signature) )
+ in
+ Foreign_field.result_row (module Runner.Impl) (fst signature) ;
+ Foreign_field.result_row (module Runner.Impl) (snd signature) ;
+ let msg_hash =
+ Foreign_field.Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ msg_hash
+ in
+ Foreign_field.result_row (module Runner.Impl) msg_hash ;
+
+ (* Create external checks contexts for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let base_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+ let scalar_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* Omit pubkey subgroup check *)
+
+ (* Omit checking r, s \in [1, n) *)
+
+ (* Verify ECDSA signature *)
+ verify
+ (module Runner.Impl)
+ ~use_precomputed_gen_doubles ~scalar_mul_bit_length:2 base_checks
+ scalar_checks curve pubkey signature msg_hash ;
+
+ (*
+ * Perform base field external checks
+ *)
+
+ (* Sanity check *)
+ let base_bound_checks_count = ref (42 + 2 + 42 + 2 + 6 + 2 + 3) in
+ if not Bignum_bigint.(curve.bignum.a = zero) then
+ base_bound_checks_count := !base_bound_checks_count + 2 ;
+ if not Bignum_bigint.(curve.bignum.b = zero) then
+ base_bound_checks_count := !base_bound_checks_count + 1 ;
+ assert (
+ Mina_stdlib.List.Length.equal base_checks.bounds
+ !base_bound_checks_count ) ;
+ assert (Mina_stdlib.List.Length.equal base_checks.multi_ranges 40) ;
+ assert (
+ Mina_stdlib.List.Length.equal base_checks.compact_multi_ranges 40 ) ;
+
+ (* Add gates for bound checks, multi-range-checks and compact-multi-range-checks *)
+ Foreign_field.constrain_external_checks
+ (module Runner.Impl)
+ base_checks curve.modulus ;
+
+ (*
+ * Perform scalar field external checks
+ *)
+
+ (* Sanity checks *)
+ assert (Mina_stdlib.List.Length.equal scalar_checks.bounds 5) ;
+ assert (Mina_stdlib.List.Length.equal scalar_checks.multi_ranges 3) ;
+ assert (
+ Mina_stdlib.List.Length.equal scalar_checks.compact_multi_ranges 3 ) ;
+
+ (* Add gates for bound checks, multi-range-checks and compact-multi-range-checks *)
+ Foreign_field.constrain_external_checks
+ (module Runner.Impl)
+ scalar_checks curve.order ;
+
+ () )
+ in
+
+ cs
+ in
+
+ (* Tiny secp256k1 signature test: results in 2-bit u1 and u2 scalars
+ * Extracted with k = 1 -> secret key = 57896044618658097711785492504343953926418782139537452191302581570759080747168 *)
+ let pubkey =
+ (* secret key d = (s - z)/r *)
+ ( Bignum_bigint.of_string
+ "86918276961810349294276103416548851884759982251107"
+ , Bignum_bigint.of_string
+ "28597260016173315074988046521176122746119865902901063272803125467328307387891"
+ )
+ in
+ let signature =
+ ( (* r = Gx *)
+ Bignum_bigint.of_string
+ "55066263022277343669578718895168534326250603453777594175500187360389116729240"
+ , (* s = r/2 *)
+ Bignum_bigint.of_string
+ "27533131511138671834789359447584267163125301726888797087750093680194558364620"
+ )
+ in
+ let msg_hash =
+ (* z = 2s *)
+ Bignum_bigint.of_string
+ "55066263022277343669578718895168534326250603453777594175500187360389116729240"
+ in
+
+ assert (Ec_group.is_on_curve_bignum_point Secp256k1.params pubkey) ;
+
+ let _cs =
+ secp256k1_verify_tiny_full ~use_precomputed_gen_doubles:false pubkey
+ signature msg_hash
+ in
+
+ () )
+
+let%test_unit "Ecdsa.verify_full_no_subgroup_check" =
+ if tests_enabled then (
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Prove ECDSA signature verification in ZK (no subgroup check)! *)
+ let test_verify_full_no_subgroup_check ?cs
+ ?(use_precomputed_gen_doubles = true) ?(scalar_mul_bit_length = 0)
+ (curve : Curve_params.t) (pubkey : Affine.bignum_point)
+ (signature : Bignum_bigint.t * Bignum_bigint.t)
+ (msg_hash : Bignum_bigint.t) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ (* Prepare test inputs *)
+ let curve =
+ Curve_params.to_circuit_constants
+ (module Runner.Impl)
+ curve ~use_precomputed_gen_doubles
+ in
+ let pubkey =
+ Affine.of_bignum_bigint_coordinates (module Runner.Impl) pubkey
+ in
+ let signature =
+ ( Foreign_field.Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ (fst signature)
+ , Foreign_field.Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ (snd signature) )
+ in
+ let msg_hash =
+ Foreign_field.Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ msg_hash
+ in
+
+ (* Create external checks contexts for tracking extra constraints
+ that are required for soundness *)
+ let base_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+ let scalar_checks =
+ Foreign_field.External_checks.create (module Runner.Impl)
+ in
+
+ (* Subgroup check for pubkey is too expensive for test without chunking *)
+
+ (* Check r, s \in [1, n) *)
+ signature_scalar_check
+ (module Runner.Impl)
+ scalar_checks curve signature ;
+
+ (* Verify ECDSA signature *)
+ verify
+ (module Runner.Impl)
+ ~use_precomputed_gen_doubles ~scalar_mul_bit_length base_checks
+ scalar_checks curve pubkey signature msg_hash ;
+
+ (*
+ * Perform base field external checks
+ *)
+ Foreign_field.constrain_external_checks
+ (module Runner.Impl)
+ base_checks curve.modulus ;
+
+ (*
+ * Perform scalar field external checks
+ *)
+ Foreign_field.constrain_external_checks
+ (module Runner.Impl)
+ scalar_checks curve.order ;
+
+ () )
+ in
+
+ cs
+ in
+
+ (* Test 1: No chunking (big test that doesn't require chunkning)
+ * Uses precomputed generator doubles.
+ * Extracted s,d such that that u1 and u2 scalars are equal to m = 95117056129877063566687163501128961107874747202063760588013341337 (216 bits) *)
+ let pubkey =
+ (* secret key d = (s - z)/r *)
+ ( Bignum_bigint.of_string
+ "28335432349034412295843546619549969371276098848890005110917167585721026348383"
+ , Bignum_bigint.of_string
+ "40779711449769771629236800666139862371172776689379727569918249313574127557987"
+ )
+ in
+ let signature =
+ ( (* r = Gx *)
+ Bignum_bigint.of_string
+ "55066263022277343669578718895168534326250603453777594175500187360389116729240"
+ , (* s = r/m *)
+ Bignum_bigint.of_string
+ "92890023769187417206640608811117482540691917151111621018323984641303111040093"
+ )
+ in
+ let msg_hash =
+ (* z = ms *)
+ Bignum_bigint.of_string
+ "55066263022277343669578718895168534326250603453777594175500187360389116729240"
+ in
+
+ assert (Ec_group.is_on_curve_bignum_point Secp256k1.params pubkey) ;
+
+ let _cs =
+ test_verify_full_no_subgroup_check Secp256k1.params
+ ~scalar_mul_bit_length:216 pubkey signature msg_hash
+ in
+
+ (* Test 2: No chunking (big test that doesn't require chunkning)
+ * Extracted s,d such that that u1 and u2 scalars are equal to m = 177225723614878382952356121702918977654 (128 bits) *)
+ let pubkey =
+ (* secret key d = (s - z)/r *)
+ ( Bignum_bigint.of_string
+ "6559447345535823731364817861985473100513487071640065635466595453031721007862"
+ , Bignum_bigint.of_string
+ "74970879557849263394678708702512922877596422437120940411392434995042287566169"
+ )
+ in
+ let signature =
+ ( (* r = Gx *)
+ Bignum_bigint.of_string
+ "55066263022277343669578718895168534326250603453777594175500187360389116729240"
+ , (* s = r/m *)
+ Bignum_bigint.of_string
+ "66524399747416926971392827702286928407253072170352243437129959464602950571595"
+ )
+ in
+ let msg_hash =
+ (* z = ms *)
+ Bignum_bigint.of_string
+ "55066263022277343669578718895168534326250603453777594175500187360389116729240"
+ in
+
+ let _cs =
+ test_verify_full_no_subgroup_check Secp256k1.params
+ ~use_precomputed_gen_doubles:false ~scalar_mul_bit_length:128 pubkey
+ signature msg_hash
+ in
+
+ () )
diff --git a/src/lib/crypto/kimchi_backend/gadgets/ethereum.ml b/src/lib/crypto/kimchi_backend/gadgets/ethereum.ml
new file mode 100644
index 00000000000..0dac6edfefe
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/ethereum.ml
@@ -0,0 +1,7 @@
+module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint
+
+let pubkey_hex_to_point (hex : string) : Bignum_bigint.t * Bignum_bigint.t =
+ assert (132 = String.length hex) ;
+ let x_hex = "0x" ^ String.sub hex 4 64 in
+ let y_hex = "0x" ^ String.sub hex 68 64 in
+ (Bignum_bigint.of_string x_hex, Bignum_bigint.of_string y_hex)
diff --git a/src/lib/crypto/kimchi_backend/gadgets/foreign_field.ml.disabled b/src/lib/crypto/kimchi_backend/gadgets/foreign_field.ml.disabled
new file mode 100644
index 00000000000..560084198cb
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/foreign_field.ml.disabled
@@ -0,0 +1,2224 @@
+open Core_kernel
+
+open Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint
+
+module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint
+module Snark_intf = Snarky_backendless.Snark_intf
+
+let tests_enabled = true
+
+let tuple5_of_array array =
+ match array with
+ | [| a1; a2; a3; a4; a5 |] ->
+ (a1, a2, a3, a4, a5)
+ | _ ->
+ assert false
+
+let tuple15_of_array array =
+ match array with
+ | [| a1; a2; a3; a4; a5; a6; a7; a8; a9; a10; a11; a12; a13; a14; a15 |] ->
+ (a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15)
+ | _ ->
+ assert false
+
+(* 2^2L *)
+let two_to_2limb = Bignum_bigint.(pow Common.two_to_limb (of_int 2))
+
+let two_to_limb_field (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) =
+ Common.(bignum_bigint_to_field (module Circuit) two_to_limb)
+
+let two_to_2limb_field (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) =
+ Common.(bignum_bigint_to_field (module Circuit) two_to_2limb)
+
+(* Binary modulus *)
+let binary_modulus = Common.two_to_3limb
+
+(* Maximum foreign field modulus for multiplication m = sqrt(2^t * n), see RFC for more details
+ * For simplicity and efficiency we use the approximation m = floor(sqrt(2^t * n))
+ * * Distinct from this approximation is the maximum prime foreign field modulus
+ * for both Pallas and Vesta given our CRT scheme:
+ * 926336713898529563388567880069503262826888842373627227613104999999999999999607 *)
+let max_foreign_field_modulus (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) :
+ Bignum_bigint.t =
+ (* m = floor(sqrt(2^t * n)) *)
+ let product =
+ (* We need Zarith for sqrt *)
+ Bignum_bigint.to_zarith_bigint
+ @@ Bignum_bigint.(binary_modulus * Circuit.Field.size)
+ (* Zarith.sqrt truncates (rounds down to int) ~ floor *)
+ in
+ Bignum_bigint.of_zarith_bigint @@ Z.sqrt product
+
+(* Type of operation *)
+type op_mode = Add | Sub
+
+(* Foreign field modulus is abstract on two parameters
+ * - Field type
+ * - Limbs structure
+ *
+ * There are 2 specific limb structures required
+ * - Standard mode : 3 limbs of L-bits each
+ * - Compact mode : 2 limbs where the lowest is 2L bits and the highest is L bits
+ *)
+
+type 'field standard_limbs = 'field * 'field * 'field
+
+type 'field compact_limbs = 'field * 'field
+
+(* Convert Bignum_bigint.t to Bignum_bigint standard_limbs *)
+let bignum_bigint_to_standard_limbs (bigint : Bignum_bigint.t) :
+ Bignum_bigint.t standard_limbs =
+ let l12, l0 = Common.(bignum_bigint_div_rem bigint two_to_limb) in
+ let l2, l1 = Common.(bignum_bigint_div_rem l12 two_to_limb) in
+ (l0, l1, l2)
+
+(* Convert Bignum_bigint.t to field standard_limbs *)
+let bignum_bigint_to_field_const_standard_limbs (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (bigint : Bignum_bigint.t) : f standard_limbs =
+ let l0, l1, l2 = bignum_bigint_to_standard_limbs bigint in
+ ( Common.bignum_bigint_to_field (module Circuit) l0
+ , Common.bignum_bigint_to_field (module Circuit) l1
+ , Common.bignum_bigint_to_field (module Circuit) l2 )
+
+(* Convert Bignum_bigint.t to Bignum_bigint compact_limbs *)
+let bignum_bigint_to_compact_limbs (bigint : Bignum_bigint.t) :
+ Bignum_bigint.t compact_limbs =
+ let l2, l01 = Common.bignum_bigint_div_rem bigint two_to_2limb in
+ (l01, l2)
+
+(* Convert Bignum_bigint.t to field compact_limbs *)
+let bignum_bigint_to_field_const_compact_limbs (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (bigint : Bignum_bigint.t) : f compact_limbs =
+ let l01, l2 = bignum_bigint_to_compact_limbs bigint in
+ ( Common.bignum_bigint_to_field (module Circuit) l01
+ , Common.bignum_bigint_to_field (module Circuit) l2 )
+
+(* Convert field standard_limbs to Bignum_bigint.t standard_limbs *)
+let field_const_standard_limbs_to_bignum_bigint_standard_limbs (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (field_limbs : f standard_limbs) : Bignum_bigint.t standard_limbs =
+ let l0, l1, l2 = field_limbs in
+ ( Common.field_to_bignum_bigint (module Circuit) l0
+ , Common.field_to_bignum_bigint (module Circuit) l1
+ , Common.field_to_bignum_bigint (module Circuit) l2 )
+
+(* Convert field standard_limbs to Bignum_bigint.t *)
+let field_const_standard_limbs_to_bignum_bigint (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (field_limbs : f standard_limbs) : Bignum_bigint.t =
+ let l0, l1, l2 =
+ field_const_standard_limbs_to_bignum_bigint_standard_limbs
+ (module Circuit)
+ field_limbs
+ in
+ Bignum_bigint.(l0 + (Common.two_to_limb * l1) + (two_to_2limb * l2))
+
+(* Foreign field element interface *)
+(* TODO: It would be better if this were created with functor that
+ * takes are arguments the native field and the foreign field modulus.
+ * Then when creating foreign field elements it could check that
+ * they are valid (less than the foreign field modulus). We'd need a
+ * mode to override this last check for bound additions.
+ *)
+module type Element_intf = sig
+ type 'field t
+
+ type 'a limbs_type
+
+ module Cvar = Snarky_backendless.Cvar
+
+ (* Create foreign field element from Cvar limbs *)
+ val of_limbs : 'field Cvar.t limbs_type -> 'field t
+
+ (* Create foreign field element from field limbs *)
+ val of_field_limbs :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field limbs_type
+ -> 'field t
+
+ (* Create foreign field element from Bignum_bigint.t *)
+ val of_bignum_bigint :
+ (module Snark_intf.Run with type field = 'field)
+ -> Bignum_bigint.t
+ -> 'field t
+
+ (* Create constant foreign field element from Bignum_bigint.t *)
+ val const_of_bignum_bigint :
+ (module Snark_intf.Run with type field = 'field)
+ -> Bignum_bigint.t
+ -> 'field t
+
+ (* Convert foreign field element into Cvar limbs *)
+ val to_limbs : 'field t -> 'field Cvar.t limbs_type
+
+ (* Map foreign field element's Cvar limbs into some other limbs with the mapping function func *)
+ val map : 'field t -> ('field Cvar.t -> 'g) -> 'g limbs_type
+
+ (* One constant *)
+ val one : (module Snark_intf.Run with type field = 'field) -> 'field t
+
+ (* Convert foreign field element into field limbs *)
+ val to_field_limbs_as_prover :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> 'field limbs_type
+
+ (* Convert foreign field element into Bignum_bigint.t limbs *)
+ val to_bignum_bigint_limbs_as_prover :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> Bignum_bigint.t limbs_type
+
+ (* Convert foreign field element into a Bignum_bigint.t *)
+ val to_bignum_bigint_as_prover :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> Bignum_bigint.t
+
+ (* Convert foreign field affine point to string *)
+ val to_string_as_prover :
+ (module Snark_intf.Run with type field = 'field) -> 'field t -> string
+
+ (* Constrain zero check computation with boolean output *)
+ val is_zero :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> 'field Cvar.t Snark_intf.Boolean0.t
+
+ (* Compare if two foreign field elements are equal *)
+ val equal_as_prover :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> 'field t
+ -> bool
+
+ (* Add copy constraints that two foreign field elements are equal *)
+ val assert_equal :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> 'field t
+ -> unit
+
+ (* Create and constrain foreign field element from Bignum_bigint.t *)
+ val check_here_const_of_bignum_bigint :
+ (module Snark_intf.Run with type field = 'field)
+ -> Bignum_bigint.t
+ -> 'field t
+
+ (* Add conditional constraints to select foreign field element *)
+ val if_ :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field Cvar.t Snark_intf.Boolean0.t
+ -> then_:'field t
+ -> else_:'field t
+ -> 'field t
+
+ (* Decompose and constrain foreign field element into list of boolean cvars *)
+ val unpack :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> length:int
+ -> 'field Cvar.t Snark_intf.Boolean0.t list
+end
+
+(* Foreign field element structures *)
+module Element : sig
+ (* Foreign field element (standard limbs) *)
+ module Standard : sig
+ include Element_intf with type 'a limbs_type = 'a standard_limbs
+
+ (* Check that the foreign element is smaller than a given field modulus *)
+ val fits_as_prover :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> 'field standard_limbs
+ -> bool
+ end
+
+ (* Foreign field element (compact limbs) *)
+ module Compact : Element_intf with type 'a limbs_type = 'a compact_limbs
+end = struct
+ (* Standard limbs foreign field element *)
+ module Standard = struct
+ module Cvar = Snarky_backendless.Cvar
+
+ type 'field limbs_type = 'field standard_limbs
+
+ type 'field t = 'field Cvar.t standard_limbs
+
+ let of_limbs x = x
+
+ let of_field_limbs (type field)
+ (module Circuit : Snark_intf.Run with type field = field)
+ (x : field limbs_type) : field t =
+ let open Circuit in
+ let x =
+ exists (Typ.array ~length:3 Field.typ) ~compute:(fun () ->
+ let x0, x1, x2 = x in
+ [| x0; x1; x2 |] )
+ |> Common.tuple3_of_array
+ in
+ of_limbs x
+
+ let of_bignum_bigint (type field)
+ (module Circuit : Snark_intf.Run with type field = field) x : field t =
+ let open Circuit in
+ let l12, l0 = Common.(bignum_bigint_div_rem x two_to_limb) in
+ let l2, l1 = Common.(bignum_bigint_div_rem l12 two_to_limb) in
+ let limb_vars =
+ exists (Typ.array ~length:3 Field.typ) ~compute:(fun () ->
+ [| Common.bignum_bigint_to_field (module Circuit) l0
+ ; Common.bignum_bigint_to_field (module Circuit) l1
+ ; Common.bignum_bigint_to_field (module Circuit) l2
+ |] )
+ in
+ of_limbs (limb_vars.(0), limb_vars.(1), limb_vars.(2))
+
+ let const_of_bignum_bigint (type field)
+ (module Circuit : Snark_intf.Run with type field = field) x : field t =
+ let open Circuit in
+ let l12, l0 = Common.(bignum_bigint_div_rem x two_to_limb) in
+ let l2, l1 = Common.(bignum_bigint_div_rem l12 two_to_limb) in
+ of_limbs
+ Field.
+ ( constant @@ Common.bignum_bigint_to_field (module Circuit) l0
+ , constant @@ Common.bignum_bigint_to_field (module Circuit) l1
+ , constant @@ Common.bignum_bigint_to_field (module Circuit) l2 )
+
+ let to_limbs x = x
+
+ let map (x : 'field t) (func : 'field Cvar.t -> 'g) : 'g limbs_type =
+ let l0, l1, l2 = to_limbs x in
+ (func l0, func l1, func l2)
+
+ let to_field_limbs_as_prover (type field)
+ (module Circuit : Snark_intf.Run with type field = field) (x : field t)
+ : field limbs_type =
+ map x (Common.cvar_field_to_field_as_prover (module Circuit))
+
+ let to_bignum_bigint_limbs_as_prover (type field)
+ (module Circuit : Snark_intf.Run with type field = field) (x : field t)
+ : Bignum_bigint.t limbs_type =
+ map x (Common.cvar_field_to_bignum_bigint_as_prover (module Circuit))
+
+ let one (type field)
+ (module Circuit : Snark_intf.Run with type field = field) : field t =
+ of_bignum_bigint (module Circuit) Bignum_bigint.one
+
+ let to_bignum_bigint_as_prover (type field)
+ (module Circuit : Snark_intf.Run with type field = field) (x : field t)
+ : Bignum_bigint.t =
+ let l0, l1, l2 = to_bignum_bigint_limbs_as_prover (module Circuit) x in
+ Bignum_bigint.(l0 + (Common.two_to_limb * l1) + (two_to_2limb * l2))
+
+ let to_string_as_prover (type field)
+ (module Circuit : Snark_intf.Run with type field = field) a : string =
+ sprintf "%s" @@ Bignum_bigint.to_string
+ @@ to_bignum_bigint_as_prover (module Circuit) a
+
+ let is_zero (type field)
+ (module Circuit : Snark_intf.Run with type field = field) (x : field t)
+ : Circuit.Boolean.var =
+ let open Circuit in
+ let x0, x1, x2 = to_limbs x in
+ let x0_is_zero = Field.(equal x0 zero) in
+ let x1_is_zero = Field.(equal x1 zero) in
+ let x2_is_zero = Field.(equal x2 zero) in
+ Boolean.(x0_is_zero && x1_is_zero && x2_is_zero)
+
+ let equal_as_prover (type field)
+ (module Circuit : Snark_intf.Run with type field = field)
+ (left : field t) (right : field t) : bool =
+ let open Circuit in
+ let left0, left1, left2 =
+ to_field_limbs_as_prover (module Circuit) left
+ in
+ let right0, right1, right2 =
+ to_field_limbs_as_prover (module Circuit) right
+ in
+ Field.Constant.(
+ equal left0 right0 && equal left1 right1 && equal left2 right2)
+
+ let assert_equal (type field)
+ (module Circuit : Snark_intf.Run with type field = field)
+ (left : field t) (right : field t) : unit =
+ let open Circuit in
+ let left0, left1, left2 = to_limbs left in
+ let right0, right1, right2 = to_limbs right in
+ Field.Assert.equal left0 right0 ;
+ Field.Assert.equal left1 right1 ;
+ Field.Assert.equal left2 right2
+
+ let check_here_const_of_bignum_bigint (type field)
+ (module Circuit : Snark_intf.Run with type field = field) x : field t =
+ let const_x = const_of_bignum_bigint (module Circuit) x in
+ let var_x = of_bignum_bigint (module Circuit) x in
+ assert_equal (module Circuit) const_x var_x ;
+ const_x
+
+ let fits_as_prover (type field)
+ (module Circuit : Snark_intf.Run with type field = field) (x : field t)
+ (modulus : field standard_limbs) : bool =
+ let modulus =
+ field_const_standard_limbs_to_bignum_bigint (module Circuit) modulus
+ in
+ Bignum_bigint.(to_bignum_bigint_as_prover (module Circuit) x < modulus)
+
+ let if_ (type field)
+ (module Circuit : Snark_intf.Run with type field = field)
+ (b : Circuit.Boolean.var) ~(then_ : field t) ~(else_ : field t) :
+ field t =
+ let open Circuit in
+ let then0, then1, then2 = to_limbs then_ in
+ let else0, else1, else2 = to_limbs else_ in
+ of_limbs
+ ( Field.if_ b ~then_:then0 ~else_:else0
+ , Field.if_ b ~then_:then1 ~else_:else1
+ , Field.if_ b ~then_:then2 ~else_:else2 )
+
+ let unpack (type field)
+ (module Circuit : Snark_intf.Run with type field = field) (x : field t)
+ ~(length : int) : Circuit.Boolean.var list =
+ let open Circuit in
+ (* TODO: Performance improvement, we could use this trick from Halo paper
+ * https://github.com/MinaProtocol/mina/blob/43e2994b64b9d3e99055d644ac6279d39c22ced5/src/lib/pickles/scalar_challenge.ml#L12
+ *)
+ let l0, l1, l2 = to_limbs x in
+ fst
+ @@ List.fold [ l0; l1; l2 ] ~init:([], length)
+ ~f:(fun (lst, length) limb ->
+ let bits_to_copy = min length Common.limb_bits in
+ ( lst @ Field.unpack limb ~length:bits_to_copy
+ , length - bits_to_copy ) )
+ end
+
+ (* Compact limbs foreign field element *)
+ module Compact = struct
+ module Cvar = Snarky_backendless.Cvar
+
+ type 'field limbs_type = 'field compact_limbs
+
+ type 'field t = 'field Cvar.t compact_limbs
+
+ let of_limbs x = x
+
+ let of_field_limbs (type field)
+ (module Circuit : Snark_intf.Run with type field = field)
+ (x : field limbs_type) : field t =
+ let open Circuit in
+ let x =
+ exists Typ.(Field.typ * Field.typ) ~compute:(fun () -> (fst x, snd x))
+ in
+ of_limbs x
+
+ let of_bignum_bigint (type field)
+ (module Circuit : Snark_intf.Run with type field = field) x : field t =
+ let open Circuit in
+ let l2, l01 = Common.(bignum_bigint_div_rem x two_to_2limb) in
+
+ let limb_vars =
+ exists (Typ.array ~length:2 Field.typ) ~compute:(fun () ->
+ [| Common.bignum_bigint_to_field (module Circuit) l01
+ ; Common.bignum_bigint_to_field (module Circuit) l2
+ |] )
+ in
+ of_limbs (limb_vars.(0), limb_vars.(1))
+
+ let to_limbs x = x
+
+ let const_of_bignum_bigint (type field)
+ (module Circuit : Snark_intf.Run with type field = field) x : field t =
+ let open Circuit in
+ let l2, l01 = Common.(bignum_bigint_div_rem x two_to_2limb) in
+ of_limbs
+ Field.
+ ( constant @@ Common.bignum_bigint_to_field (module Circuit) l01
+ , constant @@ Common.bignum_bigint_to_field (module Circuit) l2 )
+
+ let map (x : 'field t) (func : 'field Cvar.t -> 'g) : 'g limbs_type =
+ let l0, l1 = to_limbs x in
+ (func l0, func l1)
+
+ let one (type field)
+ (module Circuit : Snark_intf.Run with type field = field) : field t =
+ of_bignum_bigint (module Circuit) Bignum_bigint.one
+
+ let to_field_limbs_as_prover (type field)
+ (module Circuit : Snark_intf.Run with type field = field) (x : field t)
+ : field limbs_type =
+ map x (Common.cvar_field_to_field_as_prover (module Circuit))
+
+ let to_bignum_bigint_limbs_as_prover (type field)
+ (module Circuit : Snark_intf.Run with type field = field) (x : field t)
+ : Bignum_bigint.t limbs_type =
+ map x (Common.cvar_field_to_bignum_bigint_as_prover (module Circuit))
+
+ let to_bignum_bigint_as_prover (type field)
+ (module Circuit : Snark_intf.Run with type field = field) (x : field t)
+ =
+ let l01, l2 = to_bignum_bigint_limbs_as_prover (module Circuit) x in
+ Bignum_bigint.(l01 + (two_to_2limb * l2))
+
+ let to_string_as_prover (type field)
+ (module Circuit : Snark_intf.Run with type field = field) a : string =
+ sprintf "%s" @@ Bignum_bigint.to_string
+ @@ to_bignum_bigint_as_prover (module Circuit) a
+
+ let is_zero (type field)
+ (module Circuit : Snark_intf.Run with type field = field) (x : field t)
+ : Circuit.Boolean.var =
+ let open Circuit in
+ let x01, x2 = to_limbs x in
+ let x01_is_zero = Field.(equal x01 zero) in
+ let x2_is_zero = Field.(equal x2 zero) in
+ Boolean.(x01_is_zero && x2_is_zero)
+
+ let equal_as_prover (type field)
+ (module Circuit : Snark_intf.Run with type field = field)
+ (left : field t) (right : field t) : bool =
+ let open Circuit in
+ let left01, left2 = to_field_limbs_as_prover (module Circuit) left in
+ let right01, right2 = to_field_limbs_as_prover (module Circuit) right in
+ Field.Constant.(equal left01 right01 && equal left2 right2)
+
+ let assert_equal (type field)
+ (module Circuit : Snark_intf.Run with type field = field)
+ (left : field t) (right : field t) : unit =
+ let open Circuit in
+ let left01, left2 = to_limbs left in
+ let right01, right2 = to_limbs right in
+ Field.Assert.equal left01 right01 ;
+ Field.Assert.equal left2 right2
+
+ let check_here_const_of_bignum_bigint (type field)
+ (module Circuit : Snark_intf.Run with type field = field) x : field t =
+ let const_x = const_of_bignum_bigint (module Circuit) x in
+ let var_x = of_bignum_bigint (module Circuit) x in
+ assert_equal (module Circuit) const_x var_x ;
+ const_x
+
+ let if_ (type field)
+ (module Circuit : Snark_intf.Run with type field = field)
+ (b : Circuit.Boolean.var) ~(then_ : field t) ~(else_ : field t) :
+ field t =
+ let open Circuit in
+ let then01, then2 = to_limbs then_ in
+ let else01, else2 = to_limbs else_ in
+ of_limbs
+ ( Field.if_ b ~then_:then01 ~else_:else01
+ , Field.if_ b ~then_:then2 ~else_:else2 )
+
+ let unpack (type field)
+ (module Circuit : Snark_intf.Run with type field = field) (x : field t)
+ ~(length : int) : Circuit.Boolean.var list =
+ (* TODO: Performance improvement, we could use this trick from Halo paper
+ * https://github.com/MinaProtocol/mina/blob/43e2994b64b9d3e99055d644ac6279d39c22ced5/src/lib/pickles/scalar_challenge.ml#L12
+ *)
+ let open Circuit in
+ let l01, l2 = to_limbs x in
+ fst
+ @@ List.foldi [ l01; l2 ] ~init:([], length)
+ ~f:(fun i (lst, length) limb ->
+ let bits_to_copy = min length ((2 - i) * Common.limb_bits) in
+ ( lst @ Field.unpack limb ~length:bits_to_copy
+ , length - bits_to_copy ) )
+ end
+end
+
+(* Structure for tracking external checks that must be made
+ * (using other gadgets) in order to acheive soundess for a
+ * given multiplication *)
+module External_checks = struct
+ module Cvar = Snarky_backendless.Cvar
+
+ type 'field t =
+ { mutable multi_ranges : 'field Cvar.t standard_limbs list
+ ; mutable compact_multi_ranges : 'field Cvar.t compact_limbs list
+ ; mutable bounds : 'field Cvar.t standard_limbs list
+ }
+
+ let create (type field)
+ (module Circuit : Snark_intf.Run with type field = field) : field t =
+ { multi_ranges = []; compact_multi_ranges = []; bounds = [] }
+
+ (* Track a multi-range-check *)
+ (* TODO: improve names of these from append_ to add_, push_ or insert_ *)
+ let append_multi_range_check (external_checks : 'field t)
+ (x : 'field Cvar.t standard_limbs) =
+ external_checks.multi_ranges <- x :: external_checks.multi_ranges
+
+ (* Track a compact-multi-range-check *)
+ let append_compact_multi_range_check (external_checks : 'field t)
+ (x : 'field Cvar.t compact_limbs) =
+ external_checks.compact_multi_ranges <-
+ x :: external_checks.compact_multi_ranges
+
+ (* Track a bound check (i.e. valid_element check) *)
+ let append_bound_check (external_checks : 'field t)
+ (x : 'field Cvar.t standard_limbs) =
+ external_checks.bounds <- x :: external_checks.bounds
+end
+
+(* Common auxiliary functions for foreign field gadgets *)
+
+(* Check that the foreign modulus is less than the maximum allowed *)
+let check_modulus_bignum_bigint (type f)
+ (module Circuit : Snark_intf.Run with type field = f)
+ (foreign_field_modulus : Bignum_bigint.t) =
+ (* Note that the maximum foreign field modulus possible for addition is much
+ * larger than that supported by multiplication.
+ *
+ * Specifically, since the 88-bit limbs are embedded in a native field element
+ * of ~2^255 bits and foreign field addition increases the number of bits
+ * logarithmically, for addition we can actually support a maximum field modulus
+ * of 2^264 - 1 (i.e. binary_modulus - 1) for circuits up to length ~ 2^79 - 1,
+ * which is far larger than the maximum circuit size supported by Kimchi.
+ *
+ * However, for compatibility with multiplication operations, we must use the
+ * same maximum as foreign field multiplication.
+ *)
+ assert (
+ Bignum_bigint.(
+ foreign_field_modulus < max_foreign_field_modulus (module Circuit)) )
+
+(* Check that the foreign modulus is less than the maximum allowed *)
+let check_modulus (type f) (module Circuit : Snark_intf.Run with type field = f)
+ (foreign_field_modulus : f standard_limbs) =
+ let foreign_field_modulus =
+ field_const_standard_limbs_to_bignum_bigint
+ (module Circuit)
+ foreign_field_modulus
+ in
+
+ check_modulus_bignum_bigint (module Circuit) foreign_field_modulus
+
+(* Represents two limbs as one single field element with twice as many bits *)
+let as_prover_compact_limb (type f)
+ (module Circuit : Snark_intf.Run with type field = f) (lo : f) (hi : f) : f
+ =
+ Circuit.Field.Constant.(lo + (hi * two_to_limb_field (module Circuit)))
+
+(* FOREIGN FIELD ADDITION GADGET *)
+
+(* Internal computation for foreign field addition *)
+let sum_setup (type f) (module Circuit : Snark_intf.Run with type field = f)
+ (left_input : f Element.Standard.t) (right_input : f Element.Standard.t)
+ (operation : op_mode) (foreign_field_modulus : f standard_limbs) :
+ f Element.Standard.t * f * Circuit.Field.t =
+ let open Circuit in
+ (* Decompose modulus into limbs *)
+ let foreign_field_modulus0, foreign_field_modulus1, foreign_field_modulus2 =
+ foreign_field_modulus
+ in
+ (* Decompose left input into limbs *)
+ let left_input0, left_input1, left_input2 =
+ Element.Standard.to_limbs left_input
+ in
+ (* Decompose right input into limbs. If final check, right_input2 will contain 2^limb *)
+ let right_input0, right_input1, right_input2 =
+ Element.Standard.to_limbs right_input
+ in
+
+ (* Addition or subtraction *)
+ let sign =
+ match operation with
+ | Sub ->
+ Field.Constant.(negate one)
+ | Add ->
+ Field.Constant.one
+ in
+
+ (* Given a left and right inputs to an addition or subtraction, and a modulus, it computes
+ * all necessary values needed for the witness layout. Meaning, it returns an [FFAddValues] instance
+ * - the result of the addition/subtraction as a ForeignElement
+ * - the sign of the operation
+ * - the overflow flag
+ * - the carry value *)
+ let result0, result1, result2, field_overflow, carry =
+ exists (Typ.array ~length:5 Field.typ) ~compute:(fun () ->
+ (* Compute bigint version of the inputs *)
+ let modulus =
+ field_const_standard_limbs_to_bignum_bigint
+ (module Circuit)
+ foreign_field_modulus
+ in
+ let left =
+ Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ left_input
+ in
+ let right =
+ Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ right_input
+ in
+
+ (* Compute values for the ffadd *)
+
+ (* Overflow if addition and greater than modulus or
+ * underflow if subtraction and less than zero
+ *)
+ let has_overflow =
+ match operation with
+ | Sub ->
+ Bignum_bigint.(left < right)
+ | Add ->
+ Bignum_bigint.(left + right >= modulus)
+ in
+
+ (* 0 for no overflow
+ * -1 for underflow
+ * +1 for overflow
+ *)
+ let field_overflow =
+ if has_overflow then sign else Field.Constant.zero
+ in
+
+ (* Compute the result
+ * result = left + sign * right - field_overflow * modulus
+ * TODO: unluckily, we cannot do it in one line if we keep these types, because one
+ * cannot combine field elements and biguints in the same operation automatically
+ *)
+ let is_sub = match operation with Sub -> true | Add -> false in
+ let result =
+ Element.Standard.of_bignum_bigint (module Circuit)
+ @@ Bignum_bigint.(
+ if is_sub then
+ if not has_overflow then (* normal subtraction *)
+ left - right
+ else (* underflow *)
+ modulus + left - right
+ else if not has_overflow then (* normal addition *)
+ left + right
+ else (* overflow *)
+ left + right - modulus)
+ in
+
+ (* c = [ (a1 * 2^88 + a0) + s * (b1 * 2^88 + b0) - q * (f1 * 2^88 + f0) - (r1 * 2^88 + r0) ] / 2^176
+ * <=>
+ * c = r2 - a2 - s*b2 + q*f2 *)
+ let left_input0, left_input1, left_input2 =
+ Element.Standard.to_field_limbs_as_prover (module Circuit) left_input
+ in
+ let right_input0, right_input1, right_input2 =
+ Element.Standard.to_field_limbs_as_prover (module Circuit) right_input
+ in
+ let result0, result1, result2 =
+ Element.Standard.to_field_limbs_as_prover (module Circuit) result
+ in
+
+ (* Compute the carry value *)
+ let carry_bot =
+ Field.Constant.(
+ ( as_prover_compact_limb (module Circuit) left_input0 left_input1
+ + as_prover_compact_limb (module Circuit) right_input0 right_input1
+ * sign
+ - as_prover_compact_limb
+ (module Circuit)
+ foreign_field_modulus0 foreign_field_modulus1
+ * field_overflow
+ - as_prover_compact_limb (module Circuit) result0 result1 )
+ / two_to_2limb_field (module Circuit))
+ in
+
+ let carry_top =
+ Field.Constant.(
+ result2 - left_input2 - (sign * right_input2)
+ + (field_overflow * foreign_field_modulus2))
+ in
+
+ (* Check that both ways of computing the carry value are equal *)
+ assert (Field.Constant.equal carry_top carry_bot) ;
+
+ (* Return the ffadd values *)
+ [| result0; result1; result2; field_overflow; carry_bot |] )
+ |> tuple5_of_array
+ in
+
+ (* Create the gate *)
+ with_label "ffadd_gate" (fun () ->
+ (* Set up FFAdd gate *)
+ assert_
+ { annotation = Some __LOC__
+ ; basic =
+ Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T
+ (ForeignFieldAdd
+ { left_input_lo = left_input0
+ ; left_input_mi = left_input1
+ ; left_input_hi = left_input2
+ ; right_input_lo = right_input0
+ ; right_input_mi = right_input1
+ ; right_input_hi = right_input2
+ ; field_overflow
+ ; carry
+ ; foreign_field_modulus0
+ ; foreign_field_modulus1
+ ; foreign_field_modulus2
+ ; sign
+ } )
+ } ) ;
+
+ (* Return the result *)
+ (Element.Standard.of_limbs (result0, result1, result2), sign, field_overflow)
+
+(* Gadget for creating an addition or subtraction result row (Zero gate with result) *)
+let result_row (type f) (module Circuit : Snark_intf.Run with type field = f)
+ ?(label = "result_zero_row") (result : f Element.Standard.t) =
+ let open Circuit in
+ let result0, result1, result2 = Element.Standard.to_limbs result in
+ with_label label (fun () ->
+ assert_
+ { annotation = Some __LOC__
+ ; basic =
+ Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T
+ (Raw
+ { kind = Zero
+ ; values = [| result0; result1; result2 |]
+ ; coeffs = [||]
+ } )
+ } )
+
+(* Gadget to check the supplied value is a valid foreign field element for the
+ * supplied foreign field modulus
+ *
+ * This gadget checks in the circuit that a value is less than the foreign field modulus.
+ * Part of this involves computing a bound value that is both added to external_checks
+ * and also returned. The caller may use either one, depending on the situation.
+ *
+ * Inputs:
+ * external_checks := Context to track required external checks
+ * value := the value to check
+ * foreign_field_modulus := the modulus of the foreign field
+ *
+ * Outputs:
+ * Inserts the gates (described below) into the circuit
+ * Adds bound value to be multi-range-checked to external_checks
+ * Returns bound value
+ *
+ * Effects to the circuit:
+ * - 1 FFAdd gate
+ * - 1 Zero gate
+ *)
+let valid_element (type f) (module Circuit : Snark_intf.Run with type field = f)
+ (external_checks : f External_checks.t) (value : f Element.Standard.t)
+ (foreign_field_modulus : f standard_limbs) : f Element.Standard.t =
+ let open Circuit in
+ (* Compute the value for the right input of the addition as 2^264 *)
+ let offset0 = Field.zero in
+ let offset1 = Field.zero in
+ let offset2 =
+ exists Field.typ ~compute:(fun () -> two_to_limb_field (module Circuit))
+ in
+ (*let offset2 = Field.(mul one two_to_88) in*)
+ (* Checks that these cvars have constant values are added as generics *)
+ let offset = Element.Standard.of_limbs (offset0, offset1, offset2) in
+
+ (* Check that the value fits in the foreign field *)
+ as_prover (fun () ->
+ assert (
+ Element.Standard.fits_as_prover
+ (module Circuit)
+ value foreign_field_modulus ) ;
+ () ) ;
+
+ (* Create FFAdd gate to compute the bound value (i.e. part of valid_element check) *)
+ let bound, sign, ovf =
+ sum_setup (module Circuit) value offset Add foreign_field_modulus
+ in
+ (* Result row *)
+ result_row (module Circuit) ~label:"final_add_zero_gate" bound ;
+
+ (* Sanity check *)
+ as_prover (fun () ->
+ (* Check that the correct expected values were obtained *)
+ let ovf = Common.cvar_field_to_field_as_prover (module Circuit) ovf in
+ assert (Field.Constant.(equal sign one)) ;
+ assert (Field.Constant.(equal ovf one)) ) ;
+
+ (* Set up copy constraints with overflow with the overflow check*)
+ Field.Assert.equal ovf Field.one ;
+
+ (* Check that the highest limb of right input is 2^88*)
+ let two_to_88 = two_to_limb_field (module Circuit) in
+ Field.Assert.equal (Field.constant two_to_88) offset2 ;
+
+ (* Add external check for multi range check *)
+ External_checks.append_multi_range_check external_checks
+ @@ Element.Standard.to_limbs bound ;
+
+ (* Return the bound value *)
+ bound
+
+(* Gadget to constrain external checks using supplied modulus *)
+let constrain_external_checks (type field)
+ (module Circuit : Snark_intf.Run with type field = field)
+ (external_checks : field External_checks.t) (modulus : field standard_limbs)
+ =
+ (* 1) Add gates for external bound additions.
+ * Note: internally this also adds multi-range-checks for the
+ * computed bound to the external_checks.multi-ranges, which
+ * are then constrainted in (2)
+ *)
+ List.iter external_checks.bounds ~f:(fun value ->
+ let _bound =
+ valid_element
+ (module Circuit)
+ external_checks
+ (Element.Standard.of_limbs value)
+ modulus
+ in
+ () ) ;
+
+ (* 2) Add gates for external multi-range-checks *)
+ List.iter external_checks.multi_ranges ~f:(fun multi_range ->
+ let v0, v1, v2 = multi_range in
+ Range_check.multi (module Circuit) v0 v1 v2 ;
+ () ) ;
+
+ (* 3) Add gates for external compact-multi-range-checks *)
+ List.iter external_checks.compact_multi_ranges ~f:(fun compact_multi_range ->
+ let v01, v2 = compact_multi_range in
+ Range_check.compact_multi (module Circuit) v01 v2 ;
+ () )
+
+(* FOREIGN FIELD ADDITION CHAIN GADGET *)
+
+(** Gadget for a chain of foreign field sums (additions or subtractions)
+ *
+ * Inputs:
+ * inputs := All the inputs to the chain of sums
+ * operations := List of operation modes Add or Sub indicating whether th
+ * corresponding addition is a subtraction
+ * foreign_field_modulus := The modulus of the foreign field (all the same)
+ *
+ * Outputs:
+ * Inserts the gates (described below) into the circuit
+ * Returns the final result of the chain of sums
+ *
+ * For n+1 inputs, the gadget creates n foreign field addition gates, followed by a final
+ * foreign field addition gate for the bound check (i.e. valid_element check). For this, a
+ * an additional multi range check must also be performed.
+ * By default, the range check takes place right after the final Raw row.
+ *)
+let sum_chain (type f) (module Circuit : Snark_intf.Run with type field = f)
+ (inputs : f Element.Standard.t list) (operations : op_mode list)
+ (foreign_field_modulus : f standard_limbs) : f Element.Standard.t =
+ let open Circuit in
+ (* Check foreign field modulus < max allowed *)
+ check_modulus (module Circuit) foreign_field_modulus ;
+ (* Check that the number of inputs is correct *)
+ let n = List.length operations in
+ assert (List.length inputs = n + 1) ;
+
+ (* Initialize first left input and check it fits in the foreign mod *)
+ let left = [| List.hd_exn inputs |] in
+ as_prover (fun () ->
+ assert (
+ Element.Standard.fits_as_prover
+ (module Circuit)
+ left.(0) foreign_field_modulus ) ;
+ () ) ;
+
+ (* For all n additions, compute its values and create gates *)
+ for i = 0 to n - 1 do
+ let op = List.nth_exn operations i in
+ let right = List.nth_exn inputs (i + 1) in
+ (* Make sure that inputs are smaller than the foreign modulus *)
+ as_prover (fun () ->
+ assert (
+ Element.Standard.fits_as_prover
+ (module Circuit)
+ right foreign_field_modulus ) ;
+ () ) ;
+
+ (* Create the foreign field addition row *)
+ let result, _sign, _ovf =
+ sum_setup (module Circuit) left.(0) right op foreign_field_modulus
+ in
+
+ (* Update left input for next iteration *)
+ left.(0) <- result ; ()
+ done ;
+
+ (* Add the final gate for the bound *)
+ (* result + (2^264 - f) = bound *)
+ let result = left.(0) in
+ let unused_external_checks = External_checks.create (module Circuit) in
+ let bound =
+ valid_element
+ (module Circuit)
+ unused_external_checks result foreign_field_modulus
+ in
+ let bound0, bound1, bound2 = Element.Standard.to_limbs bound in
+
+ (* Include Multi range check for the bound right after *)
+ Range_check.multi (module Circuit) bound0 bound1 bound2 ;
+
+ (* Return result *)
+ result
+
+(* FOREIGN FIELD ADDITION SINGLE GADGET *)
+
+(* Definition of a gadget for a single foreign field addition
+ *
+ * Inputs:
+ * full := Flag for whether to perform a full addition with valid_element check
+ * on the result (default true) or just a single FFAdd row (false)
+ * left_input := 3 limbs foreign field element
+ * right_input := 3 limbs foreign field element
+ * foreign_field_modulus := The modulus of the foreign field
+ *
+ * Outputs:
+ * Inserts the gates (described below) into the circuit
+ * Returns the result of the addition as a 3 limbs element
+ *
+ * In default mode:
+ * It adds a FFAdd gate,
+ * followed by a Zero gate,
+ * a FFAdd gate for the bound check,
+ * a Zero gate after this bound check,
+ * and a Multi Range Check gadget.
+ *
+ * In false mode:
+ * It adds a FFAdd gate.
+ *)
+let add (type f) (module Circuit : Snark_intf.Run with type field = f)
+ ?(full = true) (left_input : f Element.Standard.t)
+ (right_input : f Element.Standard.t)
+ (foreign_field_modulus : f standard_limbs) : f Element.Standard.t =
+ match full with
+ | true ->
+ sum_chain
+ (module Circuit)
+ [ left_input; right_input ]
+ [ Add ] foreign_field_modulus
+ | false ->
+ let result, _sign, _ovf =
+ sum_setup
+ (module Circuit)
+ left_input right_input Add foreign_field_modulus
+ in
+ result
+
+(* Definition of a gadget for a single foreign field subtraction
+ *
+ * Inputs:
+ * full := Flag for whether to perform a full subtraction with valid_element check
+ * on the result (default true) or just a single FFAdd row (false)
+ * left_input := 3 limbs foreign field element
+ * right_input := 3 limbs foreign field element
+ * foreign_field_modulus := The modulus of the foreign field
+ *
+ * Outputs:
+ * Inserts the gates (described below) into the circuit
+ * Returns the result of the addition as a 3 limbs element
+ *
+ * In default mode:
+ * It adds a FFAdd gate,
+ * followed by a Zero gate,
+ * a FFAdd gate for the bound check,
+ * a Zero gate after this bound check,
+ * and a Multi Range Check gadget.
+ *
+ * In false mode:
+ * It adds a FFAdd gate.
+ *)
+let sub (type f) (module Circuit : Snark_intf.Run with type field = f)
+ ?(full = true) (left_input : f Element.Standard.t)
+ (right_input : f Element.Standard.t)
+ (foreign_field_modulus : f standard_limbs) : f Element.Standard.t =
+ match full with
+ | true ->
+ sum_chain
+ (module Circuit)
+ [ left_input; right_input ]
+ [ Sub ] foreign_field_modulus
+ | false ->
+ let result, _sign, _ovf =
+ sum_setup
+ (module Circuit)
+ left_input right_input Sub foreign_field_modulus
+ in
+ result
+
+(* FOREIGN FIELD MULTIPLICATION *)
+
+(* Compute non-zero intermediate products
+ *
+ * For more details see the "Intermediate products" Section of
+ * the [Foreign Field Multiplication RFC](https://o1-labs.github.io/proof-systems/rfcs/foreign_field_mul.html)
+ *
+ * Preconditions: this entire function is witness code and, therefore, must be
+ * only called from an exists construct.
+ *)
+let compute_intermediate_products (type f)
+ (module Circuit : Snark_intf.Run with type field = f)
+ (left_input : f Element.Standard.t) (right_input : f Element.Standard.t)
+ (quotient : f standard_limbs) (neg_foreign_field_modulus : f standard_limbs)
+ : f * f * f =
+ let open Circuit in
+ let left_input0, left_input1, left_input2 =
+ Element.Standard.to_field_limbs_as_prover (module Circuit) left_input
+ in
+ let right_input0, right_input1, right_input2 =
+ Element.Standard.to_field_limbs_as_prover (module Circuit) right_input
+ in
+ let quotient0, quotient1, quotient2 = quotient in
+ let ( neg_foreign_field_modulus0
+ , neg_foreign_field_modulus1
+ , neg_foreign_field_modulus2 ) =
+ neg_foreign_field_modulus
+ in
+ ( (* p0 = a0 * b0 + q0 + f'0 *)
+ Field.Constant.(
+ (left_input0 * right_input0) + (quotient0 * neg_foreign_field_modulus0))
+ , (* p1 = a0 * b1 + a1 * b0 + q0 * f'1 + q1 * f'0 *)
+ Field.Constant.(
+ (left_input0 * right_input1)
+ + (left_input1 * right_input0)
+ + (quotient0 * neg_foreign_field_modulus1)
+ + (quotient1 * neg_foreign_field_modulus0))
+ , (* p2 = a0 * b2 + a2 * b0 + a1 * b1 - q0 * f'2 + q2 * f'0 + q1 * f'1 *)
+ Field.Constant.(
+ (left_input0 * right_input2)
+ + (left_input2 * right_input0)
+ + (left_input1 * right_input1)
+ + (quotient0 * neg_foreign_field_modulus2)
+ + (quotient2 * neg_foreign_field_modulus0)
+ + (quotient1 * neg_foreign_field_modulus1)) )
+
+(* Compute intermediate sums
+ * For more details see the "Optimizations" Section of
+ * the [Foreign Field Multiplication RFC](https://o1-labs.github.io/proof-systems/rfcs/foreign_field_mul.html) *)
+let compute_intermediate_sums (type f)
+ (module Circuit : Snark_intf.Run with type field = f)
+ (quotient : f standard_limbs) (neg_foreign_field_modulus : f standard_limbs)
+ : f * f =
+ let open Circuit in
+ let quotient0, quotient1, quotient2 = quotient in
+ let ( neg_foreign_field_modulus0
+ , neg_foreign_field_modulus1
+ , neg_foreign_field_modulus2 ) =
+ neg_foreign_field_modulus
+ in
+ (* let q01 = q0 + 2^L * q1 *)
+ let quotient01 =
+ Field.Constant.(
+ quotient0 + (two_to_limb_field (module Circuit) * quotient1))
+ in
+
+ (* f'01 = f'0 + 2^L * f'1 *)
+ let neg_foreign_field_modulus01 =
+ Field.Constant.(
+ neg_foreign_field_modulus0
+ + (two_to_limb_field (module Circuit) * neg_foreign_field_modulus1))
+ in
+ ( (* q'01 = q01 + f'01 *)
+ Field.Constant.(quotient01 + neg_foreign_field_modulus01)
+ , (* q'2 = q2 + f'2 *)
+ Field.Constant.(quotient2 + neg_foreign_field_modulus2) )
+
+(* Compute witness variables related for foreign field multplication *)
+let compute_witness_variables (type f)
+ (module Circuit : Snark_intf.Run with type field = f)
+ (products : Bignum_bigint.t standard_limbs)
+ (remainder : Bignum_bigint.t standard_limbs) : f * f * f * f * f * f =
+ let products0, products1, products2 = products in
+ let remainder0, remainder1, remainder2 = remainder in
+
+ (* C1-C2: Compute components of product1 *)
+ let product1_hi, product1_lo =
+ Common.(bignum_bigint_div_rem products1 two_to_limb)
+ in
+ let product1_hi_1, product1_hi_0 =
+ Common.(bignum_bigint_div_rem product1_hi two_to_limb)
+ in
+
+ (* C3-C5: Compute v0 = the top 2 bits of (p0 + 2^L * p10 - r0 - 2^L * r1) / 2^2L
+ * N.b. To avoid an underflow error, the equation must sum the intermediate
+ * product terms before subtracting limbs of the remainder. *)
+ let carry0 =
+ Bignum_bigint.(
+ ( products0
+ + (Common.two_to_limb * product1_lo)
+ - remainder0
+ - (Common.two_to_limb * remainder1) )
+ / two_to_2limb)
+ in
+
+ (* C6-C7: Compute v1 = the top L + 3 bits (p2 + p11 + v0 - r2) / 2^L
+ * N.b. Same as above, to avoid an underflow error, the equation must
+ * sum the intermediate product terms before subtracting the remainder. *)
+ let carry1 =
+ Bignum_bigint.(
+ (products2 + product1_hi + carry0 - remainder2) / Common.two_to_limb)
+ in
+ (* Compute v10 and v11 *)
+ let carry1_hi, carry1_lo =
+ Common.(bignum_bigint_div_rem carry1 two_to_limb)
+ in
+
+ ( Common.bignum_bigint_to_field (module Circuit) product1_lo
+ , Common.bignum_bigint_to_field (module Circuit) product1_hi_0
+ , Common.bignum_bigint_to_field (module Circuit) product1_hi_1
+ , Common.bignum_bigint_to_field (module Circuit) carry0
+ , Common.bignum_bigint_to_field (module Circuit) carry1_lo
+ , Common.bignum_bigint_to_field (module Circuit) carry1_hi )
+
+(* Perform integer bound addition computation x' = x + f' *)
+let compute_bound (x : Bignum_bigint.t)
+ (neg_foreign_field_modulus : Bignum_bigint.t) : Bignum_bigint.t =
+ let x_bound = Bignum_bigint.(x + neg_foreign_field_modulus) in
+ assert (Bignum_bigint.(x_bound < binary_modulus)) ;
+ x_bound
+
+(* Compute bound witness carry bit *)
+let compute_bound_witness_carry (type f)
+ (module Circuit : Snark_intf.Run with type field = f)
+ (sums : Bignum_bigint.t compact_limbs)
+ (bound : Bignum_bigint.t compact_limbs) : f =
+ let sums01, _sums2 = sums in
+ let bound01, _bound2 = bound in
+
+ (* C9: witness data is created by externally by called and multi-range-check gate *)
+
+ (* C10-C11: Compute q'_carry01 = (s01 - q'01)/2^2L *)
+ let quotient_bound_carry, _ =
+ Common.bignum_bigint_div_rem Bignum_bigint.(sums01 - bound01) two_to_2limb
+ in
+ Common.bignum_bigint_to_field (module Circuit) quotient_bound_carry
+
+(* Foreign field multiplication gadget definition *)
+let mul (type f) (module Circuit : Snark_intf.Run with type field = f)
+ (external_checks : f External_checks.t) ?(bound_check_result = true)
+ (left_input : f Element.Standard.t) (right_input : f Element.Standard.t)
+ (foreign_field_modulus : f standard_limbs) : f Element.Standard.t =
+ let open Circuit in
+ (* Check foreign field modulus < max allowed *)
+ check_modulus (module Circuit) foreign_field_modulus ;
+
+ (* Compute gate coefficients
+ * This happens when circuit is created / not part of witness (e.g. exists, As_prover code)
+ *)
+ let foreign_field_modulus0, foreign_field_modulus1, foreign_field_modulus2 =
+ foreign_field_modulus
+ in
+ let ( neg_foreign_field_modulus
+ , ( neg_foreign_field_modulus0
+ , neg_foreign_field_modulus1
+ , neg_foreign_field_modulus2 ) ) =
+ let foreign_field_modulus =
+ field_const_standard_limbs_to_bignum_bigint
+ (module Circuit)
+ foreign_field_modulus
+ in
+ (* Compute negated foreign field modulus f' = 2^t - f public parameter *)
+ let neg_foreign_field_modulus =
+ Bignum_bigint.(binary_modulus - foreign_field_modulus)
+ in
+ ( neg_foreign_field_modulus
+ , bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ neg_foreign_field_modulus )
+ in
+
+ (* Compute witness values *)
+ let ( carry1_lo
+ , carry1_hi
+ , product1_hi_1
+ , carry0
+ , quotient0
+ , quotient1
+ , quotient2
+ , quotient_bound_carry
+ , remainder0
+ , remainder1
+ , remainder2
+ , quotient_bound01
+ , quotient_bound2
+ , product1_lo
+ , product1_hi_0 ) =
+ exists (Typ.array ~length:15 Field.typ) ~compute:(fun () ->
+ (* Compute quotient remainder and negative foreign field modulus *)
+ let quotient, remainder =
+ (* Bignum_bigint computations *)
+ let left_input =
+ Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ left_input
+ in
+ let right_input =
+ Element.Standard.to_bignum_bigint_as_prover
+ (module Circuit)
+ right_input
+ in
+ let foreign_field_modulus =
+ field_const_standard_limbs_to_bignum_bigint
+ (module Circuit)
+ foreign_field_modulus
+ in
+
+ (* Compute quotient and remainder using foreign field modulus *)
+ let quotient, remainder =
+ Common.bignum_bigint_div_rem
+ Bignum_bigint.(left_input * right_input)
+ foreign_field_modulus
+ in
+ (quotient, remainder)
+ in
+
+ (* Compute the intermediate products *)
+ let products =
+ let quotient =
+ bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ quotient
+ in
+ let neg_foreign_field_modulus =
+ bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ neg_foreign_field_modulus
+ in
+ let product0, product1, product2 =
+ compute_intermediate_products
+ (module Circuit)
+ left_input right_input quotient neg_foreign_field_modulus
+ in
+
+ ( Common.field_to_bignum_bigint (module Circuit) product0
+ , Common.field_to_bignum_bigint (module Circuit) product1
+ , Common.field_to_bignum_bigint (module Circuit) product2 )
+ in
+
+ (* Compute the intermediate sums *)
+ let sums =
+ let quotient =
+ bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ quotient
+ in
+ let neg_foreign_field_modulus =
+ bignum_bigint_to_field_const_standard_limbs
+ (module Circuit)
+ neg_foreign_field_modulus
+ in
+ let sum01, sum2 =
+ compute_intermediate_sums
+ (module Circuit)
+ quotient neg_foreign_field_modulus
+ in
+ ( Common.field_to_bignum_bigint (module Circuit) sum01
+ , Common.field_to_bignum_bigint (module Circuit) sum2 )
+ in
+
+ (* Compute witness variables *)
+ let ( product1_lo
+ , product1_hi_0
+ , product1_hi_1
+ , carry0
+ , carry1_lo
+ , carry1_hi ) =
+ compute_witness_variables
+ (module Circuit)
+ products
+ (bignum_bigint_to_standard_limbs remainder)
+ in
+
+ (* Compute bounds for multi-range-checks on quotient and remainder *)
+ let quotient_bound = compute_bound quotient neg_foreign_field_modulus in
+
+ (* Compute quotient bound addition witness variables *)
+ let quotient_bound_carry =
+ compute_bound_witness_carry
+ (module Circuit)
+ sums
+ (bignum_bigint_to_compact_limbs quotient_bound)
+ in
+
+ (* Compute the rest of the witness data *)
+ let quotient0, quotient1, quotient2 =
+ bignum_bigint_to_field_const_standard_limbs (module Circuit) quotient
+ in
+ let remainder0, remainder1, remainder2 =
+ bignum_bigint_to_field_const_standard_limbs (module Circuit) remainder
+ in
+ let quotient_bound01, quotient_bound2 =
+ bignum_bigint_to_field_const_compact_limbs
+ (module Circuit)
+ quotient_bound
+ in
+
+ [| carry1_lo
+ ; carry1_hi
+ ; product1_hi_1
+ ; carry0
+ ; quotient0
+ ; quotient1
+ ; quotient2
+ ; quotient_bound_carry
+ ; remainder0
+ ; remainder1
+ ; remainder2
+ ; quotient_bound01
+ ; quotient_bound2
+ ; product1_lo
+ ; product1_hi_0
+ |] )
+ |> tuple15_of_array
+ in
+
+ (* Add external checks *)
+ External_checks.append_multi_range_check external_checks
+ (carry1_lo, product1_lo, product1_hi_0) ;
+ External_checks.append_compact_multi_range_check external_checks
+ (quotient_bound01, quotient_bound2) ;
+ if bound_check_result then
+ External_checks.append_bound_check external_checks
+ (remainder0, remainder1, remainder2) ;
+
+ let left_input0, left_input1, left_input2 =
+ Element.Standard.to_limbs left_input
+ in
+ let right_input0, right_input1, right_input2 =
+ Element.Standard.to_limbs right_input
+ in
+
+ (* Create ForeignFieldMul gate *)
+ with_label "foreign_field_mul" (fun () ->
+ assert_
+ { annotation = Some __LOC__
+ ; basic =
+ Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T
+ (ForeignFieldMul
+ { (* Current row *) left_input0
+ ; left_input1
+ ; left_input2
+ ; right_input0
+ ; right_input1
+ ; right_input2
+ ; carry1_lo
+ ; carry1_hi
+ ; carry0
+ ; quotient0
+ ; quotient1
+ ; quotient2
+ ; quotient_bound_carry
+ ; product1_hi_1
+ ; (* Next row *) remainder0
+ ; remainder1
+ ; remainder2
+ ; quotient_bound01
+ ; quotient_bound2
+ ; product1_lo
+ ; product1_hi_0
+ ; (* Coefficients *) foreign_field_modulus0
+ ; foreign_field_modulus1
+ ; foreign_field_modulus2
+ ; neg_foreign_field_modulus0
+ ; neg_foreign_field_modulus1
+ ; neg_foreign_field_modulus2
+ } )
+ } ) ;
+ Element.Standard.of_limbs (remainder0, remainder1, remainder2)
+
+(* Gadget to constrain conversion of bytes array (output of Keccak gadget)
+ into foreign field element with standard limbs (input of ECDSA gadget).
+ Include the endianness of the bytes list. *)
+let bytes_to_standard_element (type f)
+ (module Circuit : Snark_intf.Run with type field = f)
+ ~(endian : Keccak.endianness) (bytestring : Circuit.Field.t list)
+ (fmod : f standard_limbs) (fmod_bitlen : int) =
+ let open Circuit in
+ (* Make the input bytestring a big endian value *)
+ let bytestring =
+ match endian with Little -> List.rev bytestring | Big -> bytestring
+ in
+
+ (* Convert the bytestring into a bigint *)
+ let bytestring = Array.of_list bytestring in
+
+ (* C1: Check modulus_bit_length = # of bits you unpack
+ * This is partly implicit in the circuit given the number of byte outputs of Keccak:
+ * · input_bitlen < fmod_bitlen : OK
+ * · input_bitlen = fmod_bitlen : OK
+ * · input_bitlen > fmod_bitlen : CONSTRAIN
+ * Check that the most significant byte of the input is less than 2^(fmod_bitlen % 8)
+ *)
+ let input_bitlen = Array.length bytestring * 8 in
+ if input_bitlen > fmod_bitlen then
+ (* For the most significant one, constrain that it is less bits than required *)
+ Lookup.less_than_bits
+ (module Circuit)
+ ~bits:(fmod_bitlen % 8) bytestring.(0) ;
+ (* C2: Constrain bytes into standard foreign field element limbs => foreign field element z *)
+ let elem =
+ Element.Standard.of_bignum_bigint (module Circuit)
+ @@ Common.cvar_field_bytes_to_bignum_bigint_as_prover (module Circuit)
+ @@ Array.to_list bytestring
+ in
+ (* C3: Reduce z modulo foreign_field_modulus
+ *
+ * Constrain z' = z + 0 modulo foreign_field_modulus using foreign field addition gate
+ *
+ * Note: this is sufficient because z cannot be double the size due to bit length constraint
+ *)
+ let zero = Element.Standard.of_limbs (Field.zero, Field.zero, Field.zero) in
+ (* C4: Range check z' < f *)
+ (* Altogether this is a call to Foreign_field.add in default mode *)
+ let output = add (module Circuit) elem zero fmod in
+
+ (* return z' *)
+ output
+
+(*********)
+(* Tests *)
+(*********)
+
+let%test_unit "foreign_field arithmetics gadgets" =
+ if tests_enabled then (
+ let (* Import the gadget test runner *)
+ open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Helper to test foreign_field_add gadget
+ * Inputs:
+ * - left_input
+ * - right_input
+ * - foreign_field_modulus
+ * Checks with multi range checks the size of the inputs.
+ *)
+ let test_add ?cs (left_input : Bignum_bigint.t)
+ (right_input : Bignum_bigint.t) (foreign_field_modulus : Bignum_bigint.t)
+ =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test inputs *)
+ let expected =
+ Bignum_bigint.((left_input + right_input) % foreign_field_modulus)
+ in
+ let foreign_field_modulus =
+ bignum_bigint_to_field_const_standard_limbs
+ (module Runner.Impl)
+ foreign_field_modulus
+ in
+ let left_input =
+ Element.Standard.of_bignum_bigint (module Runner.Impl) left_input
+ in
+ let right_input =
+ Element.Standard.of_bignum_bigint (module Runner.Impl) right_input
+ in
+ (* Create the gadget *)
+ let sum =
+ add
+ (module Runner.Impl)
+ left_input right_input foreign_field_modulus
+ in
+ (* Create external checks context for tracking extra constraints *)
+ let external_checks = External_checks.create (module Runner.Impl) in
+ (* Check that the inputs were foreign field elements*)
+ let _out =
+ valid_element
+ (module Runner.Impl)
+ external_checks left_input foreign_field_modulus
+ in
+ let _out =
+ valid_element
+ (module Runner.Impl)
+ external_checks right_input foreign_field_modulus
+ in
+
+ assert (Mina_stdlib.List.Length.equal external_checks.multi_ranges 2) ;
+ List.iter external_checks.multi_ranges ~f:(fun multi_range ->
+ let v0, v1, v2 = multi_range in
+ Range_check.multi (module Runner.Impl) v0 v1 v2 ;
+ () ) ;
+
+ as_prover (fun () ->
+ let expected =
+ Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ expected
+ in
+ assert (
+ Element.Standard.equal_as_prover
+ (module Runner.Impl)
+ expected sum ) ) ;
+ () )
+ in
+ cs
+ in
+
+ (* Helper to test foreign_field_mul gadget with external checks
+ * Inputs:
+ * - inputs
+ * - foreign_field_modulus
+ * - is_sub: list of operations to perform
+ *)
+ let test_add_chain ?cs (inputs : Bignum_bigint.t list)
+ (operations : op_mode list) (foreign_field_modulus : Bignum_bigint.t) =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* compute result of the chain *)
+ let n = List.length operations in
+ let chain_result = [| List.nth_exn inputs 0 |] in
+ for i = 0 to n - 1 do
+ let operation = List.nth_exn operations i in
+ let op_sign =
+ match operation with
+ | Add ->
+ Bignum_bigint.one
+ | Sub ->
+ Bignum_bigint.of_int (-1)
+ in
+ let inp = List.nth_exn inputs (i + 1) in
+ let sum =
+ Bignum_bigint.(
+ (chain_result.(0) + (op_sign * inp)) % foreign_field_modulus)
+ in
+ chain_result.(0) <- sum ; ()
+ done ;
+
+ let inputs =
+ List.map
+ ~f:(fun x ->
+ Element.Standard.of_bignum_bigint (module Runner.Impl) x )
+ inputs
+ in
+ let foreign_field_modulus =
+ bignum_bigint_to_field_const_standard_limbs
+ (module Runner.Impl)
+ foreign_field_modulus
+ in
+
+ (* Create the gadget *)
+ let sum =
+ sum_chain
+ (module Runner.Impl)
+ inputs operations foreign_field_modulus
+ in
+ (* Check sum matches expected result *)
+ as_prover (fun () ->
+ let expected =
+ Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ chain_result.(0)
+ in
+ assert (
+ Element.Standard.equal_as_prover
+ (module Runner.Impl)
+ expected sum ) ) ;
+ () )
+ in
+ cs
+ in
+
+ (* Helper to test foreign_field_mul gadget
+ * Inputs:
+ * cs := optional constraint system to reuse
+ * left_input := left multiplicand
+ * right_input := right multiplicand
+ * foreign_field_modulus := foreign field modulus
+ *)
+ let test_mul ?cs (left_input : Bignum_bigint.t)
+ (right_input : Bignum_bigint.t) (foreign_field_modulus : Bignum_bigint.t)
+ =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test inputs *)
+ let expected =
+ Bignum_bigint.(left_input * right_input % foreign_field_modulus)
+ in
+ let foreign_field_modulus =
+ bignum_bigint_to_field_const_standard_limbs
+ (module Runner.Impl)
+ foreign_field_modulus
+ in
+ let left_input =
+ Element.Standard.of_bignum_bigint (module Runner.Impl) left_input
+ in
+ let right_input =
+ Element.Standard.of_bignum_bigint (module Runner.Impl) right_input
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness (unused in this simple test) *)
+ let unused_external_checks =
+ External_checks.create (module Runner.Impl)
+ in
+
+ (* Create the gadget *)
+ let product =
+ mul
+ (module Runner.Impl)
+ unused_external_checks left_input right_input
+ foreign_field_modulus
+ in
+ (* Check product matches expected result *)
+ as_prover (fun () ->
+ let expected =
+ Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ expected
+ in
+ assert (
+ Element.Standard.equal_as_prover
+ (module Runner.Impl)
+ expected product ) ) ;
+ () )
+ in
+
+ cs
+ in
+
+ (* Helper to test foreign_field_mul gadget with external checks
+ * Inputs:
+ * cs := optional constraint system to reuse
+ * left_input := left multiplicand
+ * right_input := right multiplicand
+ * foreign_field_modulus := foreign field modulus
+ *)
+ let test_mul_full ?cs (left_input : Bignum_bigint.t)
+ (right_input : Bignum_bigint.t) (foreign_field_modulus : Bignum_bigint.t)
+ =
+ (* Generate and verify first proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test inputs *)
+ let expected =
+ Bignum_bigint.(left_input * right_input % foreign_field_modulus)
+ in
+ let foreign_field_modulus =
+ bignum_bigint_to_field_const_standard_limbs
+ (module Runner.Impl)
+ foreign_field_modulus
+ in
+ let left_input =
+ Element.Standard.of_bignum_bigint (module Runner.Impl) left_input
+ in
+ let right_input =
+ Element.Standard.of_bignum_bigint (module Runner.Impl) right_input
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness *)
+ let external_checks = External_checks.create (module Runner.Impl) in
+
+ (* External checks for this test (example, circuit designer has complete flexibility about organization)
+ * Layout
+ * 0) ForeignFieldMul
+ * 1) Zero
+ * 2) ForeignFieldAdd (result bound addition)
+ * 3) Zero (result bound addition)
+ * 4) ForeignFieldAdd (left bound addition)
+ * 5) Zero (left bound addition)
+ * 6) ForeignFieldAdd (right bound addition)
+ * 7) Zero (right bound addition)
+ * 8-11) multi-range-check (right bound)
+ * 12-15) multi-range-check (left bound)
+ * 16-19) multi-range-check (result bound)
+ * 20-23) multi-range-check (product1_lo, product1_hi_0, carry1_lo)
+ * 24-27) compact-multi-range-check (quotient)
+ *)
+
+ (* Create the foreign field mul gadget *)
+ let product =
+ mul
+ (module Runner.Impl)
+ external_checks left_input right_input foreign_field_modulus
+ in
+
+ (* Sanity check product matches expected result *)
+ as_prover (fun () ->
+ let expected =
+ Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ expected
+ in
+ assert (
+ Element.Standard.equal_as_prover
+ (module Runner.Impl)
+ expected product ) ) ;
+
+ (* Add multi-range-check left input *)
+ External_checks.append_bound_check external_checks
+ @@ Element.Standard.to_limbs left_input ;
+
+ (* Add multi-range-check right input *)
+ External_checks.append_bound_check external_checks
+ @@ Element.Standard.to_limbs right_input ;
+
+ (*
+ * Perform external checks
+ *)
+ assert (Mina_stdlib.List.Length.equal external_checks.bounds 3) ;
+ assert (Mina_stdlib.List.Length.equal external_checks.multi_ranges 1) ;
+ assert (
+ Mina_stdlib.List.Length.equal external_checks.compact_multi_ranges
+ 1 ) ;
+
+ (* Add gates for bound checks, multi-range-checks and compact-multi-range-checks *)
+ constrain_external_checks
+ (module Runner.Impl)
+ external_checks foreign_field_modulus )
+ in
+
+ cs
+ in
+
+ (* Helper to test foreign field arithmetics together
+ * It computes a * b + a - b
+ *)
+ let test_ff ?cs (left_input : Bignum_bigint.t)
+ (right_input : Bignum_bigint.t) (foreign_field_modulus : Bignum_bigint.t)
+ =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Prepare test inputs *)
+ let expected_mul =
+ Bignum_bigint.(left_input * right_input % foreign_field_modulus)
+ in
+ let expected_add =
+ Bignum_bigint.(
+ (expected_mul + left_input) % foreign_field_modulus)
+ in
+ let expected_sub =
+ Bignum_bigint.(
+ (expected_add - right_input) % foreign_field_modulus)
+ in
+ let foreign_field_modulus =
+ bignum_bigint_to_field_const_standard_limbs
+ (module Runner.Impl)
+ foreign_field_modulus
+ in
+ let left_input =
+ Element.Standard.of_bignum_bigint (module Runner.Impl) left_input
+ in
+ let right_input =
+ Element.Standard.of_bignum_bigint (module Runner.Impl) right_input
+ in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness *)
+ let unused_external_checks =
+ External_checks.create (module Runner.Impl)
+ in
+
+ let product =
+ mul
+ (module Runner.Impl)
+ unused_external_checks left_input right_input
+ foreign_field_modulus
+ in
+
+ let addition =
+ add (module Runner.Impl) product left_input foreign_field_modulus
+ in
+ let subtraction =
+ sub
+ (module Runner.Impl)
+ addition right_input foreign_field_modulus
+ in
+ let external_checks = External_checks.create (module Runner.Impl) in
+
+ (* Check product matches expected result *)
+ (* Check that the inputs were foreign field elements*)
+ let _out =
+ valid_element
+ (module Runner.Impl)
+ external_checks left_input foreign_field_modulus
+ in
+ let _out =
+ valid_element
+ (module Runner.Impl)
+ external_checks right_input foreign_field_modulus
+ in
+
+ assert (Mina_stdlib.List.Length.equal external_checks.multi_ranges 2) ;
+ List.iter external_checks.multi_ranges ~f:(fun multi_range ->
+ let v0, v1, v2 = multi_range in
+ Range_check.multi (module Runner.Impl) v0 v1 v2 ;
+ () ) ;
+
+ (* Check product matches expected result *)
+ as_prover (fun () ->
+ let expected_mul =
+ Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ expected_mul
+ in
+ let expected_add =
+ Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ expected_add
+ in
+ let expected_sub =
+ Element.Standard.of_bignum_bigint
+ (module Runner.Impl)
+ expected_sub
+ in
+ assert (
+ Element.Standard.equal_as_prover
+ (module Runner.Impl)
+ expected_mul product ) ;
+ assert (
+ Element.Standard.equal_as_prover
+ (module Runner.Impl)
+ expected_add addition ) ;
+ assert (
+ Element.Standard.equal_as_prover
+ (module Runner.Impl)
+ expected_sub subtraction ) ) )
+ in
+ cs
+ in
+
+ (* Test constants *)
+ let secp256k1_modulus =
+ Common.bignum_bigint_of_hex
+ "fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f"
+ in
+ let secp256k1_max = Bignum_bigint.(secp256k1_modulus - Bignum_bigint.one) in
+ let secp256k1_sqrt = Common.bignum_bigint_sqrt secp256k1_max in
+ let pallas_modulus =
+ Common.bignum_bigint_of_hex
+ "40000000000000000000000000000000224698fc094cf91b992d30ed00000001"
+ in
+ let pallas_max = Bignum_bigint.(pallas_modulus - Bignum_bigint.one) in
+ let pallas_sqrt = Common.bignum_bigint_sqrt pallas_max in
+ let vesta_modulus =
+ Common.bignum_bigint_of_hex
+ "40000000000000000000000000000000224698fc0994a8dd8c46eb2100000001"
+ in
+ let vesta_max = Bignum_bigint.(vesta_modulus - Bignum_bigint.one) in
+
+ (* FFAdd TESTS *)
+ (* Single tests *)
+ let cs =
+ test_add
+ (Common.bignum_bigint_of_hex
+ "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" )
+ (Common.bignum_bigint_of_hex
+ "80000000000000000000000000000000000000000000000000000000000000d0" )
+ secp256k1_modulus
+ in
+ let _cs = test_add ~cs secp256k1_max secp256k1_max secp256k1_modulus in
+ let _cs = test_add ~cs pallas_max pallas_max secp256k1_modulus in
+ let _cs = test_add ~cs vesta_modulus pallas_modulus secp256k1_modulus in
+ let cs = test_add Bignum_bigint.zero Bignum_bigint.zero secp256k1_modulus in
+ let _cs =
+ test_add ~cs Bignum_bigint.zero Bignum_bigint.zero secp256k1_modulus
+ in
+ let _cs =
+ test_add ~cs
+ (Common.bignum_bigint_of_hex
+ "1f2d8f0d0cd52771bfb86ffdf651b7907e2e0fa87f7c9c2a41b0918e2a7820d" )
+ (Common.bignum_bigint_of_hex
+ "b58c271d1f2b1c632a61a548872580228430495e9635842591d9118236bacfa2" )
+ secp256k1_modulus
+ in
+
+ assert (
+ Common.is_error (fun () ->
+ (* check that the inputs need to be smaller than the modulus *)
+ let _cs =
+ test_add ~cs secp256k1_modulus secp256k1_modulus secp256k1_modulus
+ in
+ () ) ) ;
+
+ assert (
+ Common.is_error (fun () ->
+ (* check wrong cs fails *)
+ let _cs =
+ test_add ~cs secp256k1_modulus secp256k1_modulus pallas_modulus
+ in
+ () ) ) ;
+
+ (* Chain tests *)
+ let cs =
+ test_add_chain
+ [ pallas_max
+ ; pallas_max
+ ; Common.bignum_bigint_of_hex
+ "1f2d8f0d0cd52771bfb86ffdf651b7907e2e0fa87f7c9c2a41b0918e2a7820d"
+ ; Common.bignum_bigint_of_hex
+ "69cc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15"
+ ; vesta_max
+ ]
+ [ Add; Sub; Sub; Add ] vesta_modulus
+ in
+ let _cs =
+ test_add_chain ~cs
+ [ vesta_max
+ ; pallas_max
+ ; Common.bignum_bigint_of_hex
+ "b58c271d1f2b1c632a61a548872580228430495e9635842591d9118236"
+ ; Common.bignum_bigint_of_hex
+ "1342835834869e59534942304a03534963893045203528b523532232543"
+ ; Common.bignum_bigint_of_hex
+ "1f2d8f0d0cd52771bfb86ffdf651ddddbbddeeeebbbaaaaffccee20d"
+ ]
+ [ Add; Sub; Sub; Add ] vesta_modulus
+ in
+ (* Check that the number of inputs need to be coherent with number of operations *)
+ assert (
+ Common.is_error (fun () ->
+ let _cs =
+ test_add_chain ~cs [ pallas_max; pallas_max ] [ Add; Sub; Sub; Add ]
+ secp256k1_modulus
+ in
+ () ) ) ;
+
+ (* FFMul TESTS*)
+
+ (* Positive tests *)
+ (* zero_mul: 0 * 0 *)
+ let cs = test_mul Bignum_bigint.zero Bignum_bigint.zero secp256k1_modulus in
+ (* one_mul: max * 1 *)
+ let _cs = test_mul ~cs secp256k1_max Bignum_bigint.one secp256k1_modulus in
+ (* max_native_square: pallas_sqrt * pallas_sqrt *)
+ let _cs = test_mul ~cs pallas_sqrt pallas_sqrt secp256k1_modulus in
+ (* max_foreign_square: secp256k1_sqrt * secp256k1_sqrt *)
+ let _cs = test_mul ~cs secp256k1_sqrt secp256k1_sqrt secp256k1_modulus in
+ (* max_native_multiplicands: pallas_max * pallas_max *)
+ let _cs = test_mul ~cs pallas_max pallas_max secp256k1_modulus in
+ (* max_foreign_multiplicands: secp256k1_max * secp256k1_max *)
+ let _cs = test_mul ~cs secp256k1_max secp256k1_max secp256k1_modulus in
+ (* nonzero carry0 bits *)
+ let _cs =
+ test_mul ~cs
+ (Common.bignum_bigint_of_hex
+ "fbbbd91e03b48cebbac38855289060f8b29fa6ad3cffffffffffffffffffffff" )
+ (Common.bignum_bigint_of_hex
+ "d551c3d990f42b6d780275d9ca7e30e72941aa29dcffffffffffffffffffffff" )
+ secp256k1_modulus
+ in
+ (* test nonzero carry10 *)
+ let _cs =
+ test_mul
+ (Common.bignum_bigint_of_hex
+ "4000000000000000000000000000000000000000000000000000000000000000" )
+ (Common.bignum_bigint_of_hex
+ "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0" )
+ Bignum_bigint.(pow (of_int 2) (of_int 259))
+ in
+ (* test nonzero carry1_hi *)
+ let _cs =
+ test_mul
+ (Common.bignum_bigint_of_hex
+ "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" )
+ (Common.bignum_bigint_of_hex
+ "8000000000000000000000000000000000000000000000000000000000000000d0" )
+ Bignum_bigint.(pow (of_int 2) (of_int 259) - one)
+ in
+ (* test nonzero_second_bit_carry1_hi *)
+ let _cs =
+ test_mul ~cs
+ (Common.bignum_bigint_of_hex
+ "ffffffffffffffffffffffffffffffffffffffffffffffff8a9dec7cfd1acdeb" )
+ (Common.bignum_bigint_of_hex
+ "fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2e" )
+ secp256k1_modulus
+ in
+ (* test random_multiplicands_carry1_lo *)
+ let _cs =
+ test_mul ~cs
+ (Common.bignum_bigint_of_hex
+ "ffd913aa9e17a63c7a0ff2354218037aafcd6ecaa67f56af1de882594a434dd3" )
+ (Common.bignum_bigint_of_hex
+ "7d313d6b42719a39acea5f51de9d50cd6a4ec7147c003557e114289e9d57dffc" )
+ secp256k1_modulus
+ in
+ (* test random_multiplicands_valid *)
+ let _cs =
+ test_mul ~cs
+ (Common.bignum_bigint_of_hex
+ "1f2d8f0d0cd52771bfb86ffdf651b7907e2e0fa87f7c9c2a41b0918e2a7820d" )
+ (Common.bignum_bigint_of_hex
+ "b58c271d1f2b1c632a61a548872580228430495e9635842591d9118236bacfa2" )
+ secp256k1_modulus
+ in
+ (* test smaller foreign field modulus *)
+ let _cs =
+ test_mul
+ (Common.bignum_bigint_of_hex
+ "5945fa400436f458cb9e994dcd315ded43e9b60eb68e2ae7b5cf1d07b48ca1c" )
+ (Common.bignum_bigint_of_hex
+ "747109f882b8e26947dfcd887273c0b0720618cb7f6d407c9ba74dbe0eda22f" )
+ (Common.bignum_bigint_of_hex
+ "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" )
+ in
+ (* vesta non-native on pallas native modulus *)
+ let _cs =
+ test_mul
+ (Common.bignum_bigint_of_hex
+ "69cc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15" )
+ (Common.bignum_bigint_of_hex
+ "1fffe27b14baa740db0c8bb6656de61d2871a64093908af6181f46351a1c1909" )
+ vesta_modulus
+ in
+
+ (* Full test including all external checks *)
+ let cs =
+ test_mul_full
+ (Common.bignum_bigint_of_hex "2")
+ (Common.bignum_bigint_of_hex "3")
+ secp256k1_modulus
+ in
+
+ let _cs =
+ test_mul_full ~cs
+ (Common.bignum_bigint_of_hex
+ "1f2d8f0d0cd52771bfb86ffdf651b7907e2e0fa87f7c9c2a41b0918e2a7820d" )
+ (Common.bignum_bigint_of_hex
+ "b58c271d1f2b1c632a61a548872580228430495e9635842591d9118236bacfa2" )
+ secp256k1_modulus
+ in
+
+ (* COMBINED TESTS *)
+ let _cs =
+ test_ff
+ (Common.bignum_bigint_of_hex
+ "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" )
+ (Common.bignum_bigint_of_hex
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" )
+ secp256k1_modulus
+ in
+ () ) ;
+ ()
+
+let%test_unit "foreign_field equal_as_prover" =
+ if tests_enabled then
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+ (* Check equal_as_prover *)
+ let _cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof (fun () ->
+ let open Runner.Impl in
+ let x =
+ Element.Standard.of_bignum_bigint (module Runner.Impl)
+ @@ Common.bignum_bigint_of_hex
+ "5925fa400436f458cb9e994dcd315ded43e9b60eb68e2ae7b5cf1d07b48ca1c"
+ in
+ let y =
+ Element.Standard.of_bignum_bigint (module Runner.Impl)
+ @@ Common.bignum_bigint_of_hex
+ "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15"
+ in
+ let z =
+ Element.Standard.of_bignum_bigint (module Runner.Impl)
+ @@ Common.bignum_bigint_of_hex
+ "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15"
+ in
+ as_prover (fun () ->
+ assert (
+ not (Element.Standard.equal_as_prover (module Runner.Impl) x y) ) ;
+ assert (Element.Standard.equal_as_prover (module Runner.Impl) y z) ) ;
+
+ let x =
+ Element.Compact.of_bignum_bigint (module Runner.Impl)
+ @@ Common.bignum_bigint_of_hex
+ "5925fa400436f458cb9e994dcd315ded43e9b60eb68e2ae7b5cf1d07b48ca1c"
+ in
+ let y =
+ Element.Compact.of_bignum_bigint (module Runner.Impl)
+ @@ Common.bignum_bigint_of_hex
+ "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15"
+ in
+ let z =
+ Element.Compact.of_bignum_bigint (module Runner.Impl)
+ @@ Common.bignum_bigint_of_hex
+ "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15"
+ in
+ as_prover (fun () ->
+ assert (
+ not (Element.Compact.equal_as_prover (module Runner.Impl) x y) ) ;
+ assert (Element.Compact.equal_as_prover (module Runner.Impl) y z) ) ;
+
+ (* Pad with a "dummy" constraint b/c Kimchi requires at least 2 *)
+ let fake =
+ exists Field.typ ~compute:(fun () -> Field.Constant.zero)
+ in
+ Boolean.Assert.is_true (Field.equal fake Field.zero) ;
+ () )
+ in
+ ()
+
+let%test_unit "foreign_field equal_as_prover" =
+ if tests_enabled then
+ let open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+ (* Check equal_as_prover *)
+ let _cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof (fun () ->
+ let open Runner.Impl in
+ let x =
+ Element.Standard.of_bignum_bigint (module Runner.Impl)
+ @@ Common.bignum_bigint_of_hex
+ "5925fa400436f458cb9e994dcd315ded43e9b60eb68e2ae7b5cf1d07b48ca1c"
+ in
+ let y =
+ Element.Standard.of_bignum_bigint (module Runner.Impl)
+ @@ Common.bignum_bigint_of_hex
+ "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15"
+ in
+ let z =
+ Element.Standard.of_bignum_bigint (module Runner.Impl)
+ @@ Common.bignum_bigint_of_hex
+ "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15"
+ in
+ as_prover (fun () ->
+ assert (
+ not (Element.Standard.equal_as_prover (module Runner.Impl) x y) ) ;
+ assert (Element.Standard.equal_as_prover (module Runner.Impl) y z) ) ;
+
+ let x =
+ Element.Compact.of_bignum_bigint (module Runner.Impl)
+ @@ Common.bignum_bigint_of_hex
+ "5925fa400436f458cb9e994dcd315ded43e9b60eb68e2ae7b5cf1d07b48ca1c"
+ in
+ let y =
+ Element.Compact.of_bignum_bigint (module Runner.Impl)
+ @@ Common.bignum_bigint_of_hex
+ "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15"
+ in
+ let z =
+ Element.Compact.of_bignum_bigint (module Runner.Impl)
+ @@ Common.bignum_bigint_of_hex
+ "69bc93598e05239aa77b85d172a9785f6f0405af91d91094f693305da68bf15"
+ in
+ as_prover (fun () ->
+ assert (
+ not (Element.Compact.equal_as_prover (module Runner.Impl) x y) ) ;
+ assert (Element.Compact.equal_as_prover (module Runner.Impl) y z) ) ;
+
+ (* Pad with a "dummy" constraint b/c Kimchi requires at least 2 *)
+ let fake =
+ exists Field.typ ~compute:(fun () -> Field.Constant.zero)
+ in
+ Boolean.Assert.is_true (Field.equal fake Field.zero) ;
+ () )
+ in
+ ()
diff --git a/src/lib/crypto/kimchi_backend/gadgets/foreign_field.mli.disabled b/src/lib/crypto/kimchi_backend/gadgets/foreign_field.mli.disabled
new file mode 100644
index 00000000000..e0c51dacca1
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/foreign_field.mli.disabled
@@ -0,0 +1,348 @@
+module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint
+module Snark_intf = Snarky_backendless.Snark_intf
+
+(** Conventions used
+ * 1. Functions prefixed with "as_prover_" only happen during proving
+ * and not during circuit creation
+ * 2. Functions suffixed with "_as_prover" can only be called outside
+ * the circuit. Specifically, this means within an exists, within
+ * an as_prover or in an "as_prover_" prefixed function)
+ *)
+
+(** Foreign field modulus is abstract on two parameters
+ * Field type
+ * Limbs structure
+ *
+ * There are 2 specific limb structures required
+ * Standard mode := 3 limbs of L-bits each
+ * Compact mode := 2 limbs where the lowest is 2L bits and the highest is L bits
+ *)
+type 'field standard_limbs = 'field * 'field * 'field
+
+type 'field compact_limbs = 'field * 'field
+
+val bignum_bigint_to_field_const_standard_limbs :
+ (module Snark_intf.Run with type field = 'field)
+ -> Bignum_bigint.t
+ -> 'field standard_limbs
+
+val field_const_standard_limbs_to_bignum_bigint :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field standard_limbs
+ -> Bignum_bigint.t
+
+val check_modulus :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field standard_limbs
+ -> unit
+
+val check_modulus_bignum_bigint :
+ (module Snark_intf.Run with type field = 'field) -> Bignum_bigint.t -> unit
+
+(** Foreign field element base type - not used directly *)
+module type Element_intf = sig
+ type 'field t
+
+ type 'a limbs_type
+
+ module Cvar = Snarky_backendless.Cvar
+
+ (** Create foreign field element from Cvar limbs *)
+ val of_limbs : 'field Cvar.t limbs_type -> 'field t
+
+ (** Create foreign field element from field limbs *)
+ val of_field_limbs :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field limbs_type
+ -> 'field t
+
+ (** Create foreign field element from Bignum_bigint.t *)
+ val of_bignum_bigint :
+ (module Snark_intf.Run with type field = 'field)
+ -> Bignum_bigint.t
+ -> 'field t
+
+ (** Create constant foreign field element from Bignum_bigint.t *)
+ val const_of_bignum_bigint :
+ (module Snark_intf.Run with type field = 'field)
+ -> Bignum_bigint.t
+ -> 'field t
+
+ (** Convert foreign field element into Cvar limbs *)
+ val to_limbs : 'field t -> 'field Cvar.t limbs_type
+
+ (** Map foreign field element's Cvar limbs into some other limbs with the mapping function func *)
+ val map : 'field t -> ('field Cvar.t -> 'g) -> 'g limbs_type
+
+ (** One constant *)
+ val one : (module Snark_intf.Run with type field = 'field) -> 'field t
+
+ (** Convert foreign field element into field limbs *)
+ val to_field_limbs_as_prover :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> 'field limbs_type
+
+ (** Convert foreign field element into Bignum_bigint.t limbs *)
+ val to_bignum_bigint_limbs_as_prover :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> Bignum_bigint.t limbs_type
+
+ (** Convert foreign field element into a Bignum_bigint.t *)
+ val to_bignum_bigint_as_prover :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> Bignum_bigint.t
+
+ (** Convert foreign field affine point to string *)
+ val to_string_as_prover :
+ (module Snark_intf.Run with type field = 'field) -> 'field t -> string
+
+ (** Constrain zero check computation with boolean output *)
+ val is_zero :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> 'field Cvar.t Snark_intf.Boolean0.t
+
+ (** Compare if two foreign field elements are equal *)
+ val equal_as_prover :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> 'field t
+ -> bool
+
+ (** Add copy constraints that two foreign field elements are equal *)
+ val assert_equal :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> 'field t
+ -> unit
+
+ (* Create and constrain foreign field element from Bignum_bigint.t *)
+ val check_here_const_of_bignum_bigint :
+ (module Snark_intf.Run with type field = 'field)
+ -> Bignum_bigint.t
+ -> 'field t
+
+ (** Add conditional constraints to select foreign field element *)
+ val if_ :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field Cvar.t Snark_intf.Boolean0.t
+ -> then_:'field t
+ -> else_:'field t
+ -> 'field t
+
+ (** Decompose and constrain foreign field element into list of boolean cvars *)
+ val unpack :
+ (module Snark_intf.Run with type field = 'field)
+ -> 'field t
+ -> length:int
+ -> 'field Cvar.t Snark_intf.Boolean0.t list
+end
+
+module Element : sig
+ (** Foreign field element type (standard limbs) *)
+ module Standard : sig
+ include Element_intf with type 'a limbs_type = 'a standard_limbs
+ end
+end
+
+(** Context for tracking external checks that must be made
+ * (using other gadgets) in order to acheive soundess for a
+ * given multiplication
+ *)
+module External_checks : sig
+ module Cvar = Snarky_backendless.Cvar
+
+ type 'field t =
+ { mutable multi_ranges : 'field Cvar.t standard_limbs list
+ ; mutable compact_multi_ranges : 'field Cvar.t compact_limbs list
+ ; mutable bounds : 'field Cvar.t standard_limbs list
+ }
+
+ val create : (module Snark_intf.Run with type field = 'field) -> 'field t
+
+ val append_multi_range_check :
+ 'field t -> 'field Cvar.t standard_limbs -> unit
+
+ val append_compact_multi_range_check :
+ 'field t -> 'field Cvar.t compact_limbs -> unit
+
+ val append_bound_check : 'field t -> 'field Cvar.t standard_limbs -> unit
+end
+
+(* Type of operation *)
+type op_mode = Add | Sub
+
+(** Gadget to check the supplied value is a valid foreign field element for the
+ * supplied foreign field modulus
+ *
+ * This gadget checks in the circuit that a value is less than the foreign field modulus.
+ * Part of this involves computing a bound value that is both added to external_checks
+ * and also returned. The caller may use either one, depending on the situation.
+ *
+ * Inputs:
+ * external_checks := Context to track required external checks
+ * value := the value to check
+ * foreign_field_modulus := the modulus of the foreign field
+ *
+ * Outputs:
+ * Inserts the gates (described below) into the circuit
+ * Adds bound value to be multi-range-checked to external_checks
+ * Returns bound value
+ *
+ * Effects to the circuit:
+ * - 1 FFAdd gate
+ * - 1 Zero gate
+ *)
+val valid_element :
+ (module Snark_intf.Run with type field = 'f)
+ -> 'f External_checks.t (* external_checks context *)
+ -> 'f Element.Standard.t (* value *)
+ -> 'f standard_limbs (* foreign_field_modulus *)
+ -> 'f Element.Standard.t
+(* result *)
+
+(** Gadget to constrain external checks using supplied modulus *)
+val constrain_external_checks :
+ (module Snark_intf.Run with type field = 'f)
+ -> 'f External_checks.t
+ -> 'f standard_limbs
+ -> unit
+
+(** Gadget for a chain of foreign field sums (additions or subtractions)
+ *
+ * Inputs:
+ * inputs := All the inputs to the chain of sums
+ * operations := List of operation modes Add or Sub indicating whether th
+ * corresponding addition is a subtraction
+ * foreign_field_modulus := The modulus of the foreign field (all the same)
+ *
+ * Outputs:
+ * Inserts the gates (described below) into the circuit
+ * Returns the final result of the chain of sums
+ *
+ * For n+1 inputs, the gadget creates n foreign field addition gates, followed by a final
+ * foreign field addition gate for the bound check (i.e. valid_element check). For this, a
+ * an additional multi range check must also be performed.
+ * By default, the range check takes place right after the final Raw row.
+ *)
+val sum_chain :
+ (module Snark_intf.Run with type field = 'f)
+ -> 'f Element.Standard.t list (* inputs *)
+ -> op_mode list (* operations *)
+ -> 'f standard_limbs (* foreign_field_modulus *)
+ -> 'f Element.Standard.t
+(* result *)
+
+(** Gadget for a single foreign field addition
+ *
+ * Inputs:
+ * full := flag for whether to perform a full addition with valid_element check
+ * on the result (default true) or just a single FFAdd row (false)
+ * left_input := 3 limbs foreign field element
+ * right_input := 3 limbs foreign field element
+ * foreign_field_modulus := The modulus of the foreign field
+ *
+ * Outputs:
+ * Inserts the gates (described below) into the circuit
+ * Returns the result of the addition as a 3 limbs element
+ *
+ * In default mode:
+ * It adds a FFAdd gate,
+ * followed by a Zero gate,
+ * a FFAdd gate for the bound check,
+ * a Zero gate after this bound check,
+ * and a Multi Range Check gadget.
+ *
+ * In false mode:
+ * It adds a FFAdd gate.
+ *)
+val add :
+ (module Snark_intf.Run with type field = 'f)
+ -> ?full:bool (* full *)
+ -> 'f Element.Standard.t (* left_input *)
+ -> 'f Element.Standard.t (* right_input *)
+ -> 'f standard_limbs (* foreign_field_modulus *)
+ -> 'f Element.Standard.t
+(* result *)
+
+(** Gadget for a single foreign field subtraction
+ *
+ * Inputs:
+ * full := flag for whether to perform a full subtraction with valid_element check
+ * on the result (default true) or just a single FFAdd row (false)
+ * left_input := 3 limbs foreign field element
+ * right_input := 3 limbs foreign field element
+ * foreign_field_modulus := The modulus of the foreign field
+ *
+ * Outputs:
+ * Inserts the gates (described below) into the circuit
+ * Returns the result of the addition as a 3 limbs element
+ *
+ * In default mode:
+ * It adds a FFAdd gate,
+ * followed by a Zero gate,
+ * a FFAdd gate for the bound check,
+ * a Zero gate after this bound check,
+ * and a Multi Range Check gadget.
+ *
+ * In false mode:
+ * It adds a FFAdd gate.
+ *)
+val sub :
+ (module Snark_intf.Run with type field = 'f)
+ -> ?full:bool (* full *)
+ -> 'f Element.Standard.t (* left_input *)
+ -> 'f Element.Standard.t (* right_input *)
+ -> 'f standard_limbs (* foreign_field_modulus *)
+ -> 'f Element.Standard.t
+(* result *)
+
+(* Gadget for creating an addition or subtraction result row (Zero gate with result) *)
+val result_row :
+ (module Snark_intf.Run with type field = 'f)
+ -> ?label:string
+ -> 'f Element.Standard.t
+ -> unit
+
+(** Gadget for foreign field multiplication
+ *
+ * Constrains that
+ *
+ * left_input * right_input = quotient * foreign_field_modulus + remainder
+ *
+ * where remainder is the product.
+ *
+ * Inputs:
+ * external_checks := Context to track required external checks
+ * left_input := Multiplicand foreign field element
+ * right_input := Multiplicand foreign field element
+ * foreign_field_modulus := Must be less than than max foreign field modulus
+ *
+ * Outputs:
+ * Inserts the ForeignFieldMul gate, followed by Zero gate into the circuit
+ * Appends required values to external_checks
+ * Returns the product
+ *)
+val mul :
+ (module Snark_intf.Run with type field = 'f)
+ -> 'f External_checks.t (* external_checks *)
+ -> ?bound_check_result:bool
+ -> 'f Element.Standard.t (* left_input *)
+ -> 'f Element.Standard.t (* right_input *)
+ -> 'f standard_limbs (* foreign_field_modulus *)
+ -> 'f Element.Standard.t
+(* product *)
+
+(** Gadget to constrain conversion of bytes list (output of Keccak gadget)
+ into foreign field element with standard limbs (input of ECDSA gadget).
+ Include the endianness of the bytes list. *)
+val bytes_to_standard_element :
+ (module Snark_intf.Run with type field = 'f)
+ -> endian:Keccak.endianness
+ -> 'f Snarky_backendless.Cvar.t list
+ -> 'f standard_limbs
+ -> int
+ -> 'f Element.Standard.t
diff --git a/src/lib/crypto/kimchi_backend/gadgets/generic.ml b/src/lib/crypto/kimchi_backend/gadgets/generic.ml
index 535edec1974..7184e5754f6 100644
--- a/src/lib/crypto/kimchi_backend/gadgets/generic.ml
+++ b/src/lib/crypto/kimchi_backend/gadgets/generic.ml
@@ -2,7 +2,9 @@ open Core_kernel
open Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint
-(* EXAMPLE generic addition gate gadget *)
+let tests_enabled = true
+
+(* Generic addition gate gadget *)
let add (type f)
(module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
(left_input : Circuit.Field.t) (right_input : Circuit.Field.t) :
@@ -16,6 +18,7 @@ let add (type f)
Field.Constant.add left_input right_input )
in
+ let neg_one = Field.Constant.(negate one) in
(* Set up generic add gate *)
with_label "generic_add_gadget" (fun () ->
assert_
@@ -25,14 +28,47 @@ let add (type f)
(Basic
{ l = (Field.Constant.one, left_input)
; r = (Field.Constant.one, right_input)
- ; o = (Option.value_exn Field.(to_constant (negate one)), sum)
+ ; o = (neg_one, sum)
; m = Field.Constant.zero
; c = Field.Constant.zero
} )
} ;
sum )
-(* EXAMPLE generic multiplication gate gadget *)
+(* Generic subtraction gate gadget *)
+let sub (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (left_input : Circuit.Field.t) (right_input : Circuit.Field.t) :
+ Circuit.Field.t =
+ let open Circuit in
+ (* Witness computation; difference = left_input - right_input *)
+ let difference =
+ exists Field.typ ~compute:(fun () ->
+ let left_input = As_prover.read Field.typ left_input in
+ let right_input = As_prover.read Field.typ right_input in
+ Field.Constant.sub left_input right_input )
+ in
+
+ (* Negative one gate coefficient *)
+ let neg_one = Field.Constant.(negate one) in
+
+ (* Set up generic sub gate *)
+ with_label "generic_sub_gadget" (fun () ->
+ assert_
+ { annotation = Some __LOC__
+ ; basic =
+ Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T
+ (Basic
+ { l = (Field.Constant.one, left_input)
+ ; r = (neg_one, right_input)
+ ; o = (neg_one, difference)
+ ; m = Field.Constant.zero
+ ; c = Field.Constant.zero
+ } )
+ } ;
+ difference )
+
+(* Generic multiplication gate gadget *)
let mul (type f)
(module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
(left_input : Circuit.Field.t) (right_input : Circuit.Field.t) :
@@ -46,6 +82,7 @@ let mul (type f)
Field.Constant.mul left_input right_input )
in
+ let neg_one = Field.Constant.(negate one) in
(* Set up generic mul gate *)
with_label "generic_mul_gadget" (fun () ->
assert_
@@ -55,27 +92,34 @@ let mul (type f)
(Basic
{ l = (Field.Constant.zero, left_input)
; r = (Field.Constant.zero, right_input)
- ; o = (Option.value_exn Field.(to_constant (negate one)), prod)
+ ; o = (neg_one, prod)
; m = Field.Constant.one
; c = Field.Constant.zero
} )
} ;
prod )
+(*********)
+(* Tests *)
+(*********)
+
let%test_unit "generic gadgets" =
- (* Import the gadget test runner *)
- let open Kimchi_gadgets_test_runner in
- (* Initialize the SRS cache. *)
- let () = Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] in
-
- (* Helper to test generic add gate gadget
- * Inputs operands and expected output: left_input + right_input = sum
- * Returns true if constraints are satisfied, false otherwise.
- *)
- let test_generic_add left_input right_input sum =
- try
- let _proof_keypair, _proof =
- Runner.generate_and_verify_proof (fun () ->
+ if tests_enabled then (
+ let (* Import the gadget test runner *)
+ open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Helper to test generic add gate gadget
+ * Inputs operands and expected output: left_input + right_input = sum
+ * Returns true if constraints are satisfied, false otherwise.
+ *)
+ let test_generic_add ?cs left_input right_input sum =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
let open Runner.Impl in
(* Set up snarky variables for inputs and outputs *)
let left_input =
@@ -95,18 +139,50 @@ let%test_unit "generic gadgets" =
(* Pad with a "dummy" constraint b/c Kimchi requires at least 2 *)
Boolean.Assert.is_true (Field.equal sum sum) )
in
- true
- with _ -> false
- in
- (* Helper to test generic multimplication gate gadget
- * Inputs operands and expected output: left_input * right_input = prod
- * Returns true if constraints are satisfied, false otherwise.
- *)
- let test_generic_mul left_input right_input prod =
- try
- let _proof_keypair, _proof =
- Runner.generate_and_verify_proof (fun () ->
+ cs
+ in
+
+ (* Helper to test generic sub gate gadget
+ * Inputs operands and expected output: left_input - right_input = difference
+ * Returns true if constraints are satisfied, false otherwise.
+ *)
+ let test_generic_sub ?cs left_input right_input difference =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Set up snarky variables for inputs and outputs *)
+ let left_input =
+ exists Field.typ ~compute:(fun () ->
+ Field.Constant.of_int left_input )
+ in
+ let right_input =
+ exists Field.typ ~compute:(fun () ->
+ Field.Constant.of_int right_input )
+ in
+ let difference =
+ exists Field.typ ~compute:(fun () ->
+ Field.Constant.of_int difference )
+ in
+ (* Use the generic sub gate gadget *)
+ let result = sub (module Runner.Impl) left_input right_input in
+ Field.Assert.equal difference result ;
+ (* Pad with a "dummy" constraint b/c Kimchi requires at least 2 *)
+ Boolean.Assert.is_true (Field.equal difference difference) )
+ in
+
+ cs
+ in
+
+ (* Helper to test generic multimplication gate gadget
+ * Inputs operands and expected output: left_input * right_input = prod
+ * Returns true if constraints are satisfied, false otherwise.
+ *)
+ let test_generic_mul ?cs left_input right_input prod =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
let open Runner.Impl in
(* Set up snarky variables for inputs and outputs *)
let left_input =
@@ -126,24 +202,31 @@ let%test_unit "generic gadgets" =
(* Pad with a "dummy" constraint b/c Kimchi requires at least 2 *)
Boolean.Assert.is_true (Field.equal prod prod) )
in
- true
- with _ -> false
- in
- (* TEST generic add gadget *)
- (* Positive tests *)
- assert (Bool.equal (test_generic_add 0 0 0) true) ;
- assert (Bool.equal (test_generic_add 1 2 3) true) ;
- (* Negatve tests *)
- assert (Bool.equal (test_generic_add 1 0 0) false) ;
- assert (Bool.equal (test_generic_add 2 4 7) false) ;
-
- (* TEST generic mul gadget *)
- (* Positive tests *)
- assert (Bool.equal (test_generic_mul 0 0 0) true) ;
- assert (Bool.equal (test_generic_mul 1 2 2) true) ;
- (* Negatve tests *)
- assert (Bool.equal (test_generic_mul 1 0 1) false) ;
- assert (Bool.equal (test_generic_mul 2 4 7) false) ;
+ cs
+ in
+
+ (* TEST generic add gadget *)
+ (* Positive tests *)
+ let cs = test_generic_add 0 0 0 in
+ let _cs = test_generic_add ~cs 1 2 3 in
+ (* Negatve tests *)
+ assert (Common.is_error (fun () -> test_generic_add ~cs 1 0 0)) ;
+ assert (Common.is_error (fun () -> test_generic_add ~cs 2 4 7)) ;
+
+ (* TEST generic sub gadget *)
+ (* Positive tests *)
+ let cs = test_generic_sub 0 0 0 in
+ let _cs = test_generic_sub ~cs 2 1 1 in
+ (* Negatve tests *)
+ assert (Common.is_error (fun () -> test_generic_sub ~cs 4 2 1)) ;
+ assert (Common.is_error (fun () -> test_generic_sub ~cs 13 4 10)) ;
+ (* TEST generic mul gadget *)
+ (* Positive tests *)
+ let cs = test_generic_mul 0 0 0 in
+ let _cs = test_generic_mul ~cs 1 2 2 in
+ (* Negatve tests *)
+ assert (Common.is_error (fun () -> test_generic_mul ~cs 1 0 1)) ;
+ assert (Common.is_error (fun () -> test_generic_mul ~cs 2 4 7)) ) ;
()
diff --git a/src/lib/crypto/kimchi_backend/gadgets/generic.mli b/src/lib/crypto/kimchi_backend/gadgets/generic.mli
new file mode 100644
index 00000000000..32fd3035055
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/generic.mli
@@ -0,0 +1,32 @@
+(** Generic addition gate gadget
+ * Constrains left_input + right_input = sum
+ * Returns sum
+ *)
+val add :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> 'f Snarky_backendless.Cvar.t (* left_input *)
+ -> 'f Snarky_backendless.Cvar.t (* right_input *)
+ -> 'f Snarky_backendless.Cvar.t
+(* sum *)
+
+(** Generic subtraction gate gadget
+ * Constrains left_input - right_input = difference
+ * Returns difference
+ *)
+val sub :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> 'f Snarky_backendless.Cvar.t (* left_input *)
+ -> 'f Snarky_backendless.Cvar.t (* right_input *)
+ -> 'f Snarky_backendless.Cvar.t
+(* difference *)
+
+(** Generic multiplication gate gadget
+ * Constrains left_input * right_input = product
+ * Returns product
+ *)
+val mul :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> 'f Snarky_backendless.Cvar.t (* left_input *)
+ -> 'f Snarky_backendless.Cvar.t (* right_input *)
+ -> 'f Snarky_backendless.Cvar.t
+(* product *)
diff --git a/src/lib/crypto/kimchi_backend/gadgets/keccak.ml b/src/lib/crypto/kimchi_backend/gadgets/keccak.ml
new file mode 100644
index 00000000000..100d06c1439
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/keccak.ml
@@ -0,0 +1,831 @@
+open Core_kernel
+module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint
+module Snark_intf = Snarky_backendless.Snark_intf
+
+let tests_enabled = true
+
+(* Endianness type *)
+type endianness = Big | Little
+
+(* DEFINITIONS OF CONSTANTS FOR KECCAK *)
+
+(* Length of the square matrix side of Keccak states *)
+let keccak_dim = 5
+
+(* value `l` in Keccak, ranges from 0 to 6 (7 possible values) *)
+let keccak_ell = 6
+
+(* width of the lane of the state, meaning the length of each word in bits (64) *)
+let keccak_word = Int.pow 2 keccak_ell
+
+(* number of bytes that fit in a word (8) *)
+let bytes_per_word = keccak_word / 8
+
+(* length of the state in bits, meaning the 5x5 matrix of words in bits (1600) *)
+let keccak_state_length = Int.pow keccak_dim 2 * keccak_word
+
+(* number of rounds of the Keccak permutation function depending on the value `l` (24) *)
+let keccak_rounds = 12 + (2 * keccak_ell)
+
+(* Creates the 5x5 table of rotation offset for Keccak modulo 64
+ * | x \ y | 0 | 1 | 2 | 3 | 4 |
+ * | ----- | -- | -- | -- | -- | -- |
+ * | 0 | 0 | 36 | 3 | 41 | 18 |
+ * | 1 | 1 | 44 | 10 | 45 | 2 |
+ * | 2 | 62 | 6 | 43 | 15 | 61 |
+ * | 3 | 28 | 55 | 25 | 21 | 56 |
+ * | 4 | 27 | 20 | 39 | 8 | 14 |
+*)
+let rot_table =
+ [| [| 0; 36; 3; 41; 18 |]
+ ; [| 1; 44; 10; 45; 2 |]
+ ; [| 62; 6; 43; 15; 61 |]
+ ; [| 28; 55; 25; 21; 56 |]
+ ; [| 27; 20; 39; 8; 14 |]
+ |]
+
+let round_consts =
+ [| "0000000000000001"
+ ; "0000000000008082"
+ ; "800000000000808A"
+ ; "8000000080008000"
+ ; "000000000000808B"
+ ; "0000000080000001"
+ ; "8000000080008081"
+ ; "8000000000008009"
+ ; "000000000000008A"
+ ; "0000000000000088"
+ ; "0000000080008009"
+ ; "000000008000000A"
+ ; "000000008000808B"
+ ; "800000000000008B"
+ ; "8000000000008089"
+ ; "8000000000008003"
+ ; "8000000000008002"
+ ; "8000000000000080"
+ ; "000000000000800A"
+ ; "800000008000000A"
+ ; "8000000080008081"
+ ; "8000000000008080"
+ ; "0000000080000001"
+ ; "8000000080008008"
+ |]
+
+(* Auxiliary function to check composition of 8 bytes into a 64-bit word *)
+let check_bytes_to_word (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (word : Circuit.Field.t) (word_bytes : Circuit.Field.t array) =
+ let open Circuit in
+ let composition =
+ Array.foldi word_bytes ~init:Field.zero ~f:(fun i acc x ->
+ let shift = Field.constant @@ Common.two_pow (module Circuit) (8 * i) in
+ Field.(acc + (x * shift)) )
+ in
+ Field.Assert.equal word composition
+
+(* Internal struct for Keccak State *)
+
+module State = struct
+ type 'a matrix = 'a array array
+
+ (* Creates a state formed by a matrix of 5x5 Cvar zeros *)
+ let zeros (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f) :
+ Circuit.Field.t matrix =
+ let open Circuit in
+ let state =
+ Array.make_matrix ~dimx:keccak_dim ~dimy:keccak_dim Field.zero
+ in
+ state
+
+ (* Updates the cells of a state with new values *)
+ let update (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ~(prev : Circuit.Field.t matrix) ~(next : Circuit.Field.t matrix) =
+ for x = 0 to keccak_dim - 1 do
+ prev.(x) <- next.(x)
+ done
+
+ (* Converts a list of bytes to a matrix of Field elements *)
+ let of_bytes (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (bytestring : Circuit.Field.t list) : Circuit.Field.t matrix =
+ let open Circuit in
+ assert (List.length bytestring = 200) ;
+ let bytestring = Array.of_list bytestring in
+ let state =
+ Array.make_matrix ~dimx:keccak_dim ~dimy:keccak_dim Field.zero
+ in
+ for y = 0 to keccak_dim - 1 do
+ for x = 0 to keccak_dim - 1 do
+ let idx = bytes_per_word * ((keccak_dim * y) + x) in
+ (* Create an array containing the 8 bytes starting on idx that correspond to the word in [x,y] *)
+ let word_bytes = Array.sub bytestring ~pos:idx ~len:bytes_per_word in
+ for z = 0 to bytes_per_word - 1 do
+ (* Field element containing value 2^(8*z) *)
+ let shift_field =
+ Common.bignum_bigint_to_field
+ (module Circuit)
+ Bignum_bigint.(pow (of_int 2) (of_int (Int.( * ) 8 z)))
+ in
+ let shift = Field.constant shift_field in
+ state.(x).(y) <- Field.(state.(x).(y) + (shift * word_bytes.(z)))
+ done
+ done
+ done ;
+
+ state
+
+ (* Converts a state of cvars to a list of bytes as cvars and creates constraints for it *)
+ let as_prover_to_bytes (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (state : Circuit.Field.t matrix) : Circuit.Field.t list =
+ let open Circuit in
+ assert (
+ Array.length state = keccak_dim && Array.length state.(0) = keccak_dim ) ;
+ let state_length_in_bytes = keccak_state_length / 8 in
+ let bytestring =
+ Array.init state_length_in_bytes ~f:(fun idx ->
+ exists Field.typ ~compute:(fun () ->
+ (* idx = z + 8 * ((dim * y) + x) *)
+ let z = idx % bytes_per_word in
+ let x = idx / bytes_per_word % keccak_dim in
+ let y = idx / bytes_per_word / keccak_dim in
+ (* [7 6 5 4 3 2 1 0] [x=0,y=1] [x=0,y=2] [x=0,y=3] [x=0,y=4]
+ * [x=1,y=0] [x=1,y=1] [x=1,y=2] [x=1,y=3] [x=1,y=4]
+ * [x=2,y=0] [x=2,y=1] [x=2,y=2] [x=2,y=3] [x=2,y=4]
+ * [x=3,y=0] [x=3,y=1] [x=3,y=2] [x=3,y=3] [x=3,y=4]
+ * [x=4,y=0] [x=4,y=1] [x=4,y=0] [x=4,y=3] [x=4,y=4]
+ *)
+ let word =
+ Common.cvar_field_to_bignum_bigint_as_prover
+ (module Circuit)
+ state.(x).(y)
+ in
+ let byte =
+ Common.bignum_bigint_to_field
+ (module Circuit)
+ Bignum_bigint.((word asr Int.(8 * z)) land of_int 0xff)
+ in
+ byte ) )
+ in
+ (* Check all words are composed correctly from bytes *)
+ for y = 0 to keccak_dim - 1 do
+ for x = 0 to keccak_dim - 1 do
+ let idx = bytes_per_word * ((keccak_dim * y) + x) in
+ (* Create an array containing the 8 bytes starting on idx that correspond to the word in [x,y] *)
+ let word_bytes = Array.sub bytestring ~pos:idx ~len:bytes_per_word in
+ (* Assert correct decomposition of bytes from state *)
+ check_bytes_to_word (module Circuit) state.(x).(y) word_bytes
+ done
+ done ;
+
+ Array.to_list bytestring
+
+ let xor (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (input1 : Circuit.Field.t matrix) (input2 : Circuit.Field.t matrix) :
+ Circuit.Field.t matrix =
+ assert (
+ Array.length input1 = keccak_dim && Array.length input1.(0) = keccak_dim ) ;
+ assert (
+ Array.length input2 = keccak_dim && Array.length input2.(0) = keccak_dim ) ;
+
+ (* Calls Bitwise.bxor64 on each pair (x,y) of the states input1 and input2
+ and outputs the output Cvars as a new matrix *)
+ Array.map2_exn input1 input2
+ ~f:(Array.map2_exn ~f:(Bitwise.bxor64 (module Circuit)))
+end
+
+(* KECCAK HASH FUNCTION IMPLEMENTATION *)
+
+(* Computes the number of required extra bytes to pad a message of length bytes *)
+let bytes_to_pad (rate : int) (length : int) =
+ (rate / 8) - (length mod (rate / 8))
+
+(* Pads a message M as:
+ * M ||Â pad[x](|M|)
+ * Padding rule 0x06 ..0*..1.
+ * The padded message vector will start with the message vector
+ * followed by the 0*1 rule to fulfil a length that is a multiple of rate (in bytes)
+ * (This means a 0110 sequence, followed with as many 0s as needed, and a final 1 bit)
+ *)
+let pad_nist (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (message : Circuit.Field.t list) (rate : int) : Circuit.Field.t list =
+ let open Circuit in
+ (* Find out desired length of the padding in bytes *)
+ (* If message is already rate bits, need to pad full rate again *)
+ let extra_bytes = bytes_to_pad rate (List.length message) in
+ (* 0x06 0x00 ... 0x00 0x80 or 0x86 *)
+ let last_field = Common.two_pow (module Circuit) 7 in
+ let last = Field.constant last_field in
+ (* Create the padding vector *)
+ let pad = Array.init extra_bytes ~f:(fun _ -> Field.zero) in
+ pad.(0) <- Field.of_int 6 ;
+ pad.(extra_bytes - 1) <- Field.add pad.(extra_bytes - 1) last ;
+ (* Cast the padding array to a list *)
+ let pad = Array.to_list pad in
+ (* Return the padded message *)
+ message @ pad
+
+(* Pads a message M as:
+ * M ||Â pad[x](|M|)
+ * Padding rule 10*1.
+ * The padded message vector will start with the message vector
+ * followed by the 10*1 rule to fulfil a length that is a multiple of rate (in bytes)
+ * (This means a 1 bit, followed with as many 0s as needed, and a final 1 bit)
+*)
+let pad_101 (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (message : Circuit.Field.t list) (rate : int) : Circuit.Field.t list =
+ let open Circuit in
+ (* Find out desired length of the padding in bytes *)
+ (* If message is already rate bits, need to pad full rate again *)
+ let extra_bytes = bytes_to_pad rate (List.length message) in
+ (* 0x01 0x00 ... 0x00 0x80 or 0x81 *)
+ let last_field = Common.two_pow (module Circuit) 7 in
+ let last = Field.constant @@ last_field in
+ (* Create the padding vector *)
+ let pad = Array.init extra_bytes ~f:(fun _ -> Field.zero) in
+ pad.(0) <- Field.one ;
+ pad.(extra_bytes - 1) <- Field.add pad.(extra_bytes - 1) last ;
+ (* Cast the padding array to a list *)
+ (* Return the padded message *)
+ message @ Array.to_list pad
+
+(*
+ * First algrithm in the compression step of Keccak for 64-bit words.
+ * C[x] = A[x,0] xor A[x,1] xor A[x,2] xor A[x,3] xor A[x,4]
+ * D[x] = C[x-1] xor ROT(C[x+1], 1)
+ * E[x,y] = A[x,y] xor D[x]
+ * In the Keccak reference, it corresponds to the `theta` algorithm.
+ * We use the first index of the state array as the x coordinate and the second index as the y coordinate.
+ *)
+let theta (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (state : Circuit.Field.t State.matrix) : Circuit.Field.t State.matrix =
+ let state_a = state in
+ (* XOR the elements of each row together *)
+ (* for all x in {0..4}: C[x] = A[x,0] xor A[x,1] xor A[x,2] xor A[x,3] xor A[x,4] *)
+ let state_c =
+ Array.map state_a ~f:(Array.reduce_exn ~f:(Bitwise.bxor64 (module Circuit)))
+ in
+ (* for all x in {0..4}: D[x] = C[x-1] xor ROT(C[x+1], 1) *)
+ let state_d =
+ Array.init keccak_dim ~f:(fun x ->
+ Bitwise.(
+ bxor64
+ (module Circuit)
+ (* using (x + m mod m) to avoid negative values *)
+ state_c.((x + keccak_dim - 1) mod keccak_dim)
+ (rot64 (module Circuit) state_c.((x + 1) mod keccak_dim) 1 Left)) )
+ in
+ (* for all x in {0..4} and y in {0..4}: E[x,y] = A[x,y] xor D[x] *)
+ (* return E *)
+ Array.map2_exn state_a state_d ~f:(fun state_a state_d ->
+ Array.map state_a ~f:(Bitwise.bxor64 (module Circuit) state_d) )
+
+(*
+ * Second and third steps in the compression step of Keccak for 64-bit words.
+ * B[y,2x+3y] = ROT(E[x,y], r[x,y])
+ * which is equivalent to the `rho` algorithm followed by the `pi` algorithm in the Keccak reference as follows:
+ * rho:
+ * A[0,0] = a[0,0]
+ * | x | = | 1 |
+ * | y | = | 0 |
+ * for t = 0 to 23 do
+ * A[x,y] = ROT(a[x,y], (t+1)(t+2)/2 mod 64)))
+ * | x | = | 0 1 | | x |
+ * | | = | | * | |
+ * | y | = | 2 3 | | y |
+ * end for
+ * pi:
+ * for x = 0 to 4 do
+ * for y = 0 to 4 do
+ * | X | = | 0 1 | | x |
+ * | | = | | * | |
+ * | Y | = | 2 3 | | y |
+ * A[X,Y] = a[x,y]
+ * end for
+ * end for
+ * We use the first index of the state array as the x coordinate and the second index as the y coordinate.
+ *)
+let pi_rho (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (state : Circuit.Field.t State.matrix) : Circuit.Field.t State.matrix =
+ let state_e = state in
+ let state_b = State.zeros (module Circuit) in
+ (* for all x in {0..4} and y in {0..4}: B[y,2x+3y] = ROT(E[x,y], r[x,y]) *)
+ for x = 0 to keccak_dim - 1 do
+ for y = 0 to keccak_dim - 1 do
+ (* No need to use module since this is always positive *)
+ state_b.(y).(((2 * x) + (3 * y)) mod keccak_dim) <-
+ Bitwise.rot64 (module Circuit) state_e.(x).(y) rot_table.(x).(y) Left
+ done
+ done ;
+ state_b
+
+(*
+ * Fourth step of the compression function of Keccak for 64-bit words.
+ * F[x,y] = B[x,y] xor ((not B[x+1,y]) and B[x+2,y])
+ * It corresponds to the chi algorithm in the Keccak reference.
+ * for y = 0 to 4 do
+ * for x = 0 to 4 do
+ * A[x,y] = a[x,y] xor ((not a[x+1,y]) and a[x+2,y])
+ * end for
+ * end for
+ *)
+let chi (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (state : Circuit.Field.t State.matrix) : Circuit.Field.t State.matrix =
+ let state_b = state in
+ let state_f = State.zeros (module Circuit) in
+ (* for all x in {0..4} and y in {0..4}: F[x,y] = B[x,y] xor ((not B[x+1,y]) and B[x+2,y]) *)
+ for x = 0 to keccak_dim - 1 do
+ for y = 0 to keccak_dim - 1 do
+ state_f.(x).(y) <-
+ Bitwise.(
+ bxor64
+ (module Circuit)
+ state_b.(x).(y)
+ (band64
+ (module Circuit)
+ (bnot64_unchecked (module Circuit) state_b.((x + 1) mod 5).(y))
+ state_b.((x + 2) mod 5).(y) ))
+ done
+ done ;
+ (* We can use unchecked NOT because the length of the input is constrained to be
+ 64 bits thanks to the fact that it is the output of a previous Xor64 *)
+ state_f
+
+(*
+ * Fifth step of the permutation function of Keccak for 64-bit words.
+ * It takes the word located at the position (0,0) of the state and XORs it with the round constant.
+ *)
+let iota (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (state : Circuit.Field.t State.matrix) (rc : Circuit.Field.t) :
+ Circuit.Field.t State.matrix =
+ (* Round constants for this round for the iota algorithm *)
+ let state_g = state in
+ state_g.(0).(0) <- Bitwise.(bxor64 (module Circuit) state_g.(0).(0) rc) ;
+ (* Check it is the right round constant is implicit from reusing the right cvar *)
+ state_g
+
+(* The round applies the lambda function and then chi and iota
+ * It consists of the concatenation of the theta, rho, and pi algorithms.
+ * lambda = pi o rho o theta
+ * Thus:
+ * iota o chi o pi o rho o theta
+ *)
+let round (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (state : Circuit.Field.t State.matrix) (rc : Circuit.Field.t) :
+ Circuit.Field.t State.matrix =
+ let state_a = state in
+ let state_e = theta (module Circuit) state_a in
+ let state_b = pi_rho (module Circuit) state_e in
+ let state_f = chi (module Circuit) state_b in
+ let state_d = iota (module Circuit) state_f rc in
+ state_d
+
+(* Keccak permutation function with a constant number of rounds *)
+let permutation (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (state : Circuit.Field.t State.matrix) (rc : Circuit.Field.t array) :
+ Circuit.Field.t State.matrix =
+ for i = 0 to keccak_rounds - 1 do
+ let state_i = round (module Circuit) state rc.(i) in
+ (* Update state for next step *)
+ State.update (module Circuit) ~prev:state ~next:state_i
+ done ;
+ state
+
+(* Absorb padded message into a keccak state with given rate and capacity *)
+let absorb (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (padded_message : Circuit.Field.t list) ~(capacity : int) ~(rate : int)
+ ~(rc : Circuit.Field.t array) : Circuit.Field.t State.matrix =
+ let open Circuit in
+ let root_state = State.zeros (module Circuit) in
+ let state = root_state in
+
+ (* split into blocks of rate bits *)
+ (* for each block of rate bits in the padded message -> this is rate/8 bytes *)
+ let chunks = List.chunks_of padded_message ~length:(rate / 8) in
+ (* (capacity / 8) zero bytes *)
+ let zeros = Array.to_list @@ Array.create ~len:(capacity / 8) Field.zero in
+ for i = 0 to List.length chunks - 1 do
+ let block = List.nth_exn chunks i in
+ (* pad the block with 0s to up to 1600 bits *)
+ let padded_block = block @ zeros in
+ (* padded with zeros each block until they are 1600 bit long *)
+ assert (List.length padded_block * 8 = keccak_state_length) ;
+ let block_state = State.of_bytes (module Circuit) padded_block in
+ (* xor the state with the padded block *)
+ let state_xor = State.xor (module Circuit) state block_state in
+ (* apply the permutation function to the xored state *)
+ let state_perm = permutation (module Circuit) state_xor rc in
+ State.update (module Circuit) ~prev:state ~next:state_perm
+ done ;
+
+ state
+
+(* Squeeze state until it has a desired length in bits *)
+let squeeze (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (state : Circuit.Field.t State.matrix) ~(length : int) ~(rate : int)
+ ~(rc : Circuit.Field.t array) : Circuit.Field.t list =
+ let copy (bytestring : Circuit.Field.t list)
+ (output_array : Circuit.Field.t array) ~(start : int) ~(length : int) =
+ for i = 0 to length - 1 do
+ output_array.(start + i) <- List.nth_exn bytestring i
+ done ;
+ ()
+ in
+
+ let open Circuit in
+ (* bytes per squeeze *)
+ let bytes_per_squeeze = rate / 8 in
+ (* number of squeezes *)
+ let squeezes = (length / rate) + 1 in
+ (* multiple of rate that is larger than output_length, in bytes *)
+ let output_length = squeezes * bytes_per_squeeze in
+ (* array with sufficient space to store the output *)
+ let output_array = Array.create ~len:output_length Field.zero in
+ (* first state to be squeezed *)
+ let bytestring = State.as_prover_to_bytes (module Circuit) state in
+ let output_bytes = List.take bytestring bytes_per_squeeze in
+ copy output_bytes output_array ~start:0 ~length:bytes_per_squeeze ;
+ (* for the rest of squeezes *)
+ for i = 1 to squeezes - 1 do
+ (* apply the permutation function to the state *)
+ let new_state = permutation (module Circuit) state rc in
+ State.update (module Circuit) ~prev:state ~next:new_state ;
+ (* append the output of the permutation function to the output *)
+ let bytestring_i = State.as_prover_to_bytes (module Circuit) state in
+ let output_bytes_i = List.take bytestring_i bytes_per_squeeze in
+ copy output_bytes_i output_array ~start:(bytes_per_squeeze * i)
+ ~length:bytes_per_squeeze ;
+ ()
+ done ;
+ (* Obtain the hash selecting the first bitlength/8 bytes of the output array *)
+ let hashed = Array.sub output_array ~pos:0 ~len:(length / 8) in
+
+ Array.to_list hashed
+
+(* Keccak sponge function for 1600 bits of state width
+ * Need to split the message into blocks of 1088 bits.
+ *)
+let sponge (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (padded_message : Circuit.Field.t list) ~(length : int) ~(capacity : int)
+ ~(rate : int) : Circuit.Field.t list =
+ let open Circuit in
+ (* check that the padded message is a multiple of rate *)
+ assert (List.length padded_message * 8 mod rate = 0) ;
+ (* setup cvars for round constants *)
+ let rc =
+ exists (Typ.array ~length:24 Field.typ) ~compute:(fun () ->
+ Array.map round_consts ~f:(Common.field_of_hex (module Circuit)) )
+ in
+ (* absorb *)
+ let state = absorb (module Circuit) padded_message ~capacity ~rate ~rc in
+ (* squeeze *)
+ let hashed = squeeze (module Circuit) state ~length ~rate ~rc in
+ hashed
+
+(* Checks in the circuit that a list of cvars are at most 8 bits each *)
+let check_bytes (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (inputs : Circuit.Field.t list) : unit =
+ let open Circuit in
+ (* Create a second list of shifted inputs with 4 more bits*)
+ let shifted =
+ Core_kernel.List.map ~f:(fun x -> Field.(of_int 16 * x)) inputs
+ in
+ (* We need to lookup that both the inputs and the shifted values are less than 12 bits *)
+ (* Altogether means that it was less than 8 bits *)
+ let lookups = inputs @ shifted in
+ (* Make sure that a multiple of 3 cvars is in the list *)
+ let lookups =
+ match List.length lookups % 3 with
+ | 2 ->
+ lookups @ [ Field.zero ]
+ | 1 ->
+ lookups @ [ Field.zero; Field.zero ]
+ | _ ->
+ lookups
+ in
+ (* We can fit 3 12-bit lookups per row *)
+ for i = 0 to (List.length lookups / 3) - 1 do
+ Lookup.three_12bit
+ (module Circuit)
+ (List.nth_exn lookups (3 * i))
+ (List.nth_exn lookups ((3 * i) + 1))
+ (List.nth_exn lookups ((3 * i) + 2)) ;
+ ()
+ done ;
+ ()
+
+(*
+* Keccak hash function with input message passed as list of Cvar bytes.
+* The message will be parsed as follows:
+* - the first byte of the message will be the least significant byte of the first word of the state (A[0][0])
+* - the 10*1 pad will take place after the message, until reaching the bit length rate.
+* - then, {0} pad will take place to finish the 1600 bits of the state.
+*)
+let hash (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(inp_endian = Big) ?(out_endian = Big) ?(byte_checks = false)
+ (message : Circuit.Field.t list) ~(length : int) ~(capacity : int)
+ (nist_version : bool) : Circuit.Field.t list =
+ assert (capacity > 0) ;
+ assert (capacity < keccak_state_length) ;
+ assert (length > 0) ;
+ assert (length mod 8 = 0) ;
+ (* Set input to Big Endian format *)
+ let message =
+ match inp_endian with Big -> message | Little -> List.rev message
+ in
+ (* Check each cvar input is 8 bits at most if it was not done before at creation time*)
+ if byte_checks then check_bytes (module Circuit) message ;
+ let rate = keccak_state_length - capacity in
+ let padded =
+ match nist_version with
+ | true ->
+ pad_nist (module Circuit) message rate
+ | false ->
+ pad_101 (module Circuit) message rate
+ in
+ let hash = sponge (module Circuit) padded ~length ~capacity ~rate in
+ (* Check each cvar output is 8 bits at most. Always because they are created here *)
+ check_bytes (module Circuit) hash ;
+ (* Set input to desired endianness *)
+ let hash = match out_endian with Big -> hash | Little -> List.rev hash in
+ (* Check each cvar output is 8 bits at most *)
+ hash
+
+(* Gagdet for NIST SHA-3 function for output lengths 224/256/384/512.
+ * Input and output endianness can be specified. Default is big endian.
+ *)
+let nist_sha3 (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(inp_endian = Big) ?(out_endian = Big) ?(byte_checks = false) (len : int)
+ (message : Circuit.Field.t list) : Circuit.Field.t list =
+ let hash =
+ match len with
+ | 224 ->
+ hash
+ (module Circuit)
+ message ~length:224 ~capacity:448 true ~inp_endian ~out_endian
+ ~byte_checks
+ | 256 ->
+ hash
+ (module Circuit)
+ message ~length:256 ~capacity:512 true ~inp_endian ~out_endian
+ ~byte_checks
+ | 384 ->
+ hash
+ (module Circuit)
+ message ~length:384 ~capacity:768 true ~inp_endian ~out_endian
+ ~byte_checks
+ | 512 ->
+ hash
+ (module Circuit)
+ message ~length:512 ~capacity:1024 true ~inp_endian ~out_endian
+ ~byte_checks
+ | _ ->
+ assert false
+ in
+ hash
+
+(* Gadget for Keccak hash function for the parameters used in Ethereum.
+ * Input and output endianness can be specified. Default is big endian.
+ *)
+let ethereum (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(inp_endian = Big) ?(out_endian = Big) ?(byte_checks = false)
+ (message : Circuit.Field.t list) : Circuit.Field.t list =
+ hash
+ (module Circuit)
+ message ~length:256 ~capacity:512 false ~inp_endian ~out_endian ~byte_checks
+
+(* Gagdet for pre-NIST SHA-3 function for output lengths 224/256/384/512.
+ * Input and output endianness can be specified. Default is big endian.
+ * Note that when calling with output length 256 this is equivalent to the ethereum function
+ *)
+let pre_nist (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(inp_endian = Big) ?(out_endian = Big) ?(byte_checks = false) (len : int)
+ (message : Circuit.Field.t list) : Circuit.Field.t list =
+ match len with
+ | 224 ->
+ hash
+ (module Circuit)
+ message ~length:224 ~capacity:448 false ~inp_endian ~out_endian
+ ~byte_checks
+ | 256 ->
+ ethereum (module Circuit) message ~inp_endian ~out_endian ~byte_checks
+ | 384 ->
+ hash
+ (module Circuit)
+ message ~length:384 ~capacity:768 false ~inp_endian ~out_endian
+ ~byte_checks
+ | 512 ->
+ hash
+ (module Circuit)
+ message ~length:512 ~capacity:1024 false ~inp_endian ~out_endian
+ ~byte_checks
+ | _ ->
+ assert false
+
+(* KECCAK GADGET TESTS *)
+
+let%test_unit "keccak gadget" =
+ if tests_enabled then (
+ let (* Import the gadget test runner *)
+ open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ let test_keccak ?cs ?inp_endian ?out_endian ~nist ~len message expected =
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ assert (String.length message % 2 = 0) ;
+ let message =
+ Array.to_list
+ @@ exists
+ (Typ.array ~length:(String.length message / 2) Field.typ)
+ ~compute:(fun () ->
+ Array.of_list
+ @@ Common.field_bytes_of_hex (module Runner.Impl) message
+ )
+ in
+ let hashed =
+ Array.of_list
+ @@
+ match nist with
+ | true ->
+ nist_sha3
+ (module Runner.Impl)
+ len message ?inp_endian ?out_endian ~byte_checks:true
+ | false ->
+ pre_nist
+ (module Runner.Impl)
+ len message ?inp_endian ?out_endian ~byte_checks:true
+ in
+
+ let expected =
+ Array.of_list
+ @@ Common.field_bytes_of_hex (module Runner.Impl) expected
+ in
+ (* Check expected hash output *)
+ as_prover (fun () ->
+ for i = 0 to Array.length hashed - 1 do
+ let byte_hash =
+ Common.cvar_field_to_bignum_bigint_as_prover
+ (module Runner.Impl)
+ hashed.(i)
+ in
+ let byte_exp =
+ Common.field_to_bignum_bigint
+ (module Runner.Impl)
+ expected.(i)
+ in
+ assert (Bignum_bigint.(byte_hash = byte_exp))
+ done ;
+ () ) ;
+ () )
+ in
+ cs
+ in
+
+ (* Positive tests *)
+ let cs_eth256_1byte =
+ test_keccak ~nist:false ~len:256 "30"
+ "044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d"
+ in
+
+ let cs_nist512_1byte =
+ test_keccak ~nist:true ~len:512 "30"
+ "2d44da53f305ab94b6365837b9803627ab098c41a6013694f9b468bccb9c13e95b3900365eb58924de7158a54467e984efcfdabdbcc9af9a940d49c51455b04c"
+ in
+
+ (* I am the owner of the NFT with id X on the Ethereum chain *)
+ let _cs =
+ test_keccak ~nist:false ~len:256
+ "4920616d20746865206f776e6572206f6620746865204e465420776974682069642058206f6e2074686520457468657265756d20636861696e"
+ "63858e0487687c3eeb30796a3e9307680e1b81b860b01c88ff74545c2c314e36"
+ in
+ let _cs =
+ test_keccak ~nist:false ~len:512
+ "4920616d20746865206f776e6572206f6620746865204e465420776974682069642058206f6e2074686520457468657265756d20636861696e"
+ "848cf716c2d64444d2049f215326b44c25a007127d2871c1b6004a9c3d102f637f31acb4501e59f3a0160066c8814816f4dc58a869f37f740e09b9a8757fa259"
+ in
+
+ (* The following two tests use 2 blocks instead *)
+ (* For Keccak *)
+ let _cs =
+ test_keccak ~nist:false ~len:256
+ "044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116df9e2eaaa42d9fe9e558a9b8ef1bf366f190aacaa83bad2641ee106e9041096e42d44da53f305ab94b6365837b9803627ab098c41a6013694f9b468bccb9c13e95b3900365eb58924de7158a54467e984efcfdabdbcc9af9a940d49c51455b04c63858e0487687c3eeb30796a3e9307680e1b81b860b01c88ff74545c2c314e36"
+ "560deb1d387f72dba729f0bd0231ad45998dda4b53951645322cf95c7b6261d9"
+ in
+ (* For NIST *)
+ let _cs =
+ test_keccak ~nist:true ~len:256
+ "044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116df9e2eaaa42d9fe9e558a9b8ef1bf366f190aacaa83bad2641ee106e9041096e42d44da53f305ab94b6365837b9803627ab098c41a6013694f9b468bccb9c13e95b3900365eb58924de7158a54467e984efcfdabdbcc9af9a940d49c51455b04c63858e0487687c3eeb30796a3e9307680e1b81b860b01c88ff74545c2c314e36"
+ "1784354c4bbfa5f54e5db23041089e65a807a7b970e3cfdba95e2fbe63b1c0e4"
+ in
+
+ (* Padding of input 1080 bits and 1088 bits *)
+ (* 135 bits, uses the following single padding byte as 0x81 *)
+ let cs135 =
+ test_keccak ~nist:false ~len:256
+ "391ccf9b5de23bb86ec6b2b142adb6e9ba6bee8519e7502fb8be8959fbd2672934cc3e13b7b45bf2b8a5cb48881790a7438b4a326a0c762e31280711e6b64fcc2e3e4e631e501d398861172ea98603618b8f23b91d0208b0b992dfe7fdb298b6465adafbd45e4f88ee9dc94e06bc4232be91587f78572c169d4de4d8b95b714ea62f1fbf3c67a4"
+ "7d5655391ede9ca2945f32ad9696f464be8004389151ce444c89f688278f2e1d"
+ in
+
+ (* 136 bits, 2 blocks and second is just padding *)
+ let cs136 =
+ test_keccak ~nist:false ~len:256
+ "ff391ccf9b5de23bb86ec6b2b142adb6e9ba6bee8519e7502fb8be8959fbd2672934cc3e13b7b45bf2b8a5cb48881790a7438b4a326a0c762e31280711e6b64fcc2e3e4e631e501d398861172ea98603618b8f23b91d0208b0b992dfe7fdb298b6465adafbd45e4f88ee9dc94e06bc4232be91587f78572c169d4de4d8b95b714ea62f1fbf3c67a4"
+ "37694fd4ba137be747eb25a85b259af5563e0a7a3010d42bd15963ac631b9d3f"
+ in
+
+ (* Input already looks like padded *)
+ let _cs =
+ test_keccak ~cs:cs135 ~nist:false ~len:256
+ "800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001"
+ "0edbbae289596c7da9fafe65931c5dce3439fb487b8286d6c1970e44eea39feb"
+ in
+
+ let _cs =
+ test_keccak ~cs:cs136 ~nist:false ~len:256
+ "80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001"
+ "bbf1f49a2cc5678aa62196d0c3108d89425b81780e1e90bcec03b4fb5f834714"
+ in
+
+ (* Reusing *)
+ let _cs =
+ test_keccak ~cs:cs_eth256_1byte ~nist:false ~len:256 "00"
+ "bc36789e7a1e281436464229828f817d6612f7b477d66591ff96a9e064bcc98a"
+ in
+
+ let cs2 =
+ test_keccak ~nist:false ~len:256 "a2c0"
+ "9856642c690c036527b8274db1b6f58c0429a88d9f3b9298597645991f4f58f0"
+ in
+
+ let _cs =
+ test_keccak ~cs:cs2 ~nist:false ~len:256 "0a2c"
+ "295b48ad49eff61c3abfd399c672232434d89a4ef3ca763b9dbebb60dbb32a8b"
+ in
+
+ (* Endianness *)
+ let _cs =
+ test_keccak ~nist:false ~len:256 ~inp_endian:Little ~out_endian:Little
+ "2c0a"
+ "8b2ab3db60bbbe9d3b76caf34e9ad834242372c699d3bf3a1cf6ef49ad485b29"
+ in
+
+ (* Negative tests *)
+ (* Check cannot use bad hex inputs *)
+ assert (
+ Common.is_error (fun () ->
+ test_keccak ~nist:false ~len:256 "a2c"
+ "07f02d241eeba9c909a1be75e08d9e8ac3e61d9e24fa452a6785083e1527c467" ) ) ;
+
+ (* Check cannot use bad hex inputs *)
+ assert (
+ Common.is_error (fun () ->
+ test_keccak ~nist:true ~len:256 "0"
+ "f39f4526920bb4c096e5722d64161ea0eb6dbd0b4ff0d812f31d56fb96142084" ) ) ;
+
+ (* Cannot reuse CS for different output length *)
+ assert (
+ Common.is_error (fun () ->
+ test_keccak ~cs:cs_nist512_1byte ~nist:true ~len:256 "30"
+ "f9e2eaaa42d9fe9e558a9b8ef1bf366f190aacaa83bad2641ee106e9041096e4" ) ) ;
+
+ (* Checking cannot reuse CS for same length but different padding *)
+ assert (
+ Common.is_error (fun () ->
+ test_keccak ~cs:cs_eth256_1byte ~nist:true ~len:256
+ "4920616d20746865206f776e6572206f6620746865204e465420776974682069642058206f6e2074686520457468657265756d20636861696e"
+ "63858e0487687c3eeb30796a3e9307680e1b81b860b01c88ff74545c2c314e36" ) ) ;
+
+ (* Cannot reuse cs with different endianness *)
+ assert (
+ Common.is_error (fun () ->
+ test_keccak ~cs:cs2 ~nist:false ~len:256 ~inp_endian:Little
+ ~out_endian:Little "2c0a"
+ "8b2ab3db60bbbe9d3b76caf34e9ad834242372c699d3bf3a1cf6ef49ad485b29" ) ) ;
+
+ () ) ;
+
+ ()
diff --git a/src/lib/crypto/kimchi_backend/gadgets/keccak.mli b/src/lib/crypto/kimchi_backend/gadgets/keccak.mli
new file mode 100644
index 00000000000..b3889a9cc4c
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/keccak.mli
@@ -0,0 +1,58 @@
+(* Endianness type *)
+type endianness = Big | Little
+
+(** Gagdet for NIST SHA-3 function for output lengths 224/256/384/512
+ * Input:
+ * - Endianness of the input (default is Big).
+ * - Endianness of the output (default is Big).
+ * - Flag to enable input byte checks (default is false). Outputs are always constrained.
+ * - int representing the output length of the hash function (224|256|384|512)
+ * - Arbitrary length list of Cvars representing the input to the hash function where each of them is a byte
+ * Output:
+ * - List of `int` Cvars representing the output of the hash function where each of them is a byte
+ *)
+val nist_sha3 :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> ?inp_endian:endianness
+ -> ?out_endian:endianness
+ -> ?byte_checks:bool
+ -> int
+ -> 'f Snarky_backendless.Cvar.t list
+ -> 'f Snarky_backendless.Cvar.t list
+
+(** Gadget for Keccak hash function for the parameters used in Ethereum
+ * Input:
+ * - Endianness of the input (default is Big).
+ * - Endianness of the output (default is Big).
+ * - Flag to enable input byte checks (default is false). Outputs are always constrained.
+ * - Arbitrary length list of Cvars representing the input to the hash function where each of them is a byte
+ * Output:
+ * - List of 256 Cvars representing the output of the hash function where each of them is a byte
+ *)
+val ethereum :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> ?inp_endian:endianness
+ -> ?out_endian:endianness
+ -> ?byte_checks:bool
+ -> 'f Snarky_backendless.Cvar.t list
+ -> 'f Snarky_backendless.Cvar.t list
+
+(*** Gagdet for pre-NIST SHA-3 function for output lengths 224/256/384/512.
+ * Note that when calling with output length 256 this is equivalent to the ethereum function
+ * Input:
+ * - Endianness of the input (default is Big).
+ * - Endianness of the output (default is Big).
+ * - Flag to enable input byte checks (default is false). Outputs are always constrained.
+ * - int representing the output length of the hash function (224|256|384|512)
+ * - Arbitrary length list of Cvars Cvars representing the input to the hash function where each of them is a byte
+ * Output:
+ * - List of `int` Cvars representing the output of the hash function where each of them is a byte
+ *)
+val pre_nist :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> ?inp_endian:endianness
+ -> ?out_endian:endianness
+ -> ?byte_checks:bool
+ -> int
+ -> 'f Snarky_backendless.Cvar.t list
+ -> 'f Snarky_backendless.Cvar.t list
diff --git a/src/lib/crypto/kimchi_backend/gadgets/lookup.ml b/src/lib/crypto/kimchi_backend/gadgets/lookup.ml
new file mode 100644
index 00000000000..dee61cc7983
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/lookup.ml
@@ -0,0 +1,101 @@
+open Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint
+
+let tests_enabled = true
+
+(* Looks up three values (at most 12 bits each)
+ * BEWARE: it needs in the circuit at least one gate (even if dummy) that uses the 12-bit lookup table for it to work
+ *)
+let three_12bit (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (v0 : Circuit.Field.t) (v1 : Circuit.Field.t) (v2 : Circuit.Field.t) : unit
+ =
+ let open Circuit in
+ with_label "triple_lookup" (fun () ->
+ assert_
+ { annotation = Some __LOC__
+ ; basic =
+ Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T
+ (Lookup
+ { w0 = Field.one
+ ; w1 = v0
+ ; w2 = Field.zero
+ ; w3 = v1
+ ; w4 = Field.zero
+ ; w5 = v2
+ ; w6 = Field.zero
+ } )
+ } ) ;
+ ()
+
+(* Check that one value is at most X bits (at most 12), default is 12.
+ * BEWARE: it needs in the circuit at least one gate (even if dummy) that uses the 12-bit lookup table for it to work
+ *)
+let less_than_bits (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ?(bits = 12) (value : Circuit.Field.t) : unit =
+ let open Circuit in
+ assert (bits > 0 && bits <= 12) ;
+ (* In order to check that a value is less than 2^x bits value < 2^x
+ you first check that value < 2^12 bits using the lookup table
+ and then that the value * shift < 2^12 where shift = 2^(12-x)
+ (because moving shift to the right hand side that gives value < 2^x) *)
+ let shift =
+ exists Field.typ ~compute:(fun () ->
+ let power = Core_kernel.Int.pow 2 (12 - bits) in
+ Field.Constant.of_int power )
+ in
+ three_12bit (module Circuit) value Field.(value * shift) Field.zero ;
+ ()
+
+(*********)
+(* Tests *)
+(*********)
+
+let%test_unit "lookup gadget" =
+ if tests_enabled then (
+ let (* Import the gadget test runner *)
+ open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Helper to test lookup less than gadget for both variables and constants
+ * Inputs value to be checked and number of bits
+ * Returns true if constraints are satisfied, false otherwise.
+ *)
+ let test_lookup ?cs ~bits value =
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ (* Set up snarky constant *)
+ let const = Field.constant @@ Field.Constant.of_int value in
+ (* Set up snarky variable *)
+ let value =
+ exists Field.typ ~compute:(fun () -> Field.Constant.of_int value)
+ in
+ (* Use the lookup gadget *)
+ less_than_bits (module Runner.Impl) ~bits value ;
+ less_than_bits (module Runner.Impl) ~bits const ;
+ (* Use a dummy range check to load the table *)
+ Range_check.bits64 (module Runner.Impl) Field.zero ;
+ () )
+ in
+ cs
+ in
+
+ (* TEST generic mul gadget *)
+ (* Positive tests *)
+ let cs12 = test_lookup ~bits:12 4095 in
+ let cs8 = test_lookup ~bits:8 255 in
+ let cs1 = test_lookup ~bits:1 0 in
+ let _cs = test_lookup ~cs:cs1 ~bits:1 1 in
+ (* Negatve tests *)
+ assert (Common.is_error (fun () -> test_lookup ~cs:cs12 ~bits:12 4096)) ;
+ assert (Common.is_error (fun () -> test_lookup ~cs:cs12 ~bits:12 (-1))) ;
+ assert (Common.is_error (fun () -> test_lookup ~cs:cs8 ~bits:8 256)) ;
+ assert (Common.is_error (fun () -> test_lookup ~cs:cs1 ~bits:1 2)) ;
+ () ) ;
+
+ ()
diff --git a/src/lib/crypto/kimchi_backend/gadgets/lookup.mli b/src/lib/crypto/kimchi_backend/gadgets/lookup.mli
new file mode 100644
index 00000000000..dcaa9cc77be
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/lookup.mli
@@ -0,0 +1,20 @@
+(*TODO: perhaps move this to an internal file, as the dummy gate could be misleading for users *)
+
+(** Looks up three values (at most 12 bits each)
+ * BEWARE: it needs in the circuit at least one gate (even if dummy) that uses the 12-bit lookup table for it to work
+ *)
+val three_12bit :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> 'f Snarky_backendless.Cvar.t (* v0 *)
+ -> 'f Snarky_backendless.Cvar.t (* v1 *)
+ -> 'f Snarky_backendless.Cvar.t (* v2 *)
+ -> unit
+
+(** Check that one value is at most X bits (at most 12). Default is 12.
+ * BEWARE: it needs in the circuit at least one gate (even if dummy) that uses the 12-bit lookup table for it to work
+ *)
+val less_than_bits :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> ?bits:int (* bits *)
+ -> 'f Snarky_backendless.Cvar.t (* value *)
+ -> unit
diff --git a/src/lib/crypto/kimchi_backend/gadgets/range_check.ml b/src/lib/crypto/kimchi_backend/gadgets/range_check.ml
new file mode 100644
index 00000000000..83cd95b9bfa
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/range_check.ml
@@ -0,0 +1,425 @@
+open Core_kernel
+
+open Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint
+
+let tests_enabled = true
+
+(* Helper to create RangeCheck0 gate, configured in various ways
+ * - is_64bit : create 64-bit range check
+ * - is_compact : compact limbs mode (only used by compact multi-range-check)
+ *)
+let range_check0 (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ~(label : string) ?(is_compact : bool = false) (v0 : Circuit.Field.t)
+ (v0p0 : Circuit.Field.t) (v0p1 : Circuit.Field.t) =
+ let open Circuit in
+ (* Define shorthand helper *)
+ let of_bits =
+ Common.as_prover_cvar_field_bits_le_to_cvar_field (module Circuit)
+ in
+
+ (* Sanity check v0p0 and v1p1 correspond to the correct bits of v0 *)
+ as_prover (fun () ->
+ let open Circuit.Field in
+ let v0p0_expected = of_bits v0 76 88 in
+ let v0p1_expected = of_bits v0 64 76 in
+
+ Assert.equal v0p0 v0p0_expected ;
+ Assert.equal v0p1 v0p1_expected ) ;
+
+ (* Create sublimbs *)
+ let v0p2 = of_bits v0 52 64 in
+ let v0p3 = of_bits v0 40 52 in
+ let v0p4 = of_bits v0 28 40 in
+ let v0p5 = of_bits v0 16 28 in
+ let v0c0 = of_bits v0 14 16 in
+ let v0c1 = of_bits v0 12 14 in
+ let v0c2 = of_bits v0 10 12 in
+ let v0c3 = of_bits v0 8 10 in
+ let v0c4 = of_bits v0 6 8 in
+ let v0c5 = of_bits v0 4 6 in
+ let v0c6 = of_bits v0 2 4 in
+ let v0c7 = of_bits v0 0 2 in
+
+ (* Set up compact mode coefficient *)
+ let compact =
+ if is_compact then Field.Constant.one else Field.Constant.zero
+ in
+
+ (* Create RangeCheck0 gate *)
+ with_label label (fun () ->
+ assert_
+ { annotation = Some __LOC__
+ ; basic =
+ Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T
+ (RangeCheck0
+ { (* Current row *) v0
+ ; v0p0
+ ; v0p1
+ ; v0p2
+ ; v0p3
+ ; v0p4
+ ; v0p5
+ ; v0c0
+ ; v0c1
+ ; v0c2
+ ; v0c3
+ ; v0c4
+ ; v0c5
+ ; v0c6
+ ; v0c7
+ ; (* Coefficients *)
+ compact
+ } )
+ } )
+
+(* Helper to create RangeCheck1 gate *)
+let range_check1 (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ ~(label : string) (v0p0 : Circuit.Field.t) (v0p1 : Circuit.Field.t)
+ (v1p0 : Circuit.Field.t) (v1p1 : Circuit.Field.t) (v2 : Circuit.Field.t)
+ (v12 : Circuit.Field.t) =
+ let open Circuit in
+ (* Define shorthand helper *)
+ let of_bits =
+ Common.as_prover_cvar_field_bits_le_to_cvar_field (module Circuit)
+ in
+
+ (* Create sublimbs - current row *)
+ let v2c0 = of_bits v2 86 88 in
+ let v2p0 = of_bits v2 74 86 in
+ let v2p1 = of_bits v2 62 74 in
+ let v2p2 = of_bits v2 50 62 in
+ let v2p3 = of_bits v2 38 50 in
+ let v2c1 = of_bits v2 36 38 in
+ let v2c2 = of_bits v2 34 36 in
+ let v2c3 = of_bits v2 32 34 in
+ let v2c4 = of_bits v2 30 32 in
+ let v2c5 = of_bits v2 28 30 in
+ let v2c6 = of_bits v2 26 28 in
+ let v2c7 = of_bits v2 24 26 in
+ let v2c8 = of_bits v2 22 24 in
+
+ (* Create sublimbs - next row *)
+ let v2c9 = of_bits v2 20 22 in
+ let v2c10 = of_bits v2 18 20 in
+ let v2c11 = of_bits v2 16 18 in
+ let v2c12 = of_bits v2 14 16 in
+ let v2c13 = of_bits v2 12 14 in
+ let v2c14 = of_bits v2 10 12 in
+ let v2c15 = of_bits v2 8 10 in
+ let v2c16 = of_bits v2 6 8 in
+ let v2c17 = of_bits v2 4 6 in
+ let v2c18 = of_bits v2 2 4 in
+ let v2c19 = of_bits v2 0 2 in
+
+ (* Create RangeCheck0 gate *)
+ with_label label (fun () ->
+ assert_
+ { annotation = Some __LOC__
+ ; basic =
+ Kimchi_backend_common.Plonk_constraint_system.Plonk_constraint.T
+ (RangeCheck1
+ { (* Current row *) v2
+ ; v12
+ ; v2c0
+ ; v2p0
+ ; v2p1
+ ; v2p2
+ ; v2p3
+ ; v2c1
+ ; v2c2
+ ; v2c3
+ ; v2c4
+ ; v2c5
+ ; v2c6
+ ; v2c7
+ ; v2c8
+ ; (* Next row *) v2c9
+ ; v2c10
+ ; v2c11
+ ; v0p0
+ ; v0p1
+ ; v1p0
+ ; v1p1
+ ; v2c12
+ ; v2c13
+ ; v2c14
+ ; v2c15
+ ; v2c16
+ ; v2c17
+ ; v2c18
+ ; v2c19
+ } )
+ } )
+
+(* 64-bit range-check gadget - checks v0 \in [0, 2^64) *)
+let bits64 (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (v0 : Circuit.Field.t) =
+ range_check0
+ (module Circuit)
+ ~label:"range_check64" ~is_compact:false v0 Circuit.Field.zero
+ Circuit.Field.zero
+
+(* multi-range-check gadget - checks v0,v1,v2 \in [0, 2^88) *)
+let multi (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (v0 : Circuit.Field.t) (v1 : Circuit.Field.t) (v2 : Circuit.Field.t) =
+ let open Circuit in
+ let of_bits =
+ Common.as_prover_cvar_field_bits_le_to_cvar_field (module Circuit)
+ in
+ let v0p0 = of_bits v0 76 88 in
+ let v0p1 = of_bits v0 64 76 in
+ range_check0
+ (module Circuit)
+ ~label:"multi_range_check" ~is_compact:false v0 v0p0 v0p1 ;
+ let v1p0 = of_bits v1 76 88 in
+ let v1p1 = of_bits v1 64 76 in
+ range_check0
+ (module Circuit)
+ ~label:"multi_range_check" ~is_compact:false v1 v1p0 v1p1 ;
+ let zero = exists Field.typ ~compute:(fun () -> Field.Constant.zero) in
+ range_check1
+ (module Circuit)
+ ~label:"multi_range_check" v0p0 v0p1 v1p0 v1p1 v2 zero
+
+(* compact multi-range-check gadget - checks
+ * - v0,v1,v2 \in [0, 2^88)
+ * - v01 = v0 + 2^88 * v1
+ *)
+let compact_multi (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (v01 : Circuit.Field.t) (v2 : Circuit.Field.t) =
+ let open Circuit in
+ (* Set up helper *)
+ let bignum_bigint_to_field = Common.bignum_bigint_to_field (module Circuit) in
+ (* Prepare range-check values *)
+ let v1, v0 =
+ exists
+ Typ.(Field.typ * Field.typ)
+ ~compute:(fun () ->
+ (* Decompose v0 and v1 from v01 = 2^L * v1 + v0 *)
+ let v01 =
+ Common.field_to_bignum_bigint
+ (module Circuit)
+ (As_prover.read Field.typ v01)
+ in
+ let v1, v0 = Common.(bignum_bigint_div_rem v01 two_to_limb) in
+ (bignum_bigint_to_field v1, bignum_bigint_to_field v0) )
+ in
+ let of_bits =
+ Common.as_prover_cvar_field_bits_le_to_cvar_field (module Circuit)
+ in
+ let v2p0 = of_bits v2 76 88 in
+ let v2p1 = of_bits v2 64 76 in
+ range_check0
+ (module Circuit)
+ ~label:"compact_multi_range_check" ~is_compact:false v2 v2p0 v2p1 ;
+ let v0p0 = of_bits v0 76 88 in
+ let v0p1 = of_bits v0 64 76 in
+ range_check0
+ (module Circuit)
+ ~label:"compact_multi_range_check" ~is_compact:true v0 v0p0 v0p1 ;
+ range_check1
+ (module Circuit)
+ ~label:"compact_multi_range_check" v2p0 v2p1 v0p0 v0p1 v1 v01
+
+(*********)
+(* Tests *)
+(*********)
+
+let%test_unit "range_check64 gadget" =
+ if tests_enabled then (
+ let (* Import the gadget test runner *)
+ open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Helper to test range_check64 gadget
+ * Input: value to be range checked in [0, 2^64)
+ *)
+ let test_range_check64 ?cs base10 =
+ let open Runner.Impl in
+ let value = Common.field_of_base10 (module Runner.Impl) base10 in
+
+ let make_circuit value =
+ (* Circuit definition *)
+ let value = exists Field.typ ~compute:(fun () -> value) in
+ bits64 (module Runner.Impl) value ;
+ (* Padding *)
+ Boolean.Assert.is_true (Field.equal value value)
+ in
+
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () -> make_circuit value)
+ in
+ cs
+ in
+
+ (* Positive tests *)
+ let cs = test_range_check64 "0" in
+ let _cs = test_range_check64 ~cs "4294967" in
+ let _cs = test_range_check64 ~cs "18446744073709551615" in
+ (* 2^64 - 1 *)
+ (* Negative tests *)
+ assert (
+ Common.is_error (fun () ->
+ test_range_check64 ~cs "18446744073709551616" (* 2^64 *) ) ) ;
+ assert (
+ Common.is_error (fun () ->
+ test_range_check64 ~cs "170141183460469231731687303715884105728"
+ (* 2^127 *) ) ) ) ;
+ ()
+
+let%test_unit "multi_range_check gadget" =
+ if tests_enabled then (
+ let (* Import the gadget test runner *)
+ open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Helper to test multi_range_check gadget *)
+ let test_multi_range_check ?cs v0 v1 v2 =
+ let open Runner.Impl in
+ let v0 = Common.field_of_base10 (module Runner.Impl) v0 in
+ let v1 = Common.field_of_base10 (module Runner.Impl) v1 in
+ let v2 = Common.field_of_base10 (module Runner.Impl) v2 in
+
+ let make_circuit v0 v1 v2 =
+ (* Circuit definition *)
+ let values =
+ exists (Typ.array ~length:3 Field.typ) ~compute:(fun () ->
+ [| v0; v1; v2 |] )
+ in
+ multi (module Runner.Impl) values.(0) values.(1) values.(2)
+ in
+
+ (* Generate and verify proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () -> make_circuit v0 v1 v2)
+ in
+
+ cs
+ in
+
+ (* Positive tests *)
+ let cs =
+ test_multi_range_check "0" "4294967" "309485009821345068724781055"
+ in
+ let _cs =
+ test_multi_range_check ~cs "267475740839011166017999907"
+ "120402749546803056196583080" "1159834292458813579124542"
+ in
+ let _cs =
+ test_multi_range_check ~cs "309485009821345068724781055"
+ "309485009821345068724781055" "309485009821345068724781055"
+ in
+ let _cs = test_multi_range_check ~cs "0" "0" "0" in
+ (* Negative tests *)
+ assert (
+ Common.is_error (fun () ->
+ test_multi_range_check ~cs "0" "4294967" "309485009821345068724781056" ) ) ;
+ assert (
+ Common.is_error (fun () ->
+ test_multi_range_check ~cs "0" "309485009821345068724781056"
+ "309485009821345068724781055" ) ) ;
+ assert (
+ Common.is_error (fun () ->
+ test_multi_range_check ~cs "309485009821345068724781056" "4294967"
+ "309485009821345068724781055" ) ) ;
+ assert (
+ Common.is_error (fun () ->
+ test_multi_range_check ~cs
+ "28948022309329048855892746252171976963317496166410141009864396001978282409984"
+ "0170141183460469231731687303715884105728"
+ "170141183460469231731687303715884105728" ) ) ;
+ assert (
+ Common.is_error (fun () ->
+ test_multi_range_check ~cs "0" "0"
+ "28948022309329048855892746252171976963317496166410141009864396001978282409984" ) ) ;
+ assert (
+ Common.is_error (fun () ->
+ test_multi_range_check ~cs "0170141183460469231731687303715884105728"
+ "0"
+ "28948022309329048855892746252171976963317496166410141009864396001978282409984" ) )
+ ) ;
+ ()
+
+let%test_unit "compact_multi_range_check gadget" =
+ if tests_enabled then (
+ let (* Import the gadget test runner *)
+ open Kimchi_gadgets_test_runner in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Helper to test compact_multi_range_check gadget *)
+ let test_compact_multi_range_check v01 v2 : unit =
+ let open Runner.Impl in
+ let v01 = Common.field_of_base10 (module Runner.Impl) v01 in
+ let v2 = Common.field_of_base10 (module Runner.Impl) v2 in
+
+ let make_circuit v01 v2 =
+ (* Circuit definition *)
+ let v01, v2 =
+ exists Typ.(Field.typ * Field.typ) ~compute:(fun () -> (v01, v2))
+ in
+ compact_multi (module Runner.Impl) v01 v2
+ in
+
+ (* Generate and verify first proof *)
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof (fun () -> make_circuit v01 v2)
+ in
+
+ (* Set up another witness *)
+ let mutate_witness value =
+ Field.Constant.(if equal zero value then value + one else value - one)
+ in
+ let v01 = mutate_witness v01 in
+ let v2 = mutate_witness v2 in
+
+ (* Generate and verify second proof, reusing constraint system *)
+ let _cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ~cs (fun () -> make_circuit v01 v2)
+ in
+
+ ()
+ in
+
+ (* Positive tests *)
+ test_compact_multi_range_check "0" "0" ;
+ test_compact_multi_range_check
+ "95780971304118053647396689196894323976171195136475135" (* 2^176 - 1 *)
+ "309485009821345068724781055"
+ (* 2^88 - 1 *) ;
+ (* Negative tests *)
+ assert (
+ Common.is_error (fun () ->
+ test_compact_multi_range_check
+ "28948022309329048855892746252171976963317496166410141009864396001978282409984"
+ "0" ) ) ;
+ assert (
+ Common.is_error (fun () ->
+ test_compact_multi_range_check "0"
+ "28948022309329048855892746252171976963317496166410141009864396001978282409984" ) ) ;
+ assert (
+ Common.is_error (fun () ->
+ test_compact_multi_range_check
+ "95780971304118053647396689196894323976171195136475136" (* 2^176 *)
+ "309485009821345068724781055" ) (* 2^88 - 1 *) ) ;
+ assert (
+ Common.is_error (fun () ->
+ test_compact_multi_range_check
+ "95780971304118053647396689196894323976171195136475135"
+ (* 2^176 - 1 *)
+ "309485009821345068724781056" ) (* 2^88 *) ) ) ;
+ ()
diff --git a/src/lib/crypto/kimchi_backend/gadgets/range_check.mli b/src/lib/crypto/kimchi_backend/gadgets/range_check.mli
new file mode 100644
index 00000000000..71d74849bfd
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/range_check.mli
@@ -0,0 +1,22 @@
+(** 64-bit range-check gadget - checks value \in [0, 2^64) *)
+val bits64 :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> 'f Snarky_backendless.Cvar.t (* value *)
+ -> unit
+
+(** multi-range-check gadget - checks v0,v1,v2 \in [0, 2^88) *)
+val multi :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> 'f Snarky_backendless.Cvar.t (* v0 *)
+ -> 'f Snarky_backendless.Cvar.t (* v1 *)
+ -> 'f Snarky_backendless.Cvar.t (* v2 *)
+ -> unit
+
+(** compact multi-range-check gadget - checks
+ * - v0,v1,v2 \in [0, 2^88)
+ * - v01 = v0 + 2^88 * v1 *)
+val compact_multi :
+ (module Snarky_backendless.Snark_intf.Run with type field = 'f)
+ -> 'f Snarky_backendless.Cvar.t (* v01 *)
+ -> 'f Snarky_backendless.Cvar.t (* v2 *)
+ -> unit
diff --git a/src/lib/crypto/kimchi_backend/gadgets/runner/example/example.ml b/src/lib/crypto/kimchi_backend/gadgets/runner/example/example.ml
index fa0f2fb0254..6fbd886af1b 100644
--- a/src/lib/crypto/kimchi_backend/gadgets/runner/example/example.ml
+++ b/src/lib/crypto/kimchi_backend/gadgets/runner/example/example.ml
@@ -11,9 +11,9 @@ let () = Tick.Keypair.set_urs_info []
Note that this adds more than 1 constraint, because there is an assertion in
kimchi that there is more than 1 gate (which is probably an error).
*)
-let example ~valid_witness () =
- let _proof_keypair, _proof =
- generate_and_verify_proof (fun () ->
+let example ?cs ~valid_witness () =
+ let cs, _proof_keypair, _proof =
+ generate_and_verify_proof ?cs (fun () ->
let open Impl in
(* Create a fresh snarky variable. *)
let a =
@@ -36,16 +36,16 @@ let example ~valid_witness () =
(* Assert equality directly via the permutation argument. *)
Field.Assert.equal a_squared a_plus_b )
in
- ()
+ cs
(* Generate a proof with a valid witness. *)
-let () = example ~valid_witness:true ()
+let _cs = example ~valid_witness:true ()
(* Sanity-check: ensure that the proof with an invalid witness fails. *)
let () =
let test_failed =
try
- example ~valid_witness:false () ;
+ let _cs = example ~valid_witness:false () in
false
with _ -> true
in
diff --git a/src/lib/crypto/kimchi_backend/gadgets/runner/runner.ml b/src/lib/crypto/kimchi_backend/gadgets/runner/runner.ml
index 1a4607a221e..2a2c0956673 100644
--- a/src/lib/crypto/kimchi_backend/gadgets/runner/runner.ml
+++ b/src/lib/crypto/kimchi_backend/gadgets/runner/runner.ml
@@ -2,11 +2,15 @@
module Tick = Kimchi_backend.Pasta.Vesta_based_plonk
module Impl = Snarky_backendless.Snark.Run.Make (Tick)
-let generate_and_verify_proof circuit =
+let generate_and_verify_proof ?cs circuit =
(* Generate constraint system for the circuit *)
let constraint_system =
- Impl.constraint_system ~input_typ:Impl.Typ.unit ~return_typ:Impl.Typ.unit
- (fun () () -> circuit ())
+ match cs with
+ | Some cs ->
+ cs
+ | None ->
+ Impl.constraint_system ~input_typ:Impl.Typ.unit
+ ~return_typ:Impl.Typ.unit (fun () () -> circuit ())
in
(* Generate the indexes from the constraint system *)
let proof_keypair =
@@ -28,10 +32,11 @@ let generate_and_verify_proof circuit =
(fun () () -> circuit ())
()
in
+
(* Verify proof *)
let verifier_index = Tick.Keypair.vk proof_keypair in
(* We have an empty public input; create an empty vector. *)
let public_input = Kimchi_bindings.FieldVectors.Fp.create () in
(* Assert that the proof verifies. *)
assert (Tick.Proof.verify ~message:[] proof verifier_index public_input) ;
- (proof_keypair, proof)
+ (constraint_system, proof_keypair, proof)
diff --git a/src/lib/crypto/kimchi_backend/gadgets/secp256k1.ml.disabled b/src/lib/crypto/kimchi_backend/gadgets/secp256k1.ml.disabled
new file mode 100644
index 00000000000..78c3214645a
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/secp256k1.ml.disabled
@@ -0,0 +1,30 @@
+(* secp256k1 elliptic curve parameters *)
+
+module Bignum_bigint = Snarky_backendless.Backend_extended.Bignum_bigint
+module Snark_intf = Snarky_backendless.Snark_intf
+
+let params =
+ Curve_params.
+ { modulus =
+ Bignum_bigint.of_string
+ "115792089237316195423570985008687907853269984665640564039457584007908834671663"
+ ; order =
+ Bignum_bigint.of_string
+ "115792089237316195423570985008687907852837564279074904382605163141518161494337"
+ ; a = Bignum_bigint.of_int 0
+ ; b = Bignum_bigint.of_int 7
+ ; gen =
+ ( Bignum_bigint.of_string
+ "55066263022277343669578718895168534326250603453777594175500187360389116729240"
+ , Bignum_bigint.of_string
+ "32670510020758816978083085130507043184471273380659243275938904335757337482424"
+ )
+ ; ia =
+ Curve_params.ia_of_strings
+ ( "73748207725492941843355928046090697797026070566443284126849221438943867210749"
+ , "71805440039692371678177852429904809925653495989672587996663750265844216498843"
+ )
+ ( "73748207725492941843355928046090697797026070566443284126849221438943867210749"
+ , "43986649197623823745393132578783097927616488675967976042793833742064618172820"
+ )
+ }
diff --git a/src/lib/crypto/kimchi_backend/gadgets/test.ml.disabled b/src/lib/crypto/kimchi_backend/gadgets/test.ml.disabled
new file mode 100644
index 00000000000..d38285ec684
--- /dev/null
+++ b/src/lib/crypto/kimchi_backend/gadgets/test.ml.disabled
@@ -0,0 +1,156 @@
+let tests_enabled = true
+
+let%test_unit "custom gates integration" =
+ ( if tests_enabled then
+ let (* Import the gadget test runner *)
+ open Kimchi_gadgets_test_runner in
+ let open Foreign_field in
+ (* Initialize the SRS cache. *)
+ let () =
+ try Kimchi_pasta.Vesta_based_plonk.Keypair.set_urs_info [] with _ -> ()
+ in
+
+ (* Convert Bignum_bigint.t to Bignum_bigint standard_limbs *)
+ let bignum_bigint_to_standard_limbs (bigint : Bignum_bigint.t) :
+ Bignum_bigint.t standard_limbs =
+ let l12, l0 = Common.(bignum_bigint_div_rem bigint two_to_limb) in
+ let l2, l1 = Common.(bignum_bigint_div_rem l12 two_to_limb) in
+ (l0, l1, l2)
+ in
+
+ (* Convert Bignum_bigint.t to field standard_limbs *)
+ let bignum_bigint_to_field_const_standard_limbs (type f)
+ (module Circuit : Snarky_backendless.Snark_intf.Run with type field = f)
+ (bigint : Bignum_bigint.t) : f standard_limbs =
+ let l0, l1, l2 = bignum_bigint_to_standard_limbs bigint in
+ ( Common.bignum_bigint_to_field (module Circuit) l0
+ , Common.bignum_bigint_to_field (module Circuit) l1
+ , Common.bignum_bigint_to_field (module Circuit) l2 )
+ in
+
+ (* Helper to test all custom gates for Ethereum primitives.
+ * The circuit being created is the following:
+ * - rotate first 64-bit word by 5 bits to the right
+ * - multiply by 2^176
+ * - xor it with the second word which is a native field element (255 bits)
+ * - and it with the first word (254 bits)
+ * - not the output for 254 bits
+ * - create limbs for the output and decompose
+ * - multiply it with itself (256 bits)
+ * - ffadd it with the third input which is a foreign element (256 bits)
+ * - multi range check the 3 limbs of the output
+ *)
+ let test_gates ?cs word_64bit native_elem foreign_elem =
+ let cs, _proof_keypair, _proof =
+ Runner.generate_and_verify_proof ?cs (fun () ->
+ let open Runner.Impl in
+ let open Bitwise in
+ let secp256k1_modulus =
+ bignum_bigint_to_field_const_standard_limbs (module Runner.Impl)
+ @@ Common.bignum_bigint_of_hex
+ "fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f"
+ in
+
+ (* Set up snarky variables for inputs and outputs *)
+ let word_64bit =
+ exists Field.typ ~compute:(fun () ->
+ Common.field_of_hex (module Runner.Impl) word_64bit )
+ in
+ let native_elem =
+ exists Field.typ ~compute:(fun () ->
+ Common.field_of_hex (module Runner.Impl) native_elem )
+ in
+ let foreign_elem =
+ Element.Standard.of_bignum_bigint (module Runner.Impl)
+ @@ Common.bignum_bigint_of_hex foreign_elem
+ in
+ let out_rot = rot64 (module Runner.Impl) word_64bit 5 Right in
+
+ let two_to_88 =
+ exists Field.typ ~compute:(fun () ->
+ Common.field_of_hex
+ (module Runner.Impl)
+ "10000000000000000000000" )
+ in
+ let two_to_176 = Field.(two_to_88 * two_to_88) in
+
+ let out_mul = Generic.mul (module Runner.Impl) out_rot two_to_176 in
+ let out_xor = bxor (module Runner.Impl) out_mul native_elem 255 in
+ let out_and = band (module Runner.Impl) out_xor word_64bit 254 in
+ let out_not_c = bnot_checked (module Runner.Impl) out_and 254 in
+ let out_not_u = bnot_unchecked (module Runner.Impl) out_and 254 in
+ Field.Assert.equal out_not_u out_not_c ;
+
+ let l0, l1, l2 =
+ exists (Typ.array ~length:3 Field.typ) ~compute:(fun () ->
+ let big =
+ Common.cvar_field_to_bignum_bigint_as_prover
+ (module Runner.Impl)
+ out_not_c
+ in
+ let two_to_88 = Bignum_bigint.(pow (of_int 2) (of_int 88)) in
+ let two_to_176 =
+ Bignum_bigint.(pow (of_int 2) (of_int 176))
+ in
+ let l2 = Bignum_bigint.(big / two_to_176) in
+ let l1 =
+ Bignum_bigint.((big - (l2 * two_to_176)) / two_to_88)
+ in
+ let l0 =
+ Bignum_bigint.(big - (l2 * two_to_176) - (l1 * two_to_88))
+ in
+ let l2 =
+ Common.bignum_bigint_to_field (module Runner.Impl) l2
+ in
+ let l1 =
+ Common.bignum_bigint_to_field (module Runner.Impl) l1
+ in
+ let l0 =
+ Common.bignum_bigint_to_field (module Runner.Impl) l0
+ in
+ [| l0; l1; l2 |] )
+ |> Common.tuple3_of_array
+ in
+ let out_l1 = Generic.mul (module Runner.Impl) l1 two_to_88 in
+ let out_l1l0 = Generic.add (module Runner.Impl) out_l1 l0 in
+ let out_l2 = Generic.mul (module Runner.Impl) l2 two_to_176 in
+ let out_limbs = Generic.add (module Runner.Impl) out_l1l0 out_l2 in
+ Field.Assert.equal out_limbs out_not_c ;
+ let limbs = Element.Standard.of_limbs (l0, l1, l2) in
+
+ (* Create external checks context for tracking extra constraints
+ that are required for soundness (not used in this test) *)
+ let unused_external_checks =
+ External_checks.create (module Runner.Impl)
+ in
+
+ let out_ffmul =
+ Foreign_field.mul
+ (module Runner.Impl)
+ unused_external_checks limbs limbs secp256k1_modulus
+ in
+
+ let out_ffadd =
+ Foreign_field.add
+ (module Runner.Impl)
+ out_ffmul foreign_elem secp256k1_modulus
+ in
+ let l0, l1, l2 = Element.Standard.to_limbs out_ffadd in
+ Range_check.multi (module Runner.Impl) l0 l1 l2 ;
+ () )
+ in
+ cs
+ in
+
+ let cs =
+ test_gates "7b3f28d7496d75f0"
+ "3fffe27b14baa740db0c8bb6656de61d2871a64093908af6181f46351a1c1909"
+ "b58c271d1f2b1c632a61a548872580228430495e9635842591d9118236bacfa2"
+ in
+ let _cs =
+ test_gates ~cs "84c0d728b6928a0f"
+ "1f2d8f0d0cd52771bfb86ffdf651b7907e2e0fa87f7c9c2a41b0918e2a7820d"
+ "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
+ in
+ () ) ;
+ ()
diff --git a/src/lib/crypto/kimchi_backend/pasta/basic/kimchi_pasta_basic.ml b/src/lib/crypto/kimchi_backend/pasta/basic/kimchi_pasta_basic.ml
index 8caba494c7f..245a7c0ad83 100644
--- a/src/lib/crypto/kimchi_backend/pasta/basic/kimchi_pasta_basic.ml
+++ b/src/lib/crypto/kimchi_backend/pasta/basic/kimchi_pasta_basic.ml
@@ -147,3 +147,9 @@ module Fp_poly_comm = Kimchi_backend_common.Poly_comm.Make (struct
fun unshifted shifted : t -> { shifted; unshifted }
end
end)
+
+(* poseidon params *)
+
+let poseidon_params_fp = Sponge.Params.(map pasta_p_kimchi ~f:Fp.of_string)
+
+let poseidon_params_fq = Sponge.Params.(map pasta_q_kimchi ~f:Fq.of_string)
diff --git a/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/pallas_constraint_system.ml b/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/pallas_constraint_system.ml
index cb59e2d0344..1cec41aa925 100644
--- a/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/pallas_constraint_system.ml
+++ b/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/pallas_constraint_system.ml
@@ -4,8 +4,5 @@ open Kimchi_pasta_basic
include
Plonk_constraint_system.Make (Fq) (Kimchi_bindings.Protocol.Gates.Vector.Fq)
(struct
- let params =
- Sponge.Params.(
- map pasta_q_kimchi ~f:(fun x ->
- Fq.of_bigint (Bigint256.of_decimal_string x) ))
+ let params = poseidon_params_fq
end)
diff --git a/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/vesta_constraint_system.ml b/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/vesta_constraint_system.ml
index 364673a5b85..0fff63228cd 100644
--- a/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/vesta_constraint_system.ml
+++ b/src/lib/crypto/kimchi_backend/pasta/constraint_system/caml/vesta_constraint_system.ml
@@ -4,8 +4,5 @@ open Kimchi_pasta_basic
include
Plonk_constraint_system.Make (Fp) (Kimchi_bindings.Protocol.Gates.Vector.Fp)
(struct
- let params =
- Sponge.Params.(
- map pasta_p_kimchi ~f:(fun x ->
- Fp.of_bigint (Bigint256.of_decimal_string x) ))
+ let params = poseidon_params_fp
end)
diff --git a/src/lib/crypto/kimchi_backend/pasta/constraint_system/intf.ml b/src/lib/crypto/kimchi_backend/pasta/constraint_system/intf.ml
index f9bca06f792..6f9a0648a29 100644
--- a/src/lib/crypto/kimchi_backend/pasta/constraint_system/intf.ml
+++ b/src/lib/crypto/kimchi_backend/pasta/constraint_system/intf.ml
@@ -27,23 +27,43 @@ module type With_accessors = sig
end
module type Full = sig
- include With_accessors
-
type fp
type gates
+ include
+ With_accessors
+ with type t = (fp, gates) Kimchi_backend_common.Plonk_constraint_system.t
+
val add_constraint :
?label:string
-> t
-> (fp Snarky_backendless.Cvar.t, fp) Snarky_backendless.Constraint.basic
-> unit
- val compute_witness : t -> (int -> fp) -> fp array array
+ val compute_witness :
+ t -> (int -> fp) -> fp array array * fp Kimchi_types.runtime_table array
val finalize : t -> unit
- val finalize_and_get_gates : t -> gates
+ val finalize_and_get_gates :
+ t
+ -> gates
+ * fp Kimchi_types.lookup_table array
+ * fp Kimchi_types.runtime_table_cfg array
+
+ (** Return the size of all the fixed lookup tables concatenated, without the
+ built-in XOR and RangeCheck tables *)
+ val get_concatenated_fixed_lookup_table_size : t -> int
+
+ (** Return the size of all the runtime lookup tables concatenated *)
+ val get_concatenated_runtime_lookup_table_size : t -> int
+
+ (** Finalize the fixed lookup tables. The function can not be called twice *)
+ val finalize_fixed_lookup_tables : t -> unit
+
+ (** Finalize the runtime lookup table configurations. The function can not be called twice. *)
+ val finalize_runtime_lookup_tables : t -> unit
val digest : t -> Md5.t
diff --git a/src/lib/crypto/kimchi_backend/pasta/pallas_based_plonk.ml b/src/lib/crypto/kimchi_backend/pasta/pallas_based_plonk.ml
index e9c889716f3..6c5de528b2c 100644
--- a/src/lib/crypto/kimchi_backend/pasta/pallas_based_plonk.ml
+++ b/src/lib/crypto/kimchi_backend/pasta/pallas_based_plonk.ml
@@ -77,13 +77,18 @@ module Proof = Plonk_dlog_proof.Make (struct
, Pasta_bindings.Fq.t )
Kimchi_types.prover_proof
+ type with_public_evals =
+ ( Pasta_bindings.Fp.t Kimchi_types.or_infinity
+ , Pasta_bindings.Fq.t )
+ Kimchi_types.proof_with_public
+
include Kimchi_bindings.Protocol.Proof.Fq
let batch_verify vks ts =
Promise.run_in_thread (fun () -> batch_verify vks ts)
- let create_aux ~f:create (pk : Keypair.t) primary auxiliary prev_chals
- prev_comms =
+ let create_aux ~f:create (pk : Keypair.t) ~primary ~auxiliary ~prev_chals
+ ~prev_comms =
(* external values contains [1, primary..., auxiliary ] *)
let external_values i =
let open Field.Vector in
@@ -92,7 +97,7 @@ module Proof = Plonk_dlog_proof.Make (struct
in
(* compute witness *)
- let computed_witness =
+ let computed_witness, runtime_tables =
R1CS_constraint_system.compute_witness pk.cs external_values
in
let num_rows = Array.length computed_witness.(0) in
@@ -106,16 +111,17 @@ module Proof = Plonk_dlog_proof.Make (struct
done ;
witness )
in
- create pk.index witness_cols prev_chals prev_comms
+ create pk.index witness_cols runtime_tables prev_chals prev_comms
- let create_async (pk : Keypair.t) primary auxiliary prev_chals prev_comms =
- create_aux pk primary auxiliary prev_chals prev_comms
- ~f:(fun pk auxiliary_input prev_challenges prev_sgs ->
+ let create_async (pk : Keypair.t) ~primary ~auxiliary ~prev_chals
+ ~prev_comms =
+ create_aux pk ~primary ~auxiliary ~prev_chals ~prev_comms
+ ~f:(fun pk auxiliary_input runtime_tables prev_challenges prev_sgs ->
Promise.run_in_thread (fun () ->
- create pk auxiliary_input prev_challenges prev_sgs ) )
+ create pk auxiliary_input runtime_tables prev_challenges prev_sgs ) )
- let create (pk : Keypair.t) primary auxiliary prev_chals prev_comms =
- create_aux pk primary auxiliary prev_chals prev_comms ~f:create
+ let create (pk : Keypair.t) ~primary ~auxiliary ~prev_chals ~prev_comms =
+ create_aux pk ~primary ~auxiliary ~prev_chals ~prev_comms ~f:create
end
module Verifier_index = Kimchi_bindings.Protocol.VerifierIndex.Fq
@@ -165,5 +171,7 @@ module Oracles = Plonk_dlog_oracles.Make (struct
include Kimchi_bindings.Protocol.Oracles.Fq
let create = with_lagrange create
+
+ let create_with_public_evals = with_lagrange create_with_public_evals
end
end)
diff --git a/src/lib/crypto/kimchi_backend/pasta/vesta_based_plonk.ml b/src/lib/crypto/kimchi_backend/pasta/vesta_based_plonk.ml
index e68427ebdad..163fc45a334 100644
--- a/src/lib/crypto/kimchi_backend/pasta/vesta_based_plonk.ml
+++ b/src/lib/crypto/kimchi_backend/pasta/vesta_based_plonk.ml
@@ -76,6 +76,11 @@ module Proof = Plonk_dlog_proof.Make (struct
, Pasta_bindings.Fp.t )
Kimchi_types.prover_proof
+ type with_public_evals =
+ ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
+ , Pasta_bindings.Fp.t )
+ Kimchi_types.proof_with_public
+
include Kimchi_bindings.Protocol.Proof.Fp
let batch_verify vks ts =
@@ -91,7 +96,7 @@ module Proof = Plonk_dlog_proof.Make (struct
in
(* compute witness *)
- let computed_witness =
+ let computed_witness, runtime_tables =
R1CS_constraint_system.compute_witness pk.cs external_values
in
let num_rows = Array.length computed_witness.(0) in
@@ -105,15 +110,16 @@ module Proof = Plonk_dlog_proof.Make (struct
done ;
witness )
in
- create pk.index witness_cols prev_chals prev_comms
+ create pk.index witness_cols runtime_tables prev_chals prev_comms
- let create_async (pk : Keypair.t) primary auxiliary prev_chals prev_comms =
+ let create_async (pk : Keypair.t) ~primary ~auxiliary ~prev_chals
+ ~prev_comms =
create_aux pk primary auxiliary prev_chals prev_comms
- ~f:(fun pk auxiliary_input prev_challenges prev_sgs ->
+ ~f:(fun pk auxiliary_input runtime_tables prev_challenges prev_sgs ->
Promise.run_in_thread (fun () ->
- create pk auxiliary_input prev_challenges prev_sgs ) )
+ create pk auxiliary_input runtime_tables prev_challenges prev_sgs ) )
- let create (pk : Keypair.t) primary auxiliary prev_chals prev_comms =
+ let create (pk : Keypair.t) ~primary ~auxiliary ~prev_chals ~prev_comms =
create_aux pk primary auxiliary prev_chals prev_comms ~f:create
end
@@ -164,5 +170,7 @@ module Oracles = Plonk_dlog_oracles.Make (struct
include Kimchi_bindings.Protocol.Oracles.Fp
let create = with_lagrange create
+
+ let create_with_public_evals = with_lagrange create_with_public_evals
end
end)
diff --git a/src/lib/crypto/kimchi_backend/tests.ml b/src/lib/crypto/kimchi_backend/tests.ml
index d6ea5f33e5a..0e849e44d2e 100644
--- a/src/lib/crypto/kimchi_backend/tests.ml
+++ b/src/lib/crypto/kimchi_backend/tests.ml
@@ -24,8 +24,6 @@ let%test_unit "of_affine" =
| Infinity ->
assert false
in
- Pasta_bindings.Fp.print x ;
- Pasta_bindings.Fp.print y ;
Pasta_bindings.Pallas.(ignore (of_affine_coordinates x y : t))
let%test_unit "vector test" =
diff --git a/src/lib/crypto/kimchi_bindings/stubs/Cargo.lock b/src/lib/crypto/kimchi_bindings/stubs/Cargo.lock
index daafb5ddf20..effa7be4fff 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/Cargo.lock
+++ b/src/lib/crypto/kimchi_bindings/stubs/Cargo.lock
@@ -373,9 +373,9 @@ checksum = "d102f1a462fdcdddce88d6d46c06c074a2d2749b262230333726b06c52bb7585"
[[package]]
name = "either"
-version = "1.8.1"
+version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
+checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
[[package]]
name = "fnv"
@@ -439,6 +439,9 @@ name = "hex"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+dependencies = [
+ "serde",
+]
[[package]]
name = "ident_case"
@@ -446,6 +449,14 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
+[[package]]
+name = "internal-tracing"
+version = "0.1.0"
+dependencies = [
+ "ocaml",
+ "ocaml-gen",
+]
+
[[package]]
name = "itertools"
version = "0.10.5"
@@ -473,6 +484,7 @@ dependencies = [
"disjoint-set",
"groupmap",
"hex",
+ "internal-tracing",
"itertools",
"mina-curves",
"mina-poseidon",
@@ -488,7 +500,7 @@ dependencies = [
"rand",
"rand_core",
"rayon",
- "rmp-serde 1.1.1",
+ "rmp-serde 1.1.2",
"serde",
"serde_with",
"strum",
@@ -622,9 +634,9 @@ dependencies = [
[[package]]
name = "num-traits"
-version = "0.2.15"
+version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
+checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2"
dependencies = [
"autocfg",
"libm",
@@ -781,6 +793,7 @@ dependencies = [
"rand",
"rand_core",
"rayon",
+ "rmp-serde 1.1.2",
"serde",
"serde_with",
"thiserror",
@@ -809,9 +822,9 @@ dependencies = [
[[package]]
name = "quote"
-version = "1.0.31"
+version = "1.0.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0"
+checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965"
dependencies = [
"proc-macro2",
]
@@ -876,9 +889,9 @@ dependencies = [
[[package]]
name = "rmp"
-version = "0.8.11"
+version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44519172358fd6d58656c86ab8e7fbc9e1490c3e8f14d35ed78ca0dd07403c9f"
+checksum = "7f9860a6cc38ed1da53456442089b4dfa35e7cedaa326df63017af88385e6b20"
dependencies = [
"byteorder",
"num-traits",
@@ -898,9 +911,9 @@ dependencies = [
[[package]]
name = "rmp-serde"
-version = "1.1.1"
+version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c5b13be192e0220b8afb7222aa5813cb62cc269ebb5cac346ca6487681d2913e"
+checksum = "bffea85eea980d8a74453e5d02a8d93028f3c34725de143085a844ebe953258a"
dependencies = [
"byteorder",
"rmp",
@@ -954,29 +967,29 @@ dependencies = [
[[package]]
name = "serde"
-version = "1.0.171"
+version = "1.0.176"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9"
+checksum = "76dc28c9523c5d70816e393136b86d48909cfb27cecaa902d338c19ed47164dc"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.171"
+version = "1.0.176"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682"
+checksum = "a4e7b8c5dc823e3b90651ff1d3808419cd14e5ad76de04feaf37da114e7a306f"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.26",
+ "syn 2.0.27",
]
[[package]]
name = "serde_json"
-version = "1.0.103"
+version = "1.0.104"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d03b412469450d4404fe8499a268edd7f8b79fecb074b0d812ad64ca21f4031b"
+checksum = "076066c5f1078eac5b722a31827a8832fe108bed65dfa75e233c89f8206e976c"
dependencies = [
"itoa",
"ryu",
@@ -1087,9 +1100,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.26"
+version = "2.0.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970"
+checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0"
dependencies = [
"proc-macro2",
"quote",
@@ -1110,22 +1123,22 @@ dependencies = [
[[package]]
name = "thiserror"
-version = "1.0.43"
+version = "1.0.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42"
+checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
-version = "1.0.43"
+version = "1.0.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f"
+checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.26",
+ "syn 2.0.27",
]
[[package]]
@@ -1234,5 +1247,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.26",
+ "syn 2.0.27",
]
diff --git a/src/lib/crypto/kimchi_bindings/stubs/Cargo.toml b/src/lib/crypto/kimchi_bindings/stubs/Cargo.toml
index 855f46310cd..39354e4c3a1 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/Cargo.toml
+++ b/src/lib/crypto/kimchi_bindings/stubs/Cargo.toml
@@ -40,4 +40,4 @@ kimchi = { path = "../../proof-systems/kimchi", features = ["ocaml_types"] }
# ocaml-specific
ocaml = { version = "0.22.2", features = ["no-caml-startup"] }
-ocaml-gen = "0.1.0"
+ocaml-gen = "0.1.5"
diff --git a/src/lib/crypto/kimchi_bindings/stubs/kimchi_bindings.ml b/src/lib/crypto/kimchi_bindings/stubs/kimchi_bindings.ml
index 257e4ba84f9..07a4d89b681 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/kimchi_bindings.ml
+++ b/src/lib/crypto/kimchi_bindings/stubs/kimchi_bindings.ml
@@ -194,8 +194,15 @@ module Protocol = struct
module Fp = struct
type nonrec t
- external create : Gates.Vector.Fp.t -> int -> int -> SRS.Fp.t -> t
- = "caml_pasta_fp_plonk_index_create"
+ external create :
+ Gates.Vector.Fp.t
+ -> int
+ -> Pasta_bindings.Fp.t Kimchi_types.lookup_table array
+ -> Pasta_bindings.Fp.t Kimchi_types.runtime_table_cfg array
+ -> int
+ -> SRS.Fp.t
+ -> t
+ = "caml_pasta_fp_plonk_index_create_bytecode" "caml_pasta_fp_plonk_index_create"
external max_degree : t -> int = "caml_pasta_fp_plonk_index_max_degree"
@@ -221,8 +228,15 @@ module Protocol = struct
module Fq = struct
type nonrec t
- external create : Gates.Vector.Fq.t -> int -> int -> SRS.Fq.t -> t
- = "caml_pasta_fq_plonk_index_create"
+ external create :
+ Gates.Vector.Fq.t
+ -> int
+ -> Pasta_bindings.Fq.t Kimchi_types.lookup_table array
+ -> Pasta_bindings.Fq.t Kimchi_types.runtime_table_cfg array
+ -> int
+ -> SRS.Fq.t
+ -> t
+ = "caml_pasta_fq_plonk_index_create_bytecode" "caml_pasta_fq_plonk_index_create"
external max_degree : t -> int = "caml_pasta_fq_plonk_index_max_degree"
@@ -315,6 +329,19 @@ module Protocol = struct
-> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
, Pasta_bindings.Fp.t )
Kimchi_types.prover_proof
+ -> t = "fp_oracles_create_no_public"
+
+ external create_with_public_evals :
+ Pasta_bindings.Fq.t Kimchi_types.or_infinity Kimchi_types.poly_comm
+ array
+ -> ( Pasta_bindings.Fp.t
+ , SRS.Fp.t
+ , Pasta_bindings.Fq.t Kimchi_types.or_infinity Kimchi_types.poly_comm
+ )
+ Kimchi_types.VerifierIndex.verifier_index
+ -> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
+ , Pasta_bindings.Fp.t )
+ Kimchi_types.proof_with_public
-> t = "fp_oracles_create"
external dummy : unit -> Pasta_bindings.Fp.t Kimchi_types.random_oracles
@@ -340,6 +367,19 @@ module Protocol = struct
-> ( Pasta_bindings.Fp.t Kimchi_types.or_infinity
, Pasta_bindings.Fq.t )
Kimchi_types.prover_proof
+ -> t = "fq_oracles_create_no_public"
+
+ external create_with_public_evals :
+ Pasta_bindings.Fp.t Kimchi_types.or_infinity Kimchi_types.poly_comm
+ array
+ -> ( Pasta_bindings.Fq.t
+ , SRS.Fq.t
+ , Pasta_bindings.Fp.t Kimchi_types.or_infinity Kimchi_types.poly_comm
+ )
+ Kimchi_types.VerifierIndex.verifier_index
+ -> ( Pasta_bindings.Fp.t Kimchi_types.or_infinity
+ , Pasta_bindings.Fq.t )
+ Kimchi_types.proof_with_public
-> t = "fq_oracles_create"
external dummy : unit -> Pasta_bindings.Fq.t Kimchi_types.random_oracles
@@ -357,20 +397,31 @@ module Protocol = struct
external create :
Index.Fp.t
-> FieldVectors.Fp.t array
+ -> Pasta_bindings.Fp.t Kimchi_types.runtime_table array
-> Pasta_bindings.Fp.t array
-> Pasta_bindings.Fq.t Kimchi_types.or_infinity array
-> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
, Pasta_bindings.Fp.t )
- Kimchi_types.prover_proof = "caml_pasta_fp_plonk_proof_create"
+ Kimchi_types.proof_with_public = "caml_pasta_fp_plonk_proof_create"
+
+ external create_and_verify :
+ Index.Fp.t
+ -> FieldVectors.Fp.t array
+ -> Pasta_bindings.Fp.t Kimchi_types.runtime_table array
+ -> Pasta_bindings.Fp.t array
+ -> Pasta_bindings.Fq.t Kimchi_types.or_infinity array
+ -> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
+ , Pasta_bindings.Fp.t )
+ Kimchi_types.proof_with_public
+ = "caml_pasta_fp_plonk_proof_create_and_verify"
external example_with_lookup :
SRS.Fp.t
- -> bool
-> Index.Fp.t
* Pasta_bindings.Fp.t
* ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
, Pasta_bindings.Fp.t )
- Kimchi_types.prover_proof
+ Kimchi_types.proof_with_public
= "caml_pasta_fp_plonk_proof_example_with_lookup"
external example_with_ffadd :
@@ -379,7 +430,7 @@ module Protocol = struct
* Pasta_bindings.Fp.t
* ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
, Pasta_bindings.Fp.t )
- Kimchi_types.prover_proof
+ Kimchi_types.proof_with_public
= "caml_pasta_fp_plonk_proof_example_with_ffadd"
external example_with_xor :
@@ -388,7 +439,7 @@ module Protocol = struct
* (Pasta_bindings.Fp.t * Pasta_bindings.Fp.t)
* ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
, Pasta_bindings.Fp.t )
- Kimchi_types.prover_proof
+ Kimchi_types.proof_with_public
= "caml_pasta_fp_plonk_proof_example_with_xor"
external example_with_rot :
@@ -397,7 +448,7 @@ module Protocol = struct
* (Pasta_bindings.Fp.t * Pasta_bindings.Fp.t)
* ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
, Pasta_bindings.Fp.t )
- Kimchi_types.prover_proof
+ Kimchi_types.proof_with_public
= "caml_pasta_fp_plonk_proof_example_with_rot"
external example_with_foreign_field_mul :
@@ -405,7 +456,7 @@ module Protocol = struct
-> Index.Fp.t
* ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
, Pasta_bindings.Fp.t )
- Kimchi_types.prover_proof
+ Kimchi_types.proof_with_public
= "caml_pasta_fp_plonk_proof_example_with_foreign_field_mul"
external example_with_range_check :
@@ -413,7 +464,7 @@ module Protocol = struct
-> Index.Fp.t
* ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
, Pasta_bindings.Fp.t )
- Kimchi_types.prover_proof
+ Kimchi_types.proof_with_public
= "caml_pasta_fp_plonk_proof_example_with_range_check"
external example_with_range_check0 :
@@ -421,7 +472,7 @@ module Protocol = struct
-> Index.Fp.t
* ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
, Pasta_bindings.Fp.t )
- Kimchi_types.prover_proof
+ Kimchi_types.proof_with_public
= "caml_pasta_fp_plonk_proof_example_with_range_check0"
external verify :
@@ -432,7 +483,7 @@ module Protocol = struct
Kimchi_types.VerifierIndex.verifier_index
-> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
, Pasta_bindings.Fp.t )
- Kimchi_types.prover_proof
+ Kimchi_types.proof_with_public
-> bool = "caml_pasta_fp_plonk_proof_verify"
external batch_verify :
@@ -444,7 +495,7 @@ module Protocol = struct
array
-> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
, Pasta_bindings.Fp.t )
- Kimchi_types.prover_proof
+ Kimchi_types.proof_with_public
array
-> bool = "caml_pasta_fp_plonk_proof_batch_verify"
@@ -452,26 +503,28 @@ module Protocol = struct
unit
-> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
, Pasta_bindings.Fp.t )
- Kimchi_types.prover_proof = "caml_pasta_fp_plonk_proof_dummy"
+ Kimchi_types.proof_with_public = "caml_pasta_fp_plonk_proof_dummy"
external deep_copy :
( Pasta_bindings.Fq.t Kimchi_types.or_infinity
, Pasta_bindings.Fp.t )
- Kimchi_types.prover_proof
+ Kimchi_types.proof_with_public
-> ( Pasta_bindings.Fq.t Kimchi_types.or_infinity
, Pasta_bindings.Fp.t )
- Kimchi_types.prover_proof = "caml_pasta_fp_plonk_proof_deep_copy"
+ Kimchi_types.proof_with_public
+ = "caml_pasta_fp_plonk_proof_deep_copy"
end
module Fq = struct
external create :
Index.Fq.t
-> FieldVectors.Fq.t array
+ -> Pasta_bindings.Fq.t Kimchi_types.runtime_table array
-> Pasta_bindings.Fq.t array
-> Pasta_bindings.Fp.t Kimchi_types.or_infinity array
-> ( Pasta_bindings.Fp.t Kimchi_types.or_infinity
, Pasta_bindings.Fq.t )
- Kimchi_types.prover_proof = "caml_pasta_fq_plonk_proof_create"
+ Kimchi_types.proof_with_public = "caml_pasta_fq_plonk_proof_create"
external verify :
( Pasta_bindings.Fq.t
@@ -481,7 +534,7 @@ module Protocol = struct
Kimchi_types.VerifierIndex.verifier_index
-> ( Pasta_bindings.Fp.t Kimchi_types.or_infinity
, Pasta_bindings.Fq.t )
- Kimchi_types.prover_proof
+ Kimchi_types.proof_with_public
-> bool = "caml_pasta_fq_plonk_proof_verify"
external batch_verify :
@@ -493,7 +546,7 @@ module Protocol = struct
array
-> ( Pasta_bindings.Fp.t Kimchi_types.or_infinity
, Pasta_bindings.Fq.t )
- Kimchi_types.prover_proof
+ Kimchi_types.proof_with_public
array
-> bool = "caml_pasta_fq_plonk_proof_batch_verify"
@@ -501,15 +554,16 @@ module Protocol = struct
unit
-> ( Pasta_bindings.Fp.t Kimchi_types.or_infinity
, Pasta_bindings.Fq.t )
- Kimchi_types.prover_proof = "caml_pasta_fq_plonk_proof_dummy"
+ Kimchi_types.proof_with_public = "caml_pasta_fq_plonk_proof_dummy"
external deep_copy :
( Pasta_bindings.Fp.t Kimchi_types.or_infinity
, Pasta_bindings.Fq.t )
- Kimchi_types.prover_proof
+ Kimchi_types.proof_with_public
-> ( Pasta_bindings.Fp.t Kimchi_types.or_infinity
, Pasta_bindings.Fq.t )
- Kimchi_types.prover_proof = "caml_pasta_fq_plonk_proof_deep_copy"
+ Kimchi_types.proof_with_public
+ = "caml_pasta_fq_plonk_proof_deep_copy"
end
end
end
diff --git a/src/lib/crypto/kimchi_bindings/stubs/kimchi_types.ml b/src/lib/crypto/kimchi_bindings/stubs/kimchi_types.ml
index a9e97d76e89..6bec0f01f84 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/kimchi_types.ml
+++ b/src/lib/crypto/kimchi_bindings/stubs/kimchi_types.ml
@@ -98,6 +98,13 @@ type nonrec 'caml_g lookup_commitments =
; runtime : 'caml_g poly_comm option
}
+type nonrec 'caml_f runtime_table_cfg =
+ { id : int32; first_column : 'caml_f array }
+
+type nonrec 'caml_f lookup_table = { id : int32; data : 'caml_f array array }
+
+type nonrec 'caml_f runtime_table = { id : int32; data : 'caml_f array }
+
type nonrec 'caml_g prover_commitments =
{ w_comm :
'caml_g poly_comm
@@ -129,6 +136,11 @@ type nonrec ('caml_g, 'caml_f) prover_proof =
; prev_challenges : ('caml_g, 'caml_f) recursion_challenge array
}
+type nonrec ('caml_g, 'caml_f) proof_with_public =
+ { public_evals : 'caml_f array point_evaluations option
+ ; proof : ('caml_g, 'caml_f) prover_proof
+ }
+
type nonrec wire = { row : int; col : int }
type nonrec gate_type =
@@ -225,6 +237,12 @@ module VerifierIndex = struct
; mul_comm : 'poly_comm
; emul_comm : 'poly_comm
; endomul_scalar_comm : 'poly_comm
+ ; xor_comm : 'poly_comm option
+ ; range_check0_comm : 'poly_comm option
+ ; range_check1_comm : 'poly_comm option
+ ; foreign_field_add_comm : 'poly_comm option
+ ; foreign_field_mul_comm : 'poly_comm option
+ ; rot_comm : 'poly_comm option
}
type nonrec ('fr, 'srs, 'poly_comm) verifier_index =
@@ -236,5 +254,6 @@ module VerifierIndex = struct
; evals : 'poly_comm verification_evals
; shifts : 'fr array
; lookup_index : 'poly_comm Lookup.t option
+ ; zk_rows : int
}
end
diff --git a/src/lib/crypto/kimchi_bindings/stubs/pasta_bindings.ml b/src/lib/crypto/kimchi_bindings/stubs/pasta_bindings.ml
index b0ae675599d..59a16191084 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/pasta_bindings.ml
+++ b/src/lib/crypto/kimchi_bindings/stubs/pasta_bindings.ml
@@ -61,6 +61,8 @@ module Fp = struct
external print : t -> unit = "caml_pasta_fp_print"
+ external print_rust : t -> unit = "caml_pasta_fp_print_rust"
+
external copy : t -> t -> unit = "caml_pasta_fp_copy"
external mut_add : t -> t -> unit = "caml_pasta_fp_mut_add"
@@ -128,6 +130,8 @@ module Fq = struct
external print : t -> unit = "caml_pasta_fq_print"
+ external print_rust : t -> unit = "caml_pasta_fq_print_rust"
+
external copy : t -> t -> unit = "caml_pasta_fq_copy"
external mut_add : t -> t -> unit = "caml_pasta_fq_mut_add"
diff --git a/src/lib/crypto/kimchi_bindings/stubs/rust-toolchain.toml b/src/lib/crypto/kimchi_bindings/stubs/rust-toolchain.toml
index 8ecda82d680..5a82cfe5ab4 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/rust-toolchain.toml
+++ b/src/lib/crypto/kimchi_bindings/stubs/rust-toolchain.toml
@@ -11,4 +11,4 @@
# 4. figure out the hashes of the (now obsolete) docker images used in CI rules that are failing, grep for these hashes and replace them with the new hashes
[toolchain]
-channel = "1.67.0"
+channel = "1.72"
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/bigint_256.rs b/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/bigint_256.rs
index 90570304d3c..e9f54f65219 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/bigint_256.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/bigint_256.rs
@@ -239,12 +239,12 @@ mod tests {
#[test]
fn biguint() {
let x = 10000.to_biguint().unwrap();
- println!("biguint.to_string: {}", x.to_string());
+ println!("biguint.to_string: {}", x);
let y = CamlBigInteger256::try_from(x.clone()).unwrap();
println!("camlbigint.to_string: {}", y.to_string());
//assert!(&y.to_string() == "10000");
let x2: BigUint = y.into();
assert!(x2 == x);
- println!("biguint.to_string: {}", x2.to_string());
+ println!("biguint.to_string: {}", x2);
}
}
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/pasta_fp.rs b/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/pasta_fp.rs
index 3d9144f1303..a97eb16b06c 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/pasta_fp.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/pasta_fp.rs
@@ -207,6 +207,12 @@ pub fn caml_pasta_fp_print(x: ocaml::Pointer) {
);
}
+#[ocaml_gen::func]
+#[ocaml::func]
+pub fn caml_pasta_fp_print_rust(x: ocaml::Pointer) {
+ println!("{}", x.as_ref().0);
+}
+
#[ocaml_gen::func]
#[ocaml::func]
pub fn caml_pasta_fp_copy(mut x: ocaml::Pointer, y: ocaml::Pointer) {
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/pasta_fq.rs b/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/pasta_fq.rs
index 8fbca9da595..d8bafc7384c 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/pasta_fq.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/arkworks/pasta_fq.rs
@@ -208,6 +208,12 @@ pub fn caml_pasta_fq_print(x: ocaml::Pointer) {
);
}
+#[ocaml_gen::func]
+#[ocaml::func]
+pub fn caml_pasta_fq_print_rust(x: ocaml::Pointer) {
+ println!("{}", x.as_ref().0);
+}
+
#[ocaml_gen::func]
#[ocaml::func]
pub fn caml_pasta_fq_copy(mut x: ocaml::Pointer, y: ocaml::Pointer) {
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/lib.rs b/src/lib/crypto/kimchi_bindings/stubs/src/lib.rs
index 11cb493b9c4..a3b3463b0e3 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/lib.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/lib.rs
@@ -61,7 +61,9 @@ pub use {
wires::caml::CamlWire,
},
kimchi::proof::caml::CamlProofEvaluations,
- kimchi::prover::caml::{CamlLookupCommitments, CamlProverCommitments, CamlProverProof},
+ kimchi::prover::caml::{
+ CamlLookupCommitments, CamlProofWithPublic, CamlProverCommitments, CamlProverProof,
+ },
mina_poseidon::sponge::caml::CamlScalarChallenge,
poly_commitment::commitment::caml::{CamlOpeningProof, CamlPolyComm},
};
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/main.rs b/src/lib/crypto/kimchi_bindings/stubs/src/main.rs
index afc3c250bec..5b74b542a3f 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/main.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/main.rs
@@ -1,6 +1,10 @@
use kimchi::circuits::{
expr::FeatureFlag,
- lookup::lookups::{LookupFeatures, LookupPattern, LookupPatterns},
+ lookup::{
+ lookups::{LookupFeatures, LookupPattern, LookupPatterns},
+ runtime_tables::caml::{CamlRuntimeTable, CamlRuntimeTableCfg},
+ tables::caml::CamlLookupTable,
+ },
};
use kimchi::proof::{caml::CamlRecursionChallenge, PointEvaluations};
use ocaml_gen::{decl_fake_generic, decl_func, decl_module, decl_type, decl_type_alias, Env};
@@ -29,6 +33,7 @@ use wires_15_stubs::{
CamlOpeningProof,
CamlPolyComm,
CamlProofEvaluations,
+ CamlProofWithPublic,
CamlProverCommitments,
CamlProverProof,
CamlRandomOracles,
@@ -100,8 +105,13 @@ fn generate_types_bindings(mut w: impl std::io::Write, env: &mut Env) {
decl_type!(w, env, CamlRecursionChallenge:: => "recursion_challenge");
decl_type!(w, env, CamlOpeningProof:: => "opening_proof");
decl_type!(w, env, CamlLookupCommitments:: => "lookup_commitments");
+
+ decl_type!(w, env, CamlRuntimeTableCfg:: => "runtime_table_cfg");
+ decl_type!(w, env, CamlLookupTable:: => "lookup_table");
+ decl_type!(w, env, CamlRuntimeTable:: => "runtime_table");
decl_type!(w, env, CamlProverCommitments:: => "prover_commitments");
decl_type!(w, env, CamlProverProof => "prover_proof");
+ decl_type!(w, env, CamlProofWithPublic => "proof_with_public");
decl_type!(w, env, CamlWire => "wire");
decl_type!(w, env, GateType => "gate_type");
@@ -167,6 +177,7 @@ fn generate_pasta_bindings(mut w: impl std::io::Write, env: &mut Env) {
decl_func!(w, env, caml_pasta_fp_to_string => "to_string");
decl_func!(w, env, caml_pasta_fp_of_string => "of_string");
decl_func!(w, env, caml_pasta_fp_print => "print");
+ decl_func!(w, env, caml_pasta_fp_print_rust => "print_rust");
decl_func!(w, env, caml_pasta_fp_copy => "copy");
decl_func!(w, env, caml_pasta_fp_mut_add => "mut_add");
decl_func!(w, env, caml_pasta_fp_mut_sub => "mut_sub");
@@ -203,6 +214,7 @@ fn generate_pasta_bindings(mut w: impl std::io::Write, env: &mut Env) {
decl_func!(w, env, caml_pasta_fq_to_string => "to_string");
decl_func!(w, env, caml_pasta_fq_of_string => "of_string");
decl_func!(w, env, caml_pasta_fq_print => "print");
+ decl_func!(w, env, caml_pasta_fq_print_rust => "print_rust");
decl_func!(w, env, caml_pasta_fq_copy => "copy");
decl_func!(w, env, caml_pasta_fq_mut_add => "mut_add");
decl_func!(w, env, caml_pasta_fq_mut_sub => "mut_sub");
@@ -431,7 +443,8 @@ fn generate_kimchi_bindings(mut w: impl std::io::Write, env: &mut Env) {
decl_module!(w, env, "Fp", {
decl_type_alias!(w, env, "t" => CamlOracles);
- decl_func!(w, env, fp_oracles_create => "create");
+ decl_func!(w, env, fp_oracles_create_no_public => "create");
+ decl_func!(w, env, fp_oracles_create => "create_with_public_evals");
decl_func!(w, env, fp_oracles_dummy => "dummy");
decl_func!(w, env, fp_oracles_deep_copy => "deep_copy");
});
@@ -439,7 +452,8 @@ fn generate_kimchi_bindings(mut w: impl std::io::Write, env: &mut Env) {
decl_module!(w, env, "Fq", {
decl_type_alias!(w, env, "t" => CamlOracles);
- decl_func!(w, env, fq_oracles_create => "create");
+ decl_func!(w, env, fq_oracles_create_no_public => "create");
+ decl_func!(w, env, fq_oracles_create => "create_with_public_evals");
decl_func!(w, env, fq_oracles_dummy => "dummy");
decl_func!(w, env, fq_oracles_deep_copy => "deep_copy");
});
@@ -448,6 +462,7 @@ fn generate_kimchi_bindings(mut w: impl std::io::Write, env: &mut Env) {
decl_module!(w, env, "Proof", {
decl_module!(w, env, "Fp", {
decl_func!(w, env, caml_pasta_fp_plonk_proof_create => "create");
+ decl_func!(w, env, caml_pasta_fp_plonk_proof_create_and_verify => "create_and_verify");
decl_func!(w, env, caml_pasta_fp_plonk_proof_example_with_lookup => "example_with_lookup");
decl_func!(w, env, caml_pasta_fp_plonk_proof_example_with_ffadd => "example_with_ffadd");
decl_func!(w, env, caml_pasta_fp_plonk_proof_example_with_xor => "example_with_xor");
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/oracles.rs b/src/lib/crypto/kimchi_bindings/stubs/src/oracles.rs
index c6a714f23af..db3acc979a6 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/oracles.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/oracles.rs
@@ -2,7 +2,10 @@ use crate::pasta_fp_plonk_verifier_index::CamlPastaFpPlonkVerifierIndex;
use ark_ff::One;
use kimchi::circuits::scalars::{caml::CamlRandomOracles, RandomOracles};
use kimchi::proof::ProverProof;
-use kimchi::{prover::caml::CamlProverProof, verifier_index::VerifierIndex};
+use kimchi::{
+ prover::caml::{CamlProofWithPublic, CamlProverProof},
+ verifier_index::VerifierIndex,
+};
use mina_poseidon::{
self,
constants::PlonkSpongeConstantsKimchi,
@@ -11,6 +14,8 @@ use mina_poseidon::{
};
use paste::paste;
use poly_commitment::commitment::{caml::CamlPolyComm, shift_scalar, PolyComm};
+use poly_commitment::evaluation_proof::OpeningProof;
+use poly_commitment::SRS;
#[derive(ocaml::IntoValue, ocaml::FromValue, ocaml_gen::Struct)]
pub struct CamlOracles {
@@ -22,20 +27,95 @@ pub struct CamlOracles {
macro_rules! impl_oracles {
($CamlF: ty, $F: ty, $CamlG: ty, $G: ty, $index: ty, $curve_params: ty) => {
-
paste! {
#[ocaml_gen::func]
#[ocaml::func]
pub fn [<$F:snake _oracles_create>](
+ lgr_comm: Vec>,
+ index: $index,
+ proof: CamlProofWithPublic<$CamlG, $CamlF>,
+ ) -> Result, ocaml::Error> {
+ let index: VerifierIndex<$G, OpeningProof<$G>> = index.into();
+
+ let lgr_comm: Vec> = lgr_comm
+ .into_iter()
+ .take(proof.proof.public.len())
+ .map(Into::into)
+ .collect();
+ let lgr_comm_refs: Vec<_> = lgr_comm.iter().collect();
+
+ let p_comm = PolyComm::<$G>::multi_scalar_mul(
+ &lgr_comm_refs,
+ &proof
+ .proof
+ .public
+ .iter()
+ .map(Into::<$F>::into)
+ .map(|s| -s)
+ .collect::>(),
+ );
+
+ let p_comm = {
+ index
+ .srs()
+ .mask_custom(
+ p_comm.clone(),
+ &p_comm.map(|_| $F::one()),
+ )
+ .unwrap()
+ .commitment
+ };
+
+ let (proof, public_input): (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) = proof.into();
+
+ let oracles_result =
+ proof.oracles::<
+ DefaultFqSponge<$curve_params, PlonkSpongeConstantsKimchi>,
+ DefaultFrSponge<$F, PlonkSpongeConstantsKimchi>,
+ >(&index, &p_comm, Some(&public_input))?;
+
+ let (mut sponge, combined_inner_product, p_eval, digest, oracles) = (
+ oracles_result.fq_sponge,
+ oracles_result.combined_inner_product,
+ oracles_result.public_evals,
+ oracles_result.digest,
+ oracles_result.oracles,
+ );
+
+ sponge.absorb_fr(&[shift_scalar::<$G>(combined_inner_product)]);
+
+ let opening_prechallenges = proof
+ .proof
+ .prechallenges(&mut sponge)
+ .into_iter()
+ .map(|x| x.0.into())
+ .collect();
+
+ Ok(CamlOracles {
+ o: oracles.into(),
+ p_eval: (p_eval[0][0].into(), p_eval[1][0].into()),
+ opening_prechallenges,
+ digest_before_evaluations: digest.into(),
+ })
+ }
+
+ #[ocaml_gen::func]
+ #[ocaml::func]
+ pub fn [<$F:snake _oracles_create_no_public>](
lgr_comm: Vec>,
index: $index,
proof: CamlProverProof<$CamlG, $CamlF>,
) -> Result, ocaml::Error> {
- let index: VerifierIndex<$G> = index.into();
+ let proof = CamlProofWithPublic {
+ proof,
+ public_evals: None,
+ };
+
+ let index: VerifierIndex<$G, OpeningProof<$G>> = index.into();
let lgr_comm: Vec> = lgr_comm
.into_iter()
- .take(proof.public.len())
+ .take(proof.proof.public.len())
.map(Into::into)
.collect();
let lgr_comm_refs: Vec<_> = lgr_comm.iter().collect();
@@ -43,6 +123,7 @@ macro_rules! impl_oracles {
let p_comm = PolyComm::<$G>::multi_scalar_mul(
&lgr_comm_refs,
&proof
+ .proof
.public
.iter()
.map(Into::<$F>::into)
@@ -61,10 +142,13 @@ macro_rules! impl_oracles {
.commitment
};
- let (proof, public_input): (ProverProof<$G>, Vec<$F>) = proof.into();
+ let (proof, public_input): (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) = proof.into();
let oracles_result =
- proof.oracles::, DefaultFrSponge<$F, PlonkSpongeConstantsKimchi>>(&index, &p_comm, &public_input)?;
+ proof.oracles::<
+ DefaultFqSponge<$curve_params, PlonkSpongeConstantsKimchi>,
+ DefaultFrSponge<$F, PlonkSpongeConstantsKimchi>,
+ >(&index, &p_comm, Some(&public_input))?;
let (mut sponge, combined_inner_product, p_eval, digest, oracles) = (
oracles_result.fq_sponge,
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_index.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_index.rs
index 494d262a243..4f1a3cba8a1 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_index.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_index.rs
@@ -1,9 +1,15 @@
+use crate::arkworks::CamlFp;
use crate::{gate_vector::fp::CamlPastaFpPlonkGateVectorPtr, srs::fp::CamlFpSrs};
use ark_poly::EvaluationDomain;
+use kimchi::circuits::lookup::runtime_tables::caml::CamlRuntimeTableCfg;
+use kimchi::circuits::lookup::runtime_tables::RuntimeTableCfg;
+use kimchi::circuits::lookup::tables::caml::CamlLookupTable;
+use kimchi::circuits::lookup::tables::LookupTable;
use kimchi::circuits::{constraints::ConstraintSystem, gate::CircuitGate};
use kimchi::{linearization::expr_linearization, prover_index::ProverIndex};
use mina_curves::pasta::{Fp, Pallas, Vesta, VestaParameters};
use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge};
+use poly_commitment::{evaluation_proof::OpeningProof, SRS as _};
use serde::{Deserialize, Serialize};
use std::{
fs::{File, OpenOptions},
@@ -12,7 +18,7 @@ use std::{
/// Boxed so that we don't store large proving indexes in the OCaml heap.
#[derive(ocaml_gen::CustomType)]
-pub struct CamlPastaFpPlonkIndex(pub Box>);
+pub struct CamlPastaFpPlonkIndex(pub Box>>);
pub type CamlPastaFpPlonkIndexPtr<'a> = ocaml::Pointer<'a, CamlPastaFpPlonkIndex>;
extern "C" fn caml_pasta_fp_plonk_index_finalize(v: ocaml::Raw) {
@@ -39,6 +45,8 @@ impl ocaml::custom::Custom for CamlPastaFpPlonkIndex {
pub fn caml_pasta_fp_plonk_index_create(
gates: CamlPastaFpPlonkGateVectorPtr,
public: ocaml::Int,
+ lookup_tables: Vec>,
+ runtime_tables: Vec>,
prev_challenges: ocaml::Int,
srs: CamlFpSrs,
) -> Result {
@@ -53,18 +61,26 @@ pub fn caml_pasta_fp_plonk_index_create(
})
.collect();
+ let runtime_tables: Vec> =
+ runtime_tables.into_iter().map(Into::into).collect();
+
+ let lookup_tables: Vec> = lookup_tables.into_iter().map(Into::into).collect();
+
// create constraint system
let cs = match ConstraintSystem::::create(gates)
.public(public as usize)
.prev_challenges(prev_challenges as usize)
+ .max_poly_size(Some(srs.0.max_poly_size()))
+ .lookup(lookup_tables)
+ .runtime(if runtime_tables.is_empty() {
+ None
+ } else {
+ Some(runtime_tables)
+ })
.build()
{
- Err(_) => {
- return Err(ocaml::Error::failwith(
- "caml_pasta_fp_plonk_index_create: could not create constraint system",
- )
- .err()
- .unwrap())
+ Err(e) => {
+ return Err(e.into())
}
Ok(cs) => cs,
};
@@ -80,7 +96,7 @@ pub fn caml_pasta_fp_plonk_index_create(
}
// create index
- let mut index = ProverIndex::::create(cs, endo_q, srs.clone());
+ let mut index = ProverIndex::>::create(cs, endo_q, srs.clone());
// Compute and cache the verifier index digest
index.compute_verifier_index_digest::>();
@@ -143,7 +159,9 @@ pub fn caml_pasta_fp_plonk_index_read(
}
// deserialize the index
- let mut t = ProverIndex::::deserialize(&mut rmp_serde::Deserializer::new(r))?;
+ let mut t = ProverIndex::>::deserialize(
+ &mut rmp_serde::Deserializer::new(r),
+ )?;
t.srs = srs.clone();
let (linearization, powers_of_alpha) = expr_linearization(Some(&t.cs.feature_flags), true);
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_proof.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_proof.rs
index 75e36747a7e..06686bdb72b 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_proof.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_proof.rs
@@ -9,7 +9,11 @@ use ark_ec::AffineCurve;
use ark_ff::One;
use array_init::array_init;
use groupmap::GroupMap;
-use kimchi::prover_index::ProverIndex;
+use kimchi::verifier::verify;
+use kimchi::{
+ circuits::lookup::runtime_tables::{caml::CamlRuntimeTable, RuntimeTable},
+ prover_index::ProverIndex,
+};
use kimchi::{circuits::polynomial::COLUMNS, verifier::batch_verify};
use kimchi::{
proof::{
@@ -17,7 +21,7 @@ use kimchi::{
},
verifier::Context,
};
-use kimchi::{prover::caml::CamlProverProof, verifier_index::VerifierIndex};
+use kimchi::{prover::caml::CamlProofWithPublic, verifier_index::VerifierIndex};
use mina_curves::pasta::{Fp, Fq, Pallas, Vesta, VestaParameters};
use mina_poseidon::{
constants::PlonkSpongeConstantsKimchi,
@@ -36,9 +40,78 @@ type EFrSponge = DefaultFrSponge;
pub fn caml_pasta_fp_plonk_proof_create(
index: CamlPastaFpPlonkIndexPtr<'static>,
witness: Vec,
+ runtime_tables: Vec>,
+ prev_challenges: Vec,
+ prev_sgs: Vec,
+) -> Result, ocaml::Error> {
+ {
+ let ptr: &mut poly_commitment::srs::SRS =
+ unsafe { &mut *(std::sync::Arc::as_ptr(&index.as_ref().0.srs) as *mut _) };
+ ptr.add_lagrange_basis(index.as_ref().0.cs.domain.d1);
+ }
+ let prev = if prev_challenges.is_empty() {
+ Vec::new()
+ } else {
+ let challenges_per_sg = prev_challenges.len() / prev_sgs.len();
+ prev_sgs
+ .into_iter()
+ .map(Into::::into)
+ .enumerate()
+ .map(|(i, sg)| {
+ let chals = prev_challenges[(i * challenges_per_sg)..(i + 1) * challenges_per_sg]
+ .iter()
+ .map(Into::::into)
+ .collect();
+ let comm = PolyComm:: {
+ unshifted: vec![sg],
+ shifted: None,
+ };
+ RecursionChallenge { chals, comm }
+ })
+ .collect()
+ };
+
+ let witness: Vec> = witness.iter().map(|x| (*x.0).clone()).collect();
+ let witness: [Vec<_>; COLUMNS] = witness
+ .try_into()
+ .map_err(|_| ocaml::Error::Message("the witness should be a column of 15 vectors"))?;
+ let index: &ProverIndex> = &index.as_ref().0;
+ let runtime_tables: Vec> =
+ runtime_tables.into_iter().map(Into::into).collect();
+
+ // public input
+ let public_input = witness[0][0..index.cs.public].to_vec();
+
+ // NB: This method is designed only to be used by tests. However, since creating a new reference will cause `drop` to be called on it once we are done with it. Since `drop` calls `caml_shutdown` internally, we *really, really* do not want to do this, but we have no other way to get at the active runtime.
+ // TODO: There's actually a way to get a handle to the runtime as a function argument. Switch
+ // to doing this instead.
+ let runtime = unsafe { ocaml::Runtime::recover_handle() };
+
+ // Release the runtime lock so that other threads can run using it while we generate the proof.
+ runtime.releasing_runtime(|| {
+ let group_map = GroupMap::::setup();
+ let proof = ProverProof::create_recursive::(
+ &group_map,
+ witness,
+ &runtime_tables,
+ index,
+ prev,
+ None,
+ )
+ .map_err(|e| ocaml::Error::Error(e.into()))?;
+ Ok((proof, public_input).into())
+ })
+}
+
+#[ocaml_gen::func]
+#[ocaml::func]
+pub fn caml_pasta_fp_plonk_proof_create_and_verify(
+ index: CamlPastaFpPlonkIndexPtr<'static>,
+ witness: Vec,
+ runtime_tables: Vec>,
prev_challenges: Vec,
prev_sgs: Vec,
-) -> Result, ocaml::Error> {
+) -> Result, ocaml::Error> {
{
let ptr: &mut poly_commitment::srs::SRS =
unsafe { &mut *(std::sync::Arc::as_ptr(&index.as_ref().0.srs) as *mut _) };
@@ -70,7 +143,9 @@ pub fn caml_pasta_fp_plonk_proof_create(
let witness: [Vec<_>; COLUMNS] = witness
.try_into()
.map_err(|_| ocaml::Error::Message("the witness should be a column of 15 vectors"))?;
- let index: &ProverIndex = &index.as_ref().0;
+ let index: &ProverIndex> = &index.as_ref().0;
+ let runtime_tables: Vec> =
+ runtime_tables.into_iter().map(Into::into).collect();
// public input
let public_input = witness[0][0..index.cs.public].to_vec();
@@ -86,12 +161,23 @@ pub fn caml_pasta_fp_plonk_proof_create(
let proof = ProverProof::create_recursive::(
&group_map,
witness,
- &[],
+ &runtime_tables,
index,
prev,
None,
)
.map_err(|e| ocaml::Error::Error(e.into()))?;
+
+ let verifier_index = index.verifier_index();
+
+ // Verify proof
+ verify::>(
+ &group_map,
+ &verifier_index,
+ &proof,
+ &public_input,
+ )?;
+
Ok((proof, public_input).into())
})
}
@@ -100,17 +186,16 @@ pub fn caml_pasta_fp_plonk_proof_create(
#[ocaml::func]
pub fn caml_pasta_fp_plonk_proof_example_with_lookup(
srs: CamlFpSrs,
- indexed: bool,
) -> (
CamlPastaFpPlonkIndex,
CamlFp,
- CamlProverProof,
+ CamlProofWithPublic,
) {
use ark_ff::Zero;
use kimchi::circuits::{
constraints::ConstraintSystem,
gate::{CircuitGate, GateType},
- lookup::runtime_tables::{RuntimeTable, RuntimeTableCfg, RuntimeTableSpec},
+ lookup::runtime_tables::{RuntimeTable, RuntimeTableCfg},
polynomial::COLUMNS,
wires::Wire,
};
@@ -121,16 +206,9 @@ pub fn caml_pasta_fp_plonk_proof_example_with_lookup(
let mut runtime_tables_setup = vec![];
for table_id in 0..num_tables {
- let cfg = if indexed {
- RuntimeTableCfg::Indexed(RuntimeTableSpec {
- id: table_id as i32,
- len: 5,
- })
- } else {
- RuntimeTableCfg::Custom {
- id: table_id as i32,
- first_column: [8u32, 9, 8, 7, 1].into_iter().map(Into::into).collect(),
- }
+ let cfg = RuntimeTableCfg {
+ id: table_id,
+ first_column: [8u32, 9, 8, 7, 1].into_iter().map(Into::into).collect(),
};
runtime_tables_setup.push(cfg);
}
@@ -168,7 +246,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_lookup(
// create queries into our runtime lookup table
let lookup_cols = &mut lookup_cols[1..];
for chunk in lookup_cols.chunks_mut(2) {
- chunk[0][row] = if indexed { 1u32.into() } else { 9u32.into() }; // index
+ chunk[0][row] = 9u32.into(); // index
chunk[1][row] = 2u32.into(); // value
}
}
@@ -188,7 +266,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_lookup(
ptr.add_lagrange_basis(cs.domain.d1);
let (endo_q, _endo_r) = endos::();
- let index = ProverIndex::::create(cs, endo_q, srs.0);
+ let index = ProverIndex::>::create(cs, endo_q, srs.0);
let group_map = ::Map::setup();
let public_input = witness[0][0];
let proof = ProverProof::create_recursive::(
@@ -214,7 +292,10 @@ pub fn caml_pasta_fp_plonk_proof_example_with_lookup(
#[ocaml::func]
pub fn caml_pasta_fp_plonk_proof_example_with_foreign_field_mul(
srs: CamlFpSrs,
-) -> (CamlPastaFpPlonkIndex, CamlProverProof) {
+) -> (
+ CamlPastaFpPlonkIndex,
+ CamlProofWithPublic,
+) {
use ark_ff::Zero;
use kimchi::circuits::{
constraints::ConstraintSystem,
@@ -352,7 +433,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_foreign_field_mul(
ptr.add_lagrange_basis(cs.domain.d1);
let (endo_q, _endo_r) = endos::();
- let index = ProverIndex::::create(cs, endo_q, srs.0);
+ let index = ProverIndex::>::create(cs, endo_q, srs.0);
let group_map = ::Map::setup();
let proof = ProverProof::create_recursive::(
&group_map,
@@ -373,7 +454,10 @@ pub fn caml_pasta_fp_plonk_proof_example_with_foreign_field_mul(
#[ocaml::func]
pub fn caml_pasta_fp_plonk_proof_example_with_range_check(
srs: CamlFpSrs,
-) -> (CamlPastaFpPlonkIndex, CamlProverProof) {
+) -> (
+ CamlPastaFpPlonkIndex,
+ CamlProofWithPublic,
+) {
use ark_ff::Zero;
use kimchi::circuits::{
constraints::ConstraintSystem, gate::CircuitGate, polynomials::range_check, wires::Wire,
@@ -418,7 +502,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_range_check(
ptr.add_lagrange_basis(cs.domain.d1);
let (endo_q, _endo_r) = endos::();
- let index = ProverIndex::::create(cs, endo_q, srs.0);
+ let index = ProverIndex::>::create(cs, endo_q, srs.0);
let group_map = ::Map::setup();
let proof = ProverProof::create_recursive::(
&group_map,
@@ -439,7 +523,10 @@ pub fn caml_pasta_fp_plonk_proof_example_with_range_check(
#[ocaml::func]
pub fn caml_pasta_fp_plonk_proof_example_with_range_check0(
srs: CamlFpSrs,
-) -> (CamlPastaFpPlonkIndex, CamlProverProof) {
+) -> (
+ CamlPastaFpPlonkIndex,
+ CamlProofWithPublic,
+) {
use ark_ff::Zero;
use kimchi::circuits::{
constraints::ConstraintSystem,
@@ -490,7 +577,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_range_check0(
ptr.add_lagrange_basis(cs.domain.d1);
let (endo_q, _endo_r) = endos::();
- let index = ProverIndex::::create(cs, endo_q, srs.0);
+ let index = ProverIndex::>::create(cs, endo_q, srs.0);
let group_map = ::Map::setup();
let proof = ProverProof::create_recursive::(
&group_map,
@@ -514,7 +601,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_ffadd(
) -> (
CamlPastaFpPlonkIndex,
CamlFp,
- CamlProverProof,
+ CamlProofWithPublic,
) {
use ark_ff::Zero;
use kimchi::circuits::{
@@ -615,7 +702,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_ffadd(
ptr.add_lagrange_basis(cs.domain.d1);
let (endo_q, _endo_r) = endos::();
- let index = ProverIndex::::create(cs, endo_q, srs.0);
+ let index = ProverIndex::>::create(cs, endo_q, srs.0);
let group_map = ::Map::setup();
let public_input = witness[0][0];
let proof = ProverProof::create_recursive::(
@@ -641,7 +728,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_xor(
) -> (
CamlPastaFpPlonkIndex,
(CamlFp, CamlFp),
- CamlProverProof,
+ CamlProofWithPublic,
) {
use ark_ff::Zero;
use kimchi::circuits::{
@@ -704,7 +791,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_xor(
ptr.add_lagrange_basis(cs.domain.d1);
let (endo_q, _endo_r) = endos::();
- let index = ProverIndex::::create(cs, endo_q, srs.0);
+ let index = ProverIndex::>::create(cs, endo_q, srs.0);
let group_map = ::Map::setup();
let public_input = (witness[0][0], witness[0][1]);
let proof = ProverProof::create_recursive::(
@@ -730,7 +817,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_rot(
) -> (
CamlPastaFpPlonkIndex,
(CamlFp, CamlFp),
- CamlProverProof,
+ CamlProofWithPublic,
) {
use ark_ff::Zero;
use kimchi::circuits::{
@@ -798,7 +885,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_rot(
ptr.add_lagrange_basis(cs.domain.d1);
let (endo_q, _endo_r) = endos::();
- let index = ProverIndex::::create(cs, endo_q, srs.0);
+ let index = ProverIndex::>::create(cs, endo_q, srs.0);
let group_map = ::Map::setup();
let public_input = (witness[0][0], witness[0][1]);
let proof = ProverProof::create_recursive::(
@@ -821,7 +908,7 @@ pub fn caml_pasta_fp_plonk_proof_example_with_rot(
#[ocaml::func]
pub fn caml_pasta_fp_plonk_proof_verify(
index: CamlPastaFpPlonkVerifierIndex,
- proof: CamlProverProof,
+ proof: CamlProofWithPublic,
) -> bool {
let group_map = ::Map::setup();
@@ -837,6 +924,7 @@ pub fn caml_pasta_fp_plonk_proof_verify(
Vesta,
DefaultFqSponge,
DefaultFrSponge,
+ OpeningProof,
>(&group_map, &[context])
.is_ok()
}
@@ -845,18 +933,19 @@ pub fn caml_pasta_fp_plonk_proof_verify(
#[ocaml::func]
pub fn caml_pasta_fp_plonk_proof_batch_verify(
indexes: Vec,
- proofs: Vec>,
+ proofs: Vec>,
) -> bool {
let ts: Vec<_> = indexes
.into_iter()
.zip(proofs.into_iter())
.map(|(caml_index, caml_proof)| {
- let verifier_index: VerifierIndex = caml_index.into();
- let (proof, public_input): (ProverProof<_>, Vec<_>) = caml_proof.into();
+ let verifier_index: VerifierIndex> = caml_index.into();
+ let (proof, public_input): (ProverProof>, Vec<_>) =
+ caml_proof.into();
(verifier_index, proof, public_input)
})
.collect();
- let ts_ref: Vec<_> = ts
+ let ts_ref: Vec>> = ts
.iter()
.map(|(verifier_index, proof, public_input)| Context {
verifier_index,
@@ -870,13 +959,14 @@ pub fn caml_pasta_fp_plonk_proof_batch_verify(
Vesta,
DefaultFqSponge,
DefaultFrSponge,
+ OpeningProof,
>(&group_map, &ts_ref)
.is_ok()
}
#[ocaml_gen::func]
#[ocaml::func]
-pub fn caml_pasta_fp_plonk_proof_dummy() -> CamlProverProof {
+pub fn caml_pasta_fp_plonk_proof_dummy() -> CamlProofWithPublic {
fn comm() -> PolyComm {
let g = Vesta::prime_subgroup_generator();
PolyComm {
@@ -904,6 +994,7 @@ pub fn caml_pasta_fp_plonk_proof_dummy() -> CamlProverProof
zeta_omega: vec![Fp::one()],
};
let evals = ProofEvaluations {
+ public: Some(eval()),
w: array_init(|_| eval()),
coefficients: array_init(|_| eval()),
z: eval(),
@@ -951,7 +1042,7 @@ pub fn caml_pasta_fp_plonk_proof_dummy() -> CamlProverProof
#[ocaml_gen::func]
#[ocaml::func]
pub fn caml_pasta_fp_plonk_proof_deep_copy(
- x: CamlProverProof,
-) -> CamlProverProof {
+ x: CamlProofWithPublic,
+) -> CamlProofWithPublic {
x
}
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_verifier_index.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_verifier_index.rs
index 8ac7e0b2ff7..b4cde02c17a 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_verifier_index.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fp_plonk_verifier_index.rs
@@ -10,20 +10,22 @@ use ark_poly::{EvaluationDomain, Radix2EvaluationDomain as Domain};
use kimchi::circuits::constraints::FeatureFlags;
use kimchi::circuits::lookup::lookups::{LookupFeatures, LookupPatterns};
use kimchi::circuits::polynomials::permutation::Shifts;
-use kimchi::circuits::polynomials::permutation::{zk_polynomial, zk_w3};
+use kimchi::circuits::polynomials::permutation::{permutation_vanishing_polynomial, zk_w};
use kimchi::circuits::wires::{COLUMNS, PERMUTS};
use kimchi::{linearization::expr_linearization, verifier_index::VerifierIndex};
use mina_curves::pasta::{Fp, Pallas, Vesta};
use poly_commitment::commitment::caml::CamlPolyComm;
+use poly_commitment::evaluation_proof::OpeningProof;
use poly_commitment::{commitment::PolyComm, srs::SRS};
use std::convert::TryInto;
use std::path::Path;
+use std::sync::Arc;
pub type CamlPastaFpPlonkVerifierIndex =
CamlPlonkVerifierIndex>;
-impl From> for CamlPastaFpPlonkVerifierIndex {
- fn from(vi: VerifierIndex) -> Self {
+impl From>> for CamlPastaFpPlonkVerifierIndex {
+ fn from(vi: VerifierIndex>) -> Self {
Self {
domain: CamlPlonkDomain {
log_size_of_group: vi.domain.log_size_of_group as isize,
@@ -32,7 +34,7 @@ impl From> for CamlPastaFpPlonkVerifierIndex {
max_poly_size: vi.max_poly_size as isize,
public: vi.public as isize,
prev_challenges: vi.prev_challenges as isize,
- srs: CamlFpSrs(vi.srs.get().expect("have an srs").clone()),
+ srs: CamlFpSrs(vi.srs.clone()),
evals: CamlPlonkVerificationEvals {
sigma_comm: vi.sigma_comm.to_vec().iter().map(Into::into).collect(),
coefficients_comm: vi
@@ -47,15 +49,23 @@ impl From> for CamlPastaFpPlonkVerifierIndex {
mul_comm: vi.mul_comm.into(),
emul_comm: vi.emul_comm.into(),
endomul_scalar_comm: vi.endomul_scalar_comm.into(),
+
+ xor_comm: vi.xor_comm.map(Into::into),
+ range_check0_comm: vi.range_check0_comm.map(Into::into),
+ range_check1_comm: vi.range_check1_comm.map(Into::into),
+ foreign_field_add_comm: vi.foreign_field_add_comm.map(Into::into),
+ foreign_field_mul_comm: vi.foreign_field_mul_comm.map(Into::into),
+ rot_comm: vi.rot_comm.map(Into::into),
},
shifts: vi.shift.to_vec().iter().map(Into::into).collect(),
lookup_index: vi.lookup_index.map(Into::into),
+ zk_rows: vi.zk_rows as isize,
}
}
}
// TODO: This should really be a TryFrom or TryInto
-impl From for VerifierIndex {
+impl From for VerifierIndex> {
fn from(index: CamlPastaFpPlonkVerifierIndex) -> Self {
let evals = index.evals;
let shifts = index.shifts;
@@ -76,38 +86,43 @@ impl From for VerifierIndex {
let shift: [Fp; PERMUTS] = shifts.try_into().expect("wrong size");
let feature_flags = FeatureFlags {
- range_check0: false,
- range_check1: false,
- foreign_field_add: false,
- foreign_field_mul: false,
- rot: false,
- xor: false,
- lookup_features: LookupFeatures {
- patterns: LookupPatterns {
- xor: false,
- lookup: false,
- range_check: false,
- foreign_field_mul: false,
- },
- joint_lookup_used: false,
- uses_runtime_tables: false,
+ range_check0: evals.range_check0_comm.is_some(),
+ range_check1: evals.range_check1_comm.is_some(),
+ foreign_field_add: evals.foreign_field_add_comm.is_some(),
+ foreign_field_mul: evals.foreign_field_mul_comm.is_some(),
+ rot: evals.rot_comm.is_some(),
+ xor: evals.xor_comm.is_some(),
+ lookup_features: {
+ if let Some(li) = index.lookup_index.as_ref() {
+ li.lookup_info.features
+ } else {
+ LookupFeatures {
+ patterns: LookupPatterns {
+ xor: false,
+ lookup: false,
+ range_check: false,
+ foreign_field_mul: false,
+ },
+ joint_lookup_used: false,
+ uses_runtime_tables: false,
+ }
+ }
},
};
// TODO dummy_lookup_value ?
- let (linearization, powers_of_alpha) = expr_linearization(Some(&feature_flags), true);
+ let (linearization, powers_of_alpha) =
+ expr_linearization(Some(&feature_flags), true);
- VerifierIndex:: {
+ VerifierIndex::> {
domain,
max_poly_size: index.max_poly_size as usize,
public: index.public as usize,
prev_challenges: index.prev_challenges as usize,
powers_of_alpha,
- srs: {
- let res = once_cell::sync::OnceCell::new();
- res.set(index.srs.0).unwrap();
- res
- },
+ srs: { Arc::clone(&index.srs.0) },
+
+ zk_rows: index.zk_rows as u64,
sigma_comm,
coefficients_comm,
@@ -120,23 +135,26 @@ impl From for VerifierIndex {
emul_comm: evals.emul_comm.into(),
endomul_scalar_comm: evals.endomul_scalar_comm.into(),
- xor_comm: None,
-
- range_check0_comm: None,
- range_check1_comm: None,
- foreign_field_add_comm: None,
- foreign_field_mul_comm: None,
- rot_comm: None,
+ xor_comm: evals.xor_comm.map(Into::into),
+ range_check0_comm: evals.range_check0_comm.map(Into::into),
+ range_check1_comm: evals.range_check1_comm.map(Into::into),
+ foreign_field_add_comm: evals.foreign_field_add_comm.map(Into::into),
+ foreign_field_mul_comm: evals.foreign_field_mul_comm.map(Into::into),
+ rot_comm: evals.rot_comm.map(Into::into),
shift,
- zkpm: {
+ permutation_vanishing_polynomial_m: {
let res = once_cell::sync::OnceCell::new();
- res.set(zk_polynomial(domain)).unwrap();
+ res.set(permutation_vanishing_polynomial(
+ domain,
+ index.zk_rows as u64,
+ ))
+ .unwrap();
res
},
w: {
let res = once_cell::sync::OnceCell::new();
- res.set(zk_w3(domain)).unwrap();
+ res.set(zk_w(domain, index.zk_rows as u64)).unwrap();
res
},
endo: endo_q,
@@ -151,16 +169,20 @@ pub fn read_raw(
offset: Option,
srs: CamlFpSrs,
path: String,
-) -> Result, ocaml::Error> {
+) -> Result>, ocaml::Error> {
let path = Path::new(&path);
let (endo_q, _endo_r) = poly_commitment::srs::endos::();
- VerifierIndex::::from_file(Some(srs.0), path, offset.map(|x| x as u64), endo_q).map_err(
- |_e| {
- ocaml::Error::invalid_argument("caml_pasta_fp_plonk_verifier_index_raw_read")
- .err()
- .unwrap()
- },
+ VerifierIndex::>::from_file(
+ srs.0,
+ path,
+ offset.map(|x| x as u64),
+ endo_q,
)
+ .map_err(|_e| {
+ ocaml::Error::invalid_argument("caml_pasta_fp_plonk_verifier_index_raw_read")
+ .err()
+ .unwrap()
+ })
}
//
@@ -185,7 +207,7 @@ pub fn caml_pasta_fp_plonk_verifier_index_write(
index: CamlPastaFpPlonkVerifierIndex,
path: String,
) -> Result<(), ocaml::Error> {
- let index: VerifierIndex = index.into();
+ let index: VerifierIndex> = index.into();
let path = Path::new(&path);
index.to_file(path, append).map_err(|_e| {
ocaml::Error::invalid_argument("caml_pasta_fp_plonk_verifier_index_raw_read")
@@ -248,9 +270,16 @@ pub fn caml_pasta_fp_plonk_verifier_index_dummy() -> CamlPastaFpPlonkVerifierInd
mul_comm: comm(),
emul_comm: comm(),
endomul_scalar_comm: comm(),
+ xor_comm: None,
+ range_check0_comm: None,
+ range_check1_comm: None,
+ foreign_field_add_comm: None,
+ foreign_field_mul_comm: None,
+ rot_comm: None,
},
shifts: (0..PERMUTS - 1).map(|_| Fp::one().into()).collect(),
lookup_index: None,
+ zk_rows: 3,
}
}
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_index.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_index.rs
index 0203b98bf18..0229fb9c469 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_index.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_index.rs
@@ -1,9 +1,15 @@
+use crate::arkworks::CamlFq;
use crate::{gate_vector::fq::CamlPastaFqPlonkGateVectorPtr, srs::fq::CamlFqSrs};
use ark_poly::EvaluationDomain;
+use kimchi::circuits::lookup::runtime_tables::caml::CamlRuntimeTableCfg;
+use kimchi::circuits::lookup::runtime_tables::RuntimeTableCfg;
+use kimchi::circuits::lookup::tables::caml::CamlLookupTable;
+use kimchi::circuits::lookup::tables::LookupTable;
use kimchi::circuits::{constraints::ConstraintSystem, gate::CircuitGate};
use kimchi::{linearization::expr_linearization, prover_index::ProverIndex};
use mina_curves::pasta::{Fq, Pallas, PallasParameters, Vesta};
use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge};
+use poly_commitment::{evaluation_proof::OpeningProof};
use serde::{Deserialize, Serialize};
use std::{
fs::{File, OpenOptions},
@@ -12,7 +18,7 @@ use std::{
/// Boxed so that we don't store large proving indexes in the OCaml heap.
#[derive(ocaml_gen::CustomType)]
-pub struct CamlPastaFqPlonkIndex(pub Box>);
+pub struct CamlPastaFqPlonkIndex(pub Box>>);
pub type CamlPastaFqPlonkIndexPtr<'a> = ocaml::Pointer<'a, CamlPastaFqPlonkIndex>;
extern "C" fn caml_pasta_fq_plonk_index_finalize(v: ocaml::Raw) {
@@ -39,6 +45,8 @@ impl ocaml::custom::Custom for CamlPastaFqPlonkIndex {
pub fn caml_pasta_fq_plonk_index_create(
gates: CamlPastaFqPlonkGateVectorPtr,
public: ocaml::Int,
+ lookup_tables: Vec>,
+ runtime_tables: Vec>,
prev_challenges: ocaml::Int,
srs: CamlFqSrs,
) -> Result {
@@ -53,18 +61,25 @@ pub fn caml_pasta_fq_plonk_index_create(
})
.collect();
+ let runtime_tables: Vec> =
+ runtime_tables.into_iter().map(Into::into).collect();
+
+ let lookup_tables: Vec> = lookup_tables.into_iter().map(Into::into).collect();
+
// create constraint system
let cs = match ConstraintSystem::::create(gates)
.public(public as usize)
.prev_challenges(prev_challenges as usize)
+ .lookup(lookup_tables)
+ .runtime(if runtime_tables.is_empty() {
+ None
+ } else {
+ Some(runtime_tables)
+ })
.build()
{
- Err(_) => {
- return Err(ocaml::Error::failwith(
- "caml_pasta_fq_plonk_index_create: could not create constraint system",
- )
- .err()
- .unwrap())
+ Err(e) => {
+ return Err(e.into())
}
Ok(cs) => cs,
};
@@ -80,7 +95,7 @@ pub fn caml_pasta_fq_plonk_index_create(
}
// create index
- let mut index = ProverIndex::::create(cs, endo_q, srs.clone());
+ let mut index = ProverIndex::>::create(cs, endo_q, srs.clone());
// Compute and cache the verifier index digest
index.compute_verifier_index_digest::>();
@@ -143,7 +158,9 @@ pub fn caml_pasta_fq_plonk_index_read(
}
// deserialize the index
- let mut t = ProverIndex::::deserialize(&mut rmp_serde::Deserializer::new(r))?;
+ let mut t = ProverIndex::>::deserialize(
+ &mut rmp_serde::Deserializer::new(r),
+ )?;
t.srs = srs.clone();
let (linearization, powers_of_alpha) = expr_linearization(Some(&t.cs.feature_flags), true);
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_proof.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_proof.rs
index 3bea626a23b..b54071f636d 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_proof.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_proof.rs
@@ -8,7 +8,10 @@ use ark_ec::AffineCurve;
use ark_ff::One;
use array_init::array_init;
use groupmap::GroupMap;
-use kimchi::prover_index::ProverIndex;
+use kimchi::{
+ circuits::lookup::runtime_tables::{caml::CamlRuntimeTable, RuntimeTable},
+ prover_index::ProverIndex,
+};
use kimchi::{circuits::polynomial::COLUMNS, verifier::batch_verify};
use kimchi::{
proof::{
@@ -16,7 +19,7 @@ use kimchi::{
},
verifier::Context,
};
-use kimchi::{prover::caml::CamlProverProof, verifier_index::VerifierIndex};
+use kimchi::{prover::caml::CamlProofWithPublic, verifier_index::VerifierIndex};
use mina_curves::pasta::{Fp, Fq, Pallas, PallasParameters};
use mina_poseidon::{
constants::PlonkSpongeConstantsKimchi,
@@ -32,9 +35,10 @@ use std::convert::TryInto;
pub fn caml_pasta_fq_plonk_proof_create(
index: CamlPastaFqPlonkIndexPtr<'static>,
witness: Vec,
+ runtime_tables: Vec>,
prev_challenges: Vec,
prev_sgs: Vec,
-) -> Result, ocaml::Error> {
+) -> Result, ocaml::Error> {
{
let ptr: &mut poly_commitment::srs::SRS =
unsafe { &mut *(std::sync::Arc::as_ptr(&index.as_ref().0.srs) as *mut _) };
@@ -66,7 +70,10 @@ pub fn caml_pasta_fq_plonk_proof_create(
let witness: [Vec<_>; COLUMNS] = witness
.try_into()
.expect("the witness should be a column of 15 vectors");
- let index: &ProverIndex = &index.as_ref().0;
+ let index: &ProverIndex> = &index.as_ref().0;
+
+ let runtime_tables: Vec> =
+ runtime_tables.into_iter().map(Into::into).collect();
// public input
let public_input = witness[0][0..index.cs.public].to_vec();
@@ -82,7 +89,7 @@ pub fn caml_pasta_fq_plonk_proof_create(
let proof = ProverProof::create_recursive::<
DefaultFqSponge,
DefaultFrSponge,
- >(&group_map, witness, &[], index, prev, None)
+ >(&group_map, witness, &runtime_tables, index, prev, None)
.map_err(|e| ocaml::Error::Error(e.into()))?;
Ok((proof, public_input).into())
})
@@ -92,7 +99,7 @@ pub fn caml_pasta_fq_plonk_proof_create(
#[ocaml::func]
pub fn caml_pasta_fq_plonk_proof_verify(
index: CamlPastaFqPlonkVerifierIndex,
- proof: CamlProverProof,
+ proof: CamlProofWithPublic,
) -> bool {
let group_map = ::Map::setup();
@@ -108,6 +115,7 @@ pub fn caml_pasta_fq_plonk_proof_verify(
Pallas,
DefaultFqSponge,
DefaultFrSponge,
+ OpeningProof,
>(&group_map, &[context])
.is_ok()
}
@@ -116,18 +124,19 @@ pub fn caml_pasta_fq_plonk_proof_verify(
#[ocaml::func]
pub fn caml_pasta_fq_plonk_proof_batch_verify(
indexes: Vec,
- proofs: Vec>,
+ proofs: Vec>,
) -> bool {
let ts: Vec<_> = indexes
.into_iter()
.zip(proofs.into_iter())
.map(|(caml_index, caml_proof)| {
- let verifier_index: VerifierIndex = caml_index.into();
- let (proof, public_input): (ProverProof<_>, Vec<_>) = caml_proof.into();
+ let verifier_index: VerifierIndex> = caml_index.into();
+ let (proof, public_input): (ProverProof>, Vec<_>) =
+ caml_proof.into();
(verifier_index, proof, public_input)
})
.collect();
- let ts_ref: Vec<_> = ts
+ let ts_ref: Vec>> = ts
.iter()
.map(|(verifier_index, proof, public_input)| Context {
verifier_index,
@@ -141,13 +150,14 @@ pub fn caml_pasta_fq_plonk_proof_batch_verify(
Pallas,
DefaultFqSponge,
DefaultFrSponge,
+ OpeningProof,
>(&group_map, &ts_ref)
.is_ok()
}
#[ocaml_gen::func]
#[ocaml::func]
-pub fn caml_pasta_fq_plonk_proof_dummy() -> CamlProverProof {
+pub fn caml_pasta_fq_plonk_proof_dummy() -> CamlProofWithPublic {
fn comm() -> PolyComm {
let g = Pallas::prime_subgroup_generator();
PolyComm {
@@ -175,6 +185,7 @@ pub fn caml_pasta_fq_plonk_proof_dummy() -> CamlProverProof
zeta_omega: vec![Fq::one()],
};
let evals = ProofEvaluations {
+ public: Some(eval()),
w: array_init(|_| eval()),
coefficients: array_init(|_| eval()),
z: eval(),
@@ -222,7 +233,7 @@ pub fn caml_pasta_fq_plonk_proof_dummy() -> CamlProverProof
#[ocaml_gen::func]
#[ocaml::func]
pub fn caml_pasta_fq_plonk_proof_deep_copy(
- x: CamlProverProof,
-) -> CamlProverProof {
+ x: CamlProofWithPublic,
+) -> CamlProofWithPublic {
x
}
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_verifier_index.rs b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_verifier_index.rs
index d470c8d1de3..ab05d84c0c2 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_verifier_index.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/pasta_fq_plonk_verifier_index.rs
@@ -10,20 +10,21 @@ use ark_poly::{EvaluationDomain, Radix2EvaluationDomain as Domain};
use kimchi::circuits::constraints::FeatureFlags;
use kimchi::circuits::lookup::lookups::{LookupFeatures, LookupPatterns};
use kimchi::circuits::polynomials::permutation::Shifts;
-use kimchi::circuits::polynomials::permutation::{zk_polynomial, zk_w3};
+use kimchi::circuits::polynomials::permutation::{permutation_vanishing_polynomial, zk_w};
use kimchi::circuits::wires::{COLUMNS, PERMUTS};
use kimchi::{linearization::expr_linearization, verifier_index::VerifierIndex};
use mina_curves::pasta::{Fq, Pallas, Vesta};
-use poly_commitment::commitment::caml::CamlPolyComm;
+use poly_commitment::{commitment::caml::CamlPolyComm, evaluation_proof::OpeningProof};
use poly_commitment::{commitment::PolyComm, srs::SRS};
use std::convert::TryInto;
use std::path::Path;
+use std::sync::Arc;
pub type CamlPastaFqPlonkVerifierIndex =
CamlPlonkVerifierIndex>;
-impl From> for CamlPastaFqPlonkVerifierIndex {
- fn from(vi: VerifierIndex) -> Self {
+impl From>> for CamlPastaFqPlonkVerifierIndex {
+ fn from(vi: VerifierIndex>) -> Self {
Self {
domain: CamlPlonkDomain {
log_size_of_group: vi.domain.log_size_of_group as isize,
@@ -32,7 +33,7 @@ impl From> for CamlPastaFqPlonkVerifierIndex {
max_poly_size: vi.max_poly_size as isize,
public: vi.public as isize,
prev_challenges: vi.prev_challenges as isize,
- srs: CamlFqSrs(vi.srs.get().expect("have an srs").clone()),
+ srs: CamlFqSrs(vi.srs.clone()),
evals: CamlPlonkVerificationEvals {
sigma_comm: vi.sigma_comm.to_vec().iter().map(Into::into).collect(),
coefficients_comm: vi
@@ -47,15 +48,23 @@ impl From> for CamlPastaFqPlonkVerifierIndex {
mul_comm: vi.mul_comm.into(),
emul_comm: vi.emul_comm.into(),
endomul_scalar_comm: vi.endomul_scalar_comm.into(),
+
+ xor_comm: vi.xor_comm.map(Into::into),
+ range_check0_comm: vi.range_check0_comm.map(Into::into),
+ range_check1_comm: vi.range_check1_comm.map(Into::into),
+ foreign_field_add_comm: vi.foreign_field_add_comm.map(Into::into),
+ foreign_field_mul_comm: vi.foreign_field_mul_comm.map(Into::into),
+ rot_comm: vi.rot_comm.map(Into::into),
},
shifts: vi.shift.to_vec().iter().map(Into::into).collect(),
lookup_index: vi.lookup_index.map(Into::into),
+ zk_rows: vi.zk_rows as isize,
}
}
}
// TODO: This should really be a TryFrom or TryInto
-impl From for VerifierIndex {
+impl From for VerifierIndex> {
fn from(index: CamlPastaFqPlonkVerifierIndex) -> Self {
let evals = index.evals;
let shifts = index.shifts;
@@ -76,38 +85,43 @@ impl From for VerifierIndex {
let shift: [Fq; PERMUTS] = shifts.try_into().expect("wrong size");
let feature_flags = FeatureFlags {
- range_check0: false,
- range_check1: false,
- foreign_field_add: false,
- foreign_field_mul: false,
- rot: false,
- xor: false,
- lookup_features: LookupFeatures {
- patterns: LookupPatterns {
- xor: false,
- lookup: false,
- range_check: false,
- foreign_field_mul: false,
- },
- joint_lookup_used: false,
- uses_runtime_tables: false,
+ range_check0: evals.range_check0_comm.is_some(),
+ range_check1: evals.range_check1_comm.is_some(),
+ foreign_field_add: evals.foreign_field_add_comm.is_some(),
+ foreign_field_mul: evals.foreign_field_mul_comm.is_some(),
+ rot: evals.rot_comm.is_some(),
+ xor: evals.xor_comm.is_some(),
+ lookup_features: {
+ if let Some(li) = index.lookup_index.as_ref() {
+ li.lookup_info.features
+ } else {
+ LookupFeatures {
+ patterns: LookupPatterns {
+ xor: false,
+ lookup: false,
+ range_check: false,
+ foreign_field_mul: false,
+ },
+ joint_lookup_used: false,
+ uses_runtime_tables: false,
+ }
+ }
},
};
// TODO dummy_lookup_value ?
- let (linearization, powers_of_alpha) = expr_linearization(Some(&feature_flags), true);
+ let (linearization, powers_of_alpha) =
+ expr_linearization(Some(&feature_flags), true);
- VerifierIndex:: {
+ VerifierIndex::> {
domain,
max_poly_size: index.max_poly_size as usize,
public: index.public as usize,
prev_challenges: index.prev_challenges as usize,
powers_of_alpha,
- srs: {
- let res = once_cell::sync::OnceCell::new();
- res.set(index.srs.0).unwrap();
- res
- },
+ srs: { Arc::clone(&index.srs.0) },
+
+ zk_rows: index.zk_rows as u64,
sigma_comm,
coefficients_comm,
@@ -120,23 +134,26 @@ impl From for VerifierIndex {
emul_comm: evals.emul_comm.into(),
endomul_scalar_comm: evals.endomul_scalar_comm.into(),
- xor_comm: None,
-
- range_check0_comm: None,
- range_check1_comm: None,
- foreign_field_add_comm: None,
- foreign_field_mul_comm: None,
- rot_comm: None,
+ xor_comm: evals.xor_comm.map(Into::into),
+ range_check0_comm: evals.range_check0_comm.map(Into::into),
+ range_check1_comm: evals.range_check1_comm.map(Into::into),
+ foreign_field_add_comm: evals.foreign_field_add_comm.map(Into::into),
+ foreign_field_mul_comm: evals.foreign_field_mul_comm.map(Into::into),
+ rot_comm: evals.rot_comm.map(Into::into),
shift,
- zkpm: {
+ permutation_vanishing_polynomial_m: {
let res = once_cell::sync::OnceCell::new();
- res.set(zk_polynomial(domain)).unwrap();
+ res.set(permutation_vanishing_polynomial(
+ domain,
+ index.zk_rows as u64,
+ ))
+ .unwrap();
res
},
w: {
let res = once_cell::sync::OnceCell::new();
- res.set(zk_w3(domain)).unwrap();
+ res.set(zk_w(domain, index.zk_rows as u64)).unwrap();
res
},
endo: endo_q,
@@ -151,16 +168,20 @@ pub fn read_raw(
offset: Option,
srs: CamlFqSrs,
path: String,
-) -> Result, ocaml::Error> {
+) -> Result>, ocaml::Error> {
let path = Path::new(&path);
let (endo_q, _endo_r) = poly_commitment::srs::endos::();
- VerifierIndex::::from_file(Some(srs.0), path, offset.map(|x| x as u64), endo_q).map_err(
- |_e| {
- ocaml::Error::invalid_argument("caml_pasta_fq_plonk_verifier_index_raw_read")
- .err()
- .unwrap()
- },
+ VerifierIndex::>::from_file(
+ srs.0,
+ path,
+ offset.map(|x| x as u64),
+ endo_q,
)
+ .map_err(|_e| {
+ ocaml::Error::invalid_argument("caml_pasta_fq_plonk_verifier_index_raw_read")
+ .err()
+ .unwrap()
+ })
}
//
@@ -185,7 +206,7 @@ pub fn caml_pasta_fq_plonk_verifier_index_write(
index: CamlPastaFqPlonkVerifierIndex,
path: String,
) -> Result<(), ocaml::Error> {
- let index: VerifierIndex = index.into();
+ let index: VerifierIndex> = index.into();
let path = Path::new(&path);
index.to_file(path, append).map_err(|_e| {
ocaml::Error::invalid_argument("caml_pasta_fq_plonk_verifier_index_raw_read")
@@ -248,9 +269,16 @@ pub fn caml_pasta_fq_plonk_verifier_index_dummy() -> CamlPastaFqPlonkVerifierInd
mul_comm: comm(),
endomul_scalar_comm: comm(),
emul_comm: comm(),
+ xor_comm: None,
+ range_check0_comm: None,
+ range_check1_comm: None,
+ foreign_field_add_comm: None,
+ foreign_field_mul_comm: None,
+ rot_comm: None,
},
shifts: (0..PERMUTS - 1).map(|_| Fq::one().into()).collect(),
lookup_index: None,
+ zk_rows: 3,
}
}
diff --git a/src/lib/crypto/kimchi_bindings/stubs/src/plonk_verifier_index.rs b/src/lib/crypto/kimchi_bindings/stubs/src/plonk_verifier_index.rs
index 33b3278c60b..52d45fa3527 100644
--- a/src/lib/crypto/kimchi_bindings/stubs/src/plonk_verifier_index.rs
+++ b/src/lib/crypto/kimchi_bindings/stubs/src/plonk_verifier_index.rs
@@ -20,6 +20,12 @@ pub struct CamlPlonkVerificationEvals {
pub mul_comm: PolyComm,
pub emul_comm: PolyComm,
pub endomul_scalar_comm: PolyComm,
+ pub xor_comm: Option,
+ pub range_check0_comm: Option