Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Cl/ci3.2 #10919

Merged
merged 22 commits into from
Jan 2, 2025
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
149 changes: 75 additions & 74 deletions barretenberg/acir_tests/bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,131 +6,129 @@ export CRS_PATH=$HOME/.bb-crs

function build {
set -eu
if [ ! -d acir_tests ]; then
cp -R ../../noir/noir-repo/test_programs/execution_success acir_tests
# Running these requires extra gluecode so they're skipped.
rm -rf acir_tests/{diamond_deps_0,workspace,workspace_default_member}
# TODO(https://github.com/AztecProtocol/barretenberg/issues/1108): problem regardless the proof system used
rm -rf acir_tests/regression_5045
fi

github_group "acir_tests build"

rm -rf acir_tests
cp -R ../../noir/noir-repo/test_programs/execution_success acir_tests
# Running these requires extra gluecode so they're skipped.
rm -rf acir_tests/{diamond_deps_0,workspace,workspace_default_member}
# TODO(https://github.com/AztecProtocol/barretenberg/issues/1108): problem regardless the proof system used
rm -rf acir_tests/regression_5045

# COMPILE=2 only compiles the test.
github_group "acir_tests compiling"
parallel --joblog joblog.txt --line-buffered 'COMPILE=2 ./run_test.sh $(basename {})' ::: ./acir_tests/*
github_endgroup
denoise "parallel --joblog joblog.txt --line-buffered 'COMPILE=2 ./run_test.sh \$(basename {})' ::: ./acir_tests/*"

# TODO: This actually breaks things, but shouldn't. We want to do it here and not maintain manually.
# Regenerate verify_honk_proof recursive input.
# local bb=$(realpath ../cpp/build/bin/bb)
# (cd ./acir_tests/assert_statement && \
# $bb write_recursion_inputs_honk -b ./target/program.json -o ../verify_honk_proof --recursive)

github_group "acir_tests updating yarn"
# Update yarn.lock so it can be committed.
# Be lenient about bb.js hash changing, even if we try to minimize the occurrences.
(cd browser-test-app && yarn add --dev @aztec/bb.js@../../ts && yarn)
(cd headless-test && yarn)
(cd sol-test && yarn)
denoise "cd browser-test-app && yarn add --dev @aztec/bb.js@../../ts && yarn"
denoise "cd headless-test && yarn"
denoise "cd sol-test && yarn"
# The md5sum of everything is the same after each yarn call.
# Yet seemingly yarn's content hash will churn unless we reset timestamps
find {headless-test,browser-test-app} -exec touch -t 197001010000 {} + 2>/dev/null || true
github_endgroup

github_group "acir_tests building browser-test-app"
(cd browser-test-app && yarn build)
denoise "cd browser-test-app && yarn build"

github_endgroup
}

function hash {
cache_content_hash ../../noir/.rebuild_patterns ../../noir/.rebuild_patterns_tests ../../barretenberg/cpp/.rebuild_patterns ../../barretenberg/ts/.rebuild_patterns
cache_content_hash \
../../noir/.rebuild_patterns \
../../noir/.rebuild_patterns_tests \
../../barretenberg/cpp/.rebuild_patterns \
../../barretenberg/ts/.rebuild_patterns
}

function test {
set -eu
github_group "acir_tests testing"

local hash=$(hash)
if ! test_should_run barretenberg-acir-tests-$hash; then
github_endgroup
return
fi
test_should_run barretenberg-acir-tests-$hash || return 0

github_group "acir_tests testing"

# TODO: These are some magic numbers that fit our dev/ci environments. They ultimately need to work on lower hardware.
export HARDWARE_CONCURRENCY=${HARDWARE_CONCURRENCY:-8}
# local jobs=$(($(nproc) / HARDWARE_CONCURRENCY))
local jobs=64

# Create temporary file descriptor 3, and redirects anything written to it, to parallels stdin.
exec 3> >(parallel -j$jobs --tag --line-buffered --joblog joblog.txt)
local pid=$!
trap "kill -SIGTERM $pid 2>/dev/null || true" EXIT
test_cmds | (cd $root; parallel -j$jobs --tag --line-buffered --joblog joblog.txt)

# Run function for syntactic simplicity.
run() {
echo "$*" >&3
}
cache_upload_flag barretenberg-acir-tests-$hash
github_endgroup
}

# Prints to stdout, one per line, the command to execute each individual test.
# Paths are all relative to the repository root.
function test_cmds {
local plonk_tests=$(find ./acir_tests -maxdepth 1 -mindepth 1 -type d | \
grep -vE 'verify_honk_proof|double_verify_honk_proof')
local honk_tests=$(find ./acir_tests -maxdepth 1 -mindepth 1 -type d | \
grep -vE 'single_verify_proof|double_verify_proof|double_verify_nested_proof')

local run_test=$(realpath --relative-to=$root ./run_test.sh)
local run_test_browser=$(realpath --relative-to=$root ./run_test_browser.sh)
local bbjs_bin="../ts/dest/node/main.js"

# barretenberg-acir-tests-sol:
run FLOW=sol ./run_test.sh assert_statement
run FLOW=sol ./run_test.sh double_verify_proof
run FLOW=sol ./run_test.sh double_verify_nested_proof
run FLOW=sol_honk ./run_test.sh assert_statement
run FLOW=sol_honk ./run_test.sh 1_mul
run FLOW=sol_honk ./run_test.sh slices
run FLOW=sol_honk ./run_test.sh verify_honk_proof
echo FLOW=sol $run_test assert_statement
echo FLOW=sol $run_test double_verify_proof
echo FLOW=sol $run_test double_verify_nested_proof
echo FLOW=sol_honk $run_test assert_statement
echo FLOW=sol_honk $run_test 1_mul
echo FLOW=sol_honk $run_test slices
echo FLOW=sol_honk $run_test verify_honk_proof

# barretenberg-acir-tests-bb.js:
# Browser tests.
run BROWSER=chrome THREAD_MODEL=mt PORT=8080 ./run_test_browser.sh verify_honk_proof
run BROWSER=chrome THREAD_MODEL=st PORT=8081 ./run_test_browser.sh 1_mul
run BROWSER=webkit THREAD_MODEL=mt PORT=8082 ./run_test_browser.sh verify_honk_proof
run BROWSER=webkit THREAD_MODEL=st PORT=8083 ./run_test_browser.sh 1_mul
# Run ecdsa_secp256r1_3x through bb.js on node to check 256k support.
run BIN=../ts/dest/node/main.js FLOW=prove_then_verify ./run_test.sh ecdsa_secp256r1_3x
# Run the prove then verify flow for UltraHonk. This makes sure we have the same circuit for different witness inputs.
run BIN=../ts/dest/node/main.js SYS=ultra_honk FLOW=prove_then_verify ./run_test.sh 6_array
# Run a single arbitrary test not involving recursion through bb.js for MegaHonk
run BIN=../ts/dest/node/main.js SYS=mega_honk FLOW=prove_and_verify ./run_test.sh 6_array
# Run 1_mul through bb.js build, all_cmds flow, to test all cli args.
run BIN=../ts/dest/node/main.js FLOW=all_cmds ./run_test.sh 1_mul
echo BROWSER=chrome THREAD_MODEL=mt PORT=8080 $run_test_browser verify_honk_proof
echo BROWSER=chrome THREAD_MODEL=st PORT=8081 $run_test_browser 1_mul
echo BROWSER=webkit THREAD_MODEL=mt PORT=8082 $run_test_browser verify_honk_proof
echo BROWSER=webkit THREAD_MODEL=st PORT=8083 $run_test_browser 1_mul
# echo ecdsa_secp256r1_3x through bb.js on node to check 256k support.
echo BIN=$bbjs_bin FLOW=prove_then_verify $run_test ecdsa_secp256r1_3x
# echo the prove then verify flow for UltraHonk. This makes sure we have the same circuit for different witness inputs.
echo BIN=$bbjs_bin SYS=ultra_honk FLOW=prove_then_verify $run_test 6_array
# echo a single arbitrary test not involving recursion through bb.js for MegaHonk
echo BIN=$bbjs_bin SYS=mega_honk FLOW=prove_and_verify $run_test 6_array
# echo 1_mul through bb.js build, all_cmds flow, to test all cli args.
echo BIN=$bbjs_bin FLOW=all_cmds $run_test 1_mul

# barretenberg-acir-tests-bb:
# Fold and verify an ACIR program stack using ClientIvc, recursively verify as part of the Tube circuit and produce and verify a Honk proof
run FLOW=prove_then_verify_tube ./run_test.sh 6_array
# Run 1_mul through native bb build, all_cmds flow, to test all cli args.
run FLOW=all_cmds ./run_test.sh 1_mul
echo FLOW=prove_then_verify_tube $run_test 6_array
# echo 1_mul through native bb build, all_cmds flow, to test all cli args.
echo FLOW=all_cmds $run_test 1_mul

# barretenberg-acir-tests-bb-ultra-plonk:
# Exclude honk tests.
for t in $plonk_tests; do
run FLOW=prove_then_verify ./run_test.sh $(basename $t)
echo FLOW=prove_then_verify $run_test $(basename $t)
done
run FLOW=prove_then_verify RECURSIVE=true ./run_test.sh assert_statement
run FLOW=prove_then_verify RECURSIVE=true ./run_test.sh double_verify_proof
echo FLOW=prove_then_verify RECURSIVE=true $run_test assert_statement
echo FLOW=prove_then_verify RECURSIVE=true $run_test double_verify_proof

# barretenberg-acir-tests-bb-ultra-honk:
# Exclude plonk tests.
for t in $honk_tests; do
run SYS=ultra_honk FLOW=prove_then_verify ./run_test.sh $(basename $t)
echo SYS=ultra_honk FLOW=prove_then_verify $run_test $(basename $t)
done
run SYS=ultra_honk FLOW=prove_then_verify RECURSIVE=true ./run_test.sh assert_statement
run SYS=ultra_honk FLOW=prove_then_verify RECURSIVE=true ./run_test.sh double_verify_honk_proof
run SYS=ultra_honk FLOW=prove_and_verify_program ./run_test.sh merkle_insert
echo SYS=ultra_honk FLOW=prove_then_verify RECURSIVE=true $run_test assert_statement
echo SYS=ultra_honk FLOW=prove_then_verify RECURSIVE=true $run_test double_verify_honk_proof
echo SYS=ultra_honk FLOW=prove_and_verify_program $run_test merkle_insert

# barretenberg-acir-tests-bb-client-ivc:
run FLOW=prove_then_verify_client_ivc ./run_test.sh 6_array
run FLOW=prove_then_verify_client_ivc ./run_test.sh databus
run FLOW=prove_then_verify_client_ivc ./run_test.sh databus_two_calldata

# Close parallels input file descriptor and wait for completion.
exec 3>&-
wait $pid

cache_upload_flag barretenberg-acir-tests-$hash
github_endgroup
echo FLOW=prove_then_verify_client_ivc $run_test 6_array
echo FLOW=prove_then_verify_client_ivc $run_test databus
echo FLOW=prove_then_verify_client_ivc $run_test databus_two_calldata
}

export -f build test
Expand All @@ -143,17 +141,20 @@ case "$cmd" in
""|"fast")
;;
"full")
denoise build
build
;;
"ci")
denoise build
denoise test
build
test
;;
"hash")
hash
;;
"test")
denoise test
test
;;
"test-cmds")
test_cmds
;;
*)
echo "Unknown command: $cmd"
Expand Down
4 changes: 2 additions & 2 deletions barretenberg/acir_tests/browser-test-app/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ __metadata:

"@aztec/bb.js@file:../../ts::locator=browser-test-app%40workspace%3A.":
version: 0.67.1
resolution: "@aztec/bb.js@file:../../ts#../../ts::hash=29e47a&locator=browser-test-app%40workspace%3A."
resolution: "@aztec/bb.js@file:../../ts#../../ts::hash=cd38cd&locator=browser-test-app%40workspace%3A."
dependencies:
comlink: "npm:^4.4.1"
commander: "npm:^12.1.0"
Expand All @@ -17,7 +17,7 @@ __metadata:
tslib: "npm:^2.4.0"
bin:
bb.js: ./dest/node/main.js
checksum: 10c0/c01128ff74f29b6bbc5c46362792525ef5612c5fc8787341551bcf457ba9816a971e24a74292ab230c47b0b9efe8d7e0d1cabd44247e1b6e718727d0b6372400
checksum: 10c0/c6c1476f5f5d5cc1ea7022043e00870ee0743fd73a532c171586ab74bac53f3888c648bd4057de5a602e4a556cbb5d91454f57e0875ab002ccc87e7f83f12e43
languageName: node
linkType: hard

Expand Down
2 changes: 2 additions & 0 deletions barretenberg/acir_tests/run_test_browser.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ cleanup() {
}
trap cleanup EXIT

cd $(dirname $0)

# Skipping firefox because this headless firefox is so slow.
export BROWSER=${BROWSER:-chrome,webkit}

Expand Down
59 changes: 34 additions & 25 deletions barretenberg/cpp/bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -73,31 +73,31 @@ function build {
}

function test {
if test_should_run barretenberg-test-$hash; then
github_group "bb test"

echo "Check formatting..."
./format.sh check

echo "Building tests..."
denoise cmake --preset $preset -Bbuild "&&" cmake --build build

# Download ignition transcripts.
# TODO: Use the flattened crs. These old transcripts are a pain.
echo "Downloading srs..."
denoise "cd ./srs_db && ./download_ignition.sh 3 && ./download_grumpkin.sh"
if [ ! -d ./srs_db/grumpkin ]; then
# The Grumpkin SRS is generated manually at the moment, only up to a large enough size for tests
# If tests require more points, the parameter can be increased here. Note: IPA requires
# dyadic_circuit_size + 1 points so in general this number will be a power of two plus 1
cd ./build && cmake --build . --parallel --target grumpkin_srs_gen && ./bin/grumpkin_srs_gen 32769
fi

echo "Testing..."
(cd build && GTEST_COLOR=1 denoise ctest -j32 --output-on-failure)
cache_upload_flag barretenberg-test-$hash
github_endgroup
fi
test_should_run barretenberg-test-$hash || return 0

github_group "bb test"

echo "Check formatting..."
./format.sh check

echo "Building tests..."
denoise cmake --preset $preset -Bbuild "&&" cmake --build build

# Download ignition transcripts.
# TODO: Use the flattened crs. These old transcripts are a pain.
echo "Downloading srs..."
denoise "cd ./srs_db && ./download_ignition.sh 3 && ./download_grumpkin.sh"
# if [ ! -d ./srs_db/grumpkin ]; then
# # The Grumpkin SRS is generated manually at the moment, only up to a large enough size for tests
# # If tests require more points, the parameter can be increased here. Note: IPA requires
# # dyadic_circuit_size + 1 points so in general this number will be a power of two plus 1
# (cd ./build && cmake --build . --parallel --target grumpkin_srs_gen && ./bin/grumpkin_srs_gen 32769)
# fi

echo "Testing..."
(cd build && GTEST_COLOR=1 denoise ctest -j32 --output-on-failure)
cache_upload_flag barretenberg-test-$hash
github_endgroup
}

case "$cmd" in
Expand All @@ -121,6 +121,15 @@ case "$cmd" in
"hash")
echo $hash
;;
"test-cmds")
cd build
for bin in ./bin/*_tests; do
bin_path=$(realpath --relative-to=$root $bin)
$bin --gtest_list_tests | \
awk -vbin=$bin_path '/^[a-zA-Z]/ {suite=$1} /^[ ]/ {print bin " --gtest_filter=" suite$1}' | \
sed 's/\.$//' | grep -v 'DISABLED_'; \
done
;;
*)
echo "Unknown command: $cmd"
exit 1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -433,7 +433,7 @@ void uint_extended_multiplication(State& state)
*/
static void DoPippengerSetup(const benchmark::State&)
{
bb::srs::init_crs_factory("../srs_db/ignition");
bb::srs::init_crs_factory(bb::srs::get_ignition_crs_path());
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ class ClientIVCBench : public benchmark::Fixture {

void SetUp([[maybe_unused]] const ::benchmark::State& state) override
{
bb::srs::init_crs_factory("../srs_db/ignition");
bb::srs::init_grumpkin_crs_factory("../srs_db/grumpkin");
bb::srs::init_crs_factory(bb::srs::get_ignition_crs_path());
bb::srs::init_grumpkin_crs_factory(bb::srs::get_grumpkin_crs_path());
}
};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ Builder generate_trace(size_t target_num_gates)

void eccvm_generate_prover(State& state) noexcept
{
bb::srs::init_grumpkin_crs_factory("../srs_db/grumpkin");
bb::srs::init_grumpkin_crs_factory(bb::srs::get_grumpkin_crs_path());

size_t target_num_gates = 1 << static_cast<size_t>(state.range(0));
for (auto _ : state) {
Expand All @@ -55,7 +55,7 @@ void eccvm_generate_prover(State& state) noexcept

void eccvm_prove(State& state) noexcept
{
bb::srs::init_grumpkin_crs_factory("../srs_db/grumpkin");
bb::srs::init_grumpkin_crs_factory(bb::srs::get_grumpkin_crs_path());

size_t target_num_gates = 1 << static_cast<size_t>(state.range(0));
Builder builder = generate_trace(target_num_gates);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ std::vector<std::shared_ptr<NativeTranscript>> prover_transcripts(MAX_POLYNOMIAL
std::vector<OpeningClaim<Curve>> opening_claims(MAX_POLYNOMIAL_DEGREE_LOG2 - MIN_POLYNOMIAL_DEGREE_LOG2 + 1);
static void DoSetup(const benchmark::State&)
{
srs::init_grumpkin_crs_factory("../srs_db/grumpkin");
srs::init_grumpkin_crs_factory(bb::srs::get_grumpkin_crs_path());
ck = std::make_shared<CommitmentKey<Curve>>(1 << MAX_POLYNOMIAL_DEGREE_LOG2);
vk = std::make_shared<VerifierCommitmentKey<Curve>>(1 << MAX_POLYNOMIAL_DEGREE_LOG2,
srs::get_grumpkin_crs_factory());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ const auto init = []() {
// constexpr double add_to_mixed_add_complexity = 1.36;

auto reference_string =
std::make_shared<bb::srs::factories::FileProverCrs<curve::BN254>>(NUM_POINTS, "../srs_db/ignition");
std::make_shared<bb::srs::factories::FileProverCrs<curve::BN254>>(NUM_POINTS, bb::srs::get_ignition_crs_path());

int pippenger()
{
Expand Down Expand Up @@ -104,7 +104,7 @@ int coset_fft_regular()

int main()
{
bb::srs::init_crs_factory("../srs_db/ignition");
bb::srs::init_crs_factory(bb::srs::get_ignition_crs_path());
std::cout << "initializing" << std::endl;
init();
std::cout << "executing normal fft" << std::endl;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ void fold_k(State& state) noexcept
using ProtogalaxyProver = ProtogalaxyProver_<DeciderProvingKeys_<Flavor, k + 1>>;
using Builder = typename Flavor::CircuitBuilder;

bb::srs::init_crs_factory("../srs_db/ignition");
bb::srs::init_crs_factory(bb::srs::get_ignition_crs_path());

auto log2_num_gates = static_cast<size_t>(state.range(0));

Expand Down
Loading
Loading