-
Notifications
You must be signed in to change notification settings - Fork 593
chore: honk int audit #20509
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
chore: honk int audit #20509
Changes from all commits
4819ff9
dcc1ecb
9c9a850
6e9d32a
0764f4e
8bd7ea2
2381dc9
9eeffee
bed5f36
0c3aa02
c88de54
6216367
8762210
adb0379
4bdc8e4
569d622
098ed34
c3a6c85
a533c38
bd2dc11
2bec7c9
eb476e0
38c45a9
51b81b5
aeed0a2
da66220
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -13,7 +13,7 @@ Note: Paths relative to `aztec-packages/barretenberg/cpp/src/barretenberg` | |
| 4. `honk/library/grand_product_library.hpp` | ||
| 5. `honk/proof_system/logderivative_library.hpp` | ||
| 6. `honk/proof_system/types/proof.hpp` | ||
| 7. `honk/types/circuit_type.hpp` | ||
| 7. `common/type_traits.hpp` | ||
| 8. `honk/types/public_inputs_type.hpp` | ||
| 9. `honk/utils/honk_key_gen.hpp` | ||
| 10. `honk/utils/testing.cpp` | ||
|
|
@@ -38,46 +38,42 @@ Note: Paths relative to `aztec-packages/barretenberg/cpp/src/barretenberg` | |
| 25. `flavor/ultra_keccak_flavor.hpp` | ||
| 26. `flavor/ultra_keccak_zk_flavor.hpp` | ||
| 27. `flavor/ultra_recursive_flavor.hpp` | ||
| 28. `flavor/ultra_rollup_flavor.hpp` | ||
| 29. `flavor/ultra_rollup_recursive_flavor.hpp` | ||
| 30. `flavor/ultra_zk_flavor.hpp` | ||
| 31. `flavor/ultra_zk_recursive_flavor.hpp` | ||
| 28. `flavor/ultra_zk_flavor.hpp` | ||
| 29. `flavor/ultra_zk_recursive_flavor.hpp` | ||
|
|
||
| ### Ultra Honk Prover/Verifier | ||
| 32. `ultra_honk/oink_prover.cpp` | ||
| 33. `ultra_honk/oink_prover.hpp` | ||
| 34. `ultra_honk/oink_verifier.cpp` | ||
| 35. `ultra_honk/oink_verifier.hpp` | ||
| 36. `ultra_honk/prover_instance.cpp` | ||
| 37. `ultra_honk/prover_instance.hpp` | ||
| 38. `ultra_honk/ultra_prover.cpp` | ||
| 39. `ultra_honk/ultra_prover.hpp` | ||
| 40. `ultra_honk/ultra_verifier.cpp` | ||
| 41. `ultra_honk/ultra_verifier.hpp` | ||
| 42. `ultra_honk/verifier_instance.hpp` | ||
| 43. `ultra_honk/witness_computation.cpp` | ||
| 44. `ultra_honk/witness_computation.hpp` | ||
| 30. `ultra_honk/oink_prover.cpp` | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. decided to merge |
||
| 31. `ultra_honk/oink_prover.hpp` | ||
| 32. `ultra_honk/oink_verifier.cpp` | ||
| 33. `ultra_honk/oink_verifier.hpp` | ||
| 34. `ultra_honk/prover_instance.cpp` | ||
| 35. `ultra_honk/prover_instance.hpp` | ||
| 36. `ultra_honk/ultra_prover.cpp` | ||
| 37. `ultra_honk/ultra_prover.hpp` | ||
| 38. `ultra_honk/ultra_verifier.cpp` | ||
| 39. `ultra_honk/ultra_verifier.hpp` | ||
| 40. `ultra_honk/verifier_instance.hpp` | ||
|
|
||
| ### Stdlib Honk Verifier | ||
| 45. `stdlib/honk_verifier/ipa_accumulator.hpp` | ||
| 46. `stdlib/proof/proof.hpp` | ||
| 41. `stdlib/honk_verifier/ipa_accumulator.hpp` | ||
| 42. `stdlib/proof/proof.hpp` | ||
|
|
||
| ### Public Input Components | ||
| 47. `public_input_component/public_component_key.hpp` | ||
| 48. `public_input_component/public_input_component.hpp` | ||
| 43. `public_input_component/public_component_key.hpp` | ||
| 44. `public_input_component/public_input_component.hpp` | ||
|
|
||
| ### DSL/ACIR Recursion | ||
| 49. `dsl/acir_format/recursion_constraint.hpp` | ||
| 50. `dsl/acir_format/recursion_constraint.cpp` | ||
| 51. `dsl/acir_format/recursion_constraint_output.hpp` | ||
| 52. `dsl/acir_format/recursion_constraint_output.cpp` | ||
| 53. `dsl/acir_format/honk_recursion_constraint.hpp` | ||
| 54. `dsl/acir_format/honk_recursion_constraint.cpp` | ||
| 53. `dsl/acir_format/mock_verifier_inputs.hpp` | ||
| 45. `dsl/acir_format/recursion_constraint.hpp` | ||
| 46. `dsl/acir_format/recursion_constraint.cpp` | ||
| 47. `dsl/acir_format/recursion_constraint_output.hpp` | ||
| 48. `dsl/acir_format/recursion_constraint_output.cpp` | ||
| 49. `dsl/acir_format/honk_recursion_constraint.hpp` | ||
| 50. `dsl/acir_format/honk_recursion_constraint.cpp` | ||
| 51. `dsl/acir_format/mock_verifier_inputs.hpp` | ||
|
|
||
| ## Summary of Module | ||
|
|
||
| The Honk proving system is Barretenberg's core SNARK proving system implementing Ultra and Mega arithmetization schemes with Sumcheck-based argument of knowledge. The module handles proof generation and verification for complex arithmetic circuits using the Sumcheck protocol, log-derivative lookup arguments for table lookups, and grand product permutation checks for copy constraints. The flavor definitions provide compile-time configuration for different proving system variants (Ultra, Mega, UltraRollup, UltraKeccak) including their recursive and zero-knowledge versions, specifying polynomial types, commitment schemes, and relation sets. The Oink protocol separates preprocessing rounds (wire commitments, sorted list accumulator) from the main proving phase. The trace_to_polynomials component converts execution traces from circuit builders into polynomial representations, populating wires, selectors, and permutation polynomials. The ultra_honk implementation provides the main prover and verifier logic, witness computation, and integration with polynomial commitment schemes (KZG and IPA). | ||
| The Honk proving system is Barretenberg's core SNARK proving system implementing Ultra and Mega arithmetization schemes with Sumcheck-based argument of knowledge. The module handles proof generation and verification for complex arithmetic circuits using the Sumcheck protocol, log-derivative lookup arguments for table lookups, and grand product permutation checks for copy constraints. The flavor definitions provide compile-time configuration for different proving system variants (Ultra, Mega, UltraKeccak) including their recursive and zero-knowledge versions, specifying polynomial types, commitment schemes, and relation sets. The Oink protocol separates preprocessing rounds (wire commitments, sorted list accumulator) from the main proving phase. The trace_to_polynomials component converts execution traces from circuit builders into polynomial representations, populating wires, selectors, and permutation polynomials. The ultra_honk implementation provides the main prover and verifier logic, witness computation, and integration with polynomial commitment schemes (KZG and IPA). | ||
|
|
||
| ## Test Files | ||
| 1. `honk/relation_checker.cpp` | ||
|
|
@@ -96,6 +92,9 @@ The Honk proving system is Barretenberg's core SNARK proving system implementing | |
| 14. `ultra_honk/rom_ram.test.cpp` | ||
| 15. `ultra_honk/range_constraint.test.cpp` | ||
| 16. `dsl/acir_format/honk_recursion_constraint.test.cpp` | ||
| 17. `stdlib/honk_verifier/ultra_recursive_verifier.test.cpp` | ||
|
|
||
| ## Security Mechanisms | ||
|
|
||
| - **Boomerang value detection**: `RecursiveVerifierTest::GraphAnalysisOfRecursiveVerifier` in `stdlib/honk_verifier/ultra_recursive_verifier.test.cpp` uses `StaticAnalyzer` to detect potentially unconstrained witnesses in the recursive verifier. | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -728,7 +728,7 @@ This chain ensures the op queue history is maintained correctly. The Merge proto | |
| ```cpp | ||
| // In OinkVerifier::verify() (called by HypernovaFoldingVerifier for each instance) | ||
| FF vk_hash = vk->hash_with_origin_tagging(*transcript); | ||
| transcript->add_to_hash_buffer(domain_separator + "vk_hash", vk_hash); | ||
| transcript->add_to_hash_buffer("vk_hash", vk_hash); | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. got rid of "domain_separators" |
||
| // All subsequent challenges now depend on this hash | ||
| ``` | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -22,14 +22,8 @@ namespace bb { | |
| // Constructor | ||
| Chonk::Chonk(size_t num_circuits) | ||
| : num_circuits(num_circuits) | ||
| , goblin(bn254_commitment_key) | ||
| { | ||
| BB_ASSERT_GT(num_circuits, 0UL, "Number of circuits must be specified and greater than 0."); | ||
| // Allocate BN254 commitment key based on translator circuit size. | ||
| // https://github.com/AztecProtocol/barretenberg/issues/1319): Account for Translator only when it's necessary | ||
| size_t commitment_key_size = 1UL << TranslatorFlavor::CONST_TRANSLATOR_LOG_N; | ||
| info("BN254 commitment key size: ", commitment_key_size); | ||
| bn254_commitment_key = CommitmentKey<curve::BN254>(commitment_key_size); | ||
| } | ||
|
|
||
| /** | ||
|
|
@@ -408,14 +402,6 @@ void Chonk::accumulate(ClientCircuit& circuit, const std::shared_ptr<MegaVerific | |
| debug_incoming_circuit(circuit, prover_instance, precomputed_vk); | ||
| #endif | ||
|
|
||
| // If the current circuit exceeds the current size of the commitment key, reinitialize accordingly. | ||
| // TODO(https://github.com/AztecProtocol/barretenberg/issues/1319) | ||
| if (prover_instance->dyadic_size() > bn254_commitment_key.srs_size) { | ||
| bn254_commitment_key = CommitmentKey<curve::BN254>(prover_instance->dyadic_size()); | ||
| goblin.commitment_key = bn254_commitment_key; | ||
| } | ||
| prover_instance->commitment_key = bn254_commitment_key; | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. prover instance doesn't store ck anymore |
||
|
|
||
| // We're accumulating a kernel if the verification queue is empty (because the kernel circuit contains recursive | ||
| // verifiers for all the entries previously present in the verification queue) and if it's not the first accumulate | ||
| // call (which will always be for an app circuit). | ||
|
|
@@ -460,7 +446,7 @@ void Chonk::accumulate(ClientCircuit& circuit, const std::shared_ptr<MegaVerific | |
| prover.fold(std::move(prover_accumulator), prover_instance, precomputed_vk); | ||
| // Decider uses the NEW prover_accumulator (result of fold) | ||
| DeciderProver decider(prover_accumulation_transcript); | ||
| decider_proof = decider.construct_proof(bn254_commitment_key, prover_accumulator); | ||
| decider_proof = decider.construct_proof(prover_accumulator); | ||
| break; | ||
| } | ||
| case QUEUE_TYPE::MEGA: | ||
|
|
@@ -535,7 +521,7 @@ void Chonk::hide_op_queue_content_in_hiding(ClientCircuit& circuit) | |
| HonkProof Chonk::construct_honk_proof_for_hiding_kernel(ClientCircuit& circuit, | ||
| const std::shared_ptr<MegaVerificationKey>& verification_key) | ||
| { | ||
| auto hiding_prover_inst = std::make_shared<DeciderZKProvingKey>(circuit, bn254_commitment_key); | ||
| auto hiding_prover_inst = std::make_shared<DeciderZKProvingKey>(circuit); | ||
|
|
||
| // Hiding kernel is proven by a MegaZKProver | ||
| MegaZKProver prover(hiding_prover_inst, verification_key, transcript); | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -533,64 +533,44 @@ template <typename Curve, bool HasZK = false> class ShpleminiVerifier_ { | |
| * corresponding entries in both vectors. | ||
| * | ||
| */ | ||
| // TODO(https://github.com/AztecProtocol/barretenberg/issues/1151) Avoid erasing vector elements. | ||
| static void remove_repeated_commitments(std::vector<Commitment>& commitments, | ||
| std::vector<Fr>& scalars, | ||
| const RepeatedCommitmentsData& repeated_commitments, | ||
| bool has_zk) | ||
| { | ||
| // We started populating commitments and scalars by adding Shplonk:Q commitmment and the corresponding scalar | ||
| // factor 1. In the case of ZK, we also added Gemini:masking_poly_comm before populating the vector with | ||
| // commitments to prover polynomials | ||
| // The commitments/scalars vectors start with Shplonk:Q (and Gemini:masking_poly_comm if ZK) | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Re the TODO above, surely this isnt a problem for such small containers no?
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. yeah, and the issue is closed |
||
| // before the prover polynomial commitments, so offset the AllEntities indices accordingly. | ||
| const size_t offset = has_zk ? 2 : 1; | ||
|
|
||
| // Extract the indices from the container, which is normally created in a given Flavor | ||
| const size_t& first_range_to_be_shifted_start = repeated_commitments.first_range_to_be_shifted_start + offset; | ||
| const size_t& first_range_shifted_start = repeated_commitments.first_range_shifted_start + offset; | ||
| const size_t& first_range_size = repeated_commitments.first_range_size; | ||
| const auto& r1 = repeated_commitments.first; | ||
| const auto& r2 = repeated_commitments.second; | ||
| const size_t first_original_start = r1.original_start + offset; | ||
| const size_t first_duplicate_start = r1.duplicate_start + offset; | ||
| const size_t second_original_start = r2.original_start + offset; | ||
| const size_t second_duplicate_start = r2.duplicate_start + offset; | ||
|
|
||
| const size_t& second_range_to_be_shifted_start = repeated_commitments.second_range_to_be_shifted_start + offset; | ||
| const size_t& second_range_shifted_start = repeated_commitments.second_range_shifted_start + offset; | ||
| const size_t& second_range_size = repeated_commitments.second_range_size; | ||
|
|
||
| // Iterate over the first range of to-be-shifted scalars and their shifted counterparts | ||
| for (size_t i = 0; i < first_range_size; i++) { | ||
| size_t idx_to_be_shifted = i + first_range_to_be_shifted_start; | ||
| size_t idx_shifted = i + first_range_shifted_start; | ||
| scalars[idx_to_be_shifted] = scalars[idx_to_be_shifted] + scalars[idx_shifted]; | ||
| // Fold duplicate scalars into their originals | ||
| for (size_t i = 0; i < r1.count; i++) { | ||
| scalars[i + first_original_start] = scalars[i + first_original_start] + scalars[i + first_duplicate_start]; | ||
| } | ||
|
|
||
| // Iterate over the second range of to-be-shifted precomputed scalars and their shifted counterparts (if | ||
| // provided) | ||
| for (size_t i = 0; i < second_range_size; i++) { | ||
| size_t idx_to_be_shifted = i + second_range_to_be_shifted_start; | ||
| size_t idx_shifted = i + second_range_shifted_start; | ||
| scalars[idx_to_be_shifted] = scalars[idx_to_be_shifted] + scalars[idx_shifted]; | ||
| for (size_t i = 0; i < r2.count; i++) { | ||
| scalars[i + second_original_start] = | ||
| scalars[i + second_original_start] + scalars[i + second_duplicate_start]; | ||
| } | ||
|
|
||
| if (second_range_shifted_start > first_range_shifted_start) { | ||
| // Erase the shifted scalars and commitments from the second range (if provided) | ||
| for (size_t i = 0; i < second_range_size; ++i) { | ||
| scalars.erase(scalars.begin() + static_cast<std::ptrdiff_t>(second_range_shifted_start)); | ||
| commitments.erase(commitments.begin() + static_cast<std::ptrdiff_t>(second_range_shifted_start)); | ||
| } | ||
|
|
||
| // Erase the shifted scalars and commitments from the first range | ||
| for (size_t i = 0; i < first_range_size; ++i) { | ||
| scalars.erase(scalars.begin() + static_cast<std::ptrdiff_t>(first_range_shifted_start)); | ||
| commitments.erase(commitments.begin() + static_cast<std::ptrdiff_t>(first_range_shifted_start)); | ||
| // Erase the duplicate entries (higher-index range first to preserve lower indices) | ||
| auto erase_range = [&](size_t start, size_t count) { | ||
| for (size_t i = 0; i < count; ++i) { | ||
| scalars.erase(scalars.begin() + static_cast<std::ptrdiff_t>(start)); | ||
| commitments.erase(commitments.begin() + static_cast<std::ptrdiff_t>(start)); | ||
| } | ||
| }; | ||
| if (second_duplicate_start > first_duplicate_start) { | ||
| erase_range(second_duplicate_start, r2.count); | ||
| erase_range(first_duplicate_start, r1.count); | ||
| } else { | ||
| // Erase the shifted scalars and commitments from the first range | ||
| for (size_t i = 0; i < first_range_size; ++i) { | ||
| scalars.erase(scalars.begin() + static_cast<std::ptrdiff_t>(first_range_shifted_start)); | ||
| commitments.erase(commitments.begin() + static_cast<std::ptrdiff_t>(first_range_shifted_start)); | ||
| } | ||
| // Erase the shifted scalars and commitments from the second range (if provided) | ||
| for (size_t i = 0; i < second_range_size; ++i) { | ||
| scalars.erase(scalars.begin() + static_cast<std::ptrdiff_t>(second_range_shifted_start)); | ||
| commitments.erase(commitments.begin() + static_cast<std::ptrdiff_t>(second_range_shifted_start)); | ||
| } | ||
| erase_range(first_duplicate_start, r1.count); | ||
| erase_range(second_duplicate_start, r2.count); | ||
| } | ||
| } | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,9 @@ | ||
| #pragma once | ||
| #include <concepts> | ||
|
|
||
| namespace bb { | ||
|
|
||
| template <typename T, typename... U> | ||
| concept IsAnyOf = (std::same_as<T, U> || ...); | ||
|
|
||
| } // namespace bb |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
rollup flavors gone