From ad169374943ef49c32eabc66483a7be28a711565 Mon Sep 17 00:00:00 2001 From: ledwards2225 <98505400+ledwards2225@users.noreply.github.com> Date: Tue, 10 Oct 2023 08:30:39 -0700 Subject: [PATCH] feat: Separate aggregation protocol (#2736) This PR moves the Goblin ECC op queue transcript aggregation protocol from the main Honk protocol to its own separate mini-protocol, referred to as "Merge" (based on Zac's original Goblin doc). Zac pointed out that this was likely the right approach once we go to incorporate folding. This is also a necessary step for completing integration of ZeroMorph (and deprecation of Gemini/Shplonk) because the univariate evaluation claims related to this merge protocol would have otherwise needed to be incorporated via Shplonk. This work automatically resolves one of the issues previously described in bberg [723](https://github.com/AztecProtocol/barretenberg/issues/723) related to the size of the transcript polynomials being tied to the size of the present circuit. --- .../goblin/full_goblin_composer.test.cpp | 94 ++++++++++---- .../composer/goblin_ultra_composer.test.cpp | 116 ++++++++++++++--- .../honk/composer/ultra_composer.hpp | 30 +++++ .../barretenberg/honk/flavor/goblin_ultra.hpp | 2 - .../honk/instance/prover_instance.cpp | 1 - .../cpp/src/barretenberg/honk/pcs/claim.hpp | 14 ++ .../goblin_merge/merge_prover.cpp | 120 ++++++++++++++++++ .../goblin_merge/merge_prover.hpp | 44 +++++++ .../goblin_merge/merge_verifier.cpp | 84 ++++++++++++ .../goblin_merge/merge_verifier.hpp | 42 ++++++ .../honk/proof_system/ultra_prover.cpp | 106 ---------------- .../honk/proof_system/ultra_verifier.cpp | 49 ------- .../goblin_ultra_circuit_builder.cpp | 3 - .../verifier/ultra_recursive_verifier.cpp | 51 -------- 14 files changed, 502 insertions(+), 254 deletions(-) create mode 100644 barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_prover.cpp create mode 100644 barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_prover.hpp create mode 100644 barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_verifier.cpp create mode 100644 barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_verifier.hpp diff --git a/barretenberg/cpp/src/barretenberg/honk/composer/goblin/full_goblin_composer.test.cpp b/barretenberg/cpp/src/barretenberg/honk/composer/goblin/full_goblin_composer.test.cpp index e5d1995fb89..2012cb1547a 100644 --- a/barretenberg/cpp/src/barretenberg/honk/composer/goblin/full_goblin_composer.test.cpp +++ b/barretenberg/cpp/src/barretenberg/honk/composer/goblin/full_goblin_composer.test.cpp @@ -101,13 +101,56 @@ class FullGoblinComposerTests : public ::testing::Test { // Store the commitment data for use by the prover of the next circuit op_queue->set_commitment_data(op_queue_commitments); } + + /** + * @brief Construct and a verify a Honk proof + * + */ + bool construct_and_verify_honk_proof(auto& composer, auto& builder) + { + auto instance = composer.create_instance(builder); + auto prover = composer.create_prover(instance); + auto verifier = composer.create_verifier(instance); + auto proof = prover.construct_proof(); + bool verified = verifier.verify_proof(proof); + + return verified; + } + + /** + * @brief Construct and verify a Goblin ECC op queue merge proof + * + */ + bool construct_and_verify_merge_proof(auto& composer, auto& op_queue) + { + auto merge_prover = composer.create_merge_prover(op_queue); + auto merge_verifier = composer.create_merge_verifier(10); + auto merge_proof = merge_prover.construct_proof(); + bool verified = merge_verifier.verify_proof(merge_proof); + + return verified; + } + + /** + * @brief Construct and verify a Goblin ECC op queue merge proof + * + */ + bool construct_and_verify_eccvm_proof(auto& composer, auto& builder) + { + auto prover = composer.create_prover(builder); + auto proof = prover.construct_proof(); + auto verifier = composer.create_verifier(builder); + bool verified = verifier.verify_proof(proof); + + return verified; + } }; /** * @brief Test proof construction/verification for a circuit with ECC op gates, public inputs, and basic arithmetic * gates * @note We simulate op queue interactions with a previous circuit so the actual circuit under test utilizes an op queue - * with non-empty 'previous' data. This avoid complications with zero-commitments etc. + * with non-empty 'previous' data. This avoids complications with zero-commitments etc. * */ TEST_F(FullGoblinComposerTests, SimpleCircuit) @@ -124,13 +167,16 @@ TEST_F(FullGoblinComposerTests, SimpleCircuit) generate_test_circuit(builder); + // The same composer is used to manage Honk and Merge prover/verifier auto composer = GoblinUltraComposer(); - auto instance = composer.create_instance(builder); - auto prover = composer.create_prover(instance); - auto verifier = composer.create_verifier(instance); - auto proof = prover.construct_proof(); - bool verified = verifier.verify_proof(proof); - EXPECT_EQ(verified, true); + + // Construct and verify Ultra Goblin Honk proof + auto honk_verified = construct_and_verify_honk_proof(composer, builder); + EXPECT_TRUE(honk_verified); + + // Construct and verify op queue merge proof + auto merge_verified = construct_and_verify_merge_proof(composer, op_queue); + EXPECT_TRUE(merge_verified); } // Construct an ECCVM circuit then generate and verify its proof @@ -138,15 +184,10 @@ TEST_F(FullGoblinComposerTests, SimpleCircuit) // Instantiate an ECCVM builder with the vm ops stored in the op queue auto builder = ECCVMBuilder(op_queue->raw_ops); - // // Can fiddle with one of the operands to trigger a failure - // builder.vm_operations[0].z1 *= 2; - + // Construct and verify ECCVM proof auto composer = ECCVMComposer(); - auto prover = composer.create_prover(builder); - auto proof = prover.construct_proof(); - auto verifier = composer.create_verifier(builder); - bool verified = verifier.verify_proof(proof); - ASSERT_TRUE(verified); + auto eccvm_verified = construct_and_verify_eccvm_proof(composer, builder); + EXPECT_TRUE(eccvm_verified); } } @@ -168,13 +209,16 @@ TEST_F(FullGoblinComposerTests, SimpleCircuitFailureCase) generate_test_circuit(builder); + // The same composer is used to manage Honk and Merge prover/verifier auto composer = GoblinUltraComposer(); - auto instance = composer.create_instance(builder); - auto prover = composer.create_prover(instance); - auto verifier = composer.create_verifier(instance); - auto proof = prover.construct_proof(); - bool verified = verifier.verify_proof(proof); - EXPECT_EQ(verified, true); + + // Construct and verify Ultra Goblin Honk proof + auto honk_verified = construct_and_verify_honk_proof(composer, builder); + EXPECT_TRUE(honk_verified); + + // Construct and verify op queue merge proof + auto merge_verified = construct_and_verify_merge_proof(composer, op_queue); + EXPECT_TRUE(merge_verified); } // Construct an ECCVM circuit then generate and verify its proof @@ -185,12 +229,10 @@ TEST_F(FullGoblinComposerTests, SimpleCircuitFailureCase) // Fiddle with one of the operands to trigger a failure builder.vm_operations[0].z1 += 1; + // Construct and verify ECCVM proof auto composer = ECCVMComposer(); - auto prover = composer.create_prover(builder); - auto proof = prover.construct_proof(); - auto verifier = composer.create_verifier(builder); - bool verified = verifier.verify_proof(proof); - EXPECT_EQ(verified, false); + auto eccvm_verified = construct_and_verify_eccvm_proof(composer, builder); + EXPECT_FALSE(eccvm_verified); } } diff --git a/barretenberg/cpp/src/barretenberg/honk/composer/goblin_ultra_composer.test.cpp b/barretenberg/cpp/src/barretenberg/honk/composer/goblin_ultra_composer.test.cpp index 5d33532eb1b..ee13dff5b1e 100644 --- a/barretenberg/cpp/src/barretenberg/honk/composer/goblin_ultra_composer.test.cpp +++ b/barretenberg/cpp/src/barretenberg/honk/composer/goblin_ultra_composer.test.cpp @@ -56,18 +56,11 @@ class GoblinUltraHonkComposerTests : public ::testing::Test { } /** - * @brief Construct a goblin ultra circuit then generate a verify its proof + * @brief Construct and a verify a Honk proof * - * @param op_queue - * @return auto */ - bool construct_test_circuit_then_generate_and_verify_proof(auto& op_queue) + bool construct_and_verify_honk_proof(auto& composer, auto& builder) { - auto builder = proof_system::GoblinUltraCircuitBuilder(op_queue); - - generate_test_circuit(builder); - - auto composer = GoblinUltraComposer(); auto instance = composer.create_instance(builder); auto prover = composer.create_prover(instance); auto verifier = composer.create_verifier(instance); @@ -76,6 +69,20 @@ class GoblinUltraHonkComposerTests : public ::testing::Test { return verified; } + + /** + * @brief Construct and verify a Goblin ECC op queue merge proof + * + */ + bool construct_and_verify_merge_proof(auto& composer, auto& op_queue) + { + auto merge_prover = composer.create_merge_prover(op_queue); + auto merge_verifier = composer.create_merge_verifier(10); + auto merge_proof = merge_prover.construct_proof(); + bool verified = merge_verifier.verify_proof(merge_proof); + + return verified; + } }; /** @@ -92,18 +99,27 @@ TEST_F(GoblinUltraHonkComposerTests, SingleCircuit) // Add mock data to op queue to simulate interaction with a previous circuit op_queue->populate_with_mock_initital_data(); - // Construct a test circuit then generate and verify its proof - auto verified = construct_test_circuit_then_generate_and_verify_proof(op_queue); + auto builder = proof_system::GoblinUltraCircuitBuilder(op_queue); + + generate_test_circuit(builder); - EXPECT_EQ(verified, true); + auto composer = GoblinUltraComposer(); + + // Construct and verify Honk proof + auto honk_verified = construct_and_verify_honk_proof(composer, builder); + EXPECT_TRUE(honk_verified); + + // Construct and verify Goblin ECC op queue Merge proof + auto merge_verified = construct_and_verify_merge_proof(composer, op_queue); + EXPECT_TRUE(merge_verified); } /** - * @brief Test proof construction/verification for a circuit with ECC op gates, public inputs, and basic arithmetic - * gates + * @brief Test Merge proof construction/verification for multiple circuits with ECC op gates, public inputs, and + * basic arithmetic gates * */ -TEST_F(GoblinUltraHonkComposerTests, MultipleCircuits) +TEST_F(GoblinUltraHonkComposerTests, MultipleCircuitsMergeOnly) { // Instantiate EccOpQueue. This will be shared across all circuits in the series auto op_queue = std::make_shared(); @@ -114,7 +130,75 @@ TEST_F(GoblinUltraHonkComposerTests, MultipleCircuits) // Construct multiple test circuits that share an ECC op queue. Generate and verify a proof for each. size_t NUM_CIRCUITS = 3; for (size_t i = 0; i < NUM_CIRCUITS; ++i) { - construct_test_circuit_then_generate_and_verify_proof(op_queue); + auto builder = proof_system::GoblinUltraCircuitBuilder(op_queue); + + generate_test_circuit(builder); + + auto composer = GoblinUltraComposer(); + + // Construct and verify Goblin ECC op queue Merge its proof + auto merge_verified = construct_and_verify_merge_proof(composer, op_queue); + EXPECT_TRUE(merge_verified); + } +} + +/** + * @brief Test Honk proof construction/verification for multiple circuits with ECC op gates, public inputs, and + * basic arithmetic gates + * + */ +TEST_F(GoblinUltraHonkComposerTests, MultipleCircuitsHonkOnly) +{ + // Instantiate EccOpQueue. This will be shared across all circuits in the series + auto op_queue = std::make_shared(); + + // Add mock data to op queue to simulate interaction with a previous circuit + op_queue->populate_with_mock_initital_data(); + + // Construct multiple test circuits that share an ECC op queue. Generate and verify a proof for each. + size_t NUM_CIRCUITS = 3; + for (size_t i = 0; i < NUM_CIRCUITS; ++i) { + auto builder = proof_system::GoblinUltraCircuitBuilder(op_queue); + + generate_test_circuit(builder); + + auto composer = GoblinUltraComposer(); + + // Construct and verify Honk proof + auto honk_verified = construct_and_verify_honk_proof(composer, builder); + EXPECT_TRUE(honk_verified); + } +} + +/** + * @brief Test Honk and Merge proof construction/verification for multiple circuits with ECC op gates, public inputs, + * and basic arithmetic gates + * + */ +TEST_F(GoblinUltraHonkComposerTests, MultipleCircuitsHonkAndMerge) +{ + // Instantiate EccOpQueue. This will be shared across all circuits in the series + auto op_queue = std::make_shared(); + + // Add mock data to op queue to simulate interaction with a previous circuit + op_queue->populate_with_mock_initital_data(); + + // Construct multiple test circuits that share an ECC op queue. Generate and verify a proof for each. + size_t NUM_CIRCUITS = 3; + for (size_t i = 0; i < NUM_CIRCUITS; ++i) { + auto builder = proof_system::GoblinUltraCircuitBuilder(op_queue); + + generate_test_circuit(builder); + + auto composer = GoblinUltraComposer(); + + // Construct and verify Honk proof + auto honk_verified = construct_and_verify_honk_proof(composer, builder); + EXPECT_TRUE(honk_verified); + + // Construct and verify Goblin ECC op queue Merge its proof + auto merge_verified = construct_and_verify_merge_proof(composer, op_queue); + EXPECT_TRUE(merge_verified); } // Compute the commitments to the aggregate op queue directly and check that they match those that were computed diff --git a/barretenberg/cpp/src/barretenberg/honk/composer/ultra_composer.hpp b/barretenberg/cpp/src/barretenberg/honk/composer/ultra_composer.hpp index 8cf4c27c8aa..7451cf23bb8 100644 --- a/barretenberg/cpp/src/barretenberg/honk/composer/ultra_composer.hpp +++ b/barretenberg/cpp/src/barretenberg/honk/composer/ultra_composer.hpp @@ -1,5 +1,7 @@ #pragma once #include "barretenberg/honk/instance/prover_instance.hpp" +#include "barretenberg/honk/proof_system/goblin_merge/merge_prover.hpp" +#include "barretenberg/honk/proof_system/goblin_merge/merge_verifier.hpp" #include "barretenberg/honk/proof_system/protogalaxy_prover.hpp" #include "barretenberg/honk/proof_system/protogalaxy_verifier.hpp" #include "barretenberg/honk/proof_system/ultra_prover.hpp" @@ -72,6 +74,34 @@ template class UltraComposer_ { UltraProver_ create_prover(std::shared_ptr); UltraVerifier_ create_verifier(std::shared_ptr); + /** + * @brief Create Prover for Goblin ECC op queue merge protocol + * + * @param op_queue + * @return MergeProver_ + */ + MergeProver_ create_merge_prover(std::shared_ptr op_queue) + { + // Store the previous aggregate op queue size and update the current one + op_queue->set_size_data(); + // Merge requires a commitment key with size equal to that of the current op queue transcript T_i since the + // shift of the current contribution t_i will be of degree equal to deg(T_i) + auto commitment_key = compute_commitment_key(op_queue->get_current_size()); + return MergeProver_(commitment_key, op_queue); + } + + /** + * @brief Create Verifier for Goblin ECC op queue merge protocol + * + * @param size Size of commitment key required to commit to shifted op queue contribution t_i + * @return MergeVerifier_ + */ + MergeVerifier_ create_merge_verifier(size_t size) + { + auto pcs_verification_key = std::make_unique(size, crs_factory_); + return MergeVerifier_(std::move(pcs_verification_key)); + } + ProtoGalaxyProver_ create_folding_prover(std::vector> instances) { ProverInstances insts(instances); diff --git a/barretenberg/cpp/src/barretenberg/honk/flavor/goblin_ultra.hpp b/barretenberg/cpp/src/barretenberg/honk/flavor/goblin_ultra.hpp index 006bba75d6e..95b79c204c2 100644 --- a/barretenberg/cpp/src/barretenberg/honk/flavor/goblin_ultra.hpp +++ b/barretenberg/cpp/src/barretenberg/honk/flavor/goblin_ultra.hpp @@ -288,8 +288,6 @@ class GoblinUltra { size_t num_ecc_op_gates; // needed to determine public input offset - std::shared_ptr op_queue; - // The plookup wires that store plookup read data. std::array get_table_column_wires() { return { w_l, w_r, w_o }; }; }; diff --git a/barretenberg/cpp/src/barretenberg/honk/instance/prover_instance.cpp b/barretenberg/cpp/src/barretenberg/honk/instance/prover_instance.cpp index 96154837399..76b3e5eb949 100644 --- a/barretenberg/cpp/src/barretenberg/honk/instance/prover_instance.cpp +++ b/barretenberg/cpp/src/barretenberg/honk/instance/prover_instance.cpp @@ -249,7 +249,6 @@ std::shared_ptr ProverInstance_::compute_pr if constexpr (IsGoblinFlavor) { proving_key->num_ecc_op_gates = num_ecc_op_gates; - proving_key->op_queue = circuit.op_queue; } return proving_key; diff --git a/barretenberg/cpp/src/barretenberg/honk/pcs/claim.hpp b/barretenberg/cpp/src/barretenberg/honk/pcs/claim.hpp index 9daeeb70746..05f405494f8 100644 --- a/barretenberg/cpp/src/barretenberg/honk/pcs/claim.hpp +++ b/barretenberg/cpp/src/barretenberg/honk/pcs/claim.hpp @@ -19,6 +19,20 @@ template class OpeningPair { bool operator==(const OpeningPair& other) const = default; }; +/** + * @brief Polynomial p and an opening pair (r,v) such that p(r) = v + * + * @tparam Params for the given commitment scheme + */ +template class ProverOpeningClaim { + using Fr = typename Curve::ScalarField; + using Polynomial = barretenberg::Polynomial; + + public: + Polynomial polynomial; // p + OpeningPair opening_pair; // (challenge r, evaluation v = p(r)) +}; + /** * @brief Unverified claim (C,r,v) for some witness polynomial p(X) such that * - C = Commit(p(X)) diff --git a/barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_prover.cpp b/barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_prover.cpp new file mode 100644 index 00000000000..d9e579cc22d --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_prover.cpp @@ -0,0 +1,120 @@ +#include "merge_prover.hpp" + +namespace proof_system::honk { + +/** + * Create MergeProver_ + * + */ +template +MergeProver_::MergeProver_(std::shared_ptr commitment_key, std::shared_ptr op_queue) + : op_queue(op_queue) + , pcs_commitment_key(commitment_key) +{} + +/** + * @brief Prove proper construction of the aggregate Goblin ECC op queue polynomials T_i^(j), j = 1,2,3,4. + * @details Let T_i^(j) be the jth column of the aggregate op queue after incorporating the contribution from the + * present circuit. T_{i-1}^(j) corresponds to the aggregate op queue at the previous stage and $t_i^(j)$ represents + * the contribution from the present circuit only. For each j, we have the relationship T_i = T_{i-1} + right_shift(t_i, + * M_{i-1}), where the shift magnitude M_{i-1} is the length of T_{i-1}. This protocol demonstrates that the aggregate + * op queue has been constructed correctly via a simple Schwartz-Zippel check. Evaluations are proven via batched KZG. + * + * TODO(#746): Prove connection between t_i^{shift}, committed to herein, and t_i, used in the main protocol. See issue + * for details (https://github.com/AztecProtocol/barretenberg/issues/746). + * + * @tparam Flavor + * @return plonk::proof& + */ +template plonk::proof& MergeProver_::construct_proof() +{ + size_t N = op_queue->get_current_size(); + + // Extract T_i, T_{i-1} + auto T_current = op_queue->get_aggregate_transcript(); + auto T_prev = op_queue->get_previous_aggregate_transcript(); + // TODO(#723): Cannot currently support an empty T_{i-1}. Need to be able to properly handle zero commitment. + ASSERT(T_prev[0].size() > 0); + + // Construct t_i^{shift} as T_i - T_{i-1} + std::array t_shift; + for (size_t i = 0; i < Flavor::NUM_WIRES; ++i) { + t_shift[i] = Polynomial(T_current[i]); + t_shift[i] -= T_prev[i]; + } + + // Compute/get commitments [t_i^{shift}], [T_{i-1}], and [T_i] and add to transcript + std::array C_T_current; + for (size_t idx = 0; idx < t_shift.size(); ++idx) { + // Get previous transcript commitment [T_{i-1}] from op queue + auto C_T_prev = op_queue->ultra_ops_commitments[idx]; + // Compute commitment [t_i^{shift}] directly + auto C_t_shift = pcs_commitment_key->commit(t_shift[idx]); + // Compute updated aggregate transcript commitment as [T_i] = [T_{i-1}] + [t_i^{shift}] + C_T_current[idx] = C_T_prev + C_t_shift; + + std::string suffix = std::to_string(idx + 1); + transcript.send_to_verifier("T_PREV_" + suffix, C_T_prev); + transcript.send_to_verifier("t_SHIFT_" + suffix, C_t_shift); + transcript.send_to_verifier("T_CURRENT_" + suffix, C_T_current[idx]); + } + + // Store the commitments [T_{i}] (to be used later in subsequent iterations as [T_{i-1}]). + op_queue->set_commitment_data(C_T_current); + + // Compute evaluations T_i(\kappa), T_{i-1}(\kappa), t_i^{shift}(\kappa), add to transcript. For each polynomial + // we add a univariate opening claim {p(X), (\kappa, p(\kappa))} to the set of claims to be checked via batched KZG. + auto kappa = transcript.get_challenge("kappa"); + + // Add univariate opening claims for each polynomial. + std::vector opening_claims; + // Compute evaluation T_{i-1}(\kappa) + for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { + auto polynomial = Polynomial(T_prev[idx]); + auto evaluation = polynomial.evaluate(kappa); + transcript.send_to_verifier("T_prev_eval_" + std::to_string(idx + 1), evaluation); + opening_claims.emplace_back(OpeningClaim{ polynomial, { kappa, evaluation } }); + } + // Compute evaluation t_i^{shift}(\kappa) + for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { + auto evaluation = t_shift[idx].evaluate(kappa); + transcript.send_to_verifier("t_shift_eval_" + std::to_string(idx + 1), evaluation); + opening_claims.emplace_back(OpeningClaim{ t_shift[idx], { kappa, evaluation } }); + } + // Compute evaluation T_i(\kappa) + for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { + auto polynomial = Polynomial(T_current[idx]); + auto evaluation = polynomial.evaluate(kappa); + transcript.send_to_verifier("T_current_eval_" + std::to_string(idx + 1), evaluation); + opening_claims.emplace_back(OpeningClaim{ polynomial, { kappa, evaluation } }); + } + + auto alpha = transcript.get_challenge("alpha"); + + // Constuct batched polynomial to opened via KZG + auto batched_polynomial = Polynomial(N); + auto batched_eval = FF(0); + auto alpha_pow = FF(1); + for (auto& claim : opening_claims) { + batched_polynomial.add_scaled(claim.polynomial, alpha_pow); + batched_eval += alpha_pow * claim.opening_pair.evaluation; + alpha_pow *= alpha; + } + + // Construct and commit to KZG quotient polynomial q = (f - v) / (X - kappa) + auto quotient = batched_polynomial; + quotient[0] -= batched_eval; + quotient.factor_roots(kappa); + + auto quotient_commitment = pcs_commitment_key->commit(quotient); + transcript.send_to_verifier("KZG:W", quotient_commitment); + + proof.proof_data = transcript.proof_data; + return proof; +} + +template class MergeProver_; +template class MergeProver_; +template class MergeProver_; + +} // namespace proof_system::honk \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_prover.hpp b/barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_prover.hpp new file mode 100644 index 00000000000..5050de81600 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_prover.hpp @@ -0,0 +1,44 @@ +#pragma once + +#include "barretenberg/honk/flavor/goblin_ultra.hpp" +#include "barretenberg/honk/flavor/ultra.hpp" +#include "barretenberg/honk/flavor/ultra_grumpkin.hpp" +#include "barretenberg/honk/pcs/claim.hpp" +#include "barretenberg/honk/transcript/transcript.hpp" +#include "barretenberg/plonk/proof_system/types/proof.hpp" +#include "barretenberg/proof_system/op_queue/ecc_op_queue.hpp" + +namespace proof_system::honk { + +/** + * @brief Prover class for the Goblin ECC op queue transcript merge protocol + * + * @tparam Flavor + */ +template class MergeProver_ { + using FF = typename Flavor::FF; + using Polynomial = typename Flavor::Polynomial; + using CommitmentKey = typename Flavor::CommitmentKey; + using Commitment = typename Flavor::Commitment; + using PCS = typename Flavor::PCS; + using Curve = typename Flavor::Curve; + using OpeningClaim = typename pcs::ProverOpeningClaim; + using OpeningPair = typename pcs::OpeningPair; + + public: + ProverTranscript transcript; + std::shared_ptr op_queue; + std::shared_ptr pcs_commitment_key; + + explicit MergeProver_(std::shared_ptr, std::shared_ptr); + plonk::proof& construct_proof(); + + private: + plonk::proof proof; +}; + +extern template class MergeProver_; +extern template class MergeProver_; +extern template class MergeProver_; + +} // namespace proof_system::honk \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_verifier.cpp b/barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_verifier.cpp new file mode 100644 index 00000000000..fea6b5611df --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_verifier.cpp @@ -0,0 +1,84 @@ +#include "merge_verifier.hpp" + +namespace proof_system::honk { + +template +MergeVerifier_::MergeVerifier_(std::unique_ptr verification_key) + : pcs_verification_key(std::move(verification_key)){}; + +/** + * @brief Verify proper construction of the aggregate Goblin ECC op queue polynomials T_i^(j), j = 1,2,3,4. + * @details Let T_i^(j) be the jth column of the aggregate op queue after incorporating the contribution from the + * present circuit. T_{i-1}^(j) corresponds to the aggregate op queue at the previous stage and $t_i^(j)$ represents + * the contribution from the present circuit only. For each j, we have the relationship T_i = T_{i-1} + right_shift(t_i, + * M_{i-1}), where the shift magnitude M_{i-1} is the length of T_{i-1}. This protocol verfies that the aggregate op + * queue has been constructed correctly via a simple Schwartz-Zippel check. Evaluations are checked via batched KZG. + * + * @tparam Flavor + * @return plonk::proof& + */ +template bool MergeVerifier_::verify_proof(const plonk::proof& proof) +{ + transcript = VerifierTranscript{ proof.proof_data }; + + // Receive commitments [t_i^{shift}], [T_{i-1}], and [T_i] + std::array C_T_prev; + std::array C_t_shift; + std::array C_T_current; + for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { + C_T_prev[idx] = transcript.template receive_from_prover("T_PREV_" + std::to_string(idx + 1)); + C_t_shift[idx] = transcript.template receive_from_prover("t_SHIFT_" + std::to_string(idx + 1)); + C_T_current[idx] = transcript.template receive_from_prover("T_CURRENT_" + std::to_string(idx + 1)); + } + + FF kappa = transcript.get_challenge("kappa"); + + // Receive transcript poly evaluations and add corresponding univariate opening claims {(\kappa, p(\kappa), [p(X)]} + std::array T_prev_evals; + std::array t_shift_evals; + std::array T_current_evals; + std::vector opening_claims; + for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { + T_prev_evals[idx] = transcript.template receive_from_prover("T_prev_eval_" + std::to_string(idx + 1)); + opening_claims.emplace_back(pcs::OpeningClaim{ { kappa, T_prev_evals[idx] }, C_T_prev[idx] }); + } + for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { + t_shift_evals[idx] = transcript.template receive_from_prover("t_shift_eval_" + std::to_string(idx + 1)); + opening_claims.emplace_back(pcs::OpeningClaim{ { kappa, t_shift_evals[idx] }, C_t_shift[idx] }); + } + for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { + T_current_evals[idx] = transcript.template receive_from_prover("T_current_eval_" + std::to_string(idx + 1)); + opening_claims.emplace_back(pcs::OpeningClaim{ { kappa, T_current_evals[idx] }, C_T_current[idx] }); + } + + // Check the identity T_i(\kappa) = T_{i-1}(\kappa) + t_i^{shift}(\kappa). If it fails, return false + bool identity_checked = true; + for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { + identity_checked = identity_checked && (T_current_evals[idx] == T_prev_evals[idx] + t_shift_evals[idx]); + } + + auto alpha = transcript.get_challenge("alpha"); + + // Constuct batched commitment and evaluation from constituents + auto batched_commitment = opening_claims[0].commitment; + auto batched_eval = opening_claims[0].opening_pair.evaluation; + auto alpha_pow = alpha; + for (size_t idx = 1; idx < opening_claims.size(); ++idx) { + auto& claim = opening_claims[idx]; + batched_commitment = batched_commitment + (claim.commitment * alpha_pow); + batched_eval += alpha_pow * claim.opening_pair.evaluation; + alpha_pow *= alpha; + } + + OpeningClaim batched_claim = { { kappa, batched_eval }, batched_commitment }; + + auto verified = PCS::verify(pcs_verification_key, batched_claim, transcript); + + return identity_checked && verified; +} + +template class MergeVerifier_; +template class MergeVerifier_; +template class MergeVerifier_; + +} // namespace proof_system::honk \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_verifier.hpp b/barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_verifier.hpp new file mode 100644 index 00000000000..b2b0a3d22b4 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/honk/proof_system/goblin_merge/merge_verifier.hpp @@ -0,0 +1,42 @@ +#pragma once + +#include "barretenberg/honk/flavor/goblin_ultra.hpp" +#include "barretenberg/honk/flavor/ultra.hpp" +#include "barretenberg/honk/flavor/ultra_grumpkin.hpp" +#include "barretenberg/honk/pcs/claim.hpp" +#include "barretenberg/honk/transcript/transcript.hpp" +#include "barretenberg/plonk/proof_system/types/proof.hpp" +#include "barretenberg/proof_system/op_queue/ecc_op_queue.hpp" + +namespace proof_system::honk { + +/** + * @brief Verifier class for the Goblin ECC op queue transcript merge protocol + * + * @tparam Flavor + */ +template class MergeVerifier_ { + using FF = typename Flavor::FF; + using Polynomial = typename Flavor::Polynomial; + using CommitmentKey = typename Flavor::CommitmentKey; + using Commitment = typename Flavor::Commitment; + using PCS = typename Flavor::PCS; + using Curve = typename Flavor::Curve; + using OpeningClaim = typename pcs::OpeningClaim; + using VerificationKey = typename Flavor::VerificationKey; + using VerifierCommitmentKey = typename Flavor::VerifierCommitmentKey; + + public: + VerifierTranscript transcript; + std::shared_ptr op_queue; + std::shared_ptr pcs_verification_key; + + explicit MergeVerifier_(std::unique_ptr verification_key); + bool verify_proof(const plonk::proof& proof); +}; + +extern template class MergeVerifier_; +extern template class MergeVerifier_; +extern template class MergeVerifier_; + +} // namespace proof_system::honk \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/honk/proof_system/ultra_prover.cpp b/barretenberg/cpp/src/barretenberg/honk/proof_system/ultra_prover.cpp index ad3151b45bd..618df552869 100644 --- a/barretenberg/cpp/src/barretenberg/honk/proof_system/ultra_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/honk/proof_system/ultra_prover.cpp @@ -166,109 +166,6 @@ template void UltraProver_::execute_pcs_evaluation_ } } -/** - * @brief Prove proper construction of the aggregate Goblin ECC op queue polynomials T_i^(j), j = 1,2,3,4. - * @details Let T_i^(j) be the jth column of the aggregate op queue after incorporating the contribution from the - * present circuit. T_{i-1}^(j) corresponds to the aggregate op queue at the previous stage and $t_i^(j)$ represents - * the contribution from the present circuit only. For each j, we have the relationship T_i = T_{i-1} + right_shift(t_i, - * M_{i-1}), where the shift magnitude M_{i-1} is the length of T_{i-1}. This stage of the protocol demonstrates that - * the aggregate op queue has been constructed correctly. - * - */ -template void UltraProver_::execute_op_queue_transcript_aggregation_round() -{ - if constexpr (IsGoblinFlavor) { - // Extract size M_{i-1} of T_{i-1} from op_queue - size_t prev_op_queue_size = instance->proving_key->op_queue->get_previous_size(); // M_{i-1} - // TODO(#723): Cannot currently support an empty T_{i-1}. Need to be able to properly handle zero commitment. - ASSERT(prev_op_queue_size > 0); - - auto circuit_size = instance->proving_key->circuit_size; - - // TODO(#723): The below assert ensures that M_{i-1} + m_i < n, i.e. the right shifted result can be expressed - // as a size n polynomial. If this is not the case then we should still be able to proceed without increasing - // the circuit size but need to handle with care. - ASSERT(prev_op_queue_size + instance->proving_key->num_ecc_op_gates < circuit_size); // M_{i-1} + m_i < n - - // Construct right-shift of op wires t_i^{shift} so that T_i(X) = T_{i-1}(X) + t_i^{shift}(X). - // Note: The op_wire polynomials (like all others) have constant coefficient equal to zero. Thus to obtain - // t_i^{shift} we must left-shift by 1 then right-shift by M_{i-1}, or equivalently, right-shift by - // M_{i-1} - 1. - std::array right_shifted_op_wires; - auto op_wires = instance->proving_key->get_ecc_op_wires(); - for (size_t i = 0; i < op_wires.size(); ++i) { - // Right shift by M_{i-1} - 1. - right_shifted_op_wires[i].set_to_right_shifted(op_wires[i], prev_op_queue_size - 1); - } - - // Compute/get commitments [t_i^{shift}], [T_{i-1}], and [T_i] and add to transcript - std::array prev_aggregate_op_queue_commitments; - std::array shifted_op_wire_commitments; - std::array aggregate_op_queue_commitments; - for (size_t idx = 0; idx < right_shifted_op_wires.size(); ++idx) { - // Get previous transcript commitment [T_{i-1}] from op queue - prev_aggregate_op_queue_commitments[idx] = instance->proving_key->op_queue->ultra_ops_commitments[idx]; - // Compute commitment [t_i^{shift}] directly - shifted_op_wire_commitments[idx] = pcs_commitment_key->commit(right_shifted_op_wires[idx]); - // Compute updated aggregate transcript commitmen as [T_i] = [T_{i-1}] + [t_i^{shift}] - aggregate_op_queue_commitments[idx] = - prev_aggregate_op_queue_commitments[idx] + shifted_op_wire_commitments[idx]; - - std::string suffix = std::to_string(idx + 1); - transcript.send_to_verifier("PREV_AGG_OP_QUEUE_" + suffix, prev_aggregate_op_queue_commitments[idx]); - transcript.send_to_verifier("SHIFTED_OP_WIRE_" + suffix, shifted_op_wire_commitments[idx]); - transcript.send_to_verifier("AGG_OP_QUEUE_" + suffix, aggregate_op_queue_commitments[idx]); - } - - // Store the commitments [T_{i}] (to be used later in subsequent iterations as [T_{i-1}]). - instance->proving_key->op_queue->set_commitment_data(aggregate_op_queue_commitments); - - // Compute evaluations T_i(\kappa), T_{i-1}(\kappa), t_i^{shift}(\kappa), add to transcript. For each polynomial - // we add a univariate opening claim {(\kappa, p(\kappa)), p(X)} to the set of claims to be combined in the - // batch univariate polynomial Q in Shplonk. (The other univariate claims come from the output of Gemini). - // TODO(#729): It should be possible to reuse the opening challenge from Gemini rather than generate a new one. - auto kappa = transcript.get_challenge("kappa"); - auto prev_aggregate_ecc_op_transcript = instance->proving_key->op_queue->get_previous_aggregate_transcript(); - auto aggregate_ecc_op_transcript = instance->proving_key->op_queue->get_aggregate_transcript(); - std::array prev_agg_op_queue_evals; - std::array right_shifted_op_wire_evals; - std::array agg_op_queue_evals; - std::array prev_agg_op_queue_polynomials; - std::array agg_op_queue_polynomials; - for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { - std::string suffix = std::to_string(idx + 1); - - // Compute evaluation T_{i-1}(\kappa) - prev_agg_op_queue_polynomials[idx] = Polynomial(prev_aggregate_ecc_op_transcript[idx]); - prev_agg_op_queue_evals[idx] = prev_agg_op_queue_polynomials[idx].evaluate(kappa); - transcript.send_to_verifier("prev_agg_op_queue_eval_" + suffix, prev_agg_op_queue_evals[idx]); - - // Compute evaluation t_i^{shift}(\kappa) - right_shifted_op_wire_evals[idx] = right_shifted_op_wires[idx].evaluate(kappa); - transcript.send_to_verifier("op_wire_eval_" + suffix, right_shifted_op_wire_evals[idx]); - - // Compute evaluation T_i(\kappa) - agg_op_queue_polynomials[idx] = Polynomial(aggregate_ecc_op_transcript[idx]); - agg_op_queue_evals[idx] = agg_op_queue_polynomials[idx].evaluate(kappa); - transcript.send_to_verifier("agg_op_queue_eval_" + suffix, agg_op_queue_evals[idx]); - } - - // Add univariate opening claims for each polynomial. - for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { - univariate_openings.opening_pairs.emplace_back(OpenPair{ kappa, prev_agg_op_queue_evals[idx] }); - univariate_openings.witnesses.emplace_back(std::move(prev_agg_op_queue_polynomials[idx])); - } - for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { - univariate_openings.opening_pairs.emplace_back(OpenPair{ kappa, right_shifted_op_wire_evals[idx] }); - univariate_openings.witnesses.emplace_back(std::move(right_shifted_op_wires[idx])); - } - for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { - univariate_openings.opening_pairs.emplace_back(OpenPair{ kappa, agg_op_queue_evals[idx] }); - univariate_openings.witnesses.emplace_back(std::move(agg_op_queue_polynomials[idx])); - } - } -} - /** * - Do Fiat-Shamir to get "nu" challenge. * - Compute commitment [Q]_1 @@ -346,9 +243,6 @@ template plonk::proof& UltraProver_::construct_proo // Compute Fold evaluations execute_pcs_evaluation_round(); - // ECC op queue transcript aggregation - execute_op_queue_transcript_aggregation_round(); - // Fiat-Shamir: nu // Compute Shplonk batched quotient commitment Q execute_shplonk_batched_quotient_round(); diff --git a/barretenberg/cpp/src/barretenberg/honk/proof_system/ultra_verifier.cpp b/barretenberg/cpp/src/barretenberg/honk/proof_system/ultra_verifier.cpp index 4d304709c0b..2b98a316108 100644 --- a/barretenberg/cpp/src/barretenberg/honk/proof_system/ultra_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/honk/proof_system/ultra_verifier.cpp @@ -164,55 +164,6 @@ template bool UltraVerifier_::verify_proof(const plonk batched_commitment_to_be_shifted, transcript); - // Perform ECC op queue transcript aggregation protocol - if constexpr (IsGoblinFlavor) { - // Receive commitments [t_i^{shift}], [T_{i-1}], and [T_i] - std::array prev_agg_op_queue_commitments; - std::array shifted_op_wire_commitments; - std::array agg_op_queue_commitments; - for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { - prev_agg_op_queue_commitments[idx] = - transcript.template receive_from_prover("PREV_AGG_OP_QUEUE_" + std::to_string(idx + 1)); - shifted_op_wire_commitments[idx] = - transcript.template receive_from_prover("SHIFTED_OP_WIRE_" + std::to_string(idx + 1)); - agg_op_queue_commitments[idx] = - transcript.template receive_from_prover("AGG_OP_QUEUE_" + std::to_string(idx + 1)); - } - - // Receive transcript poly evaluations - FF kappa = transcript.get_challenge("kappa"); - std::array prev_agg_op_queue_evals; - std::array shifted_op_wire_evals; - std::array agg_op_queue_evals; - for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { - prev_agg_op_queue_evals[idx] = - transcript.template receive_from_prover("prev_agg_op_queue_eval_" + std::to_string(idx + 1)); - shifted_op_wire_evals[idx] = - transcript.template receive_from_prover("op_wire_eval_" + std::to_string(idx + 1)); - agg_op_queue_evals[idx] = - transcript.template receive_from_prover("agg_op_queue_eval_" + std::to_string(idx + 1)); - - // Check the identity T_i(\kappa) = T_{i-1}(\kappa) + t_i^{shift}(\kappa). If it fails, return false - if (agg_op_queue_evals[idx] != prev_agg_op_queue_evals[idx] + shifted_op_wire_evals[idx]) { - return false; - } - } - - // Add corresponding univariate opening claims {(\kappa, p(\kappa), [p(X)]} - for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { - univariate_opening_claims.emplace_back(pcs::OpeningClaim{ { kappa, prev_agg_op_queue_evals[idx] }, - prev_agg_op_queue_commitments[idx] }); - } - for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { - univariate_opening_claims.emplace_back( - pcs::OpeningClaim{ { kappa, shifted_op_wire_evals[idx] }, shifted_op_wire_commitments[idx] }); - } - for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { - univariate_opening_claims.emplace_back( - pcs::OpeningClaim{ { kappa, agg_op_queue_evals[idx] }, agg_op_queue_commitments[idx] }); - } - } - // Produce a Shplonk claim: commitment [Q] - [Q_z], evaluation zero (at random challenge z) auto shplonk_claim = Shplonk::reduce_verification(pcs_verification_key, univariate_opening_claims, transcript); diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.cpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.cpp index 58da2674ec5..da24f823887 100644 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.cpp +++ b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.cpp @@ -10,9 +10,6 @@ namespace proof_system { template void GoblinUltraCircuitBuilder_::finalize_circuit() { UltraCircuitBuilder_::finalize_circuit(); - - // Set internally the current and previous size of the aggregate op queue transcript - op_queue->set_size_data(); } /** diff --git a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.cpp index 4b97dd1c1ab..ac66bc3cf77 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.cpp @@ -30,7 +30,6 @@ std::array UltraRecursiveVerifier_::ve using VerifierCommitments = typename Flavor::VerifierCommitments; using CommitmentLabels = typename Flavor::CommitmentLabels; using RelationParams = ::proof_system::RelationParameters; - using UnivariateClaim = ::proof_system::honk::pcs::OpeningClaim; RelationParams relation_parameters; @@ -184,56 +183,6 @@ std::array UltraRecursiveVerifier_::ve ")"); prev_num_gates = builder->get_num_gates(); - // Perform ECC op queue transcript aggregation protocol - if constexpr (IsGoblinFlavor) { - // Receive commitments [t_i^{shift}], [T_{i-1}], and [T_i] - std::array prev_agg_op_queue_commitments; - std::array shifted_op_wire_commitments; - std::array agg_op_queue_commitments; - for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { - std::string suffix = std::to_string(idx + 1); - prev_agg_op_queue_commitments[idx] = - transcript.template receive_from_prover("PREV_AGG_OP_QUEUE_" + suffix); - shifted_op_wire_commitments[idx] = - transcript.template receive_from_prover("SHIFTED_OP_WIRE_" + suffix); - agg_op_queue_commitments[idx] = - transcript.template receive_from_prover("AGG_OP_QUEUE_" + suffix); - } - - // Receive claimed evaluations of t_i^{shift}, T_{i-1}, and T_i - FF kappa = transcript.get_challenge("kappa"); - std::array prev_agg_op_queue_evals; - std::array shifted_op_wire_evals; - std::array agg_op_queue_evals; - for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { - std::string suffix = std::to_string(idx + 1); - prev_agg_op_queue_evals[idx] = - transcript.template receive_from_prover("prev_agg_op_queue_eval_" + suffix); - shifted_op_wire_evals[idx] = transcript.template receive_from_prover("op_wire_eval_" + suffix); - agg_op_queue_evals[idx] = transcript.template receive_from_prover("agg_op_queue_eval_" + suffix); - - ASSERT(agg_op_queue_evals[idx].get_value() == - prev_agg_op_queue_evals[idx].get_value() + shifted_op_wire_evals[idx].get_value()); - - // Check the identity T_i(\kappa) = T_{i-1}(\kappa) + t_i^{shift}(\kappa). - agg_op_queue_evals[idx].assert_equal(prev_agg_op_queue_evals[idx] + shifted_op_wire_evals[idx]); - } - - // Add corresponding univariate opening claims {(\kappa, p(\kappa), [p(X)]} - for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { - univariate_opening_claims.emplace_back( - UnivariateClaim{ { kappa, prev_agg_op_queue_evals[idx] }, prev_agg_op_queue_commitments[idx] }); - } - for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { - univariate_opening_claims.emplace_back( - UnivariateClaim{ { kappa, shifted_op_wire_evals[idx] }, shifted_op_wire_commitments[idx] }); - } - for (size_t idx = 0; idx < Flavor::NUM_WIRES; ++idx) { - univariate_opening_claims.emplace_back( - UnivariateClaim{ { kappa, agg_op_queue_evals[idx] }, agg_op_queue_commitments[idx] }); - } - } - // Produce a Shplonk claim: commitment [Q] - [Q_z], evaluation zero (at random challenge z) auto shplonk_claim = Shplonk::reduce_verification(pcs_verification_key, univariate_opening_claims, transcript);