From f6c698e20b0284db2230ce49993fea1f4ffac862 Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Wed, 2 Oct 2024 10:19:33 +0200 Subject: [PATCH 01/19] Poly-commitment: improve documentation regarding PolyComm --- poly-commitment/src/commitment.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/poly-commitment/src/commitment.rs b/poly-commitment/src/commitment.rs index 5807e0297d..48e212b490 100644 --- a/poly-commitment/src/commitment.rs +++ b/poly-commitment/src/commitment.rs @@ -23,7 +23,17 @@ use serde_with::{ }; use std::{iter::Iterator, marker::PhantomData}; -/// A polynomial commitment. +/// Represent a polynomial commitment when the type is instantiated with a +/// curve. +/// +/// The structure also handles chunking, i.e. when we aim to handle polynomials +/// whose degree is higher than the SRS size. For this reason, we do use a +/// vector for the field `elems`. +/// +/// Note that the parameter `C` is not constrained to be a curve, therefore in +/// some places in the code, `C` can refer to a scalar field element. For +/// instance, `PolyComm` is used to represent the evaluation of the +/// polynomial bound by a specific commitment, at a particular evaluation point. #[serde_as] #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[serde(bound = "C: CanonicalDeserialize + CanonicalSerialize")] From a7b1f5d883281c381ba421f4a8218f6236368776 Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Wed, 2 Oct 2024 10:21:54 +0200 Subject: [PATCH 02/19] Poly-commitment: comment DensePolynomialOrEvaluation --- poly-commitment/src/ipa.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/poly-commitment/src/ipa.rs b/poly-commitment/src/ipa.rs index 6f874e74b8..9d4582c0b6 100644 --- a/poly-commitment/src/ipa.rs +++ b/poly-commitment/src/ipa.rs @@ -37,8 +37,11 @@ use std::{cmp::min, collections::HashMap, iter::Iterator, ops::AddAssign}; #[derive(Default)] struct ScaledChunkedPolynomial(Vec<(F, P)>); +/// Represent a polynomial either with its coefficients or its evaluations pub enum DensePolynomialOrEvaluations<'a, F: FftField, D: EvaluationDomain> { + /// Polynomial represented by its coefficients DensePolynomial(&'a DensePolynomial), + /// Polynomial represented by its evaluations over a domain D Evaluations(&'a Evaluations, D), } From 027b586313e313d5db1a8c52252bed2ed69a2af2 Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Wed, 2 Oct 2024 10:44:34 +0200 Subject: [PATCH 03/19] Poly-commitment: additional comment/TODO. I tested the hypothesis by adding an assert that padding equals zero, and run all the tests for kimchi. --- poly-commitment/src/ipa.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/poly-commitment/src/ipa.rs b/poly-commitment/src/ipa.rs index 9d4582c0b6..2db6340c0d 100644 --- a/poly-commitment/src/ipa.rs +++ b/poly-commitment/src/ipa.rs @@ -809,6 +809,9 @@ impl SRS { let padded_length = 1 << rounds; // TODO: Trim this to the degree of the largest polynomial + // TODO: We do always suppose we have a power of 2 for the SRS in + // practice. Therefore, padding equals zero, and this code can be + // removed. Only a current test case uses a SRS with a non-power of 2. let padding = padded_length - self.g.len(); let mut g = self.g.clone(); g.extend(vec![G::zero(); padding]); From d829212d726349de8c09e4b84892eddab63ceca0 Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Wed, 2 Oct 2024 10:47:21 +0200 Subject: [PATCH 04/19] Poly-commitment: simply using utf8 greek letters in comment --- poly-commitment/src/ipa.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/poly-commitment/src/ipa.rs b/poly-commitment/src/ipa.rs index 2db6340c0d..3cf1c05133 100644 --- a/poly-commitment/src/ipa.rs +++ b/poly-commitment/src/ipa.rs @@ -822,8 +822,8 @@ impl SRS { // just the powers of a single point as in the original IPA, but rather // a vector of linearly combined powers with `evalscale` as recombiner. // - // b_init_j = sum_i r^i elm_i^j - // = zeta^j + evalscale * zeta^j omega^j + // b_init_j = Σ_i r^i elm_i^j + // = ζ^j + evalscale * ζ^j ω^j let b_init = { // randomise/scale the eval powers let mut scale = G::ScalarField::one(); From f54031c2864e292c9be1d3748e90b648e838ed6e Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Wed, 2 Oct 2024 10:59:59 +0200 Subject: [PATCH 05/19] Poly-commitment: reword documentation of PolynomialToCombine --- poly-commitment/src/lib.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/poly-commitment/src/lib.rs b/poly-commitment/src/lib.rs index 1d9eaa14d9..33406782af 100644 --- a/poly-commitment/src/lib.rs +++ b/poly-commitment/src/lib.rs @@ -146,7 +146,8 @@ pub trait SRS: Clone + Sized { } #[allow(type_alias_bounds)] -/// Vector of polynomials with commitment randomness (blinders). +/// Simply an alias to represent a polynomial with its commitment, possibly with +/// a blinder. type PolynomialsToCombine<'a, G: CommitmentCurve, D: EvaluationDomain> = &'a [( DensePolynomialOrEvaluations<'a, G::ScalarField, D>, PolyComm, From 09f73b3d9bec0057d3083eeda7c39dcab179e916 Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Wed, 2 Oct 2024 11:00:23 +0200 Subject: [PATCH 06/19] poly-commitment: add a TODO regarding naming polynomials cc @marc --- poly-commitment/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/poly-commitment/src/lib.rs b/poly-commitment/src/lib.rs index 33406782af..6e9ee9ba8b 100644 --- a/poly-commitment/src/lib.rs +++ b/poly-commitment/src/lib.rs @@ -148,6 +148,7 @@ pub trait SRS: Clone + Sized { #[allow(type_alias_bounds)] /// Simply an alias to represent a polynomial with its commitment, possibly with /// a blinder. +// TODO: add a string to name the polynomial type PolynomialsToCombine<'a, G: CommitmentCurve, D: EvaluationDomain> = &'a [( DensePolynomialOrEvaluations<'a, G::ScalarField, D>, PolyComm, From 2152c116a7af71b243ec8706c6e8bf6e249e73c7 Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Wed, 2 Oct 2024 11:24:13 +0200 Subject: [PATCH 07/19] Poly-commitment: additional doc for ScaledChunkedPolynomial In particular, adding the hypothesis we expect on the semantics of the inhabitant of the second type parameter. --- poly-commitment/src/ipa.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/poly-commitment/src/ipa.rs b/poly-commitment/src/ipa.rs index 3cf1c05133..892dda39c9 100644 --- a/poly-commitment/src/ipa.rs +++ b/poly-commitment/src/ipa.rs @@ -31,9 +31,13 @@ use serde::{Deserialize, Serialize}; use serde_with::serde_as; use std::{cmp::min, collections::HashMap, iter::Iterator, ops::AddAssign}; -// A formal sum of the form -// `s_0 * p_0 + ... s_n * p_n` -// where each `s_i` is a scalar and each `p_i` is a polynomial. +/// A formal sum of the form +/// `s_0 * p_0 + ... s_n * p_n` +/// where each `s_i` is a scalar and each `p_i` is a polynomial. +/// The parameter `P` is expected to be the coefficients of the polynomial +/// `p_i`, even though we could treat it as the evaluations. +/// +/// This hypothesis is important if `to_dense_polynomial` is called. #[derive(Default)] struct ScaledChunkedPolynomial(Vec<(F, P)>); From 666d47691a00c2cace411ba5fc1340b571963ef0 Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Wed, 2 Oct 2024 11:25:09 +0200 Subject: [PATCH 08/19] Poly-commitment: doc + example for to_dense_polynomial method --- poly-commitment/src/ipa.rs | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/poly-commitment/src/ipa.rs b/poly-commitment/src/ipa.rs index 892dda39c9..b7c4be1f93 100644 --- a/poly-commitment/src/ipa.rs +++ b/poly-commitment/src/ipa.rs @@ -56,7 +56,15 @@ impl ScaledChunkedPolynomial { } impl<'a, F: Field> ScaledChunkedPolynomial { + /// Compute the resulting scaled polynomial. + /// Example: + /// Given the two polynomials `1 + 2X` and `3 + 4X`, and the scaling + /// factors `2` and `3`, the result is the polynomial `11 + 16X`. + /// ```text + /// 2 * [1, 2] + 3 * [3, 4] = [2, 4] + [9, 12] = [11, 16] + /// ``` fn to_dense_polynomial(&self) -> DensePolynomial { + // Note: using a reference to avoid reallocation of the result. let mut res = DensePolynomial::::zero(); let scaled: Vec<_> = self @@ -64,6 +72,9 @@ impl<'a, F: Field> ScaledChunkedPolynomial { .par_iter() .map(|(scale, segment)| { let scale = *scale; + // We simply scale each coefficients. + // It is simply because DensePolynomial doesn't have a method + // `scale`. let v = segment.par_iter().map(|x| scale * *x).collect(); DensePolynomial::from_coefficients_vec(v) }) From 1112bd1483167f858b8cc2ac94faea2d943d941b Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Wed, 2 Oct 2024 11:49:39 +0200 Subject: [PATCH 09/19] Poly-commitment: n is never 0 as we treat only the non-empty case --- poly-commitment/src/ipa.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/poly-commitment/src/ipa.rs b/poly-commitment/src/ipa.rs index b7c4be1f93..9d750bcffa 100644 --- a/poly-commitment/src/ipa.rs +++ b/poly-commitment/src/ipa.rs @@ -162,11 +162,8 @@ pub fn combine_polys>( if !plnm_evals_part.is_empty() { let n = plnm_evals_part.len(); let max_poly_size = srs_length; - let num_chunks = if n == 0 { - 1 - } else { - n / max_poly_size + if n % max_poly_size == 0 { 0 } else { 1 } - }; + // equiv to divceil, but unstable in rust < 1.73. + let num_chunks = n / max_poly_size + if n % max_poly_size == 0 { 0 } else { 1 }; plnm += &Evaluations::from_vec_and_domain(plnm_evals_part, D::new(n).unwrap()) .interpolate() .to_chunked_polynomial(num_chunks, max_poly_size) From 51307776b398381a1b81289726c5e08b77fd4688 Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Wed, 2 Oct 2024 11:51:07 +0200 Subject: [PATCH 10/19] Poly-commitment: additional comments on combine_polys --- poly-commitment/src/ipa.rs | 32 ++++++++++++++++++++++++++------ 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/poly-commitment/src/ipa.rs b/poly-commitment/src/ipa.rs index 9d750bcffa..433725b2b1 100644 --- a/poly-commitment/src/ipa.rs +++ b/poly-commitment/src/ipa.rs @@ -98,12 +98,13 @@ pub fn combine_polys>( polyscale: G::ScalarField, srs_length: usize, ) -> (DensePolynomial, G::ScalarField) { - let mut plnm = ScaledChunkedPolynomial::::default(); + // Initialising the output for the combined coefficients forms + let mut plnm_coefficients = + ScaledChunkedPolynomial::::default(); + // Initialising the output for the combined evaluations forms let mut plnm_evals_part = { // For now just check that all the evaluation polynomials are the same // degree so that we can do just a single FFT. - // Furthermore we check they have size less than the SRS size so we - // don't have to do chunking. // If/when we change this, we can add more complicated code to handle // different degrees. let degree = plnms @@ -124,9 +125,17 @@ pub fn combine_polys>( let mut omega = G::ScalarField::zero(); let mut scale = G::ScalarField::one(); - // iterating over polynomials in the batch + // Iterating over polynomials in the batch. + // Note that `omegas` are given as `PolyComm`. They are + // evaluations. + // We do modify two different structures depending on the form of the + // polynomial we are currently processing: `plnm` and `plnm_evals_part`. + // We do need to treat both forms separately. for (p_i, omegas) in plnms { match p_i { + // Here we scale the polynomial in evaluations forms + // Note that based on the check above, sub_domain.size() always give + // the same value DensePolynomialOrEvaluations::Evaluations(evals_i, sub_domain) => { let stride = evals_i.evals.len() / sub_domain.size(); let evals = &evals_i.evals; @@ -142,13 +151,14 @@ pub fn combine_polys>( } } + // Here we scale the polynomial in coefficient forms DensePolynomialOrEvaluations::DensePolynomial(p_i) => { let mut offset = 0; // iterating over chunks of the polynomial for j in 0..omegas.elems.len() { let segment = &p_i.coeffs[std::cmp::min(offset, p_i.coeffs.len()) ..std::cmp::min(offset + srs_length, p_i.coeffs.len())]; - plnm.add_poly(scale, segment); + plnm_coefficients.add_poly(scale, segment); omega += &(omegas.elems[j] * scale); scale *= &polyscale; @@ -158,12 +168,22 @@ pub fn combine_polys>( } } - let mut plnm = plnm.to_dense_polynomial(); + // Now, we will combine both evaluations and coefficients forms + + // plnm will be our final combined polynomial. We first treat the + // polynomials in coefficients forms, which is simply scaling the + // coefficients and add them. + let mut plnm = plnm_coefficients.to_dense_polynomial(); + if !plnm_evals_part.is_empty() { + // n is the number of evaluations points, which is a multiple of the + // domain size. + // We treat now each chunk. let n = plnm_evals_part.len(); let max_poly_size = srs_length; // equiv to divceil, but unstable in rust < 1.73. let num_chunks = n / max_poly_size + if n % max_poly_size == 0 { 0 } else { 1 }; + // Interpolation on the whole domain, i.e. it can be d2, d4, etc. plnm += &Evaluations::from_vec_and_domain(plnm_evals_part, D::new(n).unwrap()) .interpolate() .to_chunked_polynomial(num_chunks, max_poly_size) From 1b8e9264f06d6d32a2db15dbe82f345034d58207 Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Wed, 2 Oct 2024 11:51:29 +0200 Subject: [PATCH 11/19] Poly-commitment: add documentation for combine_polys --- poly-commitment/src/ipa.rs | 25 +++++++++++++++++++++---- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/poly-commitment/src/ipa.rs b/poly-commitment/src/ipa.rs index 433725b2b1..53f7445428 100644 --- a/poly-commitment/src/ipa.rs +++ b/poly-commitment/src/ipa.rs @@ -88,11 +88,28 @@ impl<'a, F: Field> ScaledChunkedPolynomial { } } -/// Combine the polynomials using `polyscale`, creating a single unified -/// polynomial to open. +/// Combine the polynomials using a scalar (`polyscale`), creating a single +/// unified polynomial to open. This function also accepts polynomials in +/// evaluations form. In this case it applies an IFFT, and, if necessarry, +/// applies chunking to it (ie. split it in multiple polynomials of +/// degree less than the SRS size). /// Parameters: -/// - plnms: vector of polynomial with optional degree bound and commitment randomness -/// - polyscale: scaling factor for polynomials +/// - plnms: vector of polynomials, either in evaluations or coefficients form. +/// The order of the output follows the order of this structure. +/// - polyscale: scalar to combine the polynomials, which will be scaled based +/// on the number of polynomials to combine. +/// +/// Example: +/// Given the three polynomials `p1(X)`, and `p3(X)` in coefficients +/// forms, p2(X) in evaluation form, +/// and the scaling factor `s`, the result will be the polynomial: +/// +/// ```text +/// p1(X) + s * i_fft(chunks(p2))(X) + s^2 p3(X) +/// ``` +/// +/// Additional complexity is added to handle chunks. +// TODO: move into utils? It is useful for multiple PCS pub fn combine_polys>( plnms: PolynomialsToCombine, polyscale: G::ScalarField, From ae5fcaa22d82317bc1c006b3b95545412646fc82 Mon Sep 17 00:00:00 2001 From: marcbeunardeau88 Date: Wed, 2 Oct 2024 14:23:51 +0200 Subject: [PATCH 12/19] Correct typo in doc --- poly-commitment/src/commitment.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poly-commitment/src/commitment.rs b/poly-commitment/src/commitment.rs index 48e212b490..5400d813d7 100644 --- a/poly-commitment/src/commitment.rs +++ b/poly-commitment/src/commitment.rs @@ -170,7 +170,7 @@ impl PolyComm { /// |g: G, x: G::ScalarField| g.scale(2*x + 2^n) /// ``` /// -/// otherwise. So, if we want to actually scale by `s`, we need to apply the +/// otherwise. So, if we want to actually scale by `x`, we need to apply the /// inverse function of `|x| x + 2^n` (or of `|x| 2*x + 2^n` in the other case), /// before supplying the scalar to our in-circuit scalar-multiplication /// function. This computes that inverse function. From 0faab06f0cf0bca60cd0eee3dc9b0dc9245984fd Mon Sep 17 00:00:00 2001 From: marcbeunardeau88 Date: Wed, 2 Oct 2024 14:30:03 +0200 Subject: [PATCH 13/19] PC/IPA : more doc on opening proof --- poly-commitment/src/ipa.rs | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/poly-commitment/src/ipa.rs b/poly-commitment/src/ipa.rs index 53f7445428..9e424039bb 100644 --- a/poly-commitment/src/ipa.rs +++ b/poly-commitment/src/ipa.rs @@ -828,9 +828,11 @@ impl SRS { /// This function opens polynomial commitments in batch /// - plnms: batch of polynomials to open commitments for with, optionally, max degrees /// - elm: evaluation point vector to open the commitments at - /// - polyscale: polynomial scaling factor for opening commitments in batch - /// - evalscale: eval scaling factor for opening commitments in batch - /// - oracle_params: parameters for the random oracle argument + /// - polyscale: used to combine polynomials for opening commitments in batch + /// (we will open the \sum_i polyscale^i * plnms.[i]) + /// - evalscale: used to combine evaluations to open on only one point + /// - sponge: parameters for the random oracle argument + /// - rng: used for blinders for the zk property /// RETURN: commitment opening proof #[allow(clippy::too_many_arguments)] #[allow(clippy::type_complexity)] @@ -871,8 +873,8 @@ impl SRS { // just the powers of a single point as in the original IPA, but rather // a vector of linearly combined powers with `evalscale` as recombiner. // - // b_init_j = Σ_i r^i elm_i^j - // = ζ^j + evalscale * ζ^j ω^j + // b_init[j] = Σ_i evalscale^i elm_i^j + // = ζ^j + evalscale * ζ^j ω^j (in the specific case of opening) let b_init = { // randomise/scale the eval powers let mut scale = G::ScalarField::one(); @@ -895,6 +897,11 @@ impl SRS { .map(|(a, b)| *a * b) .fold(G::ScalarField::zero(), |acc, x| acc + x); + // Usually, the prover sends `combined_inner_product`` to the verifier + // So we should absorb `combined_inner_product`` + // However it is more efficient in the recursion circuit + // to absorb a slightly modified version of it. + // See the `shift_scalar`` doc. sponge.absorb_fr(&[shift_scalar::(combined_inner_product)]); let t = sponge.challenge_fq(); From c88e9d1aa2c14b08327b8b683d6671dcc8f1b27a Mon Sep 17 00:00:00 2001 From: marcbeunardeau88 Date: Wed, 2 Oct 2024 16:31:30 +0200 Subject: [PATCH 14/19] add clone to eval --- poly-commitment/src/commitment.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/poly-commitment/src/commitment.rs b/poly-commitment/src/commitment.rs index 5400d813d7..b1d69b75c3 100644 --- a/poly-commitment/src/commitment.rs +++ b/poly-commitment/src/commitment.rs @@ -476,6 +476,7 @@ pub fn combined_inner_product( } /// Contains the evaluation of a polynomial commitment at a set of points. +#[derive(Clone)] pub struct Evaluation where G: AffineRepr, From fed8fbc8c503f2787bb103d05aff57dad248a73d Mon Sep 17 00:00:00 2001 From: marcbeunardeau88 Date: Wed, 2 Oct 2024 16:31:49 +0200 Subject: [PATCH 15/19] PC/IPA/Test: use test's rng --- poly-commitment/tests/ipa_commitment.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/poly-commitment/tests/ipa_commitment.rs b/poly-commitment/tests/ipa_commitment.rs index 8433f28558..f5b8b7ab2d 100644 --- a/poly-commitment/tests/ipa_commitment.rs +++ b/poly-commitment/tests/ipa_commitment.rs @@ -18,7 +18,7 @@ use poly_commitment::{ ipa::{DensePolynomialOrEvaluations, SRS}, PolyComm, SRS as _, }; -use rand::{rngs::StdRng, SeedableRng}; +use rand::{rngs::StdRng, Rng, SeedableRng}; use std::array; #[test] @@ -242,7 +242,7 @@ fn test_opening_proof() { // create an SRS let srs = SRS::::create(20); - let rng = &mut StdRng::from_seed([0u8; 32]); + let mut rng = &mut o1_utils::tests::make_test_rng(None); // commit the two polynomials let commitment1 = srs.commit(&poly1, 1, rng); From ce2ace11db59b7061706d4e2ea19641d641bf513 Mon Sep 17 00:00:00 2001 From: marcbeunardeau88 Date: Wed, 2 Oct 2024 16:53:05 +0200 Subject: [PATCH 16/19] PC/IPA/test: open at random nb of points --- poly-commitment/tests/ipa_commitment.rs | 77 +++++++++++++------------ 1 file changed, 41 insertions(+), 36 deletions(-) diff --git a/poly-commitment/tests/ipa_commitment.rs b/poly-commitment/tests/ipa_commitment.rs index f5b8b7ab2d..e6b262e987 100644 --- a/poly-commitment/tests/ipa_commitment.rs +++ b/poly-commitment/tests/ipa_commitment.rs @@ -18,7 +18,7 @@ use poly_commitment::{ ipa::{DensePolynomialOrEvaluations, SRS}, PolyComm, SRS as _, }; -use rand::{rngs::StdRng, Rng, SeedableRng}; +use rand::Rng; use std::array; #[test] @@ -266,38 +266,41 @@ fn test_opening_proof() { commitment2.blinders, ), ]; - let elm = vec![Fp::rand(rng), Fp::rand(rng)]; - + // Generate a random number of evaluation point + let nb_elem: u32 = rng.gen_range(1..7); + let elm: Vec = (0..nb_elem).map(|_| Fp::rand(&mut rng)).collect(); let opening_proof = srs.open(&group_map, &polys, &elm, v, u, sponge.clone(), rng); - // evaluate the polynomials at these two points - let poly1_chunked_evals = vec![ - poly1 - .to_chunked_polynomial(1, srs.g.len()) - .evaluate_chunks(elm[0]), - poly1 - .to_chunked_polynomial(1, srs.g.len()) - .evaluate_chunks(elm[1]), - ]; + // evaluate the polynomials at the points + let poly1_chunked_evals: Vec> = elm + .iter() + .map(|elmi| { + poly1 + .to_chunked_polynomial(1, srs.g.len()) + .evaluate_chunks(*elmi) + }) + .collect(); fn sum(c: &[Fp]) -> Fp { c.iter().fold(Fp::zero(), |a, &b| a + b) } - assert_eq!(sum(&poly1_chunked_evals[0]), poly1.evaluate(&elm[0])); - assert_eq!(sum(&poly1_chunked_evals[1]), poly1.evaluate(&elm[1])); + for (i, chunks) in poly1_chunked_evals.iter().enumerate() { + assert_eq!(sum(chunks), poly1.evaluate(&elm[i])) + } - let poly2_chunked_evals = vec![ - poly2 - .to_chunked_polynomial(1, srs.g.len()) - .evaluate_chunks(elm[0]), - poly2 - .to_chunked_polynomial(1, srs.g.len()) - .evaluate_chunks(elm[1]), - ]; + let poly2_chunked_evals: Vec> = elm + .iter() + .map(|elmi| { + poly2 + .to_chunked_polynomial(1, srs.g.len()) + .evaluate_chunks(*elmi) + }) + .collect(); - assert_eq!(sum(&poly2_chunked_evals[0]), poly2.evaluate(&elm[0])); - assert_eq!(sum(&poly2_chunked_evals[1]), poly2.evaluate(&elm[1])); + for (i, chunks) in poly2_chunked_evals.iter().enumerate() { + assert_eq!(sum(chunks), poly2.evaluate(&elm[i])) + } let evaluations = vec![ Evaluation { @@ -318,16 +321,18 @@ fn test_opening_proof() { combined_inner_product(&v, &u, &es) }; - // verify the proof - let mut batch = vec![BatchEvaluationProof { - sponge, - evaluation_points: elm.clone(), - polyscale: v, - evalscale: u, - evaluations, - opening: &opening_proof, - combined_inner_product, - }]; - - assert!(srs.verify(&group_map, &mut batch, rng)); + { + // create the proof + let mut batch = vec![BatchEvaluationProof { + sponge, + evaluation_points: elm, + polyscale: v, + evalscale: u, + evaluations, + opening: &opening_proof, + combined_inner_product, + }]; + + assert!(srs.verify(&group_map, &mut batch, rng)); + } } From 271e3af128e80b12f29e18991fce295e6ec4b3e7 Mon Sep 17 00:00:00 2001 From: marcbeunardeau88 Date: Wed, 2 Oct 2024 17:03:07 +0200 Subject: [PATCH 17/19] PC/IPA:fix doc --- poly-commitment/src/ipa.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/poly-commitment/src/ipa.rs b/poly-commitment/src/ipa.rs index 9e424039bb..77e2af4c63 100644 --- a/poly-commitment/src/ipa.rs +++ b/poly-commitment/src/ipa.rs @@ -825,15 +825,16 @@ where } impl SRS { - /// This function opens polynomial commitments in batch - /// - plnms: batch of polynomials to open commitments for with, optionally, max degrees + /// This function opens polynomials in batch at several points + /// - plnms: batch of polynomials to open commitments for /// - elm: evaluation point vector to open the commitments at /// - polyscale: used to combine polynomials for opening commitments in batch - /// (we will open the \sum_i polyscale^i * plnms.[i]) + /// (we will open the \sum_i polyscale^i * plnms.(i)) /// - evalscale: used to combine evaluations to open on only one point /// - sponge: parameters for the random oracle argument /// - rng: used for blinders for the zk property - /// RETURN: commitment opening proof + /// A slight modification to the original protocol is done + /// when absorbing the first prover message. #[allow(clippy::too_many_arguments)] #[allow(clippy::type_complexity)] #[allow(clippy::many_single_char_names)] @@ -901,6 +902,8 @@ impl SRS { // So we should absorb `combined_inner_product`` // However it is more efficient in the recursion circuit // to absorb a slightly modified version of it. + // As a reminder, in a recursive seeting, the challenges are given as a public input + // and verified in the next iteration. // See the `shift_scalar`` doc. sponge.absorb_fr(&[shift_scalar::(combined_inner_product)]); From 403fff7cbd51b827f82325201f464d2574c88d49 Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Wed, 2 Oct 2024 18:14:38 +0200 Subject: [PATCH 18/19] Update poly-commitment/src/ipa.rs --- poly-commitment/src/ipa.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poly-commitment/src/ipa.rs b/poly-commitment/src/ipa.rs index 77e2af4c63..28d895979a 100644 --- a/poly-commitment/src/ipa.rs +++ b/poly-commitment/src/ipa.rs @@ -902,7 +902,7 @@ impl SRS { // So we should absorb `combined_inner_product`` // However it is more efficient in the recursion circuit // to absorb a slightly modified version of it. - // As a reminder, in a recursive seeting, the challenges are given as a public input + // As a reminder, in a recursive setting, the challenges are given as a public input // and verified in the next iteration. // See the `shift_scalar`` doc. sponge.absorb_fr(&[shift_scalar::(combined_inner_product)]); From dd341e8b17bc88109345419dbe759926807b610a Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Wed, 2 Oct 2024 18:14:45 +0200 Subject: [PATCH 19/19] Update poly-commitment/src/ipa.rs --- poly-commitment/src/ipa.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/poly-commitment/src/ipa.rs b/poly-commitment/src/ipa.rs index 28d895979a..7608850980 100644 --- a/poly-commitment/src/ipa.rs +++ b/poly-commitment/src/ipa.rs @@ -193,7 +193,7 @@ pub fn combine_polys>( let mut plnm = plnm_coefficients.to_dense_polynomial(); if !plnm_evals_part.is_empty() { - // n is the number of evaluations points, which is a multiple of the + // n is the number of evaluations, which is a multiple of the // domain size. // We treat now each chunk. let n = plnm_evals_part.len();