poly_commitment/
kzg.rs

1//! This module implements the KZG protocol described in the paper
2//! [Constant-Size Commitments to Polynomials and Their
3//! Applications](https://www.iacr.org/archive/asiacrypt2010/6477178/6477178.pdf)
4//! by Kate, Zaverucha and Goldberg, often referred to as the KZG10 paper.
5//!
6//! The protocol requires a structured reference string (SRS) that contains
7//! powers of a generator of a group, and a pairing friendly curve.
8//!
9//! The pairing friendly curve requirement is hidden in the Pairing trait
10//! parameter.
11
12use crate::{
13    commitment::*, ipa::SRS, utils::combine_polys, CommitmentError, PolynomialsToCombine,
14    SRS as SRSTrait,
15};
16
17use ark_ec::{pairing::Pairing, AffineRepr, VariableBaseMSM};
18use ark_ff::{One, PrimeField, Zero};
19use ark_poly::{
20    univariate::{DenseOrSparsePolynomial, DensePolynomial},
21    DenseUVPolynomial, EvaluationDomain, Evaluations, Polynomial, Radix2EvaluationDomain as D,
22};
23use mina_poseidon::FqSponge;
24use rand::thread_rng;
25use rand_core::{CryptoRng, RngCore};
26use serde::{Deserialize, Serialize};
27use serde_with::serde_as;
28use std::ops::Neg;
29
30/// Combine the (chunked) evaluations of multiple polynomials.
31/// This function returns the accumulation of the evaluations, scaled by
32/// `polyscale`.
33/// If no evaluation is given, the function returns an empty vector.
34/// It does also suppose that for each evaluation, the number of evaluations is
35/// the same. It is not constrained yet in the interface, but it should be. If
36/// one list has not the same size, it will be shrunk to the size of the first
37/// element of the list.
38/// For instance, if we have 3 polynomials P1, P2, P3 evaluated at the points
39/// ζ and ζω (like in vanilla PlonK), and for each polynomial, we have two
40/// chunks, i.e. we have
41/// ```text
42///         2 chunks of P1
43///        /---------------\
44/// E1 = [(P1_1(ζ), P1_2(ζ)), (P1_1(ζω), P1_2(ζω))]
45/// E2 = [(P2_1(ζ), P2_2(ζ)), (P2_1(ζω), P2_2(ζω))]
46/// E3 = [(P3_1(ζ), P3_2(ζ)), (P3_1(ζω), P3_2(ζω))]
47/// ```
48/// The output will be a list of 3 elements, equal to:
49/// ```text
50/// P1_1(ζ) + P1_2(ζ) * polyscale + P1_1(ζω) polyscale^2 + P1_2(ζω) * polyscale^3
51/// P2_1(ζ) + P2_2(ζ) * polyscale + P2_1(ζω) polyscale^2 + P2_2(ζω) * polyscale^3
52/// ```
53pub fn combine_evaluations<G: CommitmentCurve>(
54    evaluations: &[Evaluation<G>],
55    polyscale: G::ScalarField,
56) -> Vec<G::ScalarField> {
57    let mut polyscale_i = G::ScalarField::one();
58    let mut acc = {
59        let num_evals = if !evaluations.is_empty() {
60            evaluations[0].evaluations.len()
61        } else {
62            0
63        };
64        vec![G::ScalarField::zero(); num_evals]
65    };
66
67    for Evaluation { evaluations, .. } in evaluations.iter().filter(|x| !x.commitment.is_empty()) {
68        // IMPROVEME: we could have a flat array that would contain all the
69        // evaluations and all the chunks. It would avoid fetching the memory
70        // and avoid indirection into RAM.
71        // We could have a single flat array.
72        // iterating over the polynomial segments
73        for chunk_idx in 0..evaluations[0].len() {
74            // supposes that all evaluations are of the same size
75            for eval_pt_idx in 0..evaluations.len() {
76                acc[eval_pt_idx] += evaluations[eval_pt_idx][chunk_idx] * polyscale_i;
77            }
78            polyscale_i *= polyscale;
79        }
80    }
81
82    acc
83}
84
85#[serde_as]
86#[derive(Debug, Serialize, Deserialize)]
87#[serde(
88    bound = "Pair::G1Affine: ark_serialize::CanonicalDeserialize + ark_serialize::CanonicalSerialize"
89)]
90pub struct KZGProof<Pair: Pairing> {
91    #[serde_as(as = "o1_utils::serialization::SerdeAs")]
92    pub quotient: Pair::G1Affine,
93    #[serde_as(as = "o1_utils::serialization::SerdeAs")]
94    /// A blinding factor used to hide the polynomial, if necessary
95    pub blinding: <Pair::G1Affine as AffineRepr>::ScalarField,
96}
97
98impl<Pair: Pairing> Default for KZGProof<Pair> {
99    fn default() -> Self {
100        Self {
101            quotient: Pair::G1Affine::generator(),
102            blinding: <Pair::G1Affine as AffineRepr>::ScalarField::zero(),
103        }
104    }
105}
106
107impl<Pair: Pairing> Clone for KZGProof<Pair> {
108    fn clone(&self) -> Self {
109        Self {
110            quotient: self.quotient,
111            blinding: self.blinding,
112        }
113    }
114}
115
116#[derive(Debug, PartialEq, Serialize, Deserialize)]
117/// Define a structured reference string (i.e. SRS) for the KZG protocol.
118/// The SRS consists of powers of an element `g^x` for some toxic waste `x`.
119///
120/// The SRS is formed using what we call a "trusted setup". For now, the setup
121/// is created using the method `create_trusted_setup`.
122pub struct PairingSRS<Pair: Pairing> {
123    /// The full SRS is the one used by the prover. Can be seen as the "proving
124    /// key"/"secret key"
125    pub full_srs: SRS<Pair::G1Affine>,
126    /// SRS to be used by the verifier. Can be seen as the "verification
127    /// key"/"public key".
128    pub verifier_srs: SRS<Pair::G2Affine>,
129}
130
131impl<
132        F: PrimeField,
133        G: CommitmentCurve<ScalarField = F>,
134        G2: CommitmentCurve<ScalarField = F>,
135        Pair: Pairing<G1Affine = G, G2Affine = G2>,
136    > PairingSRS<Pair>
137{
138    /// Create a trusted setup for the KZG protocol.
139    /// The setup is created using a toxic waste `toxic_waste` and a depth
140    /// `depth`.
141    pub fn create_trusted_setup(toxic_waste: F, depth: usize) -> Self {
142        let full_srs = unsafe { SRS::create_trusted_setup(toxic_waste, depth) };
143        let verifier_srs = unsafe { SRS::create_trusted_setup(toxic_waste, 3) };
144        Self {
145            full_srs,
146            verifier_srs,
147        }
148    }
149}
150
151impl<Pair: Pairing> Default for PairingSRS<Pair> {
152    fn default() -> Self {
153        Self {
154            full_srs: SRS::default(),
155            verifier_srs: SRS::default(),
156        }
157    }
158}
159
160impl<Pair: Pairing> Clone for PairingSRS<Pair> {
161    fn clone(&self) -> Self {
162        Self {
163            full_srs: self.full_srs.clone(),
164            verifier_srs: self.verifier_srs.clone(),
165        }
166    }
167}
168
169impl<
170        F: PrimeField,
171        G: CommitmentCurve<ScalarField = F>,
172        G2: CommitmentCurve<ScalarField = F>,
173        Pair: Pairing<G1Affine = G, G2Affine = G2>,
174    > crate::OpenProof<G> for KZGProof<Pair>
175{
176    type SRS = PairingSRS<Pair>;
177
178    /// Parameters:
179    /// - `srs`: the structured reference string
180    /// - `plnms`: vector of polynomials with optional degree bound and
181    ///   commitment randomness
182    /// - `elm`: vector of evaluation points
183    /// - `polyscale`: scaling factor for polynoms
184    ///   group_maps, sponge, rng and evalscale are not used. The parameters are
185    ///   kept to fit the trait and to be used generically.
186    fn open<EFqSponge, RNG, D: EvaluationDomain<F>>(
187        srs: &Self::SRS,
188        _group_map: &<G as CommitmentCurve>::Map,
189        plnms: PolynomialsToCombine<G, D>,
190        elm: &[<G as AffineRepr>::ScalarField],
191        polyscale: <G as AffineRepr>::ScalarField,
192        _evalscale: <G as AffineRepr>::ScalarField,
193        _sponge: EFqSponge,
194        _rng: &mut RNG,
195    ) -> Self
196    where
197        EFqSponge: Clone + FqSponge<<G as AffineRepr>::BaseField, G, F>,
198        RNG: RngCore + CryptoRng,
199    {
200        KZGProof::create(srs, plnms, elm, polyscale).unwrap()
201    }
202
203    fn verify<EFqSponge, RNG>(
204        srs: &Self::SRS,
205        _group_map: &G::Map,
206        batch: &mut [BatchEvaluationProof<G, EFqSponge, Self>],
207        _rng: &mut RNG,
208    ) -> bool
209    where
210        EFqSponge: FqSponge<G::BaseField, G, F>,
211        RNG: RngCore + CryptoRng,
212    {
213        for BatchEvaluationProof {
214            sponge: _,
215            evaluations,
216            evaluation_points,
217            polyscale,
218            evalscale: _,
219            opening,
220            combined_inner_product: _,
221        } in batch.iter()
222        {
223            if !opening.verify(srs, evaluations, *polyscale, evaluation_points) {
224                return false;
225            }
226        }
227        true
228    }
229}
230
231impl<
232        F: PrimeField,
233        G: CommitmentCurve<ScalarField = F>,
234        G2: CommitmentCurve<ScalarField = F>,
235        Pair: Pairing<G1Affine = G, G2Affine = G2>,
236    > SRSTrait<G> for PairingSRS<Pair>
237{
238    fn max_poly_size(&self) -> usize {
239        self.full_srs.max_poly_size()
240    }
241
242    fn get_lagrange_basis(&self, domain: D<G::ScalarField>) -> &Vec<PolyComm<G>> {
243        self.full_srs.get_lagrange_basis(domain)
244    }
245
246    fn get_lagrange_basis_from_domain_size(&self, domain_size: usize) -> &Vec<PolyComm<G>> {
247        self.full_srs
248            .get_lagrange_basis_from_domain_size(domain_size)
249    }
250
251    fn blinding_commitment(&self) -> G {
252        self.full_srs.blinding_commitment()
253    }
254
255    fn mask_custom(
256        &self,
257        com: PolyComm<G>,
258        blinders: &PolyComm<G::ScalarField>,
259    ) -> Result<BlindedCommitment<G>, CommitmentError> {
260        self.full_srs.mask_custom(com, blinders)
261    }
262
263    fn mask(
264        &self,
265        comm: PolyComm<G>,
266        rng: &mut (impl RngCore + CryptoRng),
267    ) -> BlindedCommitment<G> {
268        self.full_srs.mask(comm, rng)
269    }
270
271    fn commit(
272        &self,
273        plnm: &DensePolynomial<F>,
274        num_chunks: usize,
275        rng: &mut (impl RngCore + CryptoRng),
276    ) -> BlindedCommitment<G> {
277        self.full_srs.commit(plnm, num_chunks, rng)
278    }
279
280    fn commit_non_hiding(
281        &self,
282        plnm: &DensePolynomial<G::ScalarField>,
283        num_chunks: usize,
284    ) -> PolyComm<G> {
285        self.full_srs.commit_non_hiding(plnm, num_chunks)
286    }
287
288    fn commit_custom(
289        &self,
290        plnm: &DensePolynomial<<G>::ScalarField>,
291        num_chunks: usize,
292        blinders: &PolyComm<<G>::ScalarField>,
293    ) -> Result<BlindedCommitment<G>, CommitmentError> {
294        self.full_srs.commit_custom(plnm, num_chunks, blinders)
295    }
296
297    fn commit_evaluations_non_hiding(
298        &self,
299        domain: D<G::ScalarField>,
300        plnm: &Evaluations<G::ScalarField, D<G::ScalarField>>,
301    ) -> PolyComm<G> {
302        self.full_srs.commit_evaluations_non_hiding(domain, plnm)
303    }
304
305    fn commit_evaluations(
306        &self,
307        domain: D<G::ScalarField>,
308        plnm: &Evaluations<G::ScalarField, D<G::ScalarField>>,
309        rng: &mut (impl RngCore + CryptoRng),
310    ) -> BlindedCommitment<G> {
311        self.full_srs.commit_evaluations(domain, plnm, rng)
312    }
313
314    fn commit_evaluations_custom(
315        &self,
316        domain: D<<G>::ScalarField>,
317        plnm: &Evaluations<<G>::ScalarField, D<<G>::ScalarField>>,
318        blinders: &PolyComm<<G>::ScalarField>,
319    ) -> Result<BlindedCommitment<G>, CommitmentError> {
320        self.full_srs
321            .commit_evaluations_custom(domain, plnm, blinders)
322    }
323
324    fn create(depth: usize) -> Self {
325        let mut rng = thread_rng();
326        let toxic_waste = G::ScalarField::rand(&mut rng);
327        Self::create_trusted_setup(toxic_waste, depth)
328    }
329
330    fn size(&self) -> usize {
331        self.full_srs.g.len()
332    }
333}
334
335/// The polynomial that evaluates to each of `evals` for the respective `elm`s.
336/// For now, only works for 2 evaluations points.
337/// `elm` is the vector of evaluation points and `evals` is the vector of
338/// evaluations at those points.
339fn eval_polynomial<F: PrimeField>(elm: &[F], evals: &[F]) -> DensePolynomial<F> {
340    assert_eq!(elm.len(), evals.len());
341    let (zeta, zeta_omega) = if elm.len() == 2 {
342        (elm[0], elm[1])
343    } else {
344        todo!()
345    };
346    let (eval_zeta, eval_zeta_omega) = if evals.len() == 2 {
347        (evals[0], evals[1])
348    } else {
349        todo!()
350    };
351
352    // The polynomial that evaluates to `p(ζ)` at `ζ` and `p(ζω)` at
353    // `ζω`.
354    // We write `p(x) = a + bx`, which gives
355    // ```text
356    // p(ζ) = a + b * ζ
357    // p(ζω) = a + b * ζω
358    // ```
359    // and so
360    // ```text
361    // b = (p(ζω) - p(ζ)) / (ζω - ζ)
362    // a = p(ζ) - b * ζ
363    // ```
364    let b = (eval_zeta_omega - eval_zeta) / (zeta_omega - zeta);
365    let a = eval_zeta - b * zeta;
366    DensePolynomial::from_coefficients_slice(&[a, b])
367}
368
369/// The polynomial that evaluates to `0` at the evaluation points.
370fn divisor_polynomial<F: PrimeField>(elm: &[F]) -> DensePolynomial<F> {
371    elm.iter()
372        .map(|value| DensePolynomial::from_coefficients_slice(&[-(*value), F::one()]))
373        .reduce(|poly1, poly2| &poly1 * &poly2)
374        .unwrap()
375}
376
377impl<
378        F: PrimeField,
379        G: CommitmentCurve<ScalarField = F>,
380        G2: CommitmentCurve<ScalarField = F>,
381        Pair: Pairing<G1Affine = G, G2Affine = G2>,
382    > KZGProof<Pair>
383{
384    /// Create a KZG proof.
385    /// Parameters:
386    /// - `srs`: the structured reference string used to commit
387    ///   to the polynomials
388    /// - `plnms`: the list of polynomials to open.
389    ///   The type is simply an alias to handle the polynomials in evaluations or
390    ///   coefficients forms.
391    /// - `elm`: vector of evaluation points. Note that it only works for two
392    ///   elements for now.
393    /// - `polyscale`: a challenge to batch the polynomials.
394    pub fn create<D: EvaluationDomain<F>>(
395        srs: &PairingSRS<Pair>,
396        plnms: PolynomialsToCombine<G, D>,
397        elm: &[F],
398        polyscale: F,
399    ) -> Option<Self> {
400        let (p, blinding_factor) = combine_polys::<G, D>(plnms, polyscale, srs.full_srs.g.len());
401        let evals: Vec<_> = elm.iter().map(|pt| p.evaluate(pt)).collect();
402
403        let quotient_poly = {
404            // This is where the condition on two points is enforced.
405            let eval_polynomial = eval_polynomial(elm, &evals);
406            let divisor_polynomial = divisor_polynomial(elm);
407            let numerator_polynomial = &p - &eval_polynomial;
408            let (quotient, remainder) = DenseOrSparsePolynomial::divide_with_q_and_r(
409                &numerator_polynomial.into(),
410                &divisor_polynomial.into(),
411            )?;
412            if !remainder.is_zero() {
413                return None;
414            }
415            quotient
416        };
417
418        let quotient = srs
419            .full_srs
420            .commit_non_hiding(&quotient_poly, 1)
421            .get_first_chunk();
422
423        Some(KZGProof {
424            quotient,
425            blinding: blinding_factor,
426        })
427    }
428
429    /// Verify a proof. Note that it only works for two elements for now, i.e.
430    /// elm must be of size 2.
431    /// Also, chunking is not supported.
432    pub fn verify(
433        &self,
434        srs: &PairingSRS<Pair>,        // SRS
435        evaluations: &[Evaluation<G>], // commitments to the polynomials
436        polyscale: F,                  // scaling factor for polynoms
437        elm: &[F],                     // vector of evaluation points
438    ) -> bool {
439        let poly_commitment: G::Group = {
440            let mut scalars: Vec<F> = Vec::new();
441            let mut points = Vec::new();
442            combine_commitments(
443                evaluations,
444                &mut scalars,
445                &mut points,
446                polyscale,
447                F::one(), /* TODO: This is inefficient */
448            );
449            let scalars: Vec<_> = scalars.iter().map(|x| x.into_bigint()).collect();
450
451            G::Group::msm_bigint(&points, &scalars)
452        };
453
454        // IMPROVEME: we could have a single flat array for all evaluations, see
455        // same comment in combine_evaluations
456        let evals = combine_evaluations(evaluations, polyscale);
457        let blinding_commitment = srs.full_srs.h.mul(self.blinding);
458        // Taking the first element of the commitment, i.e. no support for chunking.
459        let divisor_commitment = srs
460            .verifier_srs
461            .commit_non_hiding(&divisor_polynomial(elm), 1)
462            .get_first_chunk();
463        // Taking the first element of the commitment, i.e. no support for chunking.
464        let eval_commitment = srs
465            .full_srs
466            .commit_non_hiding(&eval_polynomial(elm, &evals), 1)
467            .get_first_chunk()
468            .into_group();
469        let numerator_commitment = { poly_commitment - eval_commitment - blinding_commitment };
470        // We compute the result of the multiplication of two miller loop,
471        // to apply only one final exponentation
472        let to_loop_left = [
473            ark_ec::pairing::prepare_g1::<Pair>(numerator_commitment),
474            // Note that we do a neagtion here, to put everything on the same side
475            ark_ec::pairing::prepare_g1::<Pair>(self.quotient.into_group().neg()),
476        ];
477        let to_loop_right = [
478            ark_ec::pairing::prepare_g2::<Pair>(Pair::G2Affine::generator()),
479            ark_ec::pairing::prepare_g2::<Pair>(divisor_commitment),
480        ];
481        // the result here is numerator_commitment * 1 - quotient * divisor_commitment
482        // Note that the unwrap cannot fail as the output of a miller loop is non zero
483        let res = Pair::multi_pairing(to_loop_left, to_loop_right);
484
485        res.is_zero()
486    }
487}