Skip to main content

poly_commitment/
kzg.rs

1//! This module implements the KZG protocol described in the paper
2//! [Constant-Size Commitments to Polynomials and Their
3//! Applications](https://www.iacr.org/archive/asiacrypt2010/6477178/6477178.pdf)
4//! by Kate, Zaverucha and Goldberg, often referred to as the KZG10 paper.
5//!
6//! The protocol requires a structured reference string (SRS) that contains
7//! powers of a generator of a group, and a pairing friendly curve.
8//!
9//! The pairing friendly curve requirement is hidden in the Pairing trait
10//! parameter.
11
12use crate::{
13    commitment::*, ipa::SRS, utils::combine_polys, CommitmentError, PolynomialsToCombine,
14    SRS as SRSTrait,
15};
16
17use ark_ec::{pairing::Pairing, AffineRepr, VariableBaseMSM};
18use ark_ff::{One, PrimeField, Zero};
19use ark_poly::{
20    univariate::{DenseOrSparsePolynomial, DensePolynomial},
21    DenseUVPolynomial, EvaluationDomain, Evaluations, Polynomial, Radix2EvaluationDomain as D,
22};
23use mina_poseidon::FqSponge;
24use rand::thread_rng;
25use rand_core::{CryptoRng, RngCore};
26use serde::{Deserialize, Serialize};
27use serde_with::serde_as;
28use std::ops::Neg;
29
30/// Combine the (chunked) evaluations of multiple polynomials.
31/// This function returns the accumulation of the evaluations, scaled by
32/// `polyscale`.
33/// If no evaluation is given, the function returns an empty vector.
34/// It does also suppose that for each evaluation, the number of evaluations is
35/// the same. It is not constrained yet in the interface, but it should be. If
36/// one list has not the same size, it will be shrunk to the size of the first
37/// element of the list.
38/// For instance, if we have 3 polynomials P1, P2, P3 evaluated at the points
39/// ζ and ζω (like in vanilla PlonK), and for each polynomial, we have two
40/// chunks, i.e. we have
41/// ```text
42///         2 chunks of P1
43///        /---------------\
44/// E1 = [(P1_1(ζ), P1_2(ζ)), (P1_1(ζω), P1_2(ζω))]
45/// E2 = [(P2_1(ζ), P2_2(ζ)), (P2_1(ζω), P2_2(ζω))]
46/// E3 = [(P3_1(ζ), P3_2(ζ)), (P3_1(ζω), P3_2(ζω))]
47/// ```
48/// The output will be a list of 3 elements, equal to:
49/// ```text
50/// P1_1(ζ) + P1_2(ζ) * polyscale + P1_1(ζω) polyscale^2 + P1_2(ζω) * polyscale^3
51/// P2_1(ζ) + P2_2(ζ) * polyscale + P2_1(ζω) polyscale^2 + P2_2(ζω) * polyscale^3
52/// ```
53pub fn combine_evaluations<G: CommitmentCurve>(
54    evaluations: &[Evaluation<G>],
55    polyscale: G::ScalarField,
56) -> Vec<G::ScalarField> {
57    let mut polyscale_i = G::ScalarField::one();
58    let mut acc = {
59        let num_evals = if !evaluations.is_empty() {
60            evaluations[0].evaluations.len()
61        } else {
62            0
63        };
64        vec![G::ScalarField::zero(); num_evals]
65    };
66
67    for Evaluation { evaluations, .. } in evaluations.iter().filter(|x| !x.commitment.is_empty()) {
68        // IMPROVEME: we could have a flat array that would contain all the
69        // evaluations and all the chunks. It would avoid fetching the memory
70        // and avoid indirection into RAM.
71        // We could have a single flat array.
72        // iterating over the polynomial segments
73        for chunk_idx in 0..evaluations[0].len() {
74            // supposes that all evaluations are of the same size
75            for eval_pt_idx in 0..evaluations.len() {
76                acc[eval_pt_idx] += evaluations[eval_pt_idx][chunk_idx] * polyscale_i;
77            }
78            polyscale_i *= polyscale;
79        }
80    }
81
82    acc
83}
84
85#[serde_as]
86#[derive(Debug, Serialize, Deserialize)]
87#[serde(
88    bound = "Pair::G1Affine: ark_serialize::CanonicalDeserialize + ark_serialize::CanonicalSerialize"
89)]
90pub struct KZGProof<Pair: Pairing> {
91    #[serde_as(as = "o1_utils::serialization::SerdeAs")]
92    pub quotient: Pair::G1Affine,
93    #[serde_as(as = "o1_utils::serialization::SerdeAs")]
94    /// A blinding factor used to hide the polynomial, if necessary
95    pub blinding: <Pair::G1Affine as AffineRepr>::ScalarField,
96}
97
98impl<Pair: Pairing> Default for KZGProof<Pair> {
99    fn default() -> Self {
100        Self {
101            quotient: Pair::G1Affine::generator(),
102            blinding: <Pair::G1Affine as AffineRepr>::ScalarField::zero(),
103        }
104    }
105}
106
107impl<Pair: Pairing> Clone for KZGProof<Pair> {
108    fn clone(&self) -> Self {
109        Self {
110            quotient: self.quotient,
111            blinding: self.blinding,
112        }
113    }
114}
115
116#[derive(Debug, PartialEq, Serialize, Deserialize)]
117/// Define a structured reference string (i.e. SRS) for the KZG protocol.
118/// The SRS consists of powers of an element `g^x` for some toxic waste `x`.
119///
120/// The SRS is formed using what we call a "trusted setup". For now, the setup
121/// is created using the method `create_trusted_setup`.
122pub struct PairingSRS<Pair: Pairing> {
123    /// The full SRS is the one used by the prover. Can be seen as the "proving
124    /// key"/"secret key"
125    pub full_srs: SRS<Pair::G1Affine>,
126    /// SRS to be used by the verifier. Can be seen as the "verification
127    /// key"/"public key".
128    pub verifier_srs: SRS<Pair::G2Affine>,
129}
130
131impl<
132        F: PrimeField,
133        G: CommitmentCurve<ScalarField = F>,
134        G2: CommitmentCurve<ScalarField = F>,
135        Pair: Pairing<G1Affine = G, G2Affine = G2>,
136    > PairingSRS<Pair>
137{
138    /// Create a trusted setup for the KZG protocol.
139    /// The setup is created using a toxic waste `toxic_waste` and a depth
140    /// `depth`.
141    pub fn create_trusted_setup(toxic_waste: F, depth: usize) -> Self {
142        let full_srs = unsafe { SRS::create_trusted_setup(toxic_waste, depth) };
143        let verifier_srs = unsafe { SRS::create_trusted_setup(toxic_waste, 3) };
144        Self {
145            full_srs,
146            verifier_srs,
147        }
148    }
149}
150
151impl<Pair: Pairing> Default for PairingSRS<Pair> {
152    fn default() -> Self {
153        Self {
154            full_srs: SRS::default(),
155            verifier_srs: SRS::default(),
156        }
157    }
158}
159
160impl<Pair: Pairing> Clone for PairingSRS<Pair> {
161    fn clone(&self) -> Self {
162        Self {
163            full_srs: self.full_srs.clone(),
164            verifier_srs: self.verifier_srs.clone(),
165        }
166    }
167}
168
169impl<
170        F: PrimeField,
171        G: CommitmentCurve<ScalarField = F>,
172        G2: CommitmentCurve<ScalarField = F>,
173        Pair: Pairing<G1Affine = G, G2Affine = G2>,
174        const FULL_ROUNDS: usize,
175    > crate::OpenProof<G, FULL_ROUNDS> for KZGProof<Pair>
176{
177    type SRS = PairingSRS<Pair>;
178
179    /// Parameters:
180    /// - `srs`: the structured reference string
181    /// - `plnms`: vector of polynomials with optional degree bound and
182    ///   commitment randomness
183    /// - `elm`: vector of evaluation points
184    /// - `polyscale`: scaling factor for polynoms
185    ///   group_maps, sponge, rng and evalscale are not used. The parameters are
186    ///   kept to fit the trait and to be used generically.
187    fn open<EFqSponge, RNG, D: EvaluationDomain<F>>(
188        srs: &Self::SRS,
189        _group_map: &<G as CommitmentCurve>::Map,
190        plnms: PolynomialsToCombine<G, D>,
191        elm: &[<G as AffineRepr>::ScalarField],
192        polyscale: <G as AffineRepr>::ScalarField,
193        _evalscale: <G as AffineRepr>::ScalarField,
194        _sponge: EFqSponge,
195        _rng: &mut RNG,
196    ) -> Self
197    where
198        EFqSponge: Clone + FqSponge<<G as AffineRepr>::BaseField, G, F, FULL_ROUNDS>,
199        RNG: RngCore + CryptoRng,
200    {
201        KZGProof::create(srs, plnms, elm, polyscale).unwrap()
202    }
203
204    fn verify<EFqSponge, RNG>(
205        srs: &Self::SRS,
206        _group_map: &G::Map,
207        batch: &mut [BatchEvaluationProof<G, EFqSponge, Self, FULL_ROUNDS>],
208        _rng: &mut RNG,
209    ) -> bool
210    where
211        EFqSponge: FqSponge<G::BaseField, G, F, FULL_ROUNDS>,
212        RNG: RngCore + CryptoRng,
213    {
214        for BatchEvaluationProof {
215            sponge: _,
216            evaluations,
217            evaluation_points,
218            polyscale,
219            evalscale: _,
220            opening,
221            combined_inner_product: _,
222        } in batch.iter()
223        {
224            if !opening.verify(srs, evaluations, *polyscale, evaluation_points) {
225                return false;
226            }
227        }
228        true
229    }
230}
231
232impl<
233        F: PrimeField,
234        G: CommitmentCurve<ScalarField = F>,
235        G2: CommitmentCurve<ScalarField = F>,
236        Pair: Pairing<G1Affine = G, G2Affine = G2>,
237    > SRSTrait<G> for PairingSRS<Pair>
238{
239    fn max_poly_size(&self) -> usize {
240        self.full_srs.max_poly_size()
241    }
242
243    fn get_lagrange_basis(&self, domain: D<G::ScalarField>) -> &Vec<PolyComm<G>> {
244        self.full_srs.get_lagrange_basis(domain)
245    }
246
247    fn get_lagrange_basis_from_domain_size(&self, domain_size: usize) -> &Vec<PolyComm<G>> {
248        self.full_srs
249            .get_lagrange_basis_from_domain_size(domain_size)
250    }
251
252    fn blinding_commitment(&self) -> G {
253        self.full_srs.blinding_commitment()
254    }
255
256    fn mask_custom(
257        &self,
258        com: PolyComm<G>,
259        blinders: &PolyComm<G::ScalarField>,
260    ) -> Result<BlindedCommitment<G>, CommitmentError> {
261        self.full_srs.mask_custom(com, blinders)
262    }
263
264    fn mask(
265        &self,
266        comm: PolyComm<G>,
267        rng: &mut (impl RngCore + CryptoRng),
268    ) -> BlindedCommitment<G> {
269        self.full_srs.mask(comm, rng)
270    }
271
272    fn commit(
273        &self,
274        plnm: &DensePolynomial<F>,
275        num_chunks: usize,
276        rng: &mut (impl RngCore + CryptoRng),
277    ) -> BlindedCommitment<G> {
278        self.full_srs.commit(plnm, num_chunks, rng)
279    }
280
281    fn commit_non_hiding(
282        &self,
283        plnm: &DensePolynomial<G::ScalarField>,
284        num_chunks: usize,
285    ) -> PolyComm<G> {
286        self.full_srs.commit_non_hiding(plnm, num_chunks)
287    }
288
289    fn commit_custom(
290        &self,
291        plnm: &DensePolynomial<<G>::ScalarField>,
292        num_chunks: usize,
293        blinders: &PolyComm<<G>::ScalarField>,
294    ) -> Result<BlindedCommitment<G>, CommitmentError> {
295        self.full_srs.commit_custom(plnm, num_chunks, blinders)
296    }
297
298    fn commit_evaluations_non_hiding(
299        &self,
300        domain: D<G::ScalarField>,
301        plnm: &Evaluations<G::ScalarField, D<G::ScalarField>>,
302    ) -> PolyComm<G> {
303        self.full_srs.commit_evaluations_non_hiding(domain, plnm)
304    }
305
306    fn commit_evaluations(
307        &self,
308        domain: D<G::ScalarField>,
309        plnm: &Evaluations<G::ScalarField, D<G::ScalarField>>,
310        rng: &mut (impl RngCore + CryptoRng),
311    ) -> BlindedCommitment<G> {
312        self.full_srs.commit_evaluations(domain, plnm, rng)
313    }
314
315    fn commit_evaluations_custom(
316        &self,
317        domain: D<<G>::ScalarField>,
318        plnm: &Evaluations<<G>::ScalarField, D<<G>::ScalarField>>,
319        blinders: &PolyComm<<G>::ScalarField>,
320    ) -> Result<BlindedCommitment<G>, CommitmentError> {
321        self.full_srs
322            .commit_evaluations_custom(domain, plnm, blinders)
323    }
324
325    fn create(depth: usize) -> Self {
326        let mut rng = thread_rng();
327        let toxic_waste = G::ScalarField::rand(&mut rng);
328        Self::create_trusted_setup(toxic_waste, depth)
329    }
330
331    fn size(&self) -> usize {
332        self.full_srs.g.len()
333    }
334}
335
336/// The polynomial that evaluates to each of `evals` for the respective `elm`s.
337/// For now, only works for 2 evaluations points.
338/// `elm` is the vector of evaluation points and `evals` is the vector of
339/// evaluations at those points.
340fn eval_polynomial<F: PrimeField>(elm: &[F], evals: &[F]) -> DensePolynomial<F> {
341    assert_eq!(elm.len(), evals.len());
342    let (zeta, zeta_omega) = if elm.len() == 2 {
343        (elm[0], elm[1])
344    } else {
345        todo!()
346    };
347    let (eval_zeta, eval_zeta_omega) = if evals.len() == 2 {
348        (evals[0], evals[1])
349    } else {
350        todo!()
351    };
352
353    // The polynomial that evaluates to `p(ζ)` at `ζ` and `p(ζω)` at
354    // `ζω`.
355    // We write `p(x) = a + bx`, which gives
356    // ```text
357    // p(ζ) = a + b * ζ
358    // p(ζω) = a + b * ζω
359    // ```
360    // and so
361    // ```text
362    // b = (p(ζω) - p(ζ)) / (ζω - ζ)
363    // a = p(ζ) - b * ζ
364    // ```
365    let b = (eval_zeta_omega - eval_zeta) / (zeta_omega - zeta);
366    let a = eval_zeta - b * zeta;
367    DensePolynomial::from_coefficients_slice(&[a, b])
368}
369
370/// The polynomial that evaluates to `0` at the evaluation points.
371fn divisor_polynomial<F: PrimeField>(elm: &[F]) -> DensePolynomial<F> {
372    elm.iter()
373        .map(|value| DensePolynomial::from_coefficients_slice(&[-(*value), F::one()]))
374        .reduce(|poly1, poly2| &poly1 * &poly2)
375        .unwrap()
376}
377
378impl<
379        F: PrimeField,
380        G: CommitmentCurve<ScalarField = F>,
381        G2: CommitmentCurve<ScalarField = F>,
382        Pair: Pairing<G1Affine = G, G2Affine = G2>,
383    > KZGProof<Pair>
384{
385    /// Create a KZG proof.
386    /// Parameters:
387    /// - `srs`: the structured reference string used to commit
388    ///   to the polynomials
389    /// - `plnms`: the list of polynomials to open.
390    ///   The type is simply an alias to handle the polynomials in evaluations
391    ///   or coefficients forms.
392    /// - `elm`: vector of evaluation points. Note that it only works for two
393    ///   elements for now.
394    /// - `polyscale`: a challenge to batch the polynomials.
395    pub fn create<D: EvaluationDomain<F>>(
396        srs: &PairingSRS<Pair>,
397        plnms: PolynomialsToCombine<G, D>,
398        elm: &[F],
399        polyscale: F,
400    ) -> Option<Self> {
401        let (p, blinding_factor) = combine_polys::<G, D>(plnms, polyscale, srs.full_srs.g.len());
402        let evals: Vec<_> = elm.iter().map(|pt| p.evaluate(pt)).collect();
403
404        let quotient_poly = {
405            // This is where the condition on two points is enforced.
406            let eval_polynomial = eval_polynomial(elm, &evals);
407            let divisor_polynomial = divisor_polynomial(elm);
408            let numerator_polynomial = &p - &eval_polynomial;
409            let (quotient, remainder) = DenseOrSparsePolynomial::divide_with_q_and_r(
410                &numerator_polynomial.into(),
411                &divisor_polynomial.into(),
412            )?;
413            if !remainder.is_zero() {
414                return None;
415            }
416            quotient
417        };
418
419        let quotient = srs
420            .full_srs
421            .commit_non_hiding(&quotient_poly, 1)
422            .get_first_chunk();
423
424        Some(KZGProof {
425            quotient,
426            blinding: blinding_factor,
427        })
428    }
429
430    /// Verify a proof. Note that it only works for two elements for now, i.e.
431    /// elm must be of size 2.
432    /// Also, chunking is not supported.
433    pub fn verify(
434        &self,
435        srs: &PairingSRS<Pair>,
436        evaluations: &[Evaluation<G>], // commitments to the polynomials
437        polyscale: F,                  // scaling factor for polynoms
438        elm: &[F],                     // vector of evaluation points
439    ) -> bool {
440        let poly_commitment: G::Group = {
441            let mut scalars: Vec<F> = Vec::new();
442            let mut points = Vec::new();
443            combine_commitments(
444                evaluations,
445                &mut scalars,
446                &mut points,
447                polyscale,
448                F::one(), /* TODO: This is inefficient */
449            );
450            let scalars: Vec<_> = scalars.iter().map(|x| x.into_bigint()).collect();
451
452            G::Group::msm_bigint(&points, &scalars)
453        };
454
455        // IMPROVEME: we could have a single flat array for all evaluations, see
456        // same comment in combine_evaluations
457        let evals = combine_evaluations(evaluations, polyscale);
458        let blinding_commitment = srs.full_srs.h.mul(self.blinding);
459        // Taking the first element of the commitment, i.e. no support for
460        // chunking.
461        let divisor_commitment = srs
462            .verifier_srs
463            .commit_non_hiding(&divisor_polynomial(elm), 1)
464            .get_first_chunk();
465        // Taking the first element of the commitment, i.e. no support for
466        // chunking.
467        let eval_commitment = srs
468            .full_srs
469            .commit_non_hiding(&eval_polynomial(elm, &evals), 1)
470            .get_first_chunk()
471            .into_group();
472        let numerator_commitment = { poly_commitment - eval_commitment - blinding_commitment };
473        // We compute the result of the multiplication of two miller loop, to
474        // apply only one final exponentiation
475        let to_loop_left = [
476            ark_ec::pairing::prepare_g1::<Pair>(numerator_commitment),
477            // Note that we do a neagtion here, to put everything on the same
478            // side
479            ark_ec::pairing::prepare_g1::<Pair>(self.quotient.into_group().neg()),
480        ];
481        let to_loop_right = [
482            ark_ec::pairing::prepare_g2::<Pair>(Pair::G2Affine::generator()),
483            ark_ec::pairing::prepare_g2::<Pair>(divisor_commitment),
484        ];
485        // the result here is
486        // `numerator_commitment * 1 - quotient * divisor_commitment`.
487        // Note that the unwrap cannot fail as the output of a miller loop is
488        // non zero
489        let res = Pair::multi_pairing(to_loop_left, to_loop_right);
490
491        res.is_zero()
492    }
493}