kimchi_msm/
verifier.rs

1#![allow(clippy::type_complexity)]
2#![allow(clippy::boxed_local)]
3
4use crate::logup::LookupTableID;
5use ark_ff::{Field, Zero};
6use ark_poly::{
7    univariate::DensePolynomial, EvaluationDomain, Evaluations, Polynomial,
8    Radix2EvaluationDomain as R2D,
9};
10use rand::thread_rng;
11use rayon::iter::{IntoParallelIterator, ParallelIterator};
12
13use kimchi::{
14    circuits::{
15        berkeley_columns::BerkeleyChallenges,
16        domains::EvaluationDomains,
17        expr::{Constants, Expr, PolishToken},
18    },
19    curve::KimchiCurve,
20    groupmap::GroupMap,
21    plonk_sponge::FrSponge,
22    proof::PointEvaluations,
23};
24use mina_poseidon::{sponge::ScalarChallenge, FqSponge};
25use poly_commitment::{
26    commitment::{
27        absorb_commitment, combined_inner_product, BatchEvaluationProof, Evaluation, PolyComm,
28    },
29    OpenProof, SRS,
30};
31
32use crate::{expr::E, proof::Proof, witness::Witness};
33
34pub fn verify<
35    G: KimchiCurve,
36    OpeningProof: OpenProof<G>,
37    EFqSponge: Clone + FqSponge<G::BaseField, G, G::ScalarField>,
38    EFrSponge: FrSponge<G::ScalarField>,
39    const N_WIT: usize,
40    const N_REL: usize,
41    const N_DSEL: usize,
42    const N_FSEL: usize,
43    const NPUB: usize,
44    ID: LookupTableID,
45>(
46    domain: EvaluationDomains<G::ScalarField>,
47    srs: &OpeningProof::SRS,
48    constraints: &[E<G::ScalarField>],
49    fixed_selectors: Box<[Vec<G::ScalarField>; N_FSEL]>,
50    proof: &Proof<N_WIT, N_REL, N_DSEL, N_FSEL, G, OpeningProof, ID>,
51    public_inputs: Witness<NPUB, Vec<G::ScalarField>>,
52) -> bool
53where
54    OpeningProof::SRS: Sync,
55{
56    let Proof {
57        proof_comms,
58        proof_evals,
59        opening_proof,
60    } = proof;
61
62    ////////////////////////////////////////////////////////////////////////////
63    // Re-evaluating public inputs
64    ////////////////////////////////////////////////////////////////////////////
65
66    let fixed_selectors_evals_d1: Box<[Evaluations<G::ScalarField, R2D<G::ScalarField>>; N_FSEL]> = {
67        o1_utils::array::vec_to_boxed_array(
68            fixed_selectors
69                .into_par_iter()
70                .map(|evals| Evaluations::from_vec_and_domain(evals, domain.d1))
71                .collect(),
72        )
73    };
74
75    let fixed_selectors_polys: Box<[DensePolynomial<G::ScalarField>; N_FSEL]> = {
76        o1_utils::array::vec_to_boxed_array(
77            fixed_selectors_evals_d1
78                .into_par_iter()
79                .map(|evals| evals.interpolate())
80                .collect(),
81        )
82    };
83
84    let fixed_selectors_comms: Box<[PolyComm<G>; N_FSEL]> = {
85        let comm = |poly: &DensePolynomial<G::ScalarField>| srs.commit_non_hiding(poly, 1);
86        o1_utils::array::vec_to_boxed_array(
87            fixed_selectors_polys
88                .as_ref()
89                .into_par_iter()
90                .map(comm)
91                .collect(),
92        )
93    };
94
95    // Interpolate public input columns on d1, using trait Into.
96    let public_input_evals_d1: Witness<NPUB, Evaluations<G::ScalarField, R2D<G::ScalarField>>> =
97        public_inputs
98            .into_par_iter()
99            .map(|evals| {
100                Evaluations::<G::ScalarField, R2D<G::ScalarField>>::from_vec_and_domain(
101                    evals, domain.d1,
102                )
103            })
104            .collect::<Witness<NPUB, Evaluations<G::ScalarField, R2D<G::ScalarField>>>>();
105
106    let public_input_polys: Witness<NPUB, DensePolynomial<G::ScalarField>> = {
107        let interpolate =
108            |evals: Evaluations<G::ScalarField, R2D<G::ScalarField>>| evals.interpolate();
109        public_input_evals_d1
110            .into_par_iter()
111            .map(interpolate)
112            .collect::<Witness<NPUB, DensePolynomial<G::ScalarField>>>()
113    };
114
115    let public_input_comms: Witness<NPUB, PolyComm<G>> = {
116        let comm = |poly: &DensePolynomial<G::ScalarField>| srs.commit_non_hiding(poly, 1);
117        (&public_input_polys)
118            .into_par_iter()
119            .map(comm)
120            .collect::<Witness<NPUB, PolyComm<G>>>()
121    };
122
123    assert!(
124        NPUB <= N_WIT,
125        "Number of public inputs exceeds number of witness columns"
126    );
127    for i in 0..NPUB {
128        assert!(public_input_comms.cols[i] == proof_comms.witness_comms.cols[i]);
129    }
130
131    ////////////////////////////////////////////////////////////////////////////
132    // Absorbing all the commitments to the columns
133    ////////////////////////////////////////////////////////////////////////////
134
135    let mut fq_sponge = EFqSponge::new(G::other_curve_sponge_params());
136
137    fixed_selectors_comms
138        .as_ref()
139        .iter()
140        .chain(&proof_comms.witness_comms)
141        .for_each(|comm| absorb_commitment(&mut fq_sponge, comm));
142
143    ////////////////////////////////////////////////////////////////////////////
144    // Logup
145    ////////////////////////////////////////////////////////////////////////////
146
147    let (joint_combiner, beta) = {
148        if let Some(logup_comms) = &proof_comms.logup_comms {
149            // First, we absorb the multiplicity polynomials
150            logup_comms.m.values().for_each(|comms| {
151                comms
152                    .iter()
153                    .for_each(|comm| absorb_commitment(&mut fq_sponge, comm))
154            });
155
156            // FIXME @volhovm it seems that the verifier does not
157            // actually check that the fixed tables used in the proof
158            // are the fixed tables defined in the code. In other
159            // words, all the currently used "fixed" tables are
160            // runtime and can be chosen freely by the prover.
161
162            // To generate the challenges
163            let joint_combiner = fq_sponge.challenge();
164            let beta = fq_sponge.challenge();
165
166            // And now, we absorb the commitments to the other polynomials
167            logup_comms.h.values().for_each(|comms| {
168                comms
169                    .iter()
170                    .for_each(|comm| absorb_commitment(&mut fq_sponge, comm))
171            });
172
173            logup_comms
174                .fixed_tables
175                .values()
176                .for_each(|comm| absorb_commitment(&mut fq_sponge, comm));
177
178            // And at the end, the aggregation
179            absorb_commitment(&mut fq_sponge, &logup_comms.sum);
180            (Some(joint_combiner), beta)
181        } else {
182            (None, G::ScalarField::zero())
183        }
184    };
185
186    // Sample α with the Fq-Sponge.
187    let alpha = fq_sponge.challenge();
188
189    ////////////////////////////////////////////////////////////////////////////
190    // Quotient polynomial
191    ////////////////////////////////////////////////////////////////////////////
192
193    absorb_commitment(&mut fq_sponge, &proof_comms.t_comm);
194
195    // -- Preparing for opening proof verification
196    let zeta_chal = ScalarChallenge(fq_sponge.challenge());
197    let (_, endo_r) = G::endos();
198    let zeta: G::ScalarField = zeta_chal.to_field(endo_r);
199    let omega = domain.d1.group_gen;
200    let zeta_omega = zeta * omega;
201
202    let mut coms_and_evaluations: Vec<Evaluation<_>> = vec![];
203
204    coms_and_evaluations.extend(
205        (&proof_comms.witness_comms)
206            .into_iter()
207            .zip(&proof_evals.witness_evals)
208            .map(|(commitment, point_eval)| Evaluation {
209                commitment: commitment.clone(),
210                evaluations: vec![vec![point_eval.zeta], vec![point_eval.zeta_omega]],
211            }),
212    );
213
214    coms_and_evaluations.extend(
215        (fixed_selectors_comms)
216            .into_iter()
217            .zip(proof_evals.fixed_selectors_evals.iter())
218            .map(|(commitment, point_eval)| Evaluation {
219                commitment: commitment.clone(),
220                evaluations: vec![vec![point_eval.zeta], vec![point_eval.zeta_omega]],
221            }),
222    );
223
224    if let Some(logup_comms) = &proof_comms.logup_comms {
225        coms_and_evaluations.extend(
226            logup_comms
227                .into_iter()
228                .zip(proof_evals.logup_evals.as_ref().unwrap())
229                .map(|(commitment, point_eval)| Evaluation {
230                    commitment: commitment.clone(),
231                    evaluations: vec![vec![point_eval.zeta], vec![point_eval.zeta_omega]],
232                })
233                .collect::<Vec<_>>(),
234        );
235    }
236
237    // -- Absorb all coms_and_evaluations
238    let fq_sponge_before_coms_and_evaluations = fq_sponge.clone();
239    let mut fr_sponge = EFrSponge::new(G::sponge_params());
240    fr_sponge.absorb(&fq_sponge.digest());
241
242    for PointEvaluations { zeta, zeta_omega } in (&proof_evals.witness_evals).into_iter() {
243        fr_sponge.absorb(zeta);
244        fr_sponge.absorb(zeta_omega);
245    }
246
247    for PointEvaluations { zeta, zeta_omega } in proof_evals.fixed_selectors_evals.as_ref().iter() {
248        fr_sponge.absorb(zeta);
249        fr_sponge.absorb(zeta_omega);
250    }
251
252    if proof_comms.logup_comms.is_some() {
253        // Logup FS
254        for PointEvaluations { zeta, zeta_omega } in
255            proof_evals.logup_evals.as_ref().unwrap().into_iter()
256        {
257            fr_sponge.absorb(zeta);
258            fr_sponge.absorb(zeta_omega);
259        }
260    };
261
262    // Compute [ft(X)] = \
263    //   (1 - ζ^n) \
264    //    ([t_0(X)] + ζ^n [t_1(X)] + ... + ζ^{kn} [t_{k}(X)])
265    let ft_comm = {
266        let evaluation_point_to_domain_size = zeta.pow([domain.d1.size]);
267        let chunked_t_comm = proof_comms
268            .t_comm
269            .chunk_commitment(evaluation_point_to_domain_size);
270        // (1 - ζ^n)
271        let minus_vanishing_poly_at_zeta = -domain.d1.vanishing_polynomial().evaluate(&zeta);
272        chunked_t_comm.scale(minus_vanishing_poly_at_zeta)
273    };
274
275    let challenges = BerkeleyChallenges::<G::ScalarField> {
276        alpha,
277        beta,
278        gamma: G::ScalarField::zero(),
279        joint_combiner: joint_combiner.unwrap_or(G::ScalarField::zero()),
280    };
281
282    let constants = Constants {
283        endo_coefficient: *endo_r,
284        mds: &G::sponge_params().mds,
285        zk_rows: 0,
286    };
287
288    let combined_expr =
289        Expr::combine_constraints(0..(constraints.len() as u32), constraints.to_vec());
290    // Note the minus! ft polynomial at zeta (ft_eval0) is minus evaluation of the expression.
291    let ft_eval0 = -PolishToken::evaluate(
292        combined_expr.to_polish().as_slice(),
293        domain.d1,
294        zeta,
295        proof_evals,
296        &constants,
297        &challenges,
298    )
299    .unwrap();
300
301    coms_and_evaluations.push(Evaluation {
302        commitment: ft_comm,
303        evaluations: vec![vec![ft_eval0], vec![proof_evals.ft_eval1]],
304    });
305
306    fr_sponge.absorb(&proof_evals.ft_eval1);
307    // -- End absorb all coms_and_evaluations
308
309    let v_chal = fr_sponge.challenge();
310    let v = v_chal.to_field(endo_r);
311    let u_chal = fr_sponge.challenge();
312    let u = u_chal.to_field(endo_r);
313
314    let combined_inner_product = {
315        let es: Vec<_> = coms_and_evaluations
316            .iter()
317            .map(|Evaluation { evaluations, .. }| evaluations.clone())
318            .collect();
319
320        combined_inner_product(&v, &u, es.as_slice())
321    };
322
323    let batch = BatchEvaluationProof {
324        sponge: fq_sponge_before_coms_and_evaluations,
325        evaluations: coms_and_evaluations,
326        evaluation_points: vec![zeta, zeta_omega],
327        polyscale: v,
328        evalscale: u,
329        opening: opening_proof,
330        combined_inner_product,
331    };
332
333    let group_map = G::Map::setup();
334    OpeningProof::verify(srs, &group_map, &mut [batch], &mut thread_rng())
335}