kimchi/
prover.rs

1//! This module implements prover's zk-proof primitive.
2
3use crate::{
4    circuits::{
5        argument::{Argument, ArgumentType},
6        berkeley_columns::{BerkeleyChallenges, Environment, LookupEnvironment},
7        constraints::zk_rows_strict_lower_bound,
8        expr::{self, l0_1, Constants},
9        gate::GateType,
10        lookup::{self, runtime_tables::RuntimeTable, tables::combine_table_entry},
11        polynomials::{
12            complete_add::CompleteAdd,
13            endomul_scalar::EndomulScalar,
14            endosclmul::EndosclMul,
15            foreign_field_add::circuitgates::ForeignFieldAdd,
16            foreign_field_mul::{self, circuitgates::ForeignFieldMul},
17            generic, permutation,
18            poseidon::Poseidon,
19            range_check::circuitgates::{RangeCheck0, RangeCheck1},
20            rot::Rot64,
21            varbasemul::VarbaseMul,
22            xor::Xor16,
23        },
24        wires::{COLUMNS, PERMUTS},
25    },
26    curve::KimchiCurve,
27    error::ProverError,
28    lagrange_basis_evaluations::LagrangeBasisEvaluations,
29    plonk_sponge::FrSponge,
30    proof::{
31        LookupCommitments, PointEvaluations, ProofEvaluations, ProverCommitments, ProverProof,
32        RecursionChallenge,
33    },
34    prover_index::ProverIndex,
35    verifier_index::VerifierIndex,
36};
37use ark_ff::{FftField, Field, One, PrimeField, UniformRand, Zero};
38use ark_poly::{
39    univariate::DensePolynomial, DenseUVPolynomial, EvaluationDomain, Evaluations, Polynomial,
40    Radix2EvaluationDomain as D,
41};
42use core::array;
43use itertools::Itertools;
44use mina_poseidon::{sponge::ScalarChallenge, FqSponge};
45use o1_utils::ExtendedDensePolynomial as _;
46use poly_commitment::{
47    commitment::{
48        absorb_commitment, b_poly_coefficients, BlindedCommitment, CommitmentCurve, PolyComm,
49    },
50    utils::DensePolynomialOrEvaluations,
51    OpenProof, SRS as _,
52};
53use rand_core::{CryptoRng, RngCore};
54use rayon::prelude::*;
55use std::collections::HashMap;
56
57/// The result of a proof creation or verification.
58type Result<T> = core::result::Result<T, ProverError>;
59
60/// Helper to quickly test if a witness satisfies a constraint
61macro_rules! check_constraint {
62    ($index:expr, $evaluation:expr) => {{
63        check_constraint!($index, stringify!($evaluation), $evaluation);
64    }};
65    ($index:expr, $label:expr, $evaluation:expr) => {{
66        if cfg!(debug_assertions) {
67            let (_, res) = $evaluation
68                .interpolate_by_ref()
69                .divide_by_vanishing_poly($index.cs.domain.d1)
70                .unwrap();
71            if !res.is_zero() {
72                panic!("couldn't divide by vanishing polynomial: {}", $label);
73            }
74        }
75    }};
76}
77
78/// Contains variables needed for lookup in the prover algorithm.
79#[derive(Default)]
80struct LookupContext<G, F>
81where
82    G: CommitmentCurve,
83    F: FftField,
84{
85    /// The joint combiner used to join the columns of lookup tables
86    joint_combiner: Option<F>,
87
88    /// The power of the joint_combiner that can be used to add a table_id column
89    /// to the concatenated lookup tables.
90    table_id_combiner: Option<F>,
91
92    /// The combined lookup entry that can be used as dummy value
93    dummy_lookup_value: Option<F>,
94
95    /// The combined lookup table
96    joint_lookup_table: Option<DensePolynomial<F>>,
97    joint_lookup_table_d8: Option<Evaluations<F, D<F>>>,
98
99    /// The sorted polynomials `s` in different forms
100    sorted: Option<Vec<Evaluations<F, D<F>>>>,
101    sorted_coeffs: Option<Vec<DensePolynomial<F>>>,
102    sorted_comms: Option<Vec<BlindedCommitment<G>>>,
103    sorted8: Option<Vec<Evaluations<F, D<F>>>>,
104
105    /// The aggregation polynomial in different forms
106    aggreg_coeffs: Option<DensePolynomial<F>>,
107    aggreg_comm: Option<BlindedCommitment<G>>,
108    aggreg8: Option<Evaluations<F, D<F>>>,
109
110    // lookup-related evaluations
111    /// evaluation of lookup aggregation polynomial
112    pub lookup_aggregation_eval: Option<PointEvaluations<Vec<F>>>,
113    /// evaluation of lookup table polynomial
114    pub lookup_table_eval: Option<PointEvaluations<Vec<F>>>,
115    /// evaluation of lookup sorted polynomials
116    pub lookup_sorted_eval: [Option<PointEvaluations<Vec<F>>>; 5],
117    /// evaluation of runtime lookup table polynomial
118    pub runtime_lookup_table_eval: Option<PointEvaluations<Vec<F>>>,
119
120    /// Runtime table
121    runtime_table: Option<DensePolynomial<F>>,
122    runtime_table_d8: Option<Evaluations<F, D<F>>>,
123    runtime_table_comm: Option<BlindedCommitment<G>>,
124    runtime_second_col_d8: Option<Evaluations<F, D<F>>>,
125}
126
127impl<G: KimchiCurve, OpeningProof: OpenProof<G>> ProverProof<G, OpeningProof>
128where
129    G::BaseField: PrimeField,
130{
131    /// This function constructs prover's zk-proof from the witness & the `ProverIndex` against SRS instance
132    ///
133    /// # Errors
134    ///
135    /// Will give error if `create_recursive` process fails.
136    pub fn create<
137        EFqSponge: Clone + FqSponge<G::BaseField, G, G::ScalarField>,
138        EFrSponge: FrSponge<G::ScalarField>,
139        RNG: RngCore + CryptoRng,
140    >(
141        groupmap: &G::Map,
142        witness: [Vec<G::ScalarField>; COLUMNS],
143        runtime_tables: &[RuntimeTable<G::ScalarField>],
144        index: &ProverIndex<G, OpeningProof>,
145        rng: &mut RNG,
146    ) -> Result<Self>
147    where
148        VerifierIndex<G, OpeningProof>: Clone,
149    {
150        Self::create_recursive::<EFqSponge, EFrSponge, RNG>(
151            groupmap,
152            witness,
153            runtime_tables,
154            index,
155            Vec::new(),
156            None,
157            rng,
158        )
159    }
160
161    /// This function constructs prover's recursive zk-proof from the witness &
162    /// the `ProverIndex` against SRS instance
163    ///
164    /// # Errors
165    ///
166    /// Will give error if inputs(like `lookup_context.joint_lookup_table_d8`)
167    /// are None.
168    ///
169    /// # Panics
170    ///
171    /// Will panic if `lookup_context.joint_lookup_table_d8` is None.
172    pub fn create_recursive<
173        EFqSponge: Clone + FqSponge<G::BaseField, G, G::ScalarField>,
174        EFrSponge: FrSponge<G::ScalarField>,
175        RNG: RngCore + CryptoRng,
176    >(
177        group_map: &G::Map,
178        mut witness: [Vec<G::ScalarField>; COLUMNS],
179        runtime_tables: &[RuntimeTable<G::ScalarField>],
180        index: &ProverIndex<G, OpeningProof>,
181        prev_challenges: Vec<RecursionChallenge<G>>,
182        blinders: Option<[Option<PolyComm<G::ScalarField>>; COLUMNS]>,
183        rng: &mut RNG,
184    ) -> Result<Self>
185    where
186        VerifierIndex<G, OpeningProof>: Clone,
187    {
188        internal_tracing::checkpoint!(internal_traces; create_recursive);
189        let d1_size = index.cs.domain.d1.size();
190
191        let (_, endo_r) = G::endos();
192
193        let num_chunks = if d1_size < index.max_poly_size {
194            1
195        } else {
196            d1_size / index.max_poly_size
197        };
198
199        // Verify the circuit satisfiability by the computed witness (baring plookup constraints)
200        // Catch mistakes before proof generation.
201        if cfg!(debug_assertions) && !index.cs.disable_gates_checks {
202            let public = witness[0][0..index.cs.public].to_vec();
203            index.verify(&witness, &public).expect("incorrect witness");
204        }
205
206        //~ 1. Ensure we have room in the witness for the zero-knowledge rows.
207        //~    We currently expect the witness not to be of the same length as the domain,
208        //~    but instead be of the length of the (smaller) circuit.
209        //~    If we cannot add `zk_rows` rows to the columns of the witness before reaching
210        //~    the size of the domain, abort.
211        let length_witness = witness[0].len();
212        let length_padding = d1_size
213            .checked_sub(length_witness)
214            .ok_or(ProverError::NoRoomForZkInWitness)?;
215
216        let zero_knowledge_limit = zk_rows_strict_lower_bound(num_chunks);
217        // Because the lower bound is strict, the result of the function above
218        // is not a sufficient number of zero knowledge rows, so the error must
219        // be raised anytime the number of zero knowledge rows is not greater
220        // than the strict lower bound.
221        // Example:
222        //   for 1 chunk, `zero_knowledge_limit` is 2, and we need at least 3,
223        //   thus the error should be raised and the message should say that the
224        //   expected number of zero knowledge rows is 3 (hence the + 1).
225        if (index.cs.zk_rows as usize) <= zero_knowledge_limit {
226            return Err(ProverError::NotZeroKnowledge(
227                zero_knowledge_limit + 1,
228                index.cs.zk_rows as usize,
229            ));
230        }
231
232        if length_padding < index.cs.zk_rows as usize {
233            return Err(ProverError::NoRoomForZkInWitness);
234        }
235
236        //~ 1. Pad the witness columns with Zero gates to make them the same length as the domain.
237        //~    Then, randomize the last `zk_rows` of each columns.
238        internal_tracing::checkpoint!(internal_traces; pad_witness);
239        for w in &mut witness {
240            if w.len() != length_witness {
241                return Err(ProverError::WitnessCsInconsistent);
242            }
243
244            // padding
245            w.extend(core::iter::repeat(G::ScalarField::zero()).take(length_padding));
246
247            // zk-rows
248            for row in w.iter_mut().rev().take(index.cs.zk_rows as usize) {
249                *row = <G::ScalarField as UniformRand>::rand(rng);
250            }
251        }
252
253        //~ 1. Setup the Fq-Sponge.
254        internal_tracing::checkpoint!(internal_traces; set_up_fq_sponge);
255        let mut fq_sponge = EFqSponge::new(G::other_curve_sponge_params());
256
257        //~ 1. Absorb the digest of the VerifierIndex.
258        let verifier_index_digest = index.verifier_index_digest::<EFqSponge>();
259        fq_sponge.absorb_fq(&[verifier_index_digest]);
260
261        //~ 1. Absorb the commitments of the previous challenges with the Fq-sponge.
262        for RecursionChallenge { comm, .. } in &prev_challenges {
263            absorb_commitment(&mut fq_sponge, comm)
264        }
265
266        //~ 1. Compute the negated public input polynomial as
267        //~    the polynomial that evaluates to $-p_i$ for the first `public_input_size` values of the domain,
268        //~    and $0$ for the rest.
269        let public = witness[0][0..index.cs.public].to_vec();
270        let public_poly = -Evaluations::<G::ScalarField, D<G::ScalarField>>::from_vec_and_domain(
271            public,
272            index.cs.domain.d1,
273        )
274        .interpolate();
275
276        //~ 1. Commit (non-hiding) to the negated public input polynomial.
277        let public_comm = index.srs.commit_non_hiding(&public_poly, num_chunks);
278        let public_comm = {
279            index
280                .srs
281                .mask_custom(
282                    public_comm.clone(),
283                    &public_comm.map(|_| G::ScalarField::one()),
284                )
285                .unwrap()
286                .commitment
287        };
288
289        //~ 1. Absorb the commitment to the public polynomial with the Fq-Sponge.
290        //~
291        //~    Note: unlike the original PLONK protocol,
292        //~    the prover also provides evaluations of the public polynomial to help the verifier circuit.
293        //~    This is why we need to absorb the commitment to the public polynomial at this point.
294        absorb_commitment(&mut fq_sponge, &public_comm);
295
296        //~ 1. Commit to the witness columns by creating `COLUMNS` hidding commitments.
297        //~
298        //~    Note: since the witness is in evaluation form,
299        //~    we can use the `commit_evaluation` optimization.
300        internal_tracing::checkpoint!(internal_traces; commit_to_witness_columns);
301        // generate blinders if not given externally
302        let blinders_final: Vec<PolyComm<G::ScalarField>> = match blinders {
303            None => (0..COLUMNS)
304                .map(|_| PolyComm::new(vec![UniformRand::rand(rng); num_chunks]))
305                .collect(),
306            Some(blinders_arr) => blinders_arr
307                .into_iter()
308                .map(|blinder_el| match blinder_el {
309                    None => PolyComm::new(vec![UniformRand::rand(rng); num_chunks]),
310                    Some(blinder_el_some) => blinder_el_some,
311                })
312                .collect(),
313        };
314        let w_comm_opt_res: Vec<Result<_>> = witness
315            .clone()
316            .into_par_iter()
317            .zip(blinders_final.into_par_iter())
318            .map(|(witness, blinder)| {
319                let witness_eval =
320                    Evaluations::<G::ScalarField, D<G::ScalarField>>::from_vec_and_domain(
321                        witness,
322                        index.cs.domain.d1,
323                    );
324
325                // TODO: make this a function rather no? mask_with_custom()
326                let witness_com = index
327                    .srs
328                    .commit_evaluations_non_hiding(index.cs.domain.d1, &witness_eval);
329                let com = index
330                    .srs
331                    .mask_custom(witness_com, &blinder)
332                    .map_err(ProverError::WrongBlinders)?;
333
334                Ok(com)
335            })
336            .collect();
337
338        let w_comm_res: Result<Vec<BlindedCommitment<G>>> = w_comm_opt_res.into_iter().collect();
339
340        let w_comm = w_comm_res?;
341
342        let w_comm: [BlindedCommitment<G>; COLUMNS] = w_comm
343            .try_into()
344            .expect("previous loop is of the correct length");
345
346        //~ 1. Absorb the witness commitments with the Fq-Sponge.
347        w_comm
348            .iter()
349            .for_each(|c| absorb_commitment(&mut fq_sponge, &c.commitment));
350
351        //~ 1. Compute the witness polynomials by interpolating each `COLUMNS` of the witness.
352        //~    As mentioned above, we commit using the evaluations form rather than the coefficients
353        //~    form so we can take advantage of the sparsity of the evaluations (i.e., there are many
354        //~    0 entries and entries that have less-than-full-size field elemnts.)
355        let witness_poly: [DensePolynomial<G::ScalarField>; COLUMNS] = (0..COLUMNS)
356            .into_par_iter()
357            .map(|i| {
358                Evaluations::<G::ScalarField, D<G::ScalarField>>::from_vec_and_domain(
359                    witness[i].clone(),
360                    index.cs.domain.d1,
361                )
362                .interpolate()
363            })
364            .collect::<Vec<_>>()
365            .try_into()
366            .unwrap();
367
368        let mut lookup_context = LookupContext::default();
369
370        //~ 1. If using lookup:
371        let lookup_constraint_system = index
372            .cs
373            .lookup_constraint_system
374            .try_get_or_err()
375            .map_err(ProverError::from)?;
376        if let Some(lcs) = lookup_constraint_system {
377            internal_tracing::checkpoint!(internal_traces; use_lookup, {
378                "uses_lookup": true,
379                "uses_runtime_tables": lcs.runtime_tables.is_some(),
380            });
381            //~~ * if using runtime table:
382            if let Some(cfg_runtime_tables) = &lcs.runtime_tables {
383                //~~~ * check that all the provided runtime tables have length and IDs that match the runtime table configuration of the index
384                //~~~   we expect the given runtime tables to be sorted as configured, this makes it easier afterwards
385                let expected_runtime: Vec<_> = cfg_runtime_tables
386                    .iter()
387                    .map(|rt| (rt.id, rt.len))
388                    .collect();
389                let runtime: Vec<_> = runtime_tables
390                    .iter()
391                    .map(|rt| (rt.id, rt.data.len()))
392                    .collect();
393                if expected_runtime != runtime {
394                    return Err(ProverError::RuntimeTablesInconsistent);
395                }
396
397                //~~~ * calculate the contribution to the second column of the lookup table
398                //~~~   (the runtime vector)
399                let (runtime_table_contribution, runtime_table_contribution_d8) = {
400                    let mut offset = lcs
401                        .runtime_table_offset
402                        .expect("runtime configuration missing offset");
403
404                    let mut evals = vec![G::ScalarField::zero(); d1_size];
405                    for rt in runtime_tables {
406                        let range = offset..(offset + rt.data.len());
407                        evals[range].copy_from_slice(&rt.data);
408                        offset += rt.data.len();
409                    }
410
411                    // zero-knowledge
412                    for e in evals.iter_mut().rev().take(index.cs.zk_rows as usize) {
413                        *e = <G::ScalarField as UniformRand>::rand(rng);
414                    }
415
416                    // get coeff and evaluation form
417                    let runtime_table_contribution =
418                        Evaluations::from_vec_and_domain(evals, index.cs.domain.d1).interpolate();
419
420                    let runtime_table_contribution_d8 =
421                        runtime_table_contribution.evaluate_over_domain_by_ref(index.cs.domain.d8);
422
423                    (runtime_table_contribution, runtime_table_contribution_d8)
424                };
425
426                // commit the runtime polynomial
427                // (and save it to the proof)
428                let runtime_table_comm =
429                    index
430                        .srs
431                        .commit(&runtime_table_contribution, num_chunks, rng);
432
433                // absorb the commitment
434                absorb_commitment(&mut fq_sponge, &runtime_table_comm.commitment);
435
436                // pre-compute the updated second column of the lookup table
437                let mut second_column_d8 = runtime_table_contribution_d8.clone();
438                second_column_d8
439                    .evals
440                    .par_iter_mut()
441                    .enumerate()
442                    .for_each(|(row, e)| {
443                        *e += lcs.lookup_table8[1][row];
444                    });
445
446                lookup_context.runtime_table = Some(runtime_table_contribution);
447                lookup_context.runtime_table_d8 = Some(runtime_table_contribution_d8);
448                lookup_context.runtime_table_comm = Some(runtime_table_comm);
449                lookup_context.runtime_second_col_d8 = Some(second_column_d8);
450            }
451
452            //~~ * If queries involve a lookup table with multiple columns
453            //~~   then squeeze the Fq-Sponge to obtain the joint combiner challenge $j'$,
454            //~~   otherwise set the joint combiner challenge $j'$ to $0$.
455            let joint_combiner = if lcs.configuration.lookup_info.features.joint_lookup_used {
456                fq_sponge.challenge()
457            } else {
458                G::ScalarField::zero()
459            };
460
461            //~~ * Derive the scalar joint combiner $j$ from $j'$ using the endomorphism (TODO: specify)
462            let joint_combiner: G::ScalarField = ScalarChallenge(joint_combiner).to_field(endo_r);
463
464            //~~ * If multiple lookup tables are involved,
465            //~~   set the `table_id_combiner` as the $j^i$ with $i$ the maximum width of any used table.
466            //~~   Essentially, this is to add a last column of table ids to the concatenated lookup tables.
467            let table_id_combiner: G::ScalarField = if lcs.table_ids8.as_ref().is_some() {
468                joint_combiner.pow([lcs.configuration.lookup_info.max_joint_size as u64])
469            } else {
470                // TODO: just set this to None in case multiple tables are not used
471                G::ScalarField::zero()
472            };
473            lookup_context.table_id_combiner = Some(table_id_combiner);
474
475            //~~ * Compute the dummy lookup value as the combination of the last entry of the XOR table (so `(0, 0, 0)`).
476            //~~   Warning: This assumes that we always use the XOR table when using lookups.
477            let dummy_lookup_value = lcs
478                .configuration
479                .dummy_lookup
480                .evaluate(&joint_combiner, &table_id_combiner);
481            lookup_context.dummy_lookup_value = Some(dummy_lookup_value);
482
483            //~~ * Compute the lookup table values as the combination of the lookup table entries.
484            let joint_lookup_table_d8 = {
485                let mut evals = Vec::with_capacity(d1_size);
486
487                for idx in 0..(d1_size * 8) {
488                    let table_id = match lcs.table_ids8.as_ref() {
489                        Some(table_ids8) => table_ids8.evals[idx],
490                        None =>
491                        // If there is no `table_ids8` in the constraint system,
492                        // every table ID is identically 0.
493                        {
494                            G::ScalarField::zero()
495                        }
496                    };
497
498                    let combined_entry =
499                        if !lcs.configuration.lookup_info.features.uses_runtime_tables {
500                            let table_row = lcs.lookup_table8.iter().map(|e| &e.evals[idx]);
501
502                            combine_table_entry(
503                                &joint_combiner,
504                                &table_id_combiner,
505                                table_row,
506                                &table_id,
507                            )
508                        } else {
509                            // if runtime table are used, the second row is modified
510                            let second_col = lookup_context.runtime_second_col_d8.as_ref().unwrap();
511
512                            let table_row = lcs.lookup_table8.iter().enumerate().map(|(col, e)| {
513                                if col == 1 {
514                                    &second_col.evals[idx]
515                                } else {
516                                    &e.evals[idx]
517                                }
518                            });
519
520                            combine_table_entry(
521                                &joint_combiner,
522                                &table_id_combiner,
523                                table_row,
524                                &table_id,
525                            )
526                        };
527                    evals.push(combined_entry);
528                }
529
530                Evaluations::from_vec_and_domain(evals, index.cs.domain.d8)
531            };
532
533            // TODO: This interpolation is avoidable.
534            let joint_lookup_table = joint_lookup_table_d8.interpolate_by_ref();
535
536            //~~ * Compute the sorted evaluations.
537            // TODO: Once we switch to committing using lagrange commitments,
538            // `witness` will be consumed when we interpolate, so interpolation will
539            // have to moved below this.
540            let sorted: Vec<_> = lookup::constraints::sorted(
541                dummy_lookup_value,
542                &joint_lookup_table_d8,
543                index.cs.domain.d1,
544                &index.cs.gates,
545                &witness,
546                joint_combiner,
547                table_id_combiner,
548                &lcs.configuration.lookup_info,
549                index.cs.zk_rows as usize,
550            )?;
551
552            //~~ * Randomize the last `EVALS` rows in each of the sorted polynomials
553            //~~   in order to add zero-knowledge to the protocol.
554            let sorted: Vec<_> = sorted
555                .into_iter()
556                .map(|chunk| {
557                    lookup::constraints::zk_patch(
558                        chunk,
559                        index.cs.domain.d1,
560                        index.cs.zk_rows as usize,
561                        rng,
562                    )
563                })
564                .collect();
565
566            //~~ * Commit each of the sorted polynomials.
567            let sorted_comms: Vec<_> = sorted
568                .iter()
569                .map(|v| index.srs.commit_evaluations(index.cs.domain.d1, v, rng))
570                .collect();
571
572            //~~ * Absorb each commitments to the sorted polynomials.
573            sorted_comms
574                .iter()
575                .for_each(|c| absorb_commitment(&mut fq_sponge, &c.commitment));
576
577            // precompute different forms of the sorted polynomials for later
578            // TODO: We can avoid storing these coefficients.
579            let sorted_coeffs: Vec<_> = sorted.iter().map(|e| e.clone().interpolate()).collect();
580            let sorted8: Vec<_> = sorted_coeffs
581                .iter()
582                .map(|v| v.evaluate_over_domain_by_ref(index.cs.domain.d8))
583                .collect();
584
585            lookup_context.joint_combiner = Some(joint_combiner);
586            lookup_context.sorted = Some(sorted);
587            lookup_context.sorted_coeffs = Some(sorted_coeffs);
588            lookup_context.sorted_comms = Some(sorted_comms);
589            lookup_context.sorted8 = Some(sorted8);
590            lookup_context.joint_lookup_table_d8 = Some(joint_lookup_table_d8);
591            lookup_context.joint_lookup_table = Some(joint_lookup_table);
592        }
593
594        //~ 1. Sample $\beta$ with the Fq-Sponge.
595        let beta = fq_sponge.challenge();
596
597        //~ 1. Sample $\gamma$ with the Fq-Sponge.
598        let gamma = fq_sponge.challenge();
599
600        //~ 1. If using lookup:
601        if let Some(lcs) = lookup_constraint_system {
602            //~~ * Compute the lookup aggregation polynomial.
603            let joint_lookup_table_d8 = lookup_context.joint_lookup_table_d8.as_ref().unwrap();
604
605            let aggreg = lookup::constraints::aggregation::<_, G::ScalarField>(
606                lookup_context.dummy_lookup_value.unwrap(),
607                joint_lookup_table_d8,
608                index.cs.domain.d1,
609                &index.cs.gates,
610                &witness,
611                &lookup_context.joint_combiner.unwrap(),
612                &lookup_context.table_id_combiner.unwrap(),
613                beta,
614                gamma,
615                lookup_context.sorted.as_ref().unwrap(),
616                rng,
617                &lcs.configuration.lookup_info,
618                index.cs.zk_rows as usize,
619            )?;
620
621            //~~ * Commit to the aggregation polynomial.
622            let aggreg_comm = index
623                .srs
624                .commit_evaluations(index.cs.domain.d1, &aggreg, rng);
625
626            //~~ * Absorb the commitment to the aggregation polynomial with the Fq-Sponge.
627            absorb_commitment(&mut fq_sponge, &aggreg_comm.commitment);
628
629            // precompute different forms of the aggregation polynomial for later
630            let aggreg_coeffs = aggreg.interpolate();
631            // TODO: There's probably a clever way to expand the domain without
632            // interpolating
633            let aggreg8 = aggreg_coeffs.evaluate_over_domain_by_ref(index.cs.domain.d8);
634
635            lookup_context.aggreg_comm = Some(aggreg_comm);
636            lookup_context.aggreg_coeffs = Some(aggreg_coeffs);
637            lookup_context.aggreg8 = Some(aggreg8);
638        }
639
640        let column_evaluations = index.column_evaluations.get();
641
642        //~ 1. Compute the permutation aggregation polynomial $z$.
643        internal_tracing::checkpoint!(internal_traces; z_permutation_aggregation_polynomial);
644        let z_poly = index.perm_aggreg(&witness, &beta, &gamma, rng)?;
645
646        //~ 1. Commit (hidding) to the permutation aggregation polynomial $z$.
647        let z_comm = index.srs.commit(&z_poly, num_chunks, rng);
648
649        //~ 1. Absorb the permutation aggregation polynomial $z$ with the Fq-Sponge.
650        absorb_commitment(&mut fq_sponge, &z_comm.commitment);
651
652        //~ 1. Sample $\alpha'$ with the Fq-Sponge.
653        let alpha_chal = ScalarChallenge(fq_sponge.challenge());
654
655        //~ 1. Derive $\alpha$ from $\alpha'$ using the endomorphism (TODO: details)
656        let alpha: G::ScalarField = alpha_chal.to_field(endo_r);
657
658        //~ 1. TODO: instantiate alpha?
659        let mut all_alphas = index.powers_of_alpha.clone();
660        all_alphas.instantiate(alpha);
661
662        //~ 1. Compute the quotient polynomial (the $t$ in $f = Z_H \cdot t$).
663        //~    The quotient polynomial is computed by adding all these polynomials together:
664        //~~ * the combined constraints for all the gates
665        //~~ * the combined constraints for the permutation
666        //~~ * TODO: lookup
667        //~~ * the negated public polynomial
668        //~    and by then dividing the resulting polynomial with the vanishing polynomial $Z_H$.
669        //~    TODO: specify the split of the permutation polynomial into perm and bnd?
670        let lookup_env = if let Some(lcs) = lookup_constraint_system {
671            let joint_lookup_table_d8 = lookup_context.joint_lookup_table_d8.as_ref().unwrap();
672
673            Some(LookupEnvironment {
674                aggreg: lookup_context.aggreg8.as_ref().unwrap(),
675                sorted: lookup_context.sorted8.as_ref().unwrap(),
676                selectors: &lcs.lookup_selectors,
677                table: joint_lookup_table_d8,
678                runtime_selector: lcs.runtime_selector.as_ref(),
679                runtime_table: lookup_context.runtime_table_d8.as_ref(),
680            })
681        } else {
682            None
683        };
684
685        internal_tracing::checkpoint!(internal_traces; eval_witness_polynomials_over_domains);
686        let lagrange = index.cs.evaluate(&witness_poly, &z_poly);
687        internal_tracing::checkpoint!(internal_traces; compute_index_evals);
688        let env = {
689            let mut index_evals = HashMap::new();
690            use GateType::*;
691            index_evals.insert(Generic, &column_evaluations.generic_selector4);
692            index_evals.insert(Poseidon, &column_evaluations.poseidon_selector8);
693            index_evals.insert(CompleteAdd, &column_evaluations.complete_add_selector4);
694            index_evals.insert(VarBaseMul, &column_evaluations.mul_selector8);
695            index_evals.insert(EndoMul, &column_evaluations.emul_selector8);
696            index_evals.insert(EndoMulScalar, &column_evaluations.endomul_scalar_selector8);
697
698            if let Some(selector) = &column_evaluations.range_check0_selector8 {
699                index_evals.insert(GateType::RangeCheck0, selector);
700            }
701
702            if let Some(selector) = &column_evaluations.range_check1_selector8 {
703                index_evals.insert(GateType::RangeCheck1, selector);
704            }
705
706            if let Some(selector) = &column_evaluations.foreign_field_add_selector8 {
707                index_evals.insert(GateType::ForeignFieldAdd, selector);
708            }
709
710            if let Some(selector) = &column_evaluations.foreign_field_mul_selector8 {
711                index_evals.extend(
712                    foreign_field_mul::gadget::circuit_gates()
713                        .iter()
714                        .map(|gate_type| (*gate_type, selector)),
715                );
716            }
717
718            if let Some(selector) = &column_evaluations.xor_selector8 {
719                index_evals.insert(GateType::Xor16, selector);
720            }
721
722            if let Some(selector) = &column_evaluations.rot_selector8 {
723                index_evals.insert(GateType::Rot64, selector);
724            }
725
726            let mds = &G::sponge_params().mds;
727            Environment {
728                constants: Constants {
729                    endo_coefficient: index.cs.endo,
730                    mds,
731                    zk_rows: index.cs.zk_rows,
732                },
733                challenges: BerkeleyChallenges {
734                    alpha,
735                    beta,
736                    gamma,
737                    joint_combiner: lookup_context
738                        .joint_combiner
739                        .unwrap_or(G::ScalarField::zero()),
740                },
741                witness: &lagrange.d8.this.w,
742                coefficient: &column_evaluations.coefficients8,
743                vanishes_on_zero_knowledge_and_previous_rows: &index
744                    .cs
745                    .precomputations()
746                    .vanishes_on_zero_knowledge_and_previous_rows,
747                z: &lagrange.d8.this.z,
748                l0_1: l0_1(index.cs.domain.d1),
749                domain: index.cs.domain,
750                index: index_evals,
751                lookup: lookup_env,
752            }
753        };
754
755        let mut cache = expr::Cache::default();
756
757        internal_tracing::checkpoint!(internal_traces; compute_quotient_poly);
758
759        let quotient_poly = {
760            // generic
761            let mut t4 = {
762                let generic_constraint =
763                    generic::Generic::combined_constraints(&all_alphas, &mut cache);
764                let generic4 = generic_constraint.evaluations(&env);
765
766                if cfg!(debug_assertions) {
767                    let p4 = public_poly.evaluate_over_domain_by_ref(index.cs.domain.d4);
768                    let gen_minus_pub = &generic4 + &p4;
769
770                    check_constraint!(index, gen_minus_pub);
771                }
772
773                generic4
774            };
775
776            // permutation
777            let (mut t8, bnd) = {
778                let alphas =
779                    all_alphas.get_alphas(ArgumentType::Permutation, permutation::CONSTRAINTS);
780                let (perm, bnd) = index.perm_quot(&lagrange, beta, gamma, &z_poly, alphas)?;
781
782                check_constraint!(index, perm);
783
784                (perm, bnd)
785            };
786
787            {
788                use crate::circuits::argument::DynArgument;
789
790                let range_check0_enabled = column_evaluations.range_check0_selector8.is_some();
791                let range_check1_enabled = column_evaluations.range_check1_selector8.is_some();
792                let foreign_field_addition_enabled =
793                    column_evaluations.foreign_field_add_selector8.is_some();
794                let foreign_field_multiplication_enabled =
795                    column_evaluations.foreign_field_mul_selector8.is_some();
796                let xor_enabled = column_evaluations.xor_selector8.is_some();
797                let rot_enabled = column_evaluations.rot_selector8.is_some();
798
799                for gate in [
800                    (
801                        (&CompleteAdd::default() as &dyn DynArgument<G::ScalarField>),
802                        true,
803                    ),
804                    (&VarbaseMul::default(), true),
805                    (&EndosclMul::default(), true),
806                    (&EndomulScalar::default(), true),
807                    (&Poseidon::default(), true),
808                    // Range check gates
809                    (&RangeCheck0::default(), range_check0_enabled),
810                    (&RangeCheck1::default(), range_check1_enabled),
811                    // Foreign field addition gate
812                    (&ForeignFieldAdd::default(), foreign_field_addition_enabled),
813                    // Foreign field multiplication gate
814                    (
815                        &ForeignFieldMul::default(),
816                        foreign_field_multiplication_enabled,
817                    ),
818                    // Xor gate
819                    (&Xor16::default(), xor_enabled),
820                    // Rot gate
821                    (&Rot64::default(), rot_enabled),
822                ]
823                .into_iter()
824                .filter_map(|(gate, is_enabled)| if is_enabled { Some(gate) } else { None })
825                {
826                    let constraint = gate.combined_constraints(&all_alphas, &mut cache);
827                    let eval = constraint.evaluations(&env);
828                    if eval.domain().size == t4.domain().size {
829                        t4 += &eval;
830                    } else if eval.domain().size == t8.domain().size {
831                        t8 += &eval;
832                    } else {
833                        panic!("Bad evaluation")
834                    }
835                    check_constraint!(index, format!("{:?}", gate.argument_type()), eval);
836                }
837            };
838
839            // lookup
840            {
841                if let Some(lcs) = lookup_constraint_system {
842                    let constraints = lookup::constraints::constraints(&lcs.configuration, false);
843                    let constraints_len = u32::try_from(constraints.len())
844                        .expect("not expecting a large amount of constraints");
845                    let lookup_alphas =
846                        all_alphas.get_alphas(ArgumentType::Lookup, constraints_len);
847
848                    // as lookup constraints are computed with the expression framework,
849                    // each of them can result in Evaluations of different domains
850                    for (ii, (constraint, alpha_pow)) in
851                        constraints.into_iter().zip_eq(lookup_alphas).enumerate()
852                    {
853                        let mut eval = constraint.evaluations(&env);
854                        eval.evals.par_iter_mut().for_each(|x| *x *= alpha_pow);
855
856                        if eval.domain().size == t4.domain().size {
857                            t4 += &eval;
858                        } else if eval.domain().size == t8.domain().size {
859                            t8 += &eval;
860                        } else if eval.evals.iter().all(|x| x.is_zero()) {
861                            // Skip any 0-valued evaluations
862                        } else {
863                            panic!("Bad evaluation")
864                        }
865
866                        check_constraint!(index, format!("lookup constraint #{ii}"), eval);
867                    }
868                }
869            }
870
871            // public polynomial
872            let mut f = t4.interpolate() + t8.interpolate();
873            f += &public_poly;
874
875            // divide contributions with vanishing polynomial
876            let (mut quotient, res) = f
877                .divide_by_vanishing_poly(index.cs.domain.d1)
878                .ok_or(ProverError::Prover("division by vanishing polynomial"))?;
879            if !res.is_zero() {
880                return Err(ProverError::Prover(
881                    "rest of division by vanishing polynomial",
882                ));
883            }
884
885            quotient += &bnd; // already divided by Z_H
886            quotient
887        };
888
889        //~ 1. commit (hiding) to the quotient polynomial $t$
890        let t_comm = { index.srs.commit(&quotient_poly, 7 * num_chunks, rng) };
891
892        //~ 1. Absorb the commitment of the quotient polynomial with the Fq-Sponge.
893        absorb_commitment(&mut fq_sponge, &t_comm.commitment);
894
895        //~ 1. Sample $\zeta'$ with the Fq-Sponge.
896        let zeta_chal = ScalarChallenge(fq_sponge.challenge());
897
898        //~ 1. Derive $\zeta$ from $\zeta'$ using the endomorphism (TODO: specify)
899        let zeta = zeta_chal.to_field(endo_r);
900
901        let omega = index.cs.domain.d1.group_gen;
902        let zeta_omega = zeta * omega;
903
904        //~ 1. If lookup is used, evaluate the following polynomials at $\zeta$ and $\zeta \omega$:
905        if lookup_constraint_system.is_some() {
906            //~~ * the aggregation polynomial
907            let aggreg = lookup_context
908                .aggreg_coeffs
909                .as_ref()
910                .unwrap()
911                .to_chunked_polynomial(num_chunks, index.max_poly_size);
912
913            //~~ * the sorted polynomials
914            let sorted = lookup_context
915                .sorted_coeffs
916                .as_ref()
917                .unwrap()
918                .iter()
919                .map(|c| c.to_chunked_polynomial(num_chunks, index.max_poly_size))
920                .collect::<Vec<_>>();
921
922            //~~ * the table polynonial
923            let joint_table = lookup_context.joint_lookup_table.as_ref().unwrap();
924            let joint_table = joint_table.to_chunked_polynomial(num_chunks, index.max_poly_size);
925
926            lookup_context.lookup_aggregation_eval = Some(PointEvaluations {
927                zeta: aggreg.evaluate_chunks(zeta),
928                zeta_omega: aggreg.evaluate_chunks(zeta_omega),
929            });
930            lookup_context.lookup_table_eval = Some(PointEvaluations {
931                zeta: joint_table.evaluate_chunks(zeta),
932                zeta_omega: joint_table.evaluate_chunks(zeta_omega),
933            });
934            lookup_context.lookup_sorted_eval = array::from_fn(|i| {
935                if i < sorted.len() {
936                    let sorted = &sorted[i];
937                    Some(PointEvaluations {
938                        zeta: sorted.evaluate_chunks(zeta),
939                        zeta_omega: sorted.evaluate_chunks(zeta_omega),
940                    })
941                } else {
942                    None
943                }
944            });
945            lookup_context.runtime_lookup_table_eval =
946                lookup_context.runtime_table.as_ref().map(|runtime_table| {
947                    let runtime_table =
948                        runtime_table.to_chunked_polynomial(num_chunks, index.max_poly_size);
949                    PointEvaluations {
950                        zeta: runtime_table.evaluate_chunks(zeta),
951                        zeta_omega: runtime_table.evaluate_chunks(zeta_omega),
952                    }
953                });
954        }
955
956        //~ 1. Chunk evaluate the following polynomials at both $\zeta$ and $\zeta \omega$:
957        //~~ * $s_i$
958        //~~ * $w_i$
959        //~~ * $z$
960        //~~ * lookup (TODO, see [this issue](https://github.com/MinaProtocol/mina/issues/13886))
961        //~~ * generic selector
962        //~~ * poseidon selector
963        //~
964        //~    By "chunk evaluate" we mean that the evaluation of each polynomial can potentially be a vector of values.
965        //~    This is because the index's `max_poly_size` parameter dictates the maximum size of a polynomial in the protocol.
966        //~    If a polynomial $f$ exceeds this size, it must be split into several polynomials like so:
967        //~    $$f(x) = f_0(x) + x^n f_1(x) + x^{2n} f_2(x) + \cdots$$
968        //~
969        //~    And the evaluation of such a polynomial is the following list for $x \in {\zeta, \zeta\omega}$:
970        //~
971        //~    $$(f_0(x), f_1(x), f_2(x), \ldots)$$
972        //~
973        //~    TODO: do we want to specify more on that? It seems unnecessary except for the t polynomial (or if for some reason someone sets that to a low value)
974
975        internal_tracing::checkpoint!(internal_traces; lagrange_basis_eval_zeta_poly);
976        let zeta_evals =
977            LagrangeBasisEvaluations::new(index.max_poly_size, index.cs.domain.d1, zeta);
978        internal_tracing::checkpoint!(internal_traces; lagrange_basis_eval_zeta_omega_poly);
979        let zeta_omega_evals =
980            LagrangeBasisEvaluations::new(index.max_poly_size, index.cs.domain.d1, zeta_omega);
981
982        let chunked_evals_for_selector =
983            |p: &Evaluations<G::ScalarField, D<G::ScalarField>>| PointEvaluations {
984                zeta: zeta_evals.evaluate_boolean(p),
985                zeta_omega: zeta_omega_evals.evaluate_boolean(p),
986            };
987
988        let chunked_evals_for_evaluations =
989            |p: &Evaluations<G::ScalarField, D<G::ScalarField>>| PointEvaluations {
990                zeta: zeta_evals.evaluate(p),
991                zeta_omega: zeta_omega_evals.evaluate(p),
992            };
993
994        internal_tracing::checkpoint!(internal_traces; chunk_eval_zeta_omega_poly);
995        let chunked_evals = ProofEvaluations::<PointEvaluations<Vec<G::ScalarField>>> {
996            public: {
997                let chunked = public_poly.to_chunked_polynomial(num_chunks, index.max_poly_size);
998                Some(PointEvaluations {
999                    zeta: chunked.evaluate_chunks(zeta),
1000                    zeta_omega: chunked.evaluate_chunks(zeta_omega),
1001                })
1002            },
1003            s: array::from_fn(|i| {
1004                chunked_evals_for_evaluations(&column_evaluations.permutation_coefficients8[i])
1005            }),
1006            coefficients: array::from_fn(|i| {
1007                chunked_evals_for_evaluations(&column_evaluations.coefficients8[i])
1008            }),
1009            w: array::from_fn(|i| {
1010                let chunked =
1011                    witness_poly[i].to_chunked_polynomial(num_chunks, index.max_poly_size);
1012                PointEvaluations {
1013                    zeta: chunked.evaluate_chunks(zeta),
1014                    zeta_omega: chunked.evaluate_chunks(zeta_omega),
1015                }
1016            }),
1017
1018            z: {
1019                let chunked = z_poly.to_chunked_polynomial(num_chunks, index.max_poly_size);
1020                PointEvaluations {
1021                    zeta: chunked.evaluate_chunks(zeta),
1022                    zeta_omega: chunked.evaluate_chunks(zeta_omega),
1023                }
1024            },
1025
1026            lookup_aggregation: lookup_context.lookup_aggregation_eval.take(),
1027            lookup_table: lookup_context.lookup_table_eval.take(),
1028            lookup_sorted: array::from_fn(|i| lookup_context.lookup_sorted_eval[i].take()),
1029            runtime_lookup_table: lookup_context.runtime_lookup_table_eval.take(),
1030            generic_selector: chunked_evals_for_selector(&column_evaluations.generic_selector4),
1031            poseidon_selector: chunked_evals_for_selector(&column_evaluations.poseidon_selector8),
1032            complete_add_selector: chunked_evals_for_selector(
1033                &column_evaluations.complete_add_selector4,
1034            ),
1035            mul_selector: chunked_evals_for_selector(&column_evaluations.mul_selector8),
1036            emul_selector: chunked_evals_for_selector(&column_evaluations.emul_selector8),
1037            endomul_scalar_selector: chunked_evals_for_selector(
1038                &column_evaluations.endomul_scalar_selector8,
1039            ),
1040
1041            range_check0_selector: column_evaluations
1042                .range_check0_selector8
1043                .as_ref()
1044                .map(chunked_evals_for_selector),
1045            range_check1_selector: column_evaluations
1046                .range_check1_selector8
1047                .as_ref()
1048                .map(chunked_evals_for_selector),
1049            foreign_field_add_selector: column_evaluations
1050                .foreign_field_add_selector8
1051                .as_ref()
1052                .map(chunked_evals_for_selector),
1053            foreign_field_mul_selector: column_evaluations
1054                .foreign_field_mul_selector8
1055                .as_ref()
1056                .map(chunked_evals_for_selector),
1057            xor_selector: column_evaluations
1058                .xor_selector8
1059                .as_ref()
1060                .map(chunked_evals_for_selector),
1061            rot_selector: column_evaluations
1062                .rot_selector8
1063                .as_ref()
1064                .map(chunked_evals_for_selector),
1065
1066            runtime_lookup_table_selector: lookup_constraint_system.as_ref().and_then(|lcs| {
1067                lcs.runtime_selector
1068                    .as_ref()
1069                    .map(chunked_evals_for_selector)
1070            }),
1071            xor_lookup_selector: lookup_constraint_system.as_ref().and_then(|lcs| {
1072                lcs.lookup_selectors
1073                    .xor
1074                    .as_ref()
1075                    .map(chunked_evals_for_selector)
1076            }),
1077            lookup_gate_lookup_selector: lookup_constraint_system.as_ref().and_then(|lcs| {
1078                lcs.lookup_selectors
1079                    .lookup
1080                    .as_ref()
1081                    .map(chunked_evals_for_selector)
1082            }),
1083            range_check_lookup_selector: lookup_constraint_system.as_ref().and_then(|lcs| {
1084                lcs.lookup_selectors
1085                    .range_check
1086                    .as_ref()
1087                    .map(chunked_evals_for_selector)
1088            }),
1089            foreign_field_mul_lookup_selector: lookup_constraint_system.as_ref().and_then(|lcs| {
1090                lcs.lookup_selectors
1091                    .ffmul
1092                    .as_ref()
1093                    .map(chunked_evals_for_selector)
1094            }),
1095        };
1096
1097        let zeta_to_srs_len = zeta.pow([index.max_poly_size as u64]);
1098        let zeta_omega_to_srs_len = zeta_omega.pow([index.max_poly_size as u64]);
1099        let zeta_to_domain_size = zeta.pow([d1_size as u64]);
1100
1101        //~ 1. Evaluate the same polynomials without chunking them
1102        //~    (so that each polynomial should correspond to a single value this time).
1103        let evals: ProofEvaluations<PointEvaluations<G::ScalarField>> = {
1104            let powers_of_eval_points_for_chunks = PointEvaluations {
1105                zeta: zeta_to_srs_len,
1106                zeta_omega: zeta_omega_to_srs_len,
1107            };
1108            chunked_evals.combine(&powers_of_eval_points_for_chunks)
1109        };
1110
1111        //~ 1. Compute the ft polynomial.
1112        //~    This is to implement [Maller's optimization](https://o1-labs.github.io/proof-systems/kimchi/maller_15.html).
1113        internal_tracing::checkpoint!(internal_traces; compute_ft_poly);
1114        let ft: DensePolynomial<G::ScalarField> = {
1115            let f_chunked = {
1116                // TODO: compute the linearization polynomial in evaluation form so
1117                // that we can drop the coefficient forms of the index polynomials from
1118                // the constraint system struct
1119
1120                // permutation (not part of linearization yet)
1121                let alphas =
1122                    all_alphas.get_alphas(ArgumentType::Permutation, permutation::CONSTRAINTS);
1123                let f = index.perm_lnrz(&evals, zeta, beta, gamma, alphas);
1124
1125                // the circuit polynomial
1126                let f = {
1127                    let (_lin_constant, mut lin) =
1128                        index.linearization.to_polynomial(&env, zeta, &evals);
1129                    lin += &f;
1130                    lin.interpolate()
1131                };
1132
1133                drop(env);
1134
1135                // see https://o1-labs.github.io/proof-systems/kimchi/maller_15.html#the-prover-side
1136                f.to_chunked_polynomial(num_chunks, index.max_poly_size)
1137                    .linearize(zeta_to_srs_len)
1138            };
1139
1140            let t_chunked = quotient_poly
1141                .to_chunked_polynomial(7 * num_chunks, index.max_poly_size)
1142                .linearize(zeta_to_srs_len);
1143
1144            &f_chunked - &t_chunked.scale(zeta_to_domain_size - G::ScalarField::one())
1145        };
1146
1147        //~ 1. construct the blinding part of the ft polynomial commitment
1148        //~    [see this section](https://o1-labs.github.io/proof-systems/kimchi/maller_15.html#evaluation-proof-and-blinding-factors)
1149        let blinding_ft = {
1150            let blinding_t = t_comm.blinders.chunk_blinding(zeta_to_srs_len);
1151            let blinding_f = G::ScalarField::zero();
1152
1153            PolyComm {
1154                // blinding_f - Z_H(zeta) * blinding_t
1155                chunks: vec![
1156                    blinding_f - (zeta_to_domain_size - G::ScalarField::one()) * blinding_t,
1157                ],
1158            }
1159        };
1160
1161        //~ 1. Evaluate the ft polynomial at $\zeta\omega$ only.
1162        internal_tracing::checkpoint!(internal_traces; ft_eval_zeta_omega);
1163        let ft_eval1 = ft.evaluate(&zeta_omega);
1164
1165        //~ 1. Setup the Fr-Sponge
1166        let fq_sponge_before_evaluations = fq_sponge.clone();
1167        let mut fr_sponge = EFrSponge::new(G::sponge_params());
1168
1169        //~ 1. Squeeze the Fq-sponge and absorb the result with the Fr-Sponge.
1170        fr_sponge.absorb(&fq_sponge.digest());
1171
1172        //~ 1. Absorb the previous recursion challenges.
1173        let prev_challenge_digest = {
1174            // Note: we absorb in a new sponge here to limit the scope in which we need the
1175            // more-expensive 'optional sponge'.
1176            let mut fr_sponge = EFrSponge::new(G::sponge_params());
1177            for RecursionChallenge { chals, .. } in &prev_challenges {
1178                fr_sponge.absorb_multiple(chals);
1179            }
1180            fr_sponge.digest()
1181        };
1182        fr_sponge.absorb(&prev_challenge_digest);
1183
1184        //~ 1. Compute evaluations for the previous recursion challenges.
1185        internal_tracing::checkpoint!(internal_traces; build_polynomials);
1186        let polys = prev_challenges
1187            .iter()
1188            .map(|RecursionChallenge { chals, comm }| {
1189                (
1190                    DensePolynomial::from_coefficients_vec(b_poly_coefficients(chals)),
1191                    comm.len(),
1192                )
1193            })
1194            .collect::<Vec<_>>();
1195
1196        //~ 1. Absorb the unique evaluation of ft: $ft(\zeta\omega)$.
1197        fr_sponge.absorb(&ft_eval1);
1198
1199        //~ 1. Absorb all the polynomial evaluations in $\zeta$ and $\zeta\omega$:
1200        //~~ * the public polynomial
1201        //~~ * z
1202        //~~ * generic selector
1203        //~~ * poseidon selector
1204        //~~ * the 15 register/witness
1205        //~~ * 6 sigmas evaluations (the last one is not evaluated)
1206        fr_sponge.absorb_multiple(&chunked_evals.public.as_ref().unwrap().zeta);
1207        fr_sponge.absorb_multiple(&chunked_evals.public.as_ref().unwrap().zeta_omega);
1208        fr_sponge.absorb_evaluations(&chunked_evals);
1209
1210        //~ 1. Sample $v'$ with the Fr-Sponge
1211        let v_chal = fr_sponge.challenge();
1212
1213        //~ 1. Derive $v$ from $v'$ using the endomorphism (TODO: specify)
1214        let v = v_chal.to_field(endo_r);
1215
1216        //~ 1. Sample $u'$ with the Fr-Sponge
1217        let u_chal = fr_sponge.challenge();
1218
1219        //~ 1. Derive $u$ from $u'$ using the endomorphism (TODO: specify)
1220        let u = u_chal.to_field(endo_r);
1221
1222        //~ 1. Create a list of all polynomials that will require evaluations
1223        //~    (and evaluation proofs) in the protocol.
1224        //~    First, include the previous challenges, in case we are in a recursive prover.
1225        let non_hiding = |n_chunks: usize| PolyComm {
1226            chunks: vec![G::ScalarField::zero(); n_chunks],
1227        };
1228
1229        let fixed_hiding = |n_chunks: usize| PolyComm {
1230            chunks: vec![G::ScalarField::one(); n_chunks],
1231        };
1232
1233        let coefficients_form = DensePolynomialOrEvaluations::DensePolynomial;
1234        let evaluations_form = |e| DensePolynomialOrEvaluations::Evaluations(e, index.cs.domain.d1);
1235
1236        let mut polynomials = polys
1237            .iter()
1238            .map(|(p, n_chunks)| (coefficients_form(p), non_hiding(*n_chunks)))
1239            .collect::<Vec<_>>();
1240
1241        //~ 1. Then, include:
1242        //~~ * the negated public polynomial
1243        //~~ * the ft polynomial
1244        //~~ * the permutation aggregation polynomial z polynomial
1245        //~~ * the generic selector
1246        //~~ * the poseidon selector
1247        //~~ * the 15 registers/witness columns
1248        //~~ * the 6 sigmas
1249        polynomials.push((coefficients_form(&public_poly), fixed_hiding(num_chunks)));
1250        polynomials.push((coefficients_form(&ft), blinding_ft));
1251        polynomials.push((coefficients_form(&z_poly), z_comm.blinders));
1252        polynomials.push((
1253            evaluations_form(&column_evaluations.generic_selector4),
1254            fixed_hiding(num_chunks),
1255        ));
1256        polynomials.push((
1257            evaluations_form(&column_evaluations.poseidon_selector8),
1258            fixed_hiding(num_chunks),
1259        ));
1260        polynomials.push((
1261            evaluations_form(&column_evaluations.complete_add_selector4),
1262            fixed_hiding(num_chunks),
1263        ));
1264        polynomials.push((
1265            evaluations_form(&column_evaluations.mul_selector8),
1266            fixed_hiding(num_chunks),
1267        ));
1268        polynomials.push((
1269            evaluations_form(&column_evaluations.emul_selector8),
1270            fixed_hiding(num_chunks),
1271        ));
1272        polynomials.push((
1273            evaluations_form(&column_evaluations.endomul_scalar_selector8),
1274            fixed_hiding(num_chunks),
1275        ));
1276        polynomials.extend(
1277            witness_poly
1278                .iter()
1279                .zip(w_comm.iter())
1280                .map(|(w, c)| (coefficients_form(w), c.blinders.clone()))
1281                .collect::<Vec<_>>(),
1282        );
1283        polynomials.extend(
1284            column_evaluations
1285                .coefficients8
1286                .iter()
1287                .map(|coefficientm| (evaluations_form(coefficientm), non_hiding(num_chunks)))
1288                .collect::<Vec<_>>(),
1289        );
1290        polynomials.extend(
1291            column_evaluations.permutation_coefficients8[0..PERMUTS - 1]
1292                .iter()
1293                .map(|w| (evaluations_form(w), non_hiding(num_chunks)))
1294                .collect::<Vec<_>>(),
1295        );
1296
1297        //~~ * the optional gates
1298        if let Some(range_check0_selector8) = &column_evaluations.range_check0_selector8 {
1299            polynomials.push((
1300                evaluations_form(range_check0_selector8),
1301                non_hiding(num_chunks),
1302            ));
1303        }
1304        if let Some(range_check1_selector8) = &column_evaluations.range_check1_selector8 {
1305            polynomials.push((
1306                evaluations_form(range_check1_selector8),
1307                non_hiding(num_chunks),
1308            ));
1309        }
1310        if let Some(foreign_field_add_selector8) = &column_evaluations.foreign_field_add_selector8 {
1311            polynomials.push((
1312                evaluations_form(foreign_field_add_selector8),
1313                non_hiding(num_chunks),
1314            ));
1315        }
1316        if let Some(foreign_field_mul_selector8) = &column_evaluations.foreign_field_mul_selector8 {
1317            polynomials.push((
1318                evaluations_form(foreign_field_mul_selector8),
1319                non_hiding(num_chunks),
1320            ));
1321        }
1322        if let Some(xor_selector8) = &column_evaluations.xor_selector8 {
1323            polynomials.push((evaluations_form(xor_selector8), non_hiding(num_chunks)));
1324        }
1325        if let Some(rot_selector8) = &column_evaluations.rot_selector8 {
1326            polynomials.push((evaluations_form(rot_selector8), non_hiding(num_chunks)));
1327        }
1328
1329        //~~ * optionally, the runtime table
1330        //~ 1. if using lookup:
1331        if let Some(lcs) = lookup_constraint_system {
1332            //~~ * add the lookup sorted polynomials
1333            let sorted_poly = lookup_context.sorted_coeffs.as_ref().unwrap();
1334            let sorted_comms = lookup_context.sorted_comms.as_ref().unwrap();
1335
1336            for (poly, comm) in sorted_poly.iter().zip(sorted_comms) {
1337                polynomials.push((coefficients_form(poly), comm.blinders.clone()));
1338            }
1339
1340            //~~ * add the lookup aggreg polynomial
1341            let aggreg_poly = lookup_context.aggreg_coeffs.as_ref().unwrap();
1342            let aggreg_comm = lookup_context.aggreg_comm.as_ref().unwrap();
1343            polynomials.push((coefficients_form(aggreg_poly), aggreg_comm.blinders.clone()));
1344
1345            //~~ * add the combined table polynomial
1346            let table_blinding = {
1347                let joint_combiner = lookup_context.joint_combiner.as_ref().unwrap();
1348                let table_id_combiner = lookup_context.table_id_combiner.as_ref().unwrap();
1349                let max_fixed_lookup_table_size = {
1350                    // CAUTION: This is not `lcs.configuration.lookup_info.max_joint_size` because
1351                    // the lookup table may be strictly narrower, and as such will not contribute
1352                    // the associated blinders.
1353                    // For example, using a runtime table with the lookup gate (width 2), but only
1354                    // width-1 fixed tables (e.g. range check), it would be incorrect to use the
1355                    // wider width (2) because there are no such contributing commitments!
1356                    // Note that lookup_table8 is a list of polynomials
1357                    lcs.lookup_table8.len()
1358                };
1359                let base_blinding = {
1360                    let fixed_table_blinding = if max_fixed_lookup_table_size == 0 {
1361                        G::ScalarField::zero()
1362                    } else {
1363                        (1..max_fixed_lookup_table_size).fold(G::ScalarField::one(), |acc, _| {
1364                            G::ScalarField::one() + *joint_combiner * acc
1365                        })
1366                    };
1367                    fixed_table_blinding + *table_id_combiner
1368                };
1369                if lcs.runtime_selector.is_some() {
1370                    let runtime_comm = lookup_context.runtime_table_comm.as_ref().unwrap();
1371
1372                    let chunks = runtime_comm
1373                        .blinders
1374                        .into_iter()
1375                        .map(|blinding| *joint_combiner * *blinding + base_blinding)
1376                        .collect();
1377
1378                    PolyComm::new(chunks)
1379                } else {
1380                    let chunks = vec![base_blinding; num_chunks];
1381                    PolyComm::new(chunks)
1382                }
1383            };
1384
1385            let joint_lookup_table = lookup_context.joint_lookup_table.as_ref().unwrap();
1386
1387            polynomials.push((coefficients_form(joint_lookup_table), table_blinding));
1388
1389            //~~ * if present, add the runtime table polynomial
1390            if lcs.runtime_selector.is_some() {
1391                let runtime_table_comm = lookup_context.runtime_table_comm.as_ref().unwrap();
1392                let runtime_table = lookup_context.runtime_table.as_ref().unwrap();
1393
1394                polynomials.push((
1395                    coefficients_form(runtime_table),
1396                    runtime_table_comm.blinders.clone(),
1397                ));
1398            }
1399
1400            //~~ * the lookup selectors
1401
1402            if let Some(runtime_lookup_table_selector) = &lcs.runtime_selector {
1403                polynomials.push((
1404                    evaluations_form(runtime_lookup_table_selector),
1405                    non_hiding(1),
1406                ))
1407            }
1408            if let Some(xor_lookup_selector) = &lcs.lookup_selectors.xor {
1409                polynomials.push((evaluations_form(xor_lookup_selector), non_hiding(1)))
1410            }
1411            if let Some(lookup_gate_selector) = &lcs.lookup_selectors.lookup {
1412                polynomials.push((evaluations_form(lookup_gate_selector), non_hiding(1)))
1413            }
1414            if let Some(range_check_lookup_selector) = &lcs.lookup_selectors.range_check {
1415                polynomials.push((evaluations_form(range_check_lookup_selector), non_hiding(1)))
1416            }
1417            if let Some(foreign_field_mul_lookup_selector) = &lcs.lookup_selectors.ffmul {
1418                polynomials.push((
1419                    evaluations_form(foreign_field_mul_lookup_selector),
1420                    non_hiding(1),
1421                ))
1422            }
1423        }
1424
1425        //~ 1. Create an aggregated evaluation proof for all of these polynomials at $\zeta$ and $\zeta\omega$ using $u$ and $v$.
1426        internal_tracing::checkpoint!(internal_traces; create_aggregated_ipa);
1427        let proof = OpenProof::open(
1428            &*index.srs,
1429            group_map,
1430            &polynomials,
1431            &[zeta, zeta_omega],
1432            v,
1433            u,
1434            fq_sponge_before_evaluations,
1435            rng,
1436        );
1437
1438        let lookup = lookup_context
1439            .aggreg_comm
1440            .zip(lookup_context.sorted_comms)
1441            .map(|(a, s)| LookupCommitments {
1442                aggreg: a.commitment,
1443                sorted: s.iter().map(|c| c.commitment.clone()).collect(),
1444                runtime: lookup_context.runtime_table_comm.map(|x| x.commitment),
1445            });
1446
1447        let proof = Self {
1448            commitments: ProverCommitments {
1449                w_comm: array::from_fn(|i| w_comm[i].commitment.clone()),
1450                z_comm: z_comm.commitment,
1451                t_comm: t_comm.commitment,
1452                lookup,
1453            },
1454            proof,
1455            evals: chunked_evals,
1456            ft_eval1,
1457            prev_challenges,
1458        };
1459
1460        internal_tracing::checkpoint!(internal_traces; create_recursive_done);
1461
1462        Ok(proof)
1463    }
1464}
1465
1466internal_tracing::decl_traces!(internal_traces;
1467    pasta_fp_plonk_proof_create,
1468    pasta_fq_plonk_proof_create,
1469    create_recursive,
1470    pad_witness,
1471    set_up_fq_sponge,
1472    commit_to_witness_columns,
1473    use_lookup,
1474    z_permutation_aggregation_polynomial,
1475    eval_witness_polynomials_over_domains,
1476    compute_index_evals,
1477    compute_quotient_poly,
1478    lagrange_basis_eval_zeta_poly,
1479    lagrange_basis_eval_zeta_omega_poly,
1480    chunk_eval_zeta_omega_poly,
1481    compute_ft_poly,
1482    ft_eval_zeta_omega,
1483    build_polynomials,
1484    create_aggregated_ipa,
1485    create_recursive_done);
1486
1487#[cfg(feature = "ocaml_types")]
1488pub mod caml {
1489    use super::*;
1490    use crate::proof::caml::{CamlProofEvaluations, CamlRecursionChallenge};
1491    use ark_ec::AffineRepr;
1492    use poly_commitment::{
1493        commitment::caml::CamlPolyComm,
1494        ipa::{caml::CamlOpeningProof, OpeningProof},
1495    };
1496
1497    #[cfg(feature = "internal_tracing")]
1498    pub use internal_traces::caml::CamlTraces as CamlProverTraces;
1499
1500    #[derive(ocaml::IntoValue, ocaml::FromValue, ocaml_gen::Struct)]
1501    pub struct CamlProofWithPublic<CamlG, CamlF> {
1502        pub public_evals: Option<PointEvaluations<Vec<CamlF>>>,
1503        pub proof: CamlProverProof<CamlG, CamlF>,
1504    }
1505
1506    //
1507    // CamlProverProof<CamlG, CamlF>
1508    //
1509
1510    #[derive(ocaml::IntoValue, ocaml::FromValue, ocaml_gen::Struct)]
1511    pub struct CamlProverProof<CamlG, CamlF> {
1512        pub commitments: CamlProverCommitments<CamlG>,
1513        pub proof: CamlOpeningProof<CamlG, CamlF>,
1514        // OCaml doesn't have sized arrays, so we have to convert to a tuple..
1515        pub evals: CamlProofEvaluations<CamlF>,
1516        pub ft_eval1: CamlF,
1517        pub public: Vec<CamlF>,
1518        //Vec<(Vec<CamlF>, CamlPolyComm<CamlG>)>,
1519        pub prev_challenges: Vec<CamlRecursionChallenge<CamlG, CamlF>>,
1520    }
1521
1522    //
1523    // CamlProverCommitments<CamlG>
1524    //
1525
1526    #[derive(Clone, ocaml::IntoValue, ocaml::FromValue, ocaml_gen::Struct)]
1527    pub struct CamlLookupCommitments<CamlG> {
1528        pub sorted: Vec<CamlPolyComm<CamlG>>,
1529        pub aggreg: CamlPolyComm<CamlG>,
1530        pub runtime: Option<CamlPolyComm<CamlG>>,
1531    }
1532
1533    #[allow(clippy::type_complexity)]
1534    #[derive(Clone, ocaml::IntoValue, ocaml::FromValue, ocaml_gen::Struct)]
1535    pub struct CamlProverCommitments<CamlG> {
1536        // polynomial commitments
1537        pub w_comm: (
1538            CamlPolyComm<CamlG>,
1539            CamlPolyComm<CamlG>,
1540            CamlPolyComm<CamlG>,
1541            CamlPolyComm<CamlG>,
1542            CamlPolyComm<CamlG>,
1543            CamlPolyComm<CamlG>,
1544            CamlPolyComm<CamlG>,
1545            CamlPolyComm<CamlG>,
1546            CamlPolyComm<CamlG>,
1547            CamlPolyComm<CamlG>,
1548            CamlPolyComm<CamlG>,
1549            CamlPolyComm<CamlG>,
1550            CamlPolyComm<CamlG>,
1551            CamlPolyComm<CamlG>,
1552            CamlPolyComm<CamlG>,
1553        ),
1554        pub z_comm: CamlPolyComm<CamlG>,
1555        pub t_comm: CamlPolyComm<CamlG>,
1556        pub lookup: Option<CamlLookupCommitments<CamlG>>,
1557    }
1558
1559    // These implementations are handy for conversions such as:
1560    // InternalType <-> Ocaml::Value
1561    //
1562    // It does this by hiding the required middle conversion step:
1563    // InternalType <-> CamlInternalType <-> Ocaml::Value
1564    //
1565    // Note that some conversions are not always possible to shorten,
1566    // because we don't always know how to convert the types.
1567    // For example, to implement the conversion
1568    // ProverCommitments<G> -> CamlProverCommitments<CamlG>
1569    // we need to know how to convert G to CamlG.
1570    // we don't know that information, unless we implemented some trait (e.g. ToCaml)
1571    // we can do that, but instead we implemented the From trait for the reverse
1572    // operations (From<G> for CamlG).
1573    // it reduces the complexity, but forces us to do the conversion in two
1574    // phases instead of one.
1575
1576    //
1577    // CamlLookupCommitments<CamlG> <-> LookupCommitments<G>
1578    //
1579
1580    impl<G, CamlG> From<LookupCommitments<G>> for CamlLookupCommitments<CamlG>
1581    where
1582        G: AffineRepr,
1583        CamlPolyComm<CamlG>: From<PolyComm<G>>,
1584    {
1585        fn from(
1586            LookupCommitments {
1587                aggreg,
1588                sorted,
1589                runtime,
1590            }: LookupCommitments<G>,
1591        ) -> Self {
1592            Self {
1593                aggreg: aggreg.into(),
1594                sorted: sorted.into_iter().map(Into::into).collect(),
1595                runtime: runtime.map(Into::into),
1596            }
1597        }
1598    }
1599
1600    impl<G, CamlG> From<CamlLookupCommitments<CamlG>> for LookupCommitments<G>
1601    where
1602        G: AffineRepr,
1603        PolyComm<G>: From<CamlPolyComm<CamlG>>,
1604    {
1605        fn from(
1606            CamlLookupCommitments {
1607                aggreg,
1608                sorted,
1609                runtime,
1610            }: CamlLookupCommitments<CamlG>,
1611        ) -> LookupCommitments<G> {
1612            LookupCommitments {
1613                aggreg: aggreg.into(),
1614                sorted: sorted.into_iter().map(Into::into).collect(),
1615                runtime: runtime.map(Into::into),
1616            }
1617        }
1618    }
1619
1620    //
1621    // CamlProverCommitments<CamlG> <-> ProverCommitments<G>
1622    //
1623
1624    impl<G, CamlG> From<ProverCommitments<G>> for CamlProverCommitments<CamlG>
1625    where
1626        G: AffineRepr,
1627        CamlPolyComm<CamlG>: From<PolyComm<G>>,
1628    {
1629        fn from(prover_comm: ProverCommitments<G>) -> Self {
1630            let [w_comm0, w_comm1, w_comm2, w_comm3, w_comm4, w_comm5, w_comm6, w_comm7, w_comm8, w_comm9, w_comm10, w_comm11, w_comm12, w_comm13, w_comm14] =
1631                prover_comm.w_comm;
1632            Self {
1633                w_comm: (
1634                    w_comm0.into(),
1635                    w_comm1.into(),
1636                    w_comm2.into(),
1637                    w_comm3.into(),
1638                    w_comm4.into(),
1639                    w_comm5.into(),
1640                    w_comm6.into(),
1641                    w_comm7.into(),
1642                    w_comm8.into(),
1643                    w_comm9.into(),
1644                    w_comm10.into(),
1645                    w_comm11.into(),
1646                    w_comm12.into(),
1647                    w_comm13.into(),
1648                    w_comm14.into(),
1649                ),
1650                z_comm: prover_comm.z_comm.into(),
1651                t_comm: prover_comm.t_comm.into(),
1652                lookup: prover_comm.lookup.map(Into::into),
1653            }
1654        }
1655    }
1656
1657    impl<G, CamlG> From<CamlProverCommitments<CamlG>> for ProverCommitments<G>
1658    where
1659        G: AffineRepr,
1660        PolyComm<G>: From<CamlPolyComm<CamlG>>,
1661    {
1662        fn from(caml_prover_comm: CamlProverCommitments<CamlG>) -> ProverCommitments<G> {
1663            let (
1664                w_comm0,
1665                w_comm1,
1666                w_comm2,
1667                w_comm3,
1668                w_comm4,
1669                w_comm5,
1670                w_comm6,
1671                w_comm7,
1672                w_comm8,
1673                w_comm9,
1674                w_comm10,
1675                w_comm11,
1676                w_comm12,
1677                w_comm13,
1678                w_comm14,
1679            ) = caml_prover_comm.w_comm;
1680            ProverCommitments {
1681                w_comm: [
1682                    w_comm0.into(),
1683                    w_comm1.into(),
1684                    w_comm2.into(),
1685                    w_comm3.into(),
1686                    w_comm4.into(),
1687                    w_comm5.into(),
1688                    w_comm6.into(),
1689                    w_comm7.into(),
1690                    w_comm8.into(),
1691                    w_comm9.into(),
1692                    w_comm10.into(),
1693                    w_comm11.into(),
1694                    w_comm12.into(),
1695                    w_comm13.into(),
1696                    w_comm14.into(),
1697                ],
1698                z_comm: caml_prover_comm.z_comm.into(),
1699                t_comm: caml_prover_comm.t_comm.into(),
1700                lookup: caml_prover_comm.lookup.map(Into::into),
1701            }
1702        }
1703    }
1704
1705    //
1706    // ProverProof<G> <-> CamlProofWithPublic<CamlG, CamlF>
1707    //
1708
1709    impl<G, CamlG, CamlF> From<(ProverProof<G, OpeningProof<G>>, Vec<G::ScalarField>)>
1710        for CamlProofWithPublic<CamlG, CamlF>
1711    where
1712        G: AffineRepr,
1713        CamlG: From<G>,
1714        CamlF: From<G::ScalarField>,
1715    {
1716        fn from(pp: (ProverProof<G, OpeningProof<G>>, Vec<G::ScalarField>)) -> Self {
1717            let (public_evals, evals) = pp.0.evals.into();
1718            CamlProofWithPublic {
1719                public_evals,
1720                proof: CamlProverProof {
1721                    commitments: pp.0.commitments.into(),
1722                    proof: pp.0.proof.into(),
1723                    evals,
1724                    ft_eval1: pp.0.ft_eval1.into(),
1725                    public: pp.1.into_iter().map(Into::into).collect(),
1726                    prev_challenges: pp.0.prev_challenges.into_iter().map(Into::into).collect(),
1727                },
1728            }
1729        }
1730    }
1731
1732    impl<G, CamlG, CamlF> From<CamlProofWithPublic<CamlG, CamlF>>
1733        for (ProverProof<G, OpeningProof<G>>, Vec<G::ScalarField>)
1734    where
1735        CamlF: Clone,
1736        G: AffineRepr + From<CamlG>,
1737        G::ScalarField: From<CamlF>,
1738    {
1739        fn from(
1740            caml_pp: CamlProofWithPublic<CamlG, CamlF>,
1741        ) -> (ProverProof<G, OpeningProof<G>>, Vec<G::ScalarField>) {
1742            let CamlProofWithPublic {
1743                public_evals,
1744                proof: caml_pp,
1745            } = caml_pp;
1746            let proof = ProverProof {
1747                commitments: caml_pp.commitments.into(),
1748                proof: caml_pp.proof.into(),
1749                evals: (public_evals, caml_pp.evals).into(),
1750                ft_eval1: caml_pp.ft_eval1.into(),
1751                prev_challenges: caml_pp
1752                    .prev_challenges
1753                    .into_iter()
1754                    .map(Into::into)
1755                    .collect(),
1756            };
1757
1758            (proof, caml_pp.public.into_iter().map(Into::into).collect())
1759        }
1760    }
1761}