ivc/
plonkish_lang.rs

1/// Provides definition of plonkish language related instance,
2/// witness, and tools to work with them. The IVC is specialized for
3/// exactly the plonkish language.
4use ark_ff::{FftField, Field, One};
5use ark_poly::{Evaluations, Radix2EvaluationDomain as R2D};
6use core::ops::Index;
7use folding::{instance_witness::Foldable, Alphas, Instance, Witness};
8use itertools::Itertools;
9use kimchi::{self, circuits::berkeley_columns::BerkeleyChallengeTerm};
10use kimchi_msm::{columns::Column, witness::Witness as GenericWitness};
11use mina_poseidon::FqSponge;
12use poly_commitment::{
13    commitment::{absorb_commitment, CommitmentCurve},
14    PolyComm, SRS,
15};
16use rayon::iter::{IntoParallelIterator as _, ParallelIterator as _};
17use strum_macros::{EnumCount as EnumCountMacro, EnumIter};
18
19/// Vector field over F. Something like a vector.
20pub trait CombinableEvals<F: Field>: PartialEq {
21    fn e_as_slice(&self) -> &[F];
22    fn e_as_mut_slice(&mut self) -> &mut [F];
23}
24
25impl<F: FftField> CombinableEvals<F> for Evaluations<F, R2D<F>> {
26    fn e_as_slice(&self) -> &[F] {
27        self.evals.as_slice()
28    }
29    fn e_as_mut_slice(&mut self) -> &mut [F] {
30        self.evals.as_mut_slice()
31    }
32}
33
34impl<F: FftField> CombinableEvals<F> for Vec<F> {
35    fn e_as_slice(&self) -> &[F] {
36        self.as_slice()
37    }
38    fn e_as_mut_slice(&mut self) -> &mut [F] {
39        self.as_mut_slice()
40    }
41}
42
43#[derive(Clone, Debug, PartialEq, Eq, Hash)]
44pub struct PlonkishWitnessGeneric<const N_COL: usize, const N_FSEL: usize, F: Field, Evals> {
45    pub witness: GenericWitness<N_COL, Evals>,
46    // This does not have to be part of the witness... can be a static
47    // precompiled object.
48    pub fixed_selectors: GenericWitness<N_FSEL, Evals>,
49    pub phantom: core::marker::PhantomData<F>,
50}
51
52pub type PlonkishWitness<const N_COL: usize, const N_FSEL: usize, F> =
53    PlonkishWitnessGeneric<N_COL, N_FSEL, F, Evaluations<F, R2D<F>>>;
54
55impl<const N_COL: usize, const N_FSEL: usize, F: Field, Evals: CombinableEvals<F>> Foldable<F>
56    for PlonkishWitnessGeneric<N_COL, N_FSEL, F, Evals>
57{
58    fn combine(mut a: Self, b: Self, challenge: F) -> Self {
59        for (a, b) in (*a.witness.cols).iter_mut().zip(*(b.witness.cols)) {
60            for (a, b) in (a.e_as_mut_slice()).iter_mut().zip(b.e_as_slice()) {
61                *a += *b * challenge;
62            }
63        }
64        assert!(a.fixed_selectors == b.fixed_selectors);
65        a
66    }
67}
68
69impl<
70        const N_COL: usize,
71        const N_FSEL: usize,
72        Curve: CommitmentCurve,
73        Evals: CombinableEvals<Curve::ScalarField>,
74    > Witness<Curve> for PlonkishWitnessGeneric<N_COL, N_FSEL, Curve::ScalarField, Evals>
75{
76}
77
78impl<const N_COL: usize, const N_FSEL: usize, F: FftField, Evals: CombinableEvals<F>>
79    Index<Column<usize>> for PlonkishWitnessGeneric<N_COL, N_FSEL, F, Evals>
80{
81    type Output = [F];
82
83    /// Map a column alias to the corresponding witness column.
84    fn index(&self, index: Column<usize>) -> &Self::Output {
85        match index {
86            Column::Relation(i) => self.witness.cols[i].e_as_slice(),
87            Column::FixedSelector(i) => self.fixed_selectors[i].e_as_slice(),
88            other => panic!("Invalid column index: {other:?}"),
89        }
90    }
91}
92
93// for selectors, () in this case as we have none
94impl<const N_COL: usize, const N_FSEL: usize, F: FftField> Index<()>
95    for PlonkishWitness<N_COL, N_FSEL, F>
96{
97    type Output = [F];
98
99    fn index(&self, _index: ()) -> &Self::Output {
100        unreachable!()
101    }
102}
103
104#[derive(PartialEq, Eq, Clone, Debug)]
105pub struct PlonkishInstance<
106    G: CommitmentCurve,
107    const N_COL: usize,
108    const N_CHALS: usize,
109    const N_ALPHAS: usize,
110> {
111    pub commitments: [G; N_COL],
112    pub challenges: [G::ScalarField; N_CHALS],
113    pub alphas: Alphas<G::ScalarField>,
114    pub blinder: G::ScalarField,
115}
116
117impl<G: CommitmentCurve, const N_COL: usize, const N_CHALS: usize, const N_ALPHAS: usize>
118    Foldable<G::ScalarField> for PlonkishInstance<G, N_COL, N_CHALS, N_ALPHAS>
119{
120    fn combine(a: Self, b: Self, challenge: G::ScalarField) -> Self {
121        Self {
122            commitments: core::array::from_fn(|i| {
123                (a.commitments[i] + b.commitments[i].mul(challenge)).into()
124            }),
125            challenges: core::array::from_fn(|i| a.challenges[i] + challenge * b.challenges[i]),
126            alphas: Alphas::combine(a.alphas, b.alphas, challenge),
127            blinder: a.blinder + challenge * b.blinder,
128        }
129    }
130}
131
132impl<G: CommitmentCurve, const N_COL: usize, const N_CHALS: usize, const N_ALPHAS: usize>
133    Instance<G> for PlonkishInstance<G, N_COL, N_CHALS, N_ALPHAS>
134{
135    fn to_absorb(&self) -> (Vec<G::ScalarField>, Vec<G>) {
136        // FIXME: check!!!!
137        let mut scalars = Vec::new();
138        let mut points = Vec::new();
139        points.extend(self.commitments);
140        scalars.extend(self.challenges);
141        scalars.extend(self.alphas.clone().powers());
142        (scalars, points)
143    }
144
145    fn get_alphas(&self) -> &Alphas<G::ScalarField> {
146        &self.alphas
147    }
148
149    fn get_blinder(&self) -> G::ScalarField {
150        self.blinder
151    }
152}
153
154// Implementation for 3 challenges; only for now.
155impl<G: CommitmentCurve, const N_COL: usize, const N_ALPHAS: usize>
156    PlonkishInstance<G, N_COL, 3, N_ALPHAS>
157{
158    pub fn from_witness<
159        EFqSponge: FqSponge<G::BaseField, G, G::ScalarField>,
160        Srs: SRS<G> + core::marker::Sync,
161    >(
162        w: &GenericWitness<N_COL, Evaluations<G::ScalarField, R2D<G::ScalarField>>>,
163        fq_sponge: &mut EFqSponge,
164        srs: &Srs,
165        domain: R2D<G::ScalarField>,
166    ) -> Self {
167        let blinder = G::ScalarField::one();
168
169        let commitments: GenericWitness<N_COL, PolyComm<G>> = w
170            .into_par_iter()
171            .map(|w| {
172                let blinder = PolyComm::new(vec![blinder; 1]);
173                let unblinded = srs.commit_evaluations_non_hiding(domain, w);
174                srs.mask_custom(unblinded, &blinder).unwrap().commitment
175            })
176            .collect();
177
178        // Absorbing commitments
179        (&commitments).into_iter().for_each(|c| {
180            assert!(c.len() == 1);
181            absorb_commitment(fq_sponge, c)
182        });
183
184        let commitments: [G; N_COL] = commitments
185            .into_iter()
186            .map(|c| c.get_first_chunk())
187            .collect_vec()
188            .try_into()
189            .unwrap();
190
191        let beta = fq_sponge.challenge();
192        let gamma = fq_sponge.challenge();
193        let joint_combiner = fq_sponge.challenge();
194        let challenges = [beta, gamma, joint_combiner];
195
196        let alpha = fq_sponge.challenge();
197        let alphas = Alphas::new_sized(alpha, N_ALPHAS);
198
199        Self {
200            commitments,
201            challenges,
202            alphas,
203            blinder,
204        }
205    }
206
207    pub fn verify_from_witness<EFqSponge: FqSponge<G::BaseField, G, G::ScalarField>>(
208        &self,
209        fq_sponge: &mut EFqSponge,
210    ) -> Result<(), String> {
211        (self.blinder == G::ScalarField::one())
212            .then_some(())
213            .ok_or("Blinder must be one")?;
214
215        // Absorbing commitments
216        self.commitments
217            .iter()
218            .for_each(|c| absorb_commitment(fq_sponge, &PolyComm { chunks: vec![*c] }));
219
220        let beta = fq_sponge.challenge();
221        let gamma = fq_sponge.challenge();
222        let joint_combiner = fq_sponge.challenge();
223
224        (self.challenges == [beta, gamma, joint_combiner])
225            .then_some(())
226            .ok_or("Challenges do not match the expected result")?;
227
228        let alpha = fq_sponge.challenge();
229
230        (self.alphas == Alphas::new_sized(alpha, N_ALPHAS))
231            .then_some(())
232            .ok_or("Alphas do not match the expected result")?;
233
234        Ok(())
235    }
236}
237
238#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, EnumIter, EnumCountMacro)]
239pub enum PlonkishChallenge {
240    Beta,
241    Gamma,
242    JointCombiner,
243}
244
245impl From<BerkeleyChallengeTerm> for PlonkishChallenge {
246    fn from(chal: BerkeleyChallengeTerm) -> Self {
247        match chal {
248            BerkeleyChallengeTerm::Beta => PlonkishChallenge::Beta,
249            BerkeleyChallengeTerm::Gamma => PlonkishChallenge::Gamma,
250            BerkeleyChallengeTerm::JointCombiner => PlonkishChallenge::JointCombiner,
251            BerkeleyChallengeTerm::Alpha => panic!("Alpha not allowed in folding expressions"),
252        }
253    }
254}
255
256impl<G: CommitmentCurve, const N_COL: usize, const N_ALPHAS: usize> Index<PlonkishChallenge>
257    for PlonkishInstance<G, N_COL, 3, N_ALPHAS>
258{
259    type Output = G::ScalarField;
260
261    fn index(&self, index: PlonkishChallenge) -> &Self::Output {
262        match index {
263            PlonkishChallenge::Beta => &self.challenges[0],
264            PlonkishChallenge::Gamma => &self.challenges[1],
265            PlonkishChallenge::JointCombiner => &self.challenges[2],
266        }
267    }
268}