1use crate::{
7 commitment::{
8 b_poly, b_poly_coefficients, combine_commitments, shift_scalar, squeeze_challenge,
9 squeeze_prechallenge, BatchEvaluationProof, CommitmentCurve, EndoCurve,
10 },
11 error::CommitmentError,
12 hash_map_cache::HashMapCache,
13 utils::combine_polys,
14 BlindedCommitment, PolyComm, PolynomialsToCombine, SRS as SRSTrait,
15};
16use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM};
17use ark_ff::{BigInteger, Field, One, PrimeField, UniformRand, Zero};
18use ark_poly::{
19 univariate::DensePolynomial, EvaluationDomain, Evaluations, Radix2EvaluationDomain as D,
20};
21use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
22use blake2::{Blake2b512, Digest};
23use groupmap::GroupMap;
24use mina_poseidon::{sponge::ScalarChallenge, FqSponge};
25use o1_utils::{
26 field_helpers::{inner_prod, pows},
27 math,
28};
29use rand::{CryptoRng, RngCore};
30use rayon::prelude::*;
31use serde::{Deserialize, Serialize};
32use serde_with::serde_as;
33use std::{cmp::min, iter::Iterator, ops::AddAssign};
34
35#[serde_as]
36#[derive(Debug, Clone, Default, Serialize, Deserialize)]
37#[serde(bound = "G: CanonicalDeserialize + CanonicalSerialize")]
38pub struct SRS<G> {
39 #[serde_as(as = "Vec<o1_utils::serialization::SerdeAs>")]
42 pub g: Vec<G>,
43
44 #[serde_as(as = "o1_utils::serialization::SerdeAs")]
46 pub h: G,
47
48 #[serde(skip)]
52 pub lagrange_bases: HashMapCache<usize, Vec<PolyComm<G>>>,
53}
54
55impl<G> PartialEq for SRS<G>
56where
57 G: PartialEq,
58{
59 fn eq(&self, other: &Self) -> bool {
60 self.g == other.g && self.h == other.h
61 }
62}
63
64pub fn endos<G: CommitmentCurve>() -> (G::BaseField, G::ScalarField)
65where
66 G::BaseField: PrimeField,
67{
68 let endo_q: G::BaseField = mina_poseidon::sponge::endo_coefficient();
69 let endo_r = {
70 let potential_endo_r: G::ScalarField = mina_poseidon::sponge::endo_coefficient();
71 let t = G::generator();
72 let (x, y) = t.to_coordinates().unwrap();
73 let phi_t = G::of_coordinates(x * endo_q, y);
74 if t.mul(potential_endo_r) == phi_t.into_group() {
75 potential_endo_r
76 } else {
77 potential_endo_r * potential_endo_r
78 }
79 };
80 (endo_q, endo_r)
81}
82
83fn point_of_random_bytes<G: CommitmentCurve>(map: &G::Map, random_bytes: &[u8]) -> G
84where
85 G::BaseField: Field,
86{
87 const N: usize = 31;
89 let extension_degree = G::BaseField::extension_degree() as usize;
90
91 let mut base_fields = Vec::with_capacity(N * extension_degree);
92
93 for base_count in 0..extension_degree {
94 let mut bits = [false; 8 * N];
95 let offset = base_count * N;
96 for i in 0..N {
97 for j in 0..8 {
98 bits[8 * i + j] = (random_bytes[offset + i] >> j) & 1 == 1;
99 }
100 }
101
102 let n =
103 <<G::BaseField as Field>::BasePrimeField as PrimeField>::BigInt::from_bits_be(&bits);
104 let t = <<G::BaseField as Field>::BasePrimeField as PrimeField>::from_bigint(n)
105 .expect("packing code has a bug");
106 base_fields.push(t)
107 }
108
109 let t = G::BaseField::from_base_prime_field_elems(base_fields).unwrap();
110
111 let (x, y) = map.to_group(t);
112 G::of_coordinates(x, y).mul_by_cofactor()
113}
114
115impl<G: CommitmentCurve> SRS<G> {
117 pub fn verify<EFqSponge, RNG>(
120 &self,
121 group_map: &G::Map,
122 batch: &mut [BatchEvaluationProof<G, EFqSponge, OpeningProof<G>>],
123 rng: &mut RNG,
124 ) -> bool
125 where
126 EFqSponge: FqSponge<G::BaseField, G, G::ScalarField>,
127 RNG: RngCore + CryptoRng,
128 G::BaseField: PrimeField,
129 {
130 let nonzero_length = self.g.len();
152
153 let max_rounds = math::ceil_log2(nonzero_length);
154
155 let padded_length = 1 << max_rounds;
156
157 let (_, endo_r) = endos::<G>();
158
159 let padding = padded_length - nonzero_length;
161 let mut points = vec![self.h];
162 points.extend(self.g.clone());
163 points.extend(vec![G::zero(); padding]);
164
165 let mut scalars = vec![G::ScalarField::zero(); padded_length + 1];
166 assert_eq!(scalars.len(), points.len());
167
168 let rand_base = G::ScalarField::rand(rng);
170 let sg_rand_base = G::ScalarField::rand(rng);
171
172 let mut rand_base_i = G::ScalarField::one();
173 let mut sg_rand_base_i = G::ScalarField::one();
174
175 for BatchEvaluationProof {
176 sponge,
177 evaluation_points,
178 polyscale,
179 evalscale,
180 evaluations,
181 opening,
182 combined_inner_product,
183 } in batch.iter_mut()
184 {
185 sponge.absorb_fr(&[shift_scalar::<G>(*combined_inner_product)]);
186
187 let u_base: G = {
188 let t = sponge.challenge_fq();
189 let (x, y) = group_map.to_group(t);
190 G::of_coordinates(x, y)
191 };
192
193 let Challenges { chal, chal_inv } = opening.challenges::<EFqSponge>(&endo_r, sponge);
194
195 sponge.absorb_g(&[opening.delta]);
196 let c = ScalarChallenge(sponge.challenge()).to_field(&endo_r);
197
198 let b0 = {
202 let mut scale = G::ScalarField::one();
203 let mut res = G::ScalarField::zero();
204 for &e in evaluation_points.iter() {
205 let term = b_poly(&chal, e);
206 res += &(scale * term);
207 scale *= *evalscale;
208 }
209 res
210 };
211
212 let s = b_poly_coefficients(&chal);
213
214 let neg_rand_base_i = -rand_base_i;
215
216 points.push(opening.sg);
221 scalars.push(neg_rand_base_i * opening.z1 - sg_rand_base_i);
222
223 {
230 let terms: Vec<_> = s.par_iter().map(|s| sg_rand_base_i * s).collect();
231
232 for (i, term) in terms.iter().enumerate() {
233 scalars[i + 1] += term;
234 }
235 }
236
237 scalars[0] -= &(rand_base_i * opening.z2);
240
241 scalars.push(neg_rand_base_i * (opening.z1 * b0));
244 points.push(u_base);
245
246 let rand_base_i_c_i = c * rand_base_i;
252 for ((l, r), (u_inv, u)) in opening.lr.iter().zip(chal_inv.iter().zip(chal.iter())) {
253 points.push(*l);
254 scalars.push(rand_base_i_c_i * u_inv);
255
256 points.push(*r);
257 scalars.push(rand_base_i_c_i * u);
258 }
259
260 combine_commitments(
265 evaluations,
266 &mut scalars,
267 &mut points,
268 *polyscale,
269 rand_base_i_c_i,
270 );
271
272 scalars.push(rand_base_i_c_i * *combined_inner_product);
273 points.push(u_base);
274
275 scalars.push(rand_base_i);
276 points.push(opening.delta);
277
278 rand_base_i *= &rand_base;
279 sg_rand_base_i *= &sg_rand_base;
280 }
281
282 let chunk_size = points.len() / 2;
285 let msm_res = points
286 .into_par_iter()
287 .chunks(chunk_size)
288 .zip(scalars.into_par_iter().chunks(chunk_size))
289 .map(|(bases, coeffs)| {
290 let coeffs_bigint = coeffs
291 .into_iter()
292 .map(|c| c.into_bigint())
293 .collect::<Vec<_>>();
294 G::Group::msm_bigint(&bases, &coeffs_bigint)
295 })
296 .reduce(G::Group::zero, |mut l, r| {
297 l += r;
298 l
299 });
300
301 msm_res == G::Group::zero()
302 }
303
304 pub unsafe fn create_trusted_setup(x: G::ScalarField, depth: usize) -> Self {
312 let m = G::Map::setup();
313
314 let mut x_pow = G::ScalarField::one();
315 let g: Vec<_> = (0..depth)
316 .map(|_| {
317 let res = G::generator().mul(x_pow);
318 x_pow *= x;
319 res.into_affine()
320 })
321 .collect();
322
323 let h = {
325 let mut h = Blake2b512::new();
326 h.update("srs_misc".as_bytes());
327 h.update(0_u32.to_be_bytes());
330 point_of_random_bytes(&m, &h.finalize())
331 };
332
333 Self {
334 g,
335 h,
336 lagrange_bases: HashMapCache::new(),
337 }
338 }
339}
340
341impl<G: CommitmentCurve> SRS<G>
342where
343 <G as CommitmentCurve>::Map: Sync,
344 G::BaseField: PrimeField,
345{
346 pub fn create_parallel(depth: usize) -> Self {
349 let m = G::Map::setup();
350
351 let g: Vec<_> = (0..depth)
352 .into_par_iter()
353 .map(|i| {
354 let mut h = Blake2b512::new();
355 h.update((i as u32).to_be_bytes());
356 point_of_random_bytes(&m, &h.finalize())
357 })
358 .collect();
359
360 let h = {
362 let mut h = Blake2b512::new();
363 h.update("srs_misc".as_bytes());
364 h.update(0_u32.to_be_bytes());
367 point_of_random_bytes(&m, &h.finalize())
368 };
369
370 Self {
371 g,
372 h,
373 lagrange_bases: HashMapCache::new(),
374 }
375 }
376}
377
378impl<G> SRSTrait<G> for SRS<G>
379where
380 G: CommitmentCurve,
381{
382 fn max_poly_size(&self) -> usize {
384 self.g.len()
385 }
386
387 fn blinding_commitment(&self) -> G {
388 self.h
389 }
390
391 fn mask(
395 &self,
396 comm: PolyComm<G>,
397 rng: &mut (impl RngCore + CryptoRng),
398 ) -> BlindedCommitment<G> {
399 let blinders = comm.map(|_| G::ScalarField::rand(rng));
400 self.mask_custom(comm, &blinders).unwrap()
401 }
402
403 fn mask_custom(
404 &self,
405 com: PolyComm<G>,
406 blinders: &PolyComm<G::ScalarField>,
407 ) -> Result<BlindedCommitment<G>, CommitmentError> {
408 let commitment = com
409 .zip(blinders)
410 .ok_or_else(|| CommitmentError::BlindersDontMatch(blinders.len(), com.len()))?
411 .map(|(g, b)| {
412 let mut g_masked = self.h.mul(b);
413 g_masked.add_assign(&g);
414 g_masked.into_affine()
415 });
416 Ok(BlindedCommitment {
417 commitment,
418 blinders: blinders.clone(),
419 })
420 }
421
422 fn commit_non_hiding(
423 &self,
424 plnm: &DensePolynomial<G::ScalarField>,
425 num_chunks: usize,
426 ) -> PolyComm<G> {
427 let is_zero = plnm.is_zero();
428
429 let mut chunks: Vec<_> = if is_zero {
431 vec![G::zero()]
432 } else if plnm.len() < self.g.len() {
433 vec![G::Group::msm(&self.g[..plnm.len()], &plnm.coeffs)
434 .unwrap()
435 .into_affine()]
436 } else if plnm.len() == self.g.len() {
437 let n = self.g.len();
441 let (r1, r2) = rayon::join(
442 || G::Group::msm(&self.g[..n / 2], &plnm.coeffs[..n / 2]).unwrap(),
443 || G::Group::msm(&self.g[n / 2..n], &plnm.coeffs[n / 2..n]).unwrap(),
444 );
445
446 vec![(r1 + r2).into_affine()]
447 } else {
448 plnm.into_par_iter()
450 .chunks(self.g.len())
451 .map(|chunk| {
452 let chunk_coeffs = chunk
453 .into_iter()
454 .map(|c| c.into_bigint())
455 .collect::<Vec<_>>();
456 let chunk_res = G::Group::msm_bigint(&self.g, &chunk_coeffs);
457 chunk_res.into_affine()
458 })
459 .collect()
460 };
461
462 for _ in chunks.len()..num_chunks {
463 chunks.push(G::zero());
464 }
465
466 PolyComm::<G>::new(chunks)
467 }
468
469 fn commit(
470 &self,
471 plnm: &DensePolynomial<G::ScalarField>,
472 num_chunks: usize,
473 rng: &mut (impl RngCore + CryptoRng),
474 ) -> BlindedCommitment<G> {
475 self.mask(self.commit_non_hiding(plnm, num_chunks), rng)
476 }
477
478 fn commit_custom(
479 &self,
480 plnm: &DensePolynomial<G::ScalarField>,
481 num_chunks: usize,
482 blinders: &PolyComm<G::ScalarField>,
483 ) -> Result<BlindedCommitment<G>, CommitmentError> {
484 self.mask_custom(self.commit_non_hiding(plnm, num_chunks), blinders)
485 }
486
487 fn commit_evaluations_non_hiding(
488 &self,
489 domain: D<G::ScalarField>,
490 plnm: &Evaluations<G::ScalarField, D<G::ScalarField>>,
491 ) -> PolyComm<G> {
492 let basis = self.get_lagrange_basis(domain);
493 let commit_evaluations = |evals: &Vec<G::ScalarField>, basis: &Vec<PolyComm<G>>| {
494 PolyComm::<G>::multi_scalar_mul(&basis.iter().collect::<Vec<_>>()[..], &evals[..])
495 };
496 match domain.size.cmp(&plnm.domain().size) {
497 std::cmp::Ordering::Less => {
498 let s = (plnm.domain().size / domain.size) as usize;
499 let v: Vec<_> = (0..(domain.size())).map(|i| plnm.evals[s * i]).collect();
500 commit_evaluations(&v, basis)
501 }
502 std::cmp::Ordering::Equal => commit_evaluations(&plnm.evals, basis),
503 std::cmp::Ordering::Greater => {
504 panic!("desired commitment domain size ({}) greater than evaluations' domain size ({}):", domain.size, plnm.domain().size)
505 }
506 }
507 }
508
509 fn commit_evaluations(
510 &self,
511 domain: D<G::ScalarField>,
512 plnm: &Evaluations<G::ScalarField, D<G::ScalarField>>,
513 rng: &mut (impl RngCore + CryptoRng),
514 ) -> BlindedCommitment<G> {
515 self.mask(self.commit_evaluations_non_hiding(domain, plnm), rng)
516 }
517
518 fn commit_evaluations_custom(
519 &self,
520 domain: D<G::ScalarField>,
521 plnm: &Evaluations<G::ScalarField, D<G::ScalarField>>,
522 blinders: &PolyComm<G::ScalarField>,
523 ) -> Result<BlindedCommitment<G>, CommitmentError> {
524 self.mask_custom(self.commit_evaluations_non_hiding(domain, plnm), blinders)
525 }
526
527 fn create(depth: usize) -> Self {
528 let m = G::Map::setup();
529
530 let g: Vec<_> = (0..depth)
531 .map(|i| {
532 let mut h = Blake2b512::new();
533 h.update((i as u32).to_be_bytes());
534 point_of_random_bytes(&m, &h.finalize())
535 })
536 .collect();
537
538 let h = {
540 let mut h = Blake2b512::new();
541 h.update("srs_misc".as_bytes());
542 h.update(0_u32.to_be_bytes());
545 point_of_random_bytes(&m, &h.finalize())
546 };
547
548 Self {
549 g,
550 h,
551 lagrange_bases: HashMapCache::new(),
552 }
553 }
554
555 fn get_lagrange_basis_from_domain_size(&self, domain_size: usize) -> &Vec<PolyComm<G>> {
556 self.lagrange_bases.get_or_generate(domain_size, || {
557 self.lagrange_basis(D::new(domain_size).unwrap())
558 })
559 }
560
561 fn get_lagrange_basis(&self, domain: D<G::ScalarField>) -> &Vec<PolyComm<G>> {
562 self.lagrange_bases
563 .get_or_generate(domain.size(), || self.lagrange_basis(domain))
564 }
565
566 fn size(&self) -> usize {
567 self.g.len()
568 }
569}
570
571impl<G: CommitmentCurve> SRS<G> {
572 #[allow(clippy::type_complexity)]
573 #[allow(clippy::many_single_char_names)]
574 pub fn open<EFqSponge, RNG, D: EvaluationDomain<G::ScalarField>>(
578 &self,
579 group_map: &G::Map,
580 plnms: PolynomialsToCombine<G, D>,
581 elm: &[G::ScalarField],
582 polyscale: G::ScalarField,
583 evalscale: G::ScalarField,
584 mut sponge: EFqSponge,
585 rng: &mut RNG,
586 ) -> OpeningProof<G>
587 where
588 EFqSponge: Clone + FqSponge<G::BaseField, G, G::ScalarField>,
589 RNG: RngCore + CryptoRng,
590 G::BaseField: PrimeField,
591 G: EndoCurve,
592 {
593 let (endo_q, endo_r) = endos::<G>();
594
595 let rounds = math::ceil_log2(self.g.len());
596 let padded_length = 1 << rounds;
597
598 let padding = padded_length - self.g.len();
603 let mut g = self.g.clone();
604 g.extend(vec![G::zero(); padding]);
605
606 let (p, blinding_factor) = combine_polys::<G, D>(plnms, polyscale, self.g.len());
613
614 let b_init = {
631 let mut scale = G::ScalarField::one();
633 let mut res: Vec<G::ScalarField> =
634 (0..padded_length).map(|_| G::ScalarField::zero()).collect();
635 for e in elm {
636 for (i, t) in pows(padded_length, *e).iter().enumerate() {
637 res[i] += &(scale * t);
638 }
639 scale *= &evalscale;
640 }
641 res
642 };
643
644 let combined_inner_product = p
646 .coeffs
647 .iter()
648 .zip(b_init.iter())
649 .map(|(a, b)| *a * b)
650 .fold(G::ScalarField::zero(), |acc, x| acc + x);
651
652 sponge.absorb_fr(&[shift_scalar::<G>(combined_inner_product)]);
660
661 let u_base: G = {
664 let t = sponge.challenge_fq();
665 let (x, y) = group_map.to_group(t);
666 G::of_coordinates(x, y)
667 };
668
669 let mut a = p.coeffs;
670 assert!(padded_length >= a.len());
671 a.extend(vec![G::ScalarField::zero(); padded_length - a.len()]);
672
673 let mut b = b_init;
674
675 let mut lr = vec![];
676
677 let mut blinders = vec![];
678
679 let mut chals = vec![];
680 let mut chal_invs = vec![];
681
682 for _ in 0..rounds {
684 let n = g.len() / 2;
685 let (g_lo, g_hi) = (&g[0..n], &g[n..]);
687 let (a_lo, a_hi) = (&a[0..n], &a[n..]);
689 let (b_lo, b_hi) = (&b[0..n], &b[n..]);
691
692 let rand_l = <G::ScalarField as UniformRand>::rand(rng);
694 let rand_r = <G::ScalarField as UniformRand>::rand(rng);
695
696 let l = G::Group::msm_bigint(
698 &[g_lo, &[self.h, u_base]].concat(),
699 &[a_hi, &[rand_l, inner_prod(a_hi, b_lo)]]
700 .concat()
701 .iter()
702 .map(|x| x.into_bigint())
703 .collect::<Vec<_>>(),
704 )
705 .into_affine();
706
707 let r = G::Group::msm_bigint(
708 &[g_hi, &[self.h, u_base]].concat(),
709 &[a_lo, &[rand_r, inner_prod(a_lo, b_hi)]]
710 .concat()
711 .iter()
712 .map(|x| x.into_bigint())
713 .collect::<Vec<_>>(),
714 )
715 .into_affine();
716
717 lr.push((l, r));
718 blinders.push((rand_l, rand_r));
719
720 sponge.absorb_g(&[l]);
721 sponge.absorb_g(&[r]);
722
723 let u_pre = squeeze_prechallenge(&mut sponge);
727 let u = u_pre.to_field(&endo_r);
728 let u_inv = u.inverse().unwrap();
729
730 chals.push(u);
731 chal_invs.push(u_inv);
732
733 a = a_hi
735 .par_iter()
736 .zip(a_lo)
737 .map(|(&hi, &lo)| {
738 let mut res = hi;
740 res *= u_inv;
741 res += &lo;
742 res
743 })
744 .collect();
745
746 b = b_lo
748 .par_iter()
749 .zip(b_hi)
750 .map(|(&lo, &hi)| {
751 let mut res = hi;
753 res *= u;
754 res += &lo;
755 res
756 })
757 .collect();
758
759 g = G::combine_one_endo(endo_r, endo_q, g_lo, g_hi, u_pre);
761 }
762
763 assert!(
764 g.len() == 1 && a.len() == 1 && b.len() == 1,
765 "IPA commitment folding must produce single elements after log rounds"
766 );
767 let a0 = a[0];
768 let b0 = b[0];
769 let g0 = g[0];
770
771 let r_prime = blinders
781 .iter()
782 .zip(chals.iter().zip(chal_invs.iter()))
783 .map(|((rand_l, rand_r), (u, u_inv))| ((*rand_l) * u_inv) + (*rand_r * u))
784 .fold(blinding_factor, |acc, x| acc + x);
785
786 let d = <G::ScalarField as UniformRand>::rand(rng);
787 let r_delta = <G::ScalarField as UniformRand>::rand(rng);
788
789 let delta = ((g0.into_group() + (u_base.mul(b0))).into_affine().mul(d)
794 + self.h.mul(r_delta))
795 .into_affine();
796
797 sponge.absorb_g(&[delta]);
798 let c = ScalarChallenge(sponge.challenge()).to_field(&endo_r);
799
800 let z1 = a0 * c + d;
802 let z2 = r_prime * c + r_delta;
803
804 OpeningProof {
805 delta,
806 lr,
807 z1,
808 z2,
809 sg: g0,
810 }
811 }
812
813 fn lagrange_basis(&self, domain: D<G::ScalarField>) -> Vec<PolyComm<G>> {
814 let n = domain.size();
815
816 let srs_size = self.g.len();
894 let num_elems = (n + srs_size - 1) / srs_size;
895 let mut chunks = Vec::with_capacity(num_elems);
896
897 for i in 0..num_elems {
899 let mut lg: Vec<<G as AffineRepr>::Group> = vec![<G as AffineRepr>::Group::zero(); n];
901 let start_offset = i * srs_size;
904 let num_terms = min((i + 1) * srs_size, n) - start_offset;
905 for j in 0..num_terms {
906 lg[start_offset + j] = self.g[j].into_group()
907 }
908 domain.ifft_in_place(&mut lg);
910 chunks.push(<G as AffineRepr>::Group::normalize_batch(lg.as_mut_slice()));
913 }
914
915 (0..n)
916 .map(|i| PolyComm {
917 chunks: chunks.iter().map(|v| v[i]).collect(),
918 })
919 .collect()
920 }
921}
922
923#[serde_as]
924#[derive(Clone, Debug, Serialize, Deserialize, Default, PartialEq)]
925#[serde(bound = "G: ark_serialize::CanonicalDeserialize + ark_serialize::CanonicalSerialize")]
926pub struct OpeningProof<G: AffineRepr> {
927 #[serde_as(as = "Vec<(o1_utils::serialization::SerdeAs, o1_utils::serialization::SerdeAs)>")]
929 pub lr: Vec<(G, G)>,
930 #[serde_as(as = "o1_utils::serialization::SerdeAs")]
931 pub delta: G,
932 #[serde_as(as = "o1_utils::serialization::SerdeAs")]
933 pub z1: G::ScalarField,
934 #[serde_as(as = "o1_utils::serialization::SerdeAs")]
935 pub z2: G::ScalarField,
936 #[serde_as(as = "o1_utils::serialization::SerdeAs")]
938 pub sg: G,
939}
940
941impl<BaseField: PrimeField, G: AffineRepr<BaseField = BaseField> + CommitmentCurve + EndoCurve>
942 crate::OpenProof<G> for OpeningProof<G>
943{
944 type SRS = SRS<G>;
945
946 fn open<EFqSponge, RNG, D: EvaluationDomain<<G as AffineRepr>::ScalarField>>(
947 srs: &Self::SRS,
948 group_map: &<G as CommitmentCurve>::Map,
949 plnms: PolynomialsToCombine<G, D>,
950 elm: &[<G as AffineRepr>::ScalarField],
951 polyscale: <G as AffineRepr>::ScalarField,
952 evalscale: <G as AffineRepr>::ScalarField,
953 sponge: EFqSponge,
954 rng: &mut RNG,
955 ) -> Self
956 where
957 EFqSponge:
958 Clone + FqSponge<<G as AffineRepr>::BaseField, G, <G as AffineRepr>::ScalarField>,
959 RNG: RngCore + CryptoRng,
960 {
961 srs.open(group_map, plnms, elm, polyscale, evalscale, sponge, rng)
962 }
963
964 fn verify<EFqSponge, RNG>(
965 srs: &Self::SRS,
966 group_map: &G::Map,
967 batch: &mut [BatchEvaluationProof<G, EFqSponge, Self>],
968 rng: &mut RNG,
969 ) -> bool
970 where
971 EFqSponge: FqSponge<<G as AffineRepr>::BaseField, G, <G as AffineRepr>::ScalarField>,
972 RNG: RngCore + CryptoRng,
973 {
974 srs.verify(group_map, batch, rng)
975 }
976}
977
978pub struct Challenges<F> {
980 pub chal: Vec<F>,
981 pub chal_inv: Vec<F>,
982}
983
984impl<G: AffineRepr> OpeningProof<G> {
985 pub fn prechallenges<EFqSponge: FqSponge<G::BaseField, G, G::ScalarField>>(
988 &self,
989 sponge: &mut EFqSponge,
990 ) -> Vec<ScalarChallenge<G::ScalarField>> {
991 let _t = sponge.challenge_fq();
992 self.lr
993 .iter()
994 .map(|(l, r)| {
995 sponge.absorb_g(&[*l]);
996 sponge.absorb_g(&[*r]);
997 squeeze_prechallenge(sponge)
998 })
999 .collect()
1000 }
1001
1002 pub fn challenges<EFqSponge: FqSponge<G::BaseField, G, G::ScalarField>>(
1005 &self,
1006 endo_r: &G::ScalarField,
1007 sponge: &mut EFqSponge,
1008 ) -> Challenges<G::ScalarField> {
1009 let chal: Vec<_> = self
1010 .lr
1011 .iter()
1012 .map(|(l, r)| {
1013 sponge.absorb_g(&[*l]);
1014 sponge.absorb_g(&[*r]);
1015 squeeze_challenge(endo_r, sponge)
1016 })
1017 .collect();
1018
1019 let chal_inv = {
1020 let mut cs = chal.clone();
1021 ark_ff::batch_inversion(&mut cs);
1022 cs
1023 };
1024
1025 Challenges { chal, chal_inv }
1026 }
1027}
1028
1029#[cfg(feature = "ocaml_types")]
1030pub mod caml {
1031 use super::OpeningProof;
1032 use ark_ec::AffineRepr;
1033 use ocaml;
1034
1035 #[derive(ocaml::IntoValue, ocaml::FromValue, ocaml_gen::Struct)]
1036 pub struct CamlOpeningProof<G, F> {
1037 pub lr: Vec<(G, G)>,
1039 pub delta: G,
1040 pub z1: F,
1041 pub z2: F,
1042 pub sg: G,
1043 }
1044
1045 impl<G, CamlF, CamlG> From<OpeningProof<G>> for CamlOpeningProof<CamlG, CamlF>
1046 where
1047 G: AffineRepr,
1048 CamlG: From<G>,
1049 CamlF: From<G::ScalarField>,
1050 {
1051 fn from(opening_proof: OpeningProof<G>) -> Self {
1052 Self {
1053 lr: opening_proof
1054 .lr
1055 .into_iter()
1056 .map(|(g1, g2)| (CamlG::from(g1), CamlG::from(g2)))
1057 .collect(),
1058 delta: CamlG::from(opening_proof.delta),
1059 z1: opening_proof.z1.into(),
1060 z2: opening_proof.z2.into(),
1061 sg: CamlG::from(opening_proof.sg),
1062 }
1063 }
1064 }
1065
1066 impl<G, CamlF, CamlG> From<CamlOpeningProof<CamlG, CamlF>> for OpeningProof<G>
1067 where
1068 G: AffineRepr,
1069 CamlG: Into<G>,
1070 CamlF: Into<G::ScalarField>,
1071 {
1072 fn from(caml: CamlOpeningProof<CamlG, CamlF>) -> Self {
1073 Self {
1074 lr: caml
1075 .lr
1076 .into_iter()
1077 .map(|(g1, g2)| (g1.into(), g2.into()))
1078 .collect(),
1079 delta: caml.delta.into(),
1080 z1: caml.z1.into(),
1081 z2: caml.z2.into(),
1082 sg: caml.sg.into(),
1083 }
1084 }
1085 }
1086}