1use crate::{
7 commitment::{
8 b_poly, b_poly_coefficients, combine_commitments, shift_scalar, squeeze_challenge,
9 squeeze_prechallenge, BatchEvaluationProof, CommitmentCurve, EndoCurve,
10 },
11 error::CommitmentError,
12 hash_map_cache::HashMapCache,
13 utils::combine_polys,
14 BlindedCommitment, PolyComm, PolynomialsToCombine, SRS as SRSTrait,
15};
16use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM};
17use ark_ff::{BigInteger, Field, One, PrimeField, UniformRand, Zero};
18use ark_poly::{
19 univariate::DensePolynomial, EvaluationDomain, Evaluations, Radix2EvaluationDomain as D,
20};
21use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
22use blake2::{Blake2b512, Digest};
23use groupmap::GroupMap;
24use mina_poseidon::{sponge::ScalarChallenge, FqSponge};
25use o1_utils::{
26 field_helpers::{inner_prod, pows},
27 math,
28};
29use rand::{CryptoRng, RngCore};
30use rayon::prelude::*;
31use serde::{Deserialize, Serialize};
32use serde_with::serde_as;
33use std::{cmp::min, iter::Iterator, ops::AddAssign};
34
35#[serde_as]
36#[derive(Debug, Clone, Default, Serialize, Deserialize)]
37#[serde(bound = "G: CanonicalDeserialize + CanonicalSerialize")]
38pub struct SRS<G> {
39 #[serde_as(as = "Vec<o1_utils::serialization::SerdeAs>")]
42 pub g: Vec<G>,
43
44 #[serde_as(as = "o1_utils::serialization::SerdeAs")]
46 pub h: G,
47
48 #[serde(skip)]
52 pub lagrange_bases: HashMapCache<usize, Vec<PolyComm<G>>>,
53}
54
55impl<G> PartialEq for SRS<G>
56where
57 G: PartialEq,
58{
59 fn eq(&self, other: &Self) -> bool {
60 self.g == other.g && self.h == other.h
61 }
62}
63
64pub fn endos<G: CommitmentCurve>() -> (G::BaseField, G::ScalarField)
65where
66 G::BaseField: PrimeField,
67{
68 let endo_q: G::BaseField = mina_poseidon::sponge::endo_coefficient();
69 let endo_r = {
70 let potential_endo_r: G::ScalarField = mina_poseidon::sponge::endo_coefficient();
71 let t = G::generator();
72 let (x, y) = t.to_coordinates().unwrap();
73 let phi_t = G::of_coordinates(x * endo_q, y);
74 if t.mul(potential_endo_r) == phi_t.into_group() {
75 potential_endo_r
76 } else {
77 potential_endo_r * potential_endo_r
78 }
79 };
80 (endo_q, endo_r)
81}
82
83fn point_of_random_bytes<G: CommitmentCurve>(map: &G::Map, random_bytes: &[u8]) -> G
84where
85 G::BaseField: Field,
86{
87 const N: usize = 31;
89 let extension_degree = G::BaseField::extension_degree() as usize;
90
91 let mut base_fields = Vec::with_capacity(N * extension_degree);
92
93 for base_count in 0..extension_degree {
94 let mut bits = [false; 8 * N];
95 let offset = base_count * N;
96 for i in 0..N {
97 for j in 0..8 {
98 bits[8 * i + j] = (random_bytes[offset + i] >> j) & 1 == 1;
99 }
100 }
101
102 let n =
103 <<G::BaseField as Field>::BasePrimeField as PrimeField>::BigInt::from_bits_be(&bits);
104 let t = <<G::BaseField as Field>::BasePrimeField as PrimeField>::from_bigint(n)
105 .expect("packing code has a bug");
106 base_fields.push(t)
107 }
108
109 let t = G::BaseField::from_base_prime_field_elems(&base_fields).unwrap();
110
111 let (x, y) = map.to_group(t);
112 G::of_coordinates(x, y).mul_by_cofactor()
113}
114
115impl<G: CommitmentCurve> SRS<G> {
117 pub fn verify<EFqSponge, RNG>(
120 &self,
121 group_map: &G::Map,
122 batch: &mut [BatchEvaluationProof<G, EFqSponge, OpeningProof<G>>],
123 rng: &mut RNG,
124 ) -> bool
125 where
126 EFqSponge: FqSponge<G::BaseField, G, G::ScalarField>,
127 RNG: RngCore + CryptoRng,
128 G::BaseField: PrimeField,
129 {
130 let nonzero_length = self.g.len();
151
152 let max_rounds = math::ceil_log2(nonzero_length);
153
154 let padded_length = 1 << max_rounds;
155
156 let (_, endo_r) = endos::<G>();
157
158 let padding = padded_length - nonzero_length;
160 let mut points = vec![self.h];
161 points.extend(self.g.clone());
162 points.extend(vec![G::zero(); padding]);
163
164 let mut scalars = vec![G::ScalarField::zero(); padded_length + 1];
165 assert_eq!(scalars.len(), points.len());
166
167 let rand_base = G::ScalarField::rand(rng);
169 let sg_rand_base = G::ScalarField::rand(rng);
170
171 let mut rand_base_i = G::ScalarField::one();
172 let mut sg_rand_base_i = G::ScalarField::one();
173
174 for BatchEvaluationProof {
175 sponge,
176 evaluation_points,
177 polyscale,
178 evalscale,
179 evaluations,
180 opening,
181 combined_inner_product,
182 } in batch.iter_mut()
183 {
184 sponge.absorb_fr(&[shift_scalar::<G>(*combined_inner_product)]);
185
186 let u_base: G = {
187 let t = sponge.challenge_fq();
188 let (x, y) = group_map.to_group(t);
189 G::of_coordinates(x, y)
190 };
191
192 let Challenges { chal, chal_inv } = opening.challenges::<EFqSponge>(&endo_r, sponge);
193
194 sponge.absorb_g(&[opening.delta]);
195 let c = ScalarChallenge(sponge.challenge()).to_field(&endo_r);
196
197 let b0 = {
201 let mut scale = G::ScalarField::one();
202 let mut res = G::ScalarField::zero();
203 for &e in evaluation_points.iter() {
204 let term = b_poly(&chal, e);
205 res += &(scale * term);
206 scale *= *evalscale;
207 }
208 res
209 };
210
211 let s = b_poly_coefficients(&chal);
212
213 let neg_rand_base_i = -rand_base_i;
214
215 points.push(opening.sg);
220 scalars.push(neg_rand_base_i * opening.z1 - sg_rand_base_i);
221
222 {
229 let terms: Vec<_> = s.par_iter().map(|s| sg_rand_base_i * s).collect();
230
231 for (i, term) in terms.iter().enumerate() {
232 scalars[i + 1] += term;
233 }
234 }
235
236 scalars[0] -= &(rand_base_i * opening.z2);
239
240 scalars.push(neg_rand_base_i * (opening.z1 * b0));
243 points.push(u_base);
244
245 let rand_base_i_c_i = c * rand_base_i;
251 for ((l, r), (u_inv, u)) in opening.lr.iter().zip(chal_inv.iter().zip(chal.iter())) {
252 points.push(*l);
253 scalars.push(rand_base_i_c_i * u_inv);
254
255 points.push(*r);
256 scalars.push(rand_base_i_c_i * u);
257 }
258
259 combine_commitments(
264 evaluations,
265 &mut scalars,
266 &mut points,
267 *polyscale,
268 rand_base_i_c_i,
269 );
270
271 scalars.push(rand_base_i_c_i * *combined_inner_product);
272 points.push(u_base);
273
274 scalars.push(rand_base_i);
275 points.push(opening.delta);
276
277 rand_base_i *= &rand_base;
278 sg_rand_base_i *= &sg_rand_base;
279 }
280
281 let chunk_size = points.len() / 2;
284 let msm_res = points
285 .into_par_iter()
286 .chunks(chunk_size)
287 .zip(scalars.into_par_iter().chunks(chunk_size))
288 .map(|(bases, coeffs)| {
289 let coeffs_bigint = coeffs
290 .into_iter()
291 .map(|c| c.into_bigint())
292 .collect::<Vec<_>>();
293 G::Group::msm_bigint(&bases, &coeffs_bigint)
294 })
295 .reduce(G::Group::zero, |mut l, r| {
296 l += r;
297 l
298 });
299
300 msm_res == G::Group::zero()
301 }
302
303 pub unsafe fn create_trusted_setup(x: G::ScalarField, depth: usize) -> Self {
311 let m = G::Map::setup();
312
313 let mut x_pow = G::ScalarField::one();
314 let g: Vec<_> = (0..depth)
315 .map(|_| {
316 let res = G::generator().mul(x_pow);
317 x_pow *= x;
318 res.into_affine()
319 })
320 .collect();
321
322 let h = {
324 let mut h = Blake2b512::new();
325 h.update("srs_misc".as_bytes());
326 h.update(0_u32.to_be_bytes());
329 point_of_random_bytes(&m, &h.finalize())
330 };
331
332 Self {
333 g,
334 h,
335 lagrange_bases: HashMapCache::new(),
336 }
337 }
338}
339
340impl<G: CommitmentCurve> SRS<G>
341where
342 <G as CommitmentCurve>::Map: Sync,
343 G::BaseField: PrimeField,
344{
345 pub fn create_parallel(depth: usize) -> Self {
348 let m = G::Map::setup();
349
350 let g: Vec<_> = (0..depth)
351 .into_par_iter()
352 .map(|i| {
353 let mut h = Blake2b512::new();
354 h.update((i as u32).to_be_bytes());
355 point_of_random_bytes(&m, &h.finalize())
356 })
357 .collect();
358
359 let h = {
361 let mut h = Blake2b512::new();
362 h.update("srs_misc".as_bytes());
363 h.update(0_u32.to_be_bytes());
366 point_of_random_bytes(&m, &h.finalize())
367 };
368
369 Self {
370 g,
371 h,
372 lagrange_bases: HashMapCache::new(),
373 }
374 }
375}
376
377impl<G> SRSTrait<G> for SRS<G>
378where
379 G: CommitmentCurve,
380{
381 fn max_poly_size(&self) -> usize {
383 self.g.len()
384 }
385
386 fn blinding_commitment(&self) -> G {
387 self.h
388 }
389
390 fn mask(
394 &self,
395 comm: PolyComm<G>,
396 rng: &mut (impl RngCore + CryptoRng),
397 ) -> BlindedCommitment<G> {
398 let blinders = comm.map(|_| G::ScalarField::rand(rng));
399 self.mask_custom(comm, &blinders).unwrap()
400 }
401
402 fn mask_custom(
403 &self,
404 com: PolyComm<G>,
405 blinders: &PolyComm<G::ScalarField>,
406 ) -> Result<BlindedCommitment<G>, CommitmentError> {
407 let commitment = com
408 .zip(blinders)
409 .ok_or_else(|| CommitmentError::BlindersDontMatch(blinders.len(), com.len()))?
410 .map(|(g, b)| {
411 let mut g_masked = self.h.mul(b);
412 g_masked.add_assign(&g);
413 g_masked.into_affine()
414 });
415 Ok(BlindedCommitment {
416 commitment,
417 blinders: blinders.clone(),
418 })
419 }
420
421 fn commit_non_hiding(
422 &self,
423 plnm: &DensePolynomial<G::ScalarField>,
424 num_chunks: usize,
425 ) -> PolyComm<G> {
426 let is_zero = plnm.is_zero();
427
428 let mut chunks: Vec<_> = if is_zero {
430 vec![G::zero()]
431 } else if plnm.len() < self.g.len() {
432 vec![G::Group::msm(&self.g[..plnm.len()], &plnm.coeffs)
433 .unwrap()
434 .into_affine()]
435 } else if plnm.len() == self.g.len() {
436 let n = self.g.len();
439 let (r1, r2) = rayon::join(
440 || G::Group::msm(&self.g[..n / 2], &plnm.coeffs[..n / 2]).unwrap(),
441 || G::Group::msm(&self.g[n / 2..n], &plnm.coeffs[n / 2..n]).unwrap(),
442 );
443
444 vec![(r1 + r2).into_affine()]
445 } else {
446 plnm.into_par_iter()
448 .chunks(self.g.len())
449 .map(|chunk| {
450 let chunk_coeffs = chunk
451 .into_iter()
452 .map(|c| c.into_bigint())
453 .collect::<Vec<_>>();
454 let chunk_res = G::Group::msm_bigint(&self.g, &chunk_coeffs);
455 chunk_res.into_affine()
456 })
457 .collect()
458 };
459
460 for _ in chunks.len()..num_chunks {
461 chunks.push(G::zero());
462 }
463
464 PolyComm::<G>::new(chunks)
465 }
466
467 fn commit(
468 &self,
469 plnm: &DensePolynomial<G::ScalarField>,
470 num_chunks: usize,
471 rng: &mut (impl RngCore + CryptoRng),
472 ) -> BlindedCommitment<G> {
473 self.mask(self.commit_non_hiding(plnm, num_chunks), rng)
474 }
475
476 fn commit_custom(
477 &self,
478 plnm: &DensePolynomial<G::ScalarField>,
479 num_chunks: usize,
480 blinders: &PolyComm<G::ScalarField>,
481 ) -> Result<BlindedCommitment<G>, CommitmentError> {
482 self.mask_custom(self.commit_non_hiding(plnm, num_chunks), blinders)
483 }
484
485 fn commit_evaluations_non_hiding(
486 &self,
487 domain: D<G::ScalarField>,
488 plnm: &Evaluations<G::ScalarField, D<G::ScalarField>>,
489 ) -> PolyComm<G> {
490 let basis = self.get_lagrange_basis(domain);
491 let commit_evaluations = |evals: &Vec<G::ScalarField>, basis: &Vec<PolyComm<G>>| {
492 PolyComm::<G>::multi_scalar_mul(&basis.iter().collect::<Vec<_>>()[..], &evals[..])
493 };
494 match domain.size.cmp(&plnm.domain().size) {
495 std::cmp::Ordering::Less => {
496 let s = (plnm.domain().size / domain.size) as usize;
497 let v: Vec<_> = (0..(domain.size())).map(|i| plnm.evals[s * i]).collect();
498 commit_evaluations(&v, basis)
499 }
500 std::cmp::Ordering::Equal => commit_evaluations(&plnm.evals, basis),
501 std::cmp::Ordering::Greater => {
502 panic!("desired commitment domain size ({}) greater than evaluations' domain size ({}):", domain.size, plnm.domain().size)
503 }
504 }
505 }
506
507 fn commit_evaluations(
508 &self,
509 domain: D<G::ScalarField>,
510 plnm: &Evaluations<G::ScalarField, D<G::ScalarField>>,
511 rng: &mut (impl RngCore + CryptoRng),
512 ) -> BlindedCommitment<G> {
513 self.mask(self.commit_evaluations_non_hiding(domain, plnm), rng)
514 }
515
516 fn commit_evaluations_custom(
517 &self,
518 domain: D<G::ScalarField>,
519 plnm: &Evaluations<G::ScalarField, D<G::ScalarField>>,
520 blinders: &PolyComm<G::ScalarField>,
521 ) -> Result<BlindedCommitment<G>, CommitmentError> {
522 self.mask_custom(self.commit_evaluations_non_hiding(domain, plnm), blinders)
523 }
524
525 fn create(depth: usize) -> Self {
526 let m = G::Map::setup();
527
528 let g: Vec<_> = (0..depth)
529 .map(|i| {
530 let mut h = Blake2b512::new();
531 h.update((i as u32).to_be_bytes());
532 point_of_random_bytes(&m, &h.finalize())
533 })
534 .collect();
535
536 let h = {
538 let mut h = Blake2b512::new();
539 h.update("srs_misc".as_bytes());
540 h.update(0_u32.to_be_bytes());
543 point_of_random_bytes(&m, &h.finalize())
544 };
545
546 Self {
547 g,
548 h,
549 lagrange_bases: HashMapCache::new(),
550 }
551 }
552
553 fn get_lagrange_basis_from_domain_size(&self, domain_size: usize) -> &Vec<PolyComm<G>> {
554 self.lagrange_bases.get_or_generate(domain_size, || {
555 self.lagrange_basis(D::new(domain_size).unwrap())
556 })
557 }
558
559 fn get_lagrange_basis(&self, domain: D<G::ScalarField>) -> &Vec<PolyComm<G>> {
560 self.lagrange_bases
561 .get_or_generate(domain.size(), || self.lagrange_basis(domain))
562 }
563
564 fn size(&self) -> usize {
565 self.g.len()
566 }
567}
568
569impl<G: CommitmentCurve> SRS<G> {
570 #[allow(clippy::type_complexity)]
571 #[allow(clippy::many_single_char_names)]
572 pub fn open<EFqSponge, RNG, D: EvaluationDomain<G::ScalarField>>(
576 &self,
577 group_map: &G::Map,
578 plnms: PolynomialsToCombine<G, D>,
579 elm: &[G::ScalarField],
580 polyscale: G::ScalarField,
581 evalscale: G::ScalarField,
582 mut sponge: EFqSponge,
583 rng: &mut RNG,
584 ) -> OpeningProof<G>
585 where
586 EFqSponge: Clone + FqSponge<G::BaseField, G, G::ScalarField>,
587 RNG: RngCore + CryptoRng,
588 G::BaseField: PrimeField,
589 G: EndoCurve,
590 {
591 let (endo_q, endo_r) = endos::<G>();
592
593 let rounds = math::ceil_log2(self.g.len());
594 let padded_length = 1 << rounds;
595
596 let padding = padded_length - self.g.len();
601 let mut g = self.g.clone();
602 g.extend(vec![G::zero(); padding]);
603
604 let (p, blinding_factor) = combine_polys::<G, D>(plnms, polyscale, self.g.len());
610
611 let b_init = {
626 let mut scale = G::ScalarField::one();
628 let mut res: Vec<G::ScalarField> =
629 (0..padded_length).map(|_| G::ScalarField::zero()).collect();
630 for e in elm {
631 for (i, t) in pows(padded_length, *e).iter().enumerate() {
632 res[i] += &(scale * t);
633 }
634 scale *= &evalscale;
635 }
636 res
637 };
638
639 let combined_inner_product = p
641 .coeffs
642 .iter()
643 .zip(b_init.iter())
644 .map(|(a, b)| *a * b)
645 .fold(G::ScalarField::zero(), |acc, x| acc + x);
646
647 sponge.absorb_fr(&[shift_scalar::<G>(combined_inner_product)]);
655
656 let u_base: G = {
658 let t = sponge.challenge_fq();
659 let (x, y) = group_map.to_group(t);
660 G::of_coordinates(x, y)
661 };
662
663 let mut a = p.coeffs;
664 assert!(padded_length >= a.len());
665 a.extend(vec![G::ScalarField::zero(); padded_length - a.len()]);
666
667 let mut b = b_init;
668
669 let mut lr = vec![];
670
671 let mut blinders = vec![];
672
673 let mut chals = vec![];
674 let mut chal_invs = vec![];
675
676 for _ in 0..rounds {
678 let n = g.len() / 2;
679 let (g_lo, g_hi) = (&g[0..n], &g[n..]);
681 let (a_lo, a_hi) = (&a[0..n], &a[n..]);
683 let (b_lo, b_hi) = (&b[0..n], &b[n..]);
685
686 let rand_l = <G::ScalarField as UniformRand>::rand(rng);
688 let rand_r = <G::ScalarField as UniformRand>::rand(rng);
689
690 let l = G::Group::msm_bigint(
692 &[g_lo, &[self.h, u_base]].concat(),
693 &[a_hi, &[rand_l, inner_prod(a_hi, b_lo)]]
694 .concat()
695 .iter()
696 .map(|x| x.into_bigint())
697 .collect::<Vec<_>>(),
698 )
699 .into_affine();
700
701 let r = G::Group::msm_bigint(
702 &[g_hi, &[self.h, u_base]].concat(),
703 &[a_lo, &[rand_r, inner_prod(a_lo, b_hi)]]
704 .concat()
705 .iter()
706 .map(|x| x.into_bigint())
707 .collect::<Vec<_>>(),
708 )
709 .into_affine();
710
711 lr.push((l, r));
712 blinders.push((rand_l, rand_r));
713
714 sponge.absorb_g(&[l]);
715 sponge.absorb_g(&[r]);
716
717 let u_pre = squeeze_prechallenge(&mut sponge);
721 let u = u_pre.to_field(&endo_r);
722 let u_inv = u.inverse().unwrap();
723
724 chals.push(u);
725 chal_invs.push(u_inv);
726
727 a = a_hi
729 .par_iter()
730 .zip(a_lo)
731 .map(|(&hi, &lo)| {
732 let mut res = hi;
734 res *= u_inv;
735 res += &lo;
736 res
737 })
738 .collect();
739
740 b = b_lo
742 .par_iter()
743 .zip(b_hi)
744 .map(|(&lo, &hi)| {
745 let mut res = hi;
747 res *= u;
748 res += &lo;
749 res
750 })
751 .collect();
752
753 g = G::combine_one_endo(endo_r, endo_q, g_lo, g_hi, u_pre);
755 }
756
757 assert!(
758 g.len() == 1 && a.len() == 1 && b.len() == 1,
759 "IPA commitment folding must produce single elements after log rounds"
760 );
761 let a0 = a[0];
762 let b0 = b[0];
763 let g0 = g[0];
764
765 let r_prime = blinders
775 .iter()
776 .zip(chals.iter().zip(chal_invs.iter()))
777 .map(|((rand_l, rand_r), (u, u_inv))| ((*rand_l) * u_inv) + (*rand_r * u))
778 .fold(blinding_factor, |acc, x| acc + x);
779
780 let d = <G::ScalarField as UniformRand>::rand(rng);
781 let r_delta = <G::ScalarField as UniformRand>::rand(rng);
782
783 let delta = ((g0.into_group() + (u_base.mul(b0))).into_affine().mul(d)
786 + self.h.mul(r_delta))
787 .into_affine();
788
789 sponge.absorb_g(&[delta]);
790 let c = ScalarChallenge(sponge.challenge()).to_field(&endo_r);
791
792 let z1 = a0 * c + d;
794 let z2 = r_prime * c + r_delta;
795
796 OpeningProof {
797 delta,
798 lr,
799 z1,
800 z2,
801 sg: g0,
802 }
803 }
804
805 fn lagrange_basis(&self, domain: D<G::ScalarField>) -> Vec<PolyComm<G>> {
806 let n = domain.size();
807
808 let srs_size = self.g.len();
882 let num_elems = (n + srs_size - 1) / srs_size;
883 let mut chunks = Vec::with_capacity(num_elems);
884
885 for i in 0..num_elems {
887 let mut lg: Vec<<G as AffineRepr>::Group> = vec![<G as AffineRepr>::Group::zero(); n];
889 let start_offset = i * srs_size;
891 let num_terms = min((i + 1) * srs_size, n) - start_offset;
892 for j in 0..num_terms {
893 lg[start_offset + j] = self.g[j].into_group()
894 }
895 domain.ifft_in_place(&mut lg);
897 chunks.push(<G as AffineRepr>::Group::normalize_batch(lg.as_mut_slice()));
899 }
900
901 (0..n)
902 .map(|i| PolyComm {
903 chunks: chunks.iter().map(|v| v[i]).collect(),
904 })
905 .collect()
906 }
907}
908
909#[serde_as]
910#[derive(Clone, Debug, Serialize, Deserialize, Default, PartialEq)]
911#[serde(bound = "G: ark_serialize::CanonicalDeserialize + ark_serialize::CanonicalSerialize")]
912pub struct OpeningProof<G: AffineRepr> {
913 #[serde_as(as = "Vec<(o1_utils::serialization::SerdeAs, o1_utils::serialization::SerdeAs)>")]
915 pub lr: Vec<(G, G)>,
916 #[serde_as(as = "o1_utils::serialization::SerdeAs")]
917 pub delta: G,
918 #[serde_as(as = "o1_utils::serialization::SerdeAs")]
919 pub z1: G::ScalarField,
920 #[serde_as(as = "o1_utils::serialization::SerdeAs")]
921 pub z2: G::ScalarField,
922 #[serde_as(as = "o1_utils::serialization::SerdeAs")]
924 pub sg: G,
925}
926
927impl<BaseField: PrimeField, G: AffineRepr<BaseField = BaseField> + CommitmentCurve + EndoCurve>
928 crate::OpenProof<G> for OpeningProof<G>
929{
930 type SRS = SRS<G>;
931
932 fn open<EFqSponge, RNG, D: EvaluationDomain<<G as AffineRepr>::ScalarField>>(
933 srs: &Self::SRS,
934 group_map: &<G as CommitmentCurve>::Map,
935 plnms: PolynomialsToCombine<G, D>,
936 elm: &[<G as AffineRepr>::ScalarField], polyscale: <G as AffineRepr>::ScalarField, evalscale: <G as AffineRepr>::ScalarField, sponge: EFqSponge, rng: &mut RNG,
941 ) -> Self
942 where
943 EFqSponge:
944 Clone + FqSponge<<G as AffineRepr>::BaseField, G, <G as AffineRepr>::ScalarField>,
945 RNG: RngCore + CryptoRng,
946 {
947 srs.open(group_map, plnms, elm, polyscale, evalscale, sponge, rng)
948 }
949
950 fn verify<EFqSponge, RNG>(
951 srs: &Self::SRS,
952 group_map: &G::Map,
953 batch: &mut [BatchEvaluationProof<G, EFqSponge, Self>],
954 rng: &mut RNG,
955 ) -> bool
956 where
957 EFqSponge: FqSponge<<G as AffineRepr>::BaseField, G, <G as AffineRepr>::ScalarField>,
958 RNG: RngCore + CryptoRng,
959 {
960 srs.verify(group_map, batch, rng)
961 }
962}
963
964pub struct Challenges<F> {
966 pub chal: Vec<F>,
967 pub chal_inv: Vec<F>,
968}
969
970impl<G: AffineRepr> OpeningProof<G> {
971 pub fn prechallenges<EFqSponge: FqSponge<G::BaseField, G, G::ScalarField>>(
974 &self,
975 sponge: &mut EFqSponge,
976 ) -> Vec<ScalarChallenge<G::ScalarField>> {
977 let _t = sponge.challenge_fq();
978 self.lr
979 .iter()
980 .map(|(l, r)| {
981 sponge.absorb_g(&[*l]);
982 sponge.absorb_g(&[*r]);
983 squeeze_prechallenge(sponge)
984 })
985 .collect()
986 }
987
988 pub fn challenges<EFqSponge: FqSponge<G::BaseField, G, G::ScalarField>>(
991 &self,
992 endo_r: &G::ScalarField,
993 sponge: &mut EFqSponge,
994 ) -> Challenges<G::ScalarField> {
995 let chal: Vec<_> = self
996 .lr
997 .iter()
998 .map(|(l, r)| {
999 sponge.absorb_g(&[*l]);
1000 sponge.absorb_g(&[*r]);
1001 squeeze_challenge(endo_r, sponge)
1002 })
1003 .collect();
1004
1005 let chal_inv = {
1006 let mut cs = chal.clone();
1007 ark_ff::batch_inversion(&mut cs);
1008 cs
1009 };
1010
1011 Challenges { chal, chal_inv }
1012 }
1013}
1014
1015#[cfg(feature = "ocaml_types")]
1016pub mod caml {
1017 use super::OpeningProof;
1018 use ark_ec::AffineRepr;
1019 use ocaml;
1020
1021 #[derive(ocaml::IntoValue, ocaml::FromValue, ocaml_gen::Struct)]
1022 pub struct CamlOpeningProof<G, F> {
1023 pub lr: Vec<(G, G)>,
1025 pub delta: G,
1026 pub z1: F,
1027 pub z2: F,
1028 pub sg: G,
1029 }
1030
1031 impl<G, CamlF, CamlG> From<OpeningProof<G>> for CamlOpeningProof<CamlG, CamlF>
1032 where
1033 G: AffineRepr,
1034 CamlG: From<G>,
1035 CamlF: From<G::ScalarField>,
1036 {
1037 fn from(opening_proof: OpeningProof<G>) -> Self {
1038 Self {
1039 lr: opening_proof
1040 .lr
1041 .into_iter()
1042 .map(|(g1, g2)| (CamlG::from(g1), CamlG::from(g2)))
1043 .collect(),
1044 delta: CamlG::from(opening_proof.delta),
1045 z1: opening_proof.z1.into(),
1046 z2: opening_proof.z2.into(),
1047 sg: CamlG::from(opening_proof.sg),
1048 }
1049 }
1050 }
1051
1052 impl<G, CamlF, CamlG> From<CamlOpeningProof<CamlG, CamlF>> for OpeningProof<G>
1053 where
1054 G: AffineRepr,
1055 CamlG: Into<G>,
1056 CamlF: Into<G::ScalarField>,
1057 {
1058 fn from(caml: CamlOpeningProof<CamlG, CamlF>) -> Self {
1059 Self {
1060 lr: caml
1061 .lr
1062 .into_iter()
1063 .map(|(g1, g2)| (g1.into(), g2.into()))
1064 .collect(),
1065 delta: caml.delta.into(),
1066 z1: caml.z1.into(),
1067 z2: caml.z2.into(),
1068 sg: caml.sg.into(),
1069 }
1070 }
1071 }
1072}