bench_cross_terms/
bench_cross_terms.rs

1use ark_ff::{One, UniformRand, Zero};
2use kimchi::circuits::{
3    berkeley_columns::BerkeleyChallengeTerm,
4    expr::{ConstantExpr, Expr, ExprInner, Variable},
5    gate::CurrOrNext,
6};
7use mina_curves::pasta::Fp;
8use mvpoly::{monomials::Sparse, MVPoly};
9use std::time::Instant;
10
11fn bench_sparse_cross_terms_computation_scaled() {
12    let mut rng = o1_utils::tests::make_test_rng(None);
13    let p1: Sparse<Fp, 10, 7> = unsafe { Sparse::random(&mut rng, None) };
14    let eval_left: [Fp; 10] = std::array::from_fn(|_| Fp::rand(&mut rng));
15    let eval_right: [Fp; 10] = std::array::from_fn(|_| Fp::rand(&mut rng));
16    let u1 = Fp::rand(&mut rng);
17    let u2 = Fp::rand(&mut rng);
18    let a1 = Fp::rand(&mut rng);
19    let a2 = Fp::rand(&mut rng);
20    let start_timer = Instant::now();
21    p1.compute_cross_terms_scaled(&eval_left, &eval_right, u1, u2, a1, a2);
22    let elapsed = start_timer.elapsed();
23    println!("sparse cross terms computation scaled: {:?}", elapsed);
24}
25
26fn bench_sparse_cross_terms_computation_ec_addition() {
27    // Simulate a real usecase of the sparse cross terms computation
28    // The bench is related to a case we mightencounter in the context of
29    // Arrabbiata, i.e. a setup with 15 private columns, 15 public inputs, and
30    // 15 columns for the "next row".
31    // The following lines/design look similar to the ones we use in
32    // o1vm/arrabbiata
33    #[derive(Clone, Copy, PartialEq)]
34    enum Column {
35        X(usize),
36    }
37
38    impl From<Column> for usize {
39        fn from(val: Column) -> usize {
40            match val {
41                Column::X(i) => i,
42            }
43        }
44    }
45
46    struct Constraint {
47        idx: usize,
48    }
49
50    trait Interpreter {
51        type Position: Clone + Copy;
52
53        type Variable: Clone
54            + std::ops::Add<Self::Variable, Output = Self::Variable>
55            + std::ops::Sub<Self::Variable, Output = Self::Variable>
56            + std::ops::Mul<Self::Variable, Output = Self::Variable>;
57
58        fn allocate(&mut self) -> Self::Position;
59
60        // Simulate fetching/reading a value from outside
61        // In the case of the witness, it will be getting a value from the
62        // environment
63        fn fetch(&self, pos: Self::Position) -> Self::Variable;
64    }
65
66    impl Interpreter for Constraint {
67        type Position = Column;
68
69        type Variable = Expr<ConstantExpr<Fp, BerkeleyChallengeTerm>, Column>;
70
71        fn allocate(&mut self) -> Self::Position {
72            let col = Column::X(self.idx);
73            self.idx += 1;
74            col
75        }
76
77        fn fetch(&self, col: Self::Position) -> Self::Variable {
78            Expr::Atom(ExprInner::Cell(Variable {
79                col,
80                row: CurrOrNext::Curr,
81            }))
82        }
83    }
84
85    impl Constraint {
86        fn new() -> Self {
87            Self { idx: 0 }
88        }
89    }
90
91    let mut interpreter = Constraint::new();
92    // Constraints for elliptic curve addition, without handling the case of the
93    // point at infinity or double
94    let lambda = {
95        let pos = interpreter.allocate();
96        interpreter.fetch(pos)
97    };
98    let x1 = {
99        let pos = interpreter.allocate();
100        interpreter.fetch(pos)
101    };
102    let x2 = {
103        let pos = interpreter.allocate();
104        interpreter.fetch(pos)
105    };
106
107    let y1 = {
108        let pos = interpreter.allocate();
109        interpreter.fetch(pos)
110    };
111    let y2 = {
112        let pos = interpreter.allocate();
113        interpreter.fetch(pos)
114    };
115
116    let x3 = {
117        let pos = interpreter.allocate();
118        interpreter.fetch(pos)
119    };
120    let y3 = {
121        let pos = interpreter.allocate();
122        interpreter.fetch(pos)
123    };
124    // - Constraint 1: λ (X1 - X2) - Y1 + Y2 = 0
125    let p1 = {
126        let expr = lambda.clone() * (x1.clone() - x2.clone()) - (y1.clone() - y2.clone());
127        Sparse::<Fp, 7, 2>::from_expr::<Column, BerkeleyChallengeTerm>(expr, None)
128    };
129
130    // - Constraint 2: X3 + X1 + X2 - λ^2 = 0
131    let p2 = {
132        let expr = x3.clone() + x1.clone() + x2.clone() - lambda.clone() * lambda.clone();
133        Sparse::<Fp, 7, 2>::from_expr::<Column, BerkeleyChallengeTerm>(expr, None)
134    };
135
136    // - Constraint 3: Y3 - λ (X1 - X3) + Y1 = 0
137    let p3 = {
138        let expr = y3.clone() - lambda.clone() * (x1.clone() - x3.clone()) + y1.clone();
139        Sparse::<Fp, 7, 2>::from_expr::<Column, BerkeleyChallengeTerm>(expr, None)
140    };
141
142    let circuits = [p1, p2, p3];
143    // - simulate the evaluation of the circuits, and increase artificially the degree to 5.
144    // - add some public values, i.e. random columns.
145    // - Add 8 random columns, plus 15 public inputs + another 15 columns for
146    // the "next row".
147    // - Only the first 8 columns are filled with random values
148    // for the eval right.
149    // - For the eval left, we have random values for everything.
150    let circuits: Vec<Sparse<Fp, 7, 5>> = circuits
151        .into_iter()
152        .map(|c| {
153            let res: Result<Sparse<Fp, 7, 5>, _> = c.into();
154            res.unwrap()
155        })
156        .collect();
157    let circuits: Vec<Sparse<Fp, 45, 5>> = circuits
158        .into_iter()
159        .map(|c| {
160            let res: Result<Sparse<Fp, 45, 5>, _> = c.into();
161            res.unwrap()
162        })
163        .collect();
164
165    let mut rng = o1_utils::tests::make_test_rng(None);
166    let eval_left: [Fp; 45] = std::array::from_fn(|_| Fp::rand(&mut rng));
167    let eval_right: [Fp; 45] = std::array::from_fn(|i| {
168        if i < 7 {
169            Fp::rand(&mut rng)
170        } else {
171            Fp::zero()
172        }
173    });
174    let u1 = Fp::rand(&mut rng);
175    // The right u is always one as we suppose the constraints are not
176    // "relaxed".
177    let u2 = Fp::one();
178    let combiner1 = Fp::rand(&mut rng);
179    let combiner2 = Fp::rand(&mut rng);
180
181    let start_timer = Instant::now();
182    let res = mvpoly::compute_combined_cross_terms(
183        circuits, eval_left, eval_right, u1, u2, combiner1, combiner2,
184    );
185    let elapsed = start_timer.elapsed();
186    // Only printing to be sure that the compiler does not optimize the code and
187    // remove the computation.
188    // We know how compilers can be annoying sometimes.
189    println!("res: {:?}", res);
190    println!("Sparse cross terms computation ec addition: {:?}", elapsed);
191}
192
193fn main() {
194    bench_sparse_cross_terms_computation_scaled();
195    bench_sparse_cross_terms_computation_ec_addition();
196}