use crate::{
auto_clone, auto_clone_array,
circuits::{
argument::{Argument, ArgumentEnv, ArgumentType},
berkeley_columns::BerkeleyChallengeTerm,
expr::{
constraints::{boolean, ExprOps},
Cache,
},
gate::GateType,
polynomials::keccak::{constants::*, OFF},
},
grid,
};
use ark_ff::PrimeField;
use std::marker::PhantomData;
#[macro_export]
macro_rules! from_quarters {
($quarters:ident, $x:ident) => {
$quarters($x, 0)
+ T::two_pow(16) * $quarters($x, 1)
+ T::two_pow(32) * $quarters($x, 2)
+ T::two_pow(48) * $quarters($x, 3)
};
($quarters:ident, $y:ident, $x:ident) => {
$quarters($y, $x, 0)
+ T::two_pow(16) * $quarters($y, $x, 1)
+ T::two_pow(32) * $quarters($y, $x, 2)
+ T::two_pow(48) * $quarters($y, $x, 3)
};
}
#[macro_export]
macro_rules! from_shifts {
($shifts:ident, $i:ident) => {
$shifts($i)
+ T::two_pow(1) * $shifts(100 + $i)
+ T::two_pow(2) * $shifts(200 + $i)
+ T::two_pow(3) * $shifts(300 + $i)
};
($shifts:ident, $x:ident, $q:ident) => {
$shifts(0, $x, $q)
+ T::two_pow(1) * $shifts(1, $x, $q)
+ T::two_pow(2) * $shifts(2, $x, $q)
+ T::two_pow(3) * $shifts(3, $x, $q)
};
($shifts:ident, $y:ident, $x:ident, $q:ident) => {
$shifts(0, $y, $x, $q)
+ T::two_pow(1) * $shifts(1, $y, $x, $q)
+ T::two_pow(2) * $shifts(2, $y, $x, $q)
+ T::two_pow(3) * $shifts(3, $y, $x, $q)
};
}
#[derive(Default)]
pub struct KeccakRound<F>(PhantomData<F>);
impl<F> Argument<F> for KeccakRound<F>
where
F: PrimeField,
{
const ARGUMENT_TYPE: ArgumentType = ArgumentType::Gate(GateType::KeccakRound);
const CONSTRAINTS: u32 = 389;
fn constraint_checks<T: ExprOps<F, BerkeleyChallengeTerm>>(
env: &ArgumentEnv<F, T>,
_cache: &mut Cache,
) -> Vec<T> {
let mut constraints = vec![];
let rc = [env.coeff(0), env.coeff(1), env.coeff(2), env.coeff(3)];
let state_a = grid!(
100,
env.witness_curr_chunk(THETA_STATE_A_OFF, THETA_SHIFTS_C_OFF)
);
let shifts_c = grid!(
80,
env.witness_curr_chunk(THETA_SHIFTS_C_OFF, THETA_DENSE_C_OFF)
);
let dense_c = grid!(
20,
env.witness_curr_chunk(THETA_DENSE_C_OFF, THETA_QUOTIENT_C_OFF)
);
let quotient_c = grid!(
5,
env.witness_curr_chunk(THETA_QUOTIENT_C_OFF, THETA_REMAINDER_C_OFF)
);
let remainder_c = grid!(
20,
env.witness_curr_chunk(THETA_REMAINDER_C_OFF, THETA_DENSE_ROT_C_OFF)
);
let dense_rot_c = grid!(
20,
env.witness_curr_chunk(THETA_DENSE_ROT_C_OFF, THETA_EXPAND_ROT_C_OFF)
);
let expand_rot_c = grid!(
20,
env.witness_curr_chunk(THETA_EXPAND_ROT_C_OFF, PIRHO_DENSE_E_OFF)
);
let shifts_e = grid!(
400,
env.witness_curr_chunk(PIRHO_SHIFTS_E_OFF, PIRHO_DENSE_E_OFF)
);
let dense_e = grid!(
100,
env.witness_curr_chunk(PIRHO_DENSE_E_OFF, PIRHO_QUOTIENT_E_OFF)
);
let quotient_e = grid!(
100,
env.witness_curr_chunk(PIRHO_QUOTIENT_E_OFF, PIRHO_REMAINDER_E_OFF)
);
let remainder_e = grid!(
100,
env.witness_curr_chunk(PIRHO_REMAINDER_E_OFF, PIRHO_DENSE_ROT_E_OFF)
);
let dense_rot_e = grid!(
100,
env.witness_curr_chunk(PIRHO_DENSE_ROT_E_OFF, PIRHO_EXPAND_ROT_E_OFF)
);
let expand_rot_e = grid!(
100,
env.witness_curr_chunk(PIRHO_EXPAND_ROT_E_OFF, CHI_SHIFTS_B_OFF)
);
let shifts_b = grid!(
400,
env.witness_curr_chunk(CHI_SHIFTS_B_OFF, CHI_SHIFTS_SUM_OFF)
);
let shifts_sum = grid!(
400,
env.witness_curr_chunk(CHI_SHIFTS_SUM_OFF, IOTA_STATE_G_OFF)
);
let state_g = grid!(100, env.witness_next_chunk(0, IOTA_STATE_G_LEN));
let mut state_c: Vec<Vec<T>> = vec![vec![T::zero(); QUARTERS]; DIM];
let mut state_d: Vec<Vec<T>> = vec![vec![T::zero(); QUARTERS]; DIM];
let mut state_e: Vec<Vec<Vec<T>>> = vec![vec![vec![T::zero(); QUARTERS]; DIM]; DIM];
let mut state_b: Vec<Vec<Vec<T>>> = vec![vec![vec![T::zero(); QUARTERS]; DIM]; DIM];
let mut state_f: Vec<Vec<Vec<T>>> = vec![vec![vec![T::zero(); QUARTERS]; DIM]; DIM];
for x in 0..DIM {
let word_c = from_quarters!(dense_c, x);
let rem_c = from_quarters!(remainder_c, x);
let rot_c = from_quarters!(dense_rot_c, x);
constraints
.push(word_c * T::two_pow(1) - (quotient_c(x) * T::two_pow(64) + rem_c.clone()));
constraints.push(rot_c - (quotient_c(x) + rem_c));
constraints.push(boolean("ient_c(x)));
for q in 0..QUARTERS {
state_c[x][q] = state_a(0, x, q)
+ state_a(1, x, q)
+ state_a(2, x, q)
+ state_a(3, x, q)
+ state_a(4, x, q);
constraints.push(state_c[x][q].clone() - from_shifts!(shifts_c, x, q));
state_d[x][q] =
shifts_c(0, (x + DIM - 1) % DIM, q) + expand_rot_c((x + 1) % DIM, q);
for (y, column_e) in state_e.iter_mut().enumerate() {
column_e[x][q] = state_a(y, x, q) + state_d[x][q].clone();
}
}
} for (y, col) in OFF.iter().enumerate() {
for (x, off) in col.iter().enumerate() {
let word_e = from_quarters!(dense_e, y, x);
let quo_e = from_quarters!(quotient_e, y, x);
let rem_e = from_quarters!(remainder_e, y, x);
let rot_e = from_quarters!(dense_rot_e, y, x);
constraints.push(
word_e * T::two_pow(*off) - (quo_e.clone() * T::two_pow(64) + rem_e.clone()),
);
constraints.push(rot_e - (quo_e.clone() + rem_e));
for q in 0..QUARTERS {
constraints.push(state_e[y][x][q].clone() - from_shifts!(shifts_e, y, x, q));
state_b[(2 * x + 3 * y) % DIM][y][q] = expand_rot_e(y, x, q);
}
}
} for q in 0..QUARTERS {
for x in 0..DIM {
for y in 0..DIM {
let not = T::literal(F::from(0x1111111111111111u64))
- shifts_b(0, y, (x + 1) % DIM, q);
let sum = not + shifts_b(0, y, (x + 2) % DIM, q);
let and = shifts_sum(1, y, x, q);
constraints.push(state_b[y][x][q].clone() - from_shifts!(shifts_b, y, x, q));
constraints.push(sum - from_shifts!(shifts_sum, y, x, q));
state_f[y][x][q] = shifts_b(0, y, x, q) + and;
}
}
} for (q, c) in rc.iter().enumerate() {
constraints.push(state_g(0, 0, q) - (state_f[0][0][q].clone() + c.clone()));
} constraints
}
}
#[derive(Default)]
pub struct KeccakSponge<F>(PhantomData<F>);
impl<F> Argument<F> for KeccakSponge<F>
where
F: PrimeField,
{
const ARGUMENT_TYPE: ArgumentType = ArgumentType::Gate(GateType::KeccakSponge);
const CONSTRAINTS: u32 = 532;
fn constraint_checks<T: ExprOps<F, BerkeleyChallengeTerm>>(
env: &ArgumentEnv<F, T>,
_cache: &mut Cache,
) -> Vec<T> {
let mut constraints = vec![];
let old_state = env.witness_curr_chunk(SPONGE_OLD_STATE_OFF, SPONGE_NEW_STATE_OFF);
let new_state = env.witness_curr_chunk(SPONGE_NEW_STATE_OFF, SPONGE_BYTES_OFF);
let zeros = env.witness_curr_chunk(SPONGE_ZEROS_OFF, SPONGE_BYTES_OFF);
let xor_state = env.witness_next_chunk(0, SPONGE_XOR_STATE_LEN);
let bytes = env.witness_curr_chunk(SPONGE_BYTES_OFF, SPONGE_SHIFTS_OFF);
let shifts =
env.witness_curr_chunk(SPONGE_SHIFTS_OFF, SPONGE_SHIFTS_OFF + SPONGE_SHIFTS_LEN);
auto_clone_array!(old_state);
auto_clone_array!(new_state);
auto_clone_array!(xor_state);
auto_clone_array!(bytes);
auto_clone_array!(shifts);
let absorb = env.coeff(0);
let squeeze = env.coeff(1);
let root = env.coeff(2);
let flags = env.coeff_chunk(4, 140);
let pad = env.coeff_chunk(200, 336);
auto_clone!(root);
auto_clone!(absorb);
auto_clone!(squeeze);
auto_clone_array!(flags);
auto_clone_array!(pad);
for z in zeros {
constraints.push(absorb() * z);
}
for i in 0..STATE_LEN {
constraints.push(root() * old_state(i));
constraints.push(absorb() * (xor_state(i) - (old_state(i) + new_state(i))));
constraints.push(absorb() * (new_state(i) - from_shifts!(shifts, i)));
}
for i in 0..64 {
constraints.push(squeeze() * (old_state(i) - from_shifts!(shifts, i)));
}
for i in 0..RATE_IN_BYTES {
constraints.push(flags(i) * (pad(i) - bytes(i)));
}
constraints
}
}