use crate::{
interpreters::mips::{
column::{
ColumnAlias as MIPSColumn, MIPS_BYTE_COUNTER_OFF, MIPS_CHUNK_BYTES_LEN,
MIPS_END_OF_PREIMAGE_OFF, MIPS_HASH_COUNTER_OFF, MIPS_HAS_N_BYTES_OFF,
MIPS_LENGTH_BYTES_OFF, MIPS_NUM_BYTES_READ_OFF, MIPS_PREIMAGE_BYTES_OFF,
MIPS_PREIMAGE_CHUNK_OFF, MIPS_PREIMAGE_KEY, N_MIPS_REL_COLS,
},
interpreter::{interpret_instruction, InterpreterEnv},
Instruction,
},
lookups::{Lookup, LookupTableIDs},
E,
};
use ark_ff::{Field, One};
use kimchi::circuits::{
expr::{ConstantTerm::Literal, Expr, ExprInner, Operations, Variable},
gate::CurrOrNext,
};
use kimchi_msm::columns::ColumnIndexer as _;
use std::array;
use strum::IntoEnumIterator;
use super::column::N_MIPS_SEL_COLS;
pub struct Env<Fp> {
scratch_state_idx: usize,
scratch_state_idx_inverse: usize,
constraints: Vec<E<Fp>>,
lookups: Vec<Lookup<E<Fp>>>,
selector: Option<E<Fp>>,
}
impl<Fp: Field> Default for Env<Fp> {
fn default() -> Self {
Self {
scratch_state_idx: 0,
scratch_state_idx_inverse: 0,
constraints: Vec::new(),
lookups: Vec::new(),
selector: None,
}
}
}
impl<Fp: Field> InterpreterEnv for Env<Fp> {
type Position = MIPSColumn;
fn alloc_scratch(&mut self) -> Self::Position {
let scratch_idx = self.scratch_state_idx;
self.scratch_state_idx += 1;
MIPSColumn::ScratchState(scratch_idx)
}
fn alloc_scratch_inverse(&mut self) -> Self::Position {
let scratch_idx = self.scratch_state_idx_inverse;
self.scratch_state_idx_inverse += 1;
MIPSColumn::ScratchStateInverse(scratch_idx)
}
type Variable = E<Fp>;
fn variable(&self, column: Self::Position) -> Self::Variable {
Expr::Atom(ExprInner::Cell(Variable {
col: column.to_column(),
row: CurrOrNext::Curr,
}))
}
fn activate_selector(&mut self, selector: Instruction) {
assert!(self.selector.is_none(), "A selector has been already activated. You might need to reset the environment if you want to start a new instruction.");
let n = usize::from(selector) - N_MIPS_REL_COLS;
self.selector = Some(self.variable(MIPSColumn::Selector(n)))
}
fn add_constraint(&mut self, assert_equals_zero: Self::Variable) {
self.constraints.push(assert_equals_zero)
}
fn check_is_zero(_assert_equals_zero: &Self::Variable) {
}
fn check_equal(_x: &Self::Variable, _y: &Self::Variable) {
}
fn check_boolean(_x: &Self::Variable) {
}
fn add_lookup(&mut self, lookup: Lookup<Self::Variable>) {
self.lookups.push(lookup);
}
fn instruction_counter(&self) -> Self::Variable {
self.variable(MIPSColumn::InstructionCounter)
}
fn increase_instruction_counter(&mut self) {
}
unsafe fn fetch_register(
&mut self,
_idx: &Self::Variable,
output: Self::Position,
) -> Self::Variable {
self.variable(output)
}
unsafe fn push_register_if(
&mut self,
_idx: &Self::Variable,
_value: Self::Variable,
_if_is_true: &Self::Variable,
) {
}
unsafe fn fetch_register_access(
&mut self,
_idx: &Self::Variable,
output: Self::Position,
) -> Self::Variable {
self.variable(output)
}
unsafe fn push_register_access_if(
&mut self,
_idx: &Self::Variable,
_value: Self::Variable,
_if_is_true: &Self::Variable,
) {
}
unsafe fn fetch_memory(
&mut self,
_addr: &Self::Variable,
output: Self::Position,
) -> Self::Variable {
self.variable(output)
}
unsafe fn push_memory(&mut self, _addr: &Self::Variable, _value: Self::Variable) {
}
unsafe fn fetch_memory_access(
&mut self,
_addr: &Self::Variable,
output: Self::Position,
) -> Self::Variable {
self.variable(output)
}
unsafe fn push_memory_access(&mut self, _addr: &Self::Variable, _value: Self::Variable) {
}
fn constant(x: u32) -> Self::Variable {
Self::Variable::constant(Operations::from(Literal(Fp::from(x))))
}
unsafe fn bitmask(
&mut self,
_x: &Self::Variable,
_highest_bit: u32,
_lowest_bit: u32,
position: Self::Position,
) -> Self::Variable {
self.variable(position)
}
unsafe fn shift_left(
&mut self,
_x: &Self::Variable,
_by: &Self::Variable,
position: Self::Position,
) -> Self::Variable {
self.variable(position)
}
unsafe fn shift_right(
&mut self,
_x: &Self::Variable,
_by: &Self::Variable,
position: Self::Position,
) -> Self::Variable {
self.variable(position)
}
unsafe fn shift_right_arithmetic(
&mut self,
_x: &Self::Variable,
_by: &Self::Variable,
position: Self::Position,
) -> Self::Variable {
self.variable(position)
}
unsafe fn test_zero(
&mut self,
_x: &Self::Variable,
position: Self::Position,
) -> Self::Variable {
self.variable(position)
}
fn is_zero(&mut self, x: &Self::Variable) -> Self::Variable {
let res = {
let pos = self.alloc_scratch();
unsafe { self.test_zero(x, pos) }
};
let x_inv_or_zero = {
let pos = self.alloc_scratch_inverse();
self.variable(pos)
};
self.add_constraint(x.clone() * x_inv_or_zero.clone() + res.clone() - Self::constant(1));
self.add_constraint(x.clone() * res.clone());
res
}
fn equal(&mut self, x: &Self::Variable, y: &Self::Variable) -> Self::Variable {
self.is_zero(&(x.clone() - y.clone()))
}
unsafe fn test_less_than(
&mut self,
_x: &Self::Variable,
_y: &Self::Variable,
position: Self::Position,
) -> Self::Variable {
self.variable(position)
}
unsafe fn test_less_than_signed(
&mut self,
_x: &Self::Variable,
_y: &Self::Variable,
position: Self::Position,
) -> Self::Variable {
self.variable(position)
}
unsafe fn and_witness(
&mut self,
_x: &Self::Variable,
_y: &Self::Variable,
position: Self::Position,
) -> Self::Variable {
self.variable(position)
}
unsafe fn nor_witness(
&mut self,
_x: &Self::Variable,
_y: &Self::Variable,
position: Self::Position,
) -> Self::Variable {
self.variable(position)
}
unsafe fn or_witness(
&mut self,
_x: &Self::Variable,
_y: &Self::Variable,
position: Self::Position,
) -> Self::Variable {
self.variable(position)
}
unsafe fn xor_witness(
&mut self,
_x: &Self::Variable,
_y: &Self::Variable,
position: Self::Position,
) -> Self::Variable {
self.variable(position)
}
unsafe fn add_witness(
&mut self,
_y: &Self::Variable,
_x: &Self::Variable,
out_position: Self::Position,
overflow_position: Self::Position,
) -> (Self::Variable, Self::Variable) {
(
self.variable(out_position),
self.variable(overflow_position),
)
}
unsafe fn sub_witness(
&mut self,
_y: &Self::Variable,
_x: &Self::Variable,
out_position: Self::Position,
underflow_position: Self::Position,
) -> (Self::Variable, Self::Variable) {
(
self.variable(out_position),
self.variable(underflow_position),
)
}
unsafe fn mul_signed_witness(
&mut self,
_x: &Self::Variable,
_y: &Self::Variable,
position: Self::Position,
) -> Self::Variable {
self.variable(position)
}
unsafe fn mul_hi_lo_signed(
&mut self,
_x: &Self::Variable,
_y: &Self::Variable,
position_hi: Self::Position,
position_lo: Self::Position,
) -> (Self::Variable, Self::Variable) {
(self.variable(position_hi), self.variable(position_lo))
}
unsafe fn mul_hi_lo(
&mut self,
_x: &Self::Variable,
_y: &Self::Variable,
position_hi: Self::Position,
position_lo: Self::Position,
) -> (Self::Variable, Self::Variable) {
(self.variable(position_hi), self.variable(position_lo))
}
unsafe fn divmod_signed(
&mut self,
_x: &Self::Variable,
_y: &Self::Variable,
position_quotient: Self::Position,
position_remainder: Self::Position,
) -> (Self::Variable, Self::Variable) {
(
self.variable(position_quotient),
self.variable(position_remainder),
)
}
unsafe fn divmod(
&mut self,
_x: &Self::Variable,
_y: &Self::Variable,
position_quotient: Self::Position,
position_remainder: Self::Position,
) -> (Self::Variable, Self::Variable) {
(
self.variable(position_quotient),
self.variable(position_remainder),
)
}
unsafe fn count_leading_zeros(
&mut self,
_x: &Self::Variable,
position: Self::Position,
) -> Self::Variable {
self.variable(position)
}
unsafe fn count_leading_ones(
&mut self,
_x: &Self::Variable,
position: Self::Position,
) -> Self::Variable {
self.variable(position)
}
fn copy(&mut self, x: &Self::Variable, position: Self::Position) -> Self::Variable {
let res = self.variable(position);
self.constraints.push(x.clone() - res.clone());
res
}
fn set_halted(&mut self, _flag: Self::Variable) {
}
fn report_exit(&mut self, _exit_code: &Self::Variable) {}
fn request_preimage_write(
&mut self,
_addr: &Self::Variable,
len: &Self::Variable,
pos: Self::Position,
) -> Self::Variable {
let hash_counter = self.variable(Self::Position::ScratchState(MIPS_HASH_COUNTER_OFF));
let byte_counter = self.variable(Self::Position::ScratchState(MIPS_BYTE_COUNTER_OFF));
let end_of_preimage = self.variable(Self::Position::ScratchState(MIPS_END_OF_PREIMAGE_OFF));
let num_preimage_bytes_read =
self.variable(Self::Position::ScratchState(MIPS_NUM_BYTES_READ_OFF));
let this_chunk = self.variable(Self::Position::ScratchState(MIPS_PREIMAGE_CHUNK_OFF));
let preimage_key = self.variable(Self::Position::ScratchState(MIPS_PREIMAGE_KEY));
let bytes: [_; MIPS_CHUNK_BYTES_LEN] = array::from_fn(|i| {
self.variable(Self::Position::ScratchState(MIPS_PREIMAGE_BYTES_OFF + i))
});
let length_bytes: [_; MIPS_CHUNK_BYTES_LEN] = array::from_fn(|i| {
self.variable(Self::Position::ScratchState(MIPS_LENGTH_BYTES_OFF + i))
});
let has_n_bytes: [_; MIPS_CHUNK_BYTES_LEN] = array::from_fn(|i| {
self.variable(Self::Position::ScratchState(MIPS_HAS_N_BYTES_OFF + i))
});
let actual_read_bytes = self.variable(pos);
{
for var in has_n_bytes.iter() {
self.assert_boolean(var.clone());
}
self.assert_boolean(end_of_preimage.clone());
}
{
let preimage_1 = (num_preimage_bytes_read.clone())
* (num_preimage_bytes_read.clone() - Expr::from(2))
* (num_preimage_bytes_read.clone() - Expr::from(3))
* (num_preimage_bytes_read.clone() - Expr::from(4));
let preimage_2 = (num_preimage_bytes_read.clone())
* (num_preimage_bytes_read.clone() - Expr::from(1))
* (num_preimage_bytes_read.clone() - Expr::from(3))
* (num_preimage_bytes_read.clone() - Expr::from(4));
let preimage_3 = (num_preimage_bytes_read.clone())
* (num_preimage_bytes_read.clone() - Expr::from(1))
* (num_preimage_bytes_read.clone() - Expr::from(2))
* (num_preimage_bytes_read.clone() - Expr::from(4));
let preimage_4 = (num_preimage_bytes_read.clone())
* (num_preimage_bytes_read.clone() - Expr::from(1))
* (num_preimage_bytes_read.clone() - Expr::from(2))
* (num_preimage_bytes_read.clone() - Expr::from(3));
{
self.add_constraint(preimage_1 * (this_chunk.clone() - bytes[0].clone()));
self.add_constraint(
preimage_2
* (this_chunk.clone()
- (bytes[0].clone() * Expr::from(2u64.pow(8)) + bytes[1].clone())),
);
self.add_constraint(
preimage_3
* (this_chunk.clone()
- (bytes[0].clone() * Expr::from(2u64.pow(16))
+ bytes[1].clone() * Expr::from(2u64.pow(8))
+ bytes[2].clone())),
);
self.add_constraint(
preimage_4
* (this_chunk.clone()
- (bytes[0].clone() * Expr::from(2u64.pow(24))
+ bytes[1].clone() * Expr::from(2u64.pow(16))
+ bytes[2].clone() * Expr::from(2u64.pow(8))
+ bytes[3].clone())),
);
}
{
self.add_constraint(
has_n_bytes[0].clone()
* (num_preimage_bytes_read.clone() - Expr::from(1))
* (num_preimage_bytes_read.clone() - Expr::from(2))
* (num_preimage_bytes_read.clone() - Expr::from(3))
* (num_preimage_bytes_read.clone() - Expr::from(4)),
);
self.add_constraint(
has_n_bytes[1].clone()
* (num_preimage_bytes_read.clone() - Expr::from(2))
* (num_preimage_bytes_read.clone() - Expr::from(3))
* (num_preimage_bytes_read.clone() - Expr::from(4)),
);
self.add_constraint(
has_n_bytes[2].clone()
* (num_preimage_bytes_read.clone() - Expr::from(3))
* (num_preimage_bytes_read.clone() - Expr::from(4)),
);
self.add_constraint(
has_n_bytes[3].clone() * (num_preimage_bytes_read.clone() - Expr::from(4)),
);
}
}
for byte in bytes.iter() {
self.add_lookup(Lookup::read_one(
LookupTableIDs::ByteLookup,
vec![byte.clone()],
));
}
for b in length_bytes.iter() {
self.add_lookup(Lookup::read_one(
LookupTableIDs::ByteLookup,
vec![b.clone()],
));
}
self.lookup_2bits(len);
self.lookup_2bits(&actual_read_bytes);
self.lookup_2bits(&num_preimage_bytes_read);
self.lookup_2bits(&(len.clone() - actual_read_bytes.clone()));
self.lookup_2bits(&(actual_read_bytes.clone() - num_preimage_bytes_read.clone()));
for i in 0..MIPS_CHUNK_BYTES_LEN {
self.add_lookup(Lookup::write_if(
has_n_bytes[i].clone(),
LookupTableIDs::SyscallLookup,
vec![
hash_counter.clone(),
byte_counter.clone() + Expr::from(i as u64),
bytes[i].clone(),
],
));
}
self.add_lookup(Lookup::read_if(
end_of_preimage,
LookupTableIDs::SyscallLookup,
vec![hash_counter.clone(), preimage_key],
));
actual_read_bytes
}
fn request_hint_write(&mut self, _addr: &Self::Variable, _len: &Self::Variable) {
}
fn reset(&mut self) {
self.scratch_state_idx = 0;
self.scratch_state_idx_inverse = 0;
self.constraints.clear();
self.lookups.clear();
self.selector = None;
}
}
impl<Fp: Field> Env<Fp> {
pub fn get_selector_constraints(&self) -> Vec<E<Fp>> {
let one = <Self as InterpreterEnv>::Variable::one();
let mut enforce_bool: Vec<E<Fp>> = (0..N_MIPS_SEL_COLS)
.map(|i| {
let var = self.variable(MIPSColumn::Selector(i));
(var.clone() - one.clone()) * var.clone()
})
.collect();
let enforce_one_activation = (0..N_MIPS_SEL_COLS).fold(E::<Fp>::one(), |res, i| {
let var = self.variable(MIPSColumn::Selector(i));
res - var.clone()
});
enforce_bool.push(enforce_one_activation);
enforce_bool
}
pub fn get_selector(&self) -> E<Fp> {
self.selector
.clone()
.unwrap_or_else(|| panic!("Selector is not set"))
}
pub fn get_constraints(&self) -> Vec<E<Fp>> {
self.constraints.clone()
}
pub fn get_lookups(&self) -> Vec<Lookup<E<Fp>>> {
self.lookups.clone()
}
}
pub fn get_all_constraints<Fp: Field>() -> Vec<E<Fp>> {
let mut mips_con_env = Env::<Fp>::default();
let mut constraints = Instruction::iter()
.flat_map(|instr_typ| instr_typ.into_iter())
.fold(vec![], |mut acc, instr| {
interpret_instruction(&mut mips_con_env, instr);
let selector = mips_con_env.get_selector();
let constraints_with_selector: Vec<E<Fp>> = mips_con_env
.get_constraints()
.into_iter()
.map(|c| selector.clone() * c)
.collect();
acc.extend(constraints_with_selector);
mips_con_env.reset();
acc
});
constraints.extend(mips_con_env.get_selector_constraints());
constraints
}