o1vm/interpreters/mips/
witness.rs

1use super::column::{N_MIPS_SEL_COLS, SCRATCH_SIZE, SCRATCH_SIZE_INVERSE};
2use crate::{
3    cannon::{
4        Hint, Meta, Page, Start, State, StepFrequency, VmConfiguration, PAGE_ADDRESS_MASK,
5        PAGE_ADDRESS_SIZE, PAGE_SIZE,
6    },
7    interpreters::{
8        keccak::environment::KeccakEnv,
9        mips::{
10            column::{
11                ColumnAlias as Column, MIPS_BYTE_COUNTER_OFF, MIPS_CHUNK_BYTES_LEN,
12                MIPS_END_OF_PREIMAGE_OFF, MIPS_HASH_COUNTER_OFF, MIPS_HAS_N_BYTES_OFF,
13                MIPS_LENGTH_BYTES_OFF, MIPS_NUM_BYTES_READ_OFF, MIPS_PREIMAGE_BYTES_OFF,
14                MIPS_PREIMAGE_CHUNK_OFF, MIPS_PREIMAGE_KEY,
15            },
16            interpreter::{
17                self, ITypeInstruction, Instruction, InterpreterEnv, JTypeInstruction,
18                RTypeInstruction,
19            },
20            registers::Registers,
21        },
22    },
23    lookups::{Lookup, LookupTableIDs},
24    preimage_oracle::PreImageOracleT,
25    ramlookup::LookupMode,
26    utils::memory_size,
27};
28use ark_ff::{Field, PrimeField};
29use core::panic;
30use kimchi::o1_utils::Two;
31use kimchi_msm::LogupTableID;
32use log::{debug, info};
33use std::{
34    array,
35    fs::File,
36    io::{BufWriter, Write},
37};
38
39// TODO: do we want to be more restrictive and refer to the number of accesses
40//       to the SAME register/memory addrss?
41
42/// Maximum number of register accesses per instruction (based on demo)
43pub const MAX_NB_REG_ACC: u64 = 7;
44/// Maximum number of memory accesses per instruction (based on demo)
45pub const MAX_NB_MEM_ACC: u64 = 12;
46/// Maximum number of memory or register accesses per instruction
47pub const MAX_ACC: u64 = MAX_NB_REG_ACC + MAX_NB_MEM_ACC;
48
49pub const NUM_GLOBAL_LOOKUP_TERMS: usize = 1;
50pub const NUM_DECODING_LOOKUP_TERMS: usize = 2;
51pub const NUM_INSTRUCTION_LOOKUP_TERMS: usize = 5;
52pub const NUM_LOOKUP_TERMS: usize =
53    NUM_GLOBAL_LOOKUP_TERMS + NUM_DECODING_LOOKUP_TERMS + NUM_INSTRUCTION_LOOKUP_TERMS;
54// TODO: Delete and use a vector instead
55
56#[derive(Clone, Default)]
57pub struct SyscallEnv {
58    pub last_hint: Option<Vec<u8>>,
59}
60
61impl SyscallEnv {
62    pub fn create(state: &State) -> Self {
63        SyscallEnv {
64            last_hint: state.last_hint.clone(),
65        }
66    }
67}
68
69pub struct LookupMultiplicities {
70    pub pad_lookup: Vec<u64>,
71    pub round_constants_lookup: Vec<u64>,
72    pub at_most_4_lookup: Vec<u64>,
73    pub byte_lookup: Vec<u64>,
74    pub range_check_16_lookup: Vec<u64>,
75    pub sparse_lookup: Vec<u64>,
76    pub reset_lookup: Vec<u64>,
77}
78
79impl LookupMultiplicities {
80    pub fn new() -> Self {
81        LookupMultiplicities {
82            pad_lookup: vec![0; LookupTableIDs::PadLookup.length()],
83            round_constants_lookup: vec![0; LookupTableIDs::RoundConstantsLookup.length()],
84            at_most_4_lookup: vec![0; LookupTableIDs::AtMost4Lookup.length()],
85            byte_lookup: vec![0; LookupTableIDs::ByteLookup.length()],
86            range_check_16_lookup: vec![0; LookupTableIDs::RangeCheck16Lookup.length()],
87            sparse_lookup: vec![0; LookupTableIDs::SparseLookup.length()],
88            reset_lookup: vec![0; LookupTableIDs::ResetLookup.length()],
89        }
90    }
91}
92
93impl Default for LookupMultiplicities {
94    fn default() -> Self {
95        Self::new()
96    }
97}
98
99/// This structure represents the environment the virtual machine state will use
100/// to transition. This environment will be used by the interpreter. The virtual
101/// machine has access to its internal state and some external memory. In
102/// addition to that, it has access to the environment of the Keccak interpreter
103/// that is used to verify the preimage requested during the execution.
104pub struct Env<Fp, PreImageOracle: PreImageOracleT> {
105    pub instruction_counter: u64,
106    pub memory: Vec<(u32, Vec<u8>)>,
107    pub last_memory_accesses: [usize; 3],
108    pub memory_write_index: Vec<(u32, Vec<u64>)>,
109    pub last_memory_write_index_accesses: [usize; 3],
110    pub registers: Registers<u32>,
111    pub registers_write_index: Registers<u64>,
112    pub scratch_state_idx: usize,
113    pub scratch_state_idx_inverse: usize,
114    pub scratch_state: [Fp; SCRATCH_SIZE],
115    pub scratch_state_inverse: [Fp; SCRATCH_SIZE_INVERSE],
116    pub lookup_state_idx: usize,
117    pub lookup_state: Vec<Fp>,
118    // tracks the arity of every lookup
119    // [1,1,3] means that the lookup state is of size 5,
120    // containing two lookup of arity one and one of arity three.
121    pub lookup_arity: Vec<usize>,
122    pub halt: bool,
123    pub syscall_env: SyscallEnv,
124    pub selector: usize,
125    pub preimage_oracle: PreImageOracle,
126    pub preimage: Option<Vec<u8>>,
127    pub preimage_bytes_read: u64,
128    pub preimage_key: Option<[u8; 32]>,
129    pub keccak_env: Option<KeccakEnv<Fp>>,
130    pub hash_counter: u64,
131    pub lookup_multiplicities: LookupMultiplicities,
132}
133
134fn fresh_scratch_state<Fp: Field, const N: usize>() -> [Fp; N] {
135    array::from_fn(|_| Fp::zero())
136}
137
138impl<Fp: PrimeField, PreImageOracle: PreImageOracleT> InterpreterEnv for Env<Fp, PreImageOracle> {
139    type Position = Column;
140
141    fn alloc_scratch(&mut self) -> Self::Position {
142        let scratch_idx = self.scratch_state_idx;
143        self.scratch_state_idx += 1;
144        Column::ScratchState(scratch_idx)
145    }
146
147    fn alloc_scratch_inverse(&mut self) -> Self::Position {
148        let scratch_idx = self.scratch_state_idx_inverse;
149        self.scratch_state_idx_inverse += 1;
150        Column::ScratchStateInverse(scratch_idx)
151    }
152
153    type Variable = u64;
154
155    fn variable(&self, _column: Self::Position) -> Self::Variable {
156        todo!()
157    }
158
159    fn add_constraint(&mut self, _assert_equals_zero: Self::Variable) {
160        // No-op for witness
161        // Do not assert that _assert_equals_zero is zero here!
162        // Some variables may have placeholders that do not faithfully
163        // represent the underlying values.
164    }
165
166    fn activate_selector(&mut self, instruction: Instruction) {
167        self.selector = instruction.into();
168    }
169
170    fn check_is_zero(assert_equals_zero: &Self::Variable) {
171        assert_eq!(*assert_equals_zero, 0);
172    }
173
174    fn check_equal(x: &Self::Variable, y: &Self::Variable) {
175        assert_eq!(*x, *y);
176    }
177
178    fn check_boolean(x: &Self::Variable) {
179        if !(*x == 0 || *x == 1) {
180            panic!("The value {} is not a boolean", *x);
181        }
182    }
183
184    fn add_lookup(&mut self, lookup: Lookup<Self::Variable>) {
185        let mut arity_counter = 0;
186        let mut add_value = |x: Fp| {
187            self.lookup_state_idx += 1;
188            self.lookup_state.push(x);
189            arity_counter += 1;
190        };
191        let Lookup {
192            table_id,
193            magnitude: numerator,
194            mode,
195            value: values,
196        } = lookup;
197        let values: Vec<_> = values.into_iter().map(|x| Fp::from(x)).collect();
198
199        // Add lookup numerator
200        match mode {
201            LookupMode::Write => add_value(Fp::from(numerator)),
202            LookupMode::Read => add_value(-Fp::from(numerator)),
203        };
204        // Add lookup table ID
205        add_value(Fp::from(table_id.to_u32()));
206        // Add values
207        for value in values.iter() {
208            add_value(*value);
209        }
210        // Update multiplicities
211        if let Some(idx) = table_id.ix_by_value(values.as_slice()) {
212            match table_id {
213                LookupTableIDs::PadLookup => self.lookup_multiplicities.pad_lookup[idx] += 1,
214                LookupTableIDs::RoundConstantsLookup => {
215                    self.lookup_multiplicities.round_constants_lookup[idx] += 1
216                }
217                LookupTableIDs::AtMost4Lookup => {
218                    self.lookup_multiplicities.at_most_4_lookup[idx] += 1
219                }
220                LookupTableIDs::ByteLookup => self.lookup_multiplicities.byte_lookup[idx] += 1,
221                LookupTableIDs::RangeCheck16Lookup => {
222                    self.lookup_multiplicities.range_check_16_lookup[idx] += 1
223                }
224                LookupTableIDs::SparseLookup => self.lookup_multiplicities.sparse_lookup[idx] += 1,
225                LookupTableIDs::ResetLookup => self.lookup_multiplicities.reset_lookup[idx] += 1,
226                // RAM ones, no multiplicities
227                LookupTableIDs::MemoryLookup => (),
228                LookupTableIDs::RegisterLookup => (),
229                LookupTableIDs::SyscallLookup => (),
230                LookupTableIDs::KeccakStepLookup => (),
231            }
232        }
233        //Update arity
234        self.lookup_arity.push(arity_counter);
235    }
236
237    fn instruction_counter(&self) -> Self::Variable {
238        self.instruction_counter
239    }
240
241    fn increase_instruction_counter(&mut self) {
242        self.instruction_counter += 1;
243    }
244
245    unsafe fn fetch_register(
246        &mut self,
247        idx: &Self::Variable,
248        output: Self::Position,
249    ) -> Self::Variable {
250        let res = self.registers[*idx as usize] as u64;
251        self.write_column(output, res);
252        res
253    }
254
255    unsafe fn push_register_if(
256        &mut self,
257        idx: &Self::Variable,
258        value: Self::Variable,
259        if_is_true: &Self::Variable,
260    ) {
261        let value: u32 = value.try_into().unwrap();
262        if *if_is_true == 1 {
263            self.registers[*idx as usize] = value
264        } else if *if_is_true == 0 {
265            // No-op
266        } else {
267            panic!("Bad value for flag in push_register: {}", *if_is_true);
268        }
269    }
270
271    unsafe fn fetch_register_access(
272        &mut self,
273        idx: &Self::Variable,
274        output: Self::Position,
275    ) -> Self::Variable {
276        let res = self.registers_write_index[*idx as usize];
277        self.write_column(output, res);
278        res
279    }
280
281    unsafe fn push_register_access_if(
282        &mut self,
283        idx: &Self::Variable,
284        value: Self::Variable,
285        if_is_true: &Self::Variable,
286    ) {
287        if *if_is_true == 1 {
288            self.registers_write_index[*idx as usize] = value
289        } else if *if_is_true == 0 {
290            // No-op
291        } else {
292            panic!("Bad value for flag in push_register: {}", *if_is_true);
293        }
294    }
295
296    unsafe fn fetch_memory(
297        &mut self,
298        addr: &Self::Variable,
299        output: Self::Position,
300    ) -> Self::Variable {
301        let addr: u32 = (*addr).try_into().unwrap();
302        let page = addr >> PAGE_ADDRESS_SIZE;
303        let page_address = (addr & PAGE_ADDRESS_MASK) as usize;
304        let memory_page_idx = self.get_memory_page_index(page);
305        let value = self.memory[memory_page_idx].1[page_address];
306        self.write_column(output, value.into());
307        value.into()
308    }
309
310    unsafe fn push_memory(&mut self, addr: &Self::Variable, value: Self::Variable) {
311        let addr: u32 = (*addr).try_into().unwrap();
312        let page = addr >> PAGE_ADDRESS_SIZE;
313        let page_address = (addr & PAGE_ADDRESS_MASK) as usize;
314        let memory_page_idx = self.get_memory_page_index(page);
315        self.memory[memory_page_idx].1[page_address] =
316            value.try_into().expect("push_memory values fit in a u8");
317    }
318
319    unsafe fn fetch_memory_access(
320        &mut self,
321        addr: &Self::Variable,
322        output: Self::Position,
323    ) -> Self::Variable {
324        let addr: u32 = (*addr).try_into().unwrap();
325        let page = addr >> PAGE_ADDRESS_SIZE;
326        let page_address = (addr & PAGE_ADDRESS_MASK) as usize;
327        let memory_write_index_page_idx = self.get_memory_access_page_index(page);
328        let value = self.memory_write_index[memory_write_index_page_idx].1[page_address];
329        self.write_column(output, value);
330        value
331    }
332
333    unsafe fn push_memory_access(&mut self, addr: &Self::Variable, value: Self::Variable) {
334        let addr = *addr as u32;
335        let page = addr >> PAGE_ADDRESS_SIZE;
336        let page_address = (addr & PAGE_ADDRESS_MASK) as usize;
337        let memory_write_index_page_idx = self.get_memory_access_page_index(page);
338        self.memory_write_index[memory_write_index_page_idx].1[page_address] = value;
339    }
340
341    fn constant(x: u32) -> Self::Variable {
342        x as u64
343    }
344
345    unsafe fn bitmask(
346        &mut self,
347        x: &Self::Variable,
348        highest_bit: u32,
349        lowest_bit: u32,
350        position: Self::Position,
351    ) -> Self::Variable {
352        let x: u32 = (*x).try_into().unwrap();
353        let res = (x >> lowest_bit) & ((1 << (highest_bit - lowest_bit)) - 1);
354        let res = res as u64;
355        self.write_column(position, res);
356        res
357    }
358
359    unsafe fn shift_left(
360        &mut self,
361        x: &Self::Variable,
362        by: &Self::Variable,
363        position: Self::Position,
364    ) -> Self::Variable {
365        let x: u32 = (*x).try_into().unwrap();
366        let by: u32 = (*by).try_into().unwrap();
367        let res = x << by;
368        let res = res as u64;
369        self.write_column(position, res);
370        res
371    }
372
373    unsafe fn shift_right(
374        &mut self,
375        x: &Self::Variable,
376        by: &Self::Variable,
377        position: Self::Position,
378    ) -> Self::Variable {
379        let x: u32 = (*x).try_into().unwrap();
380        let by: u32 = (*by).try_into().unwrap();
381        let res = x >> by;
382        let res = res as u64;
383        self.write_column(position, res);
384        res
385    }
386
387    unsafe fn shift_right_arithmetic(
388        &mut self,
389        x: &Self::Variable,
390        by: &Self::Variable,
391        position: Self::Position,
392    ) -> Self::Variable {
393        let x: u32 = (*x).try_into().unwrap();
394        let by: u32 = (*by).try_into().unwrap();
395        let res = ((x as i32) >> by) as u32;
396        let res = res as u64;
397        self.write_column(position, res);
398        res
399    }
400
401    unsafe fn test_zero(&mut self, x: &Self::Variable, position: Self::Position) -> Self::Variable {
402        let res = if *x == 0 { 1 } else { 0 };
403        self.write_column(position, res);
404        res
405    }
406
407    fn is_zero(&mut self, x: &Self::Variable) -> Self::Variable {
408        // write the result
409        let res = {
410            let pos = self.alloc_scratch();
411            unsafe { self.test_zero(x, pos) }
412        };
413        // write the non deterministic advice inv_or_zero
414        let pos = self.alloc_scratch_inverse();
415        if *x == 0 {
416            self.write_field_column(pos, Fp::zero());
417        } else {
418            self.write_field_column(pos, Fp::from(*x));
419        };
420        // return the result
421        res
422    }
423
424    fn equal(&mut self, x: &Self::Variable, y: &Self::Variable) -> Self::Variable {
425        // We replicate is_zero(x-y), but working on field elt,
426        // to avoid subtraction overflow in the witness interpreter for u32
427        let to_zero_test = Fp::from(*x) - Fp::from(*y);
428        let res = {
429            let pos = self.alloc_scratch();
430            let is_zero: u64 = if to_zero_test == Fp::zero() { 1 } else { 0 };
431            self.write_column(pos, is_zero);
432            is_zero
433        };
434        let pos = self.alloc_scratch_inverse();
435        if to_zero_test == Fp::zero() {
436            self.write_field_column(pos, Fp::zero());
437        } else {
438            self.write_field_column(pos, to_zero_test);
439        };
440        res
441    }
442
443    unsafe fn test_less_than(
444        &mut self,
445        x: &Self::Variable,
446        y: &Self::Variable,
447        position: Self::Position,
448    ) -> Self::Variable {
449        let x: u32 = (*x).try_into().unwrap();
450        let y: u32 = (*y).try_into().unwrap();
451        let res = if x < y { 1 } else { 0 };
452        let res = res as u64;
453        self.write_column(position, res);
454        res
455    }
456
457    unsafe fn test_less_than_signed(
458        &mut self,
459        x: &Self::Variable,
460        y: &Self::Variable,
461        position: Self::Position,
462    ) -> Self::Variable {
463        let x: u32 = (*x).try_into().unwrap();
464        let y: u32 = (*y).try_into().unwrap();
465        let res = if (x as i32) < (y as i32) { 1 } else { 0 };
466        let res = res as u64;
467        self.write_column(position, res);
468        res
469    }
470
471    unsafe fn and_witness(
472        &mut self,
473        x: &Self::Variable,
474        y: &Self::Variable,
475        position: Self::Position,
476    ) -> Self::Variable {
477        let x: u32 = (*x).try_into().unwrap();
478        let y: u32 = (*y).try_into().unwrap();
479        let res = x & y;
480        let res = res as u64;
481        self.write_column(position, res);
482        res
483    }
484
485    unsafe fn nor_witness(
486        &mut self,
487        x: &Self::Variable,
488        y: &Self::Variable,
489        position: Self::Position,
490    ) -> Self::Variable {
491        let x: u32 = (*x).try_into().unwrap();
492        let y: u32 = (*y).try_into().unwrap();
493        let res = !(x | y);
494        let res = res as u64;
495        self.write_column(position, res);
496        res
497    }
498
499    unsafe fn or_witness(
500        &mut self,
501        x: &Self::Variable,
502        y: &Self::Variable,
503        position: Self::Position,
504    ) -> Self::Variable {
505        let x: u32 = (*x).try_into().unwrap();
506        let y: u32 = (*y).try_into().unwrap();
507        let res = x | y;
508        let res = res as u64;
509        self.write_column(position, res);
510        res
511    }
512
513    unsafe fn xor_witness(
514        &mut self,
515        x: &Self::Variable,
516        y: &Self::Variable,
517        position: Self::Position,
518    ) -> Self::Variable {
519        let x: u32 = (*x).try_into().unwrap();
520        let y: u32 = (*y).try_into().unwrap();
521        let res = x ^ y;
522        let res = res as u64;
523        self.write_column(position, res);
524        res
525    }
526
527    unsafe fn add_witness(
528        &mut self,
529        x: &Self::Variable,
530        y: &Self::Variable,
531        out_position: Self::Position,
532        overflow_position: Self::Position,
533    ) -> (Self::Variable, Self::Variable) {
534        let x: u32 = (*x).try_into().unwrap();
535        let y: u32 = (*y).try_into().unwrap();
536        // https://doc.rust-lang.org/std/primitive.u32.html#method.overflowing_add
537        let res = x.overflowing_add(y);
538        let (res_, overflow) = (res.0 as u64, res.1 as u64);
539        self.write_column(out_position, res_);
540        self.write_column(overflow_position, overflow);
541        (res_, overflow)
542    }
543
544    unsafe fn sub_witness(
545        &mut self,
546        x: &Self::Variable,
547        y: &Self::Variable,
548        out_position: Self::Position,
549        underflow_position: Self::Position,
550    ) -> (Self::Variable, Self::Variable) {
551        let x: u32 = (*x).try_into().unwrap();
552        let y: u32 = (*y).try_into().unwrap();
553        // https://doc.rust-lang.org/std/primitive.u32.html#method.overflowing_sub
554        let res = x.overflowing_sub(y);
555        let (res_, underflow) = (res.0 as u64, res.1 as u64);
556        self.write_column(out_position, res_);
557        self.write_column(underflow_position, underflow);
558        (res_, underflow)
559    }
560
561    unsafe fn mul_signed_witness(
562        &mut self,
563        x: &Self::Variable,
564        y: &Self::Variable,
565        position: Self::Position,
566    ) -> Self::Variable {
567        let x: u32 = (*x).try_into().unwrap();
568        let y: u32 = (*y).try_into().unwrap();
569        let res = ((x as i32) * (y as i32)) as u32;
570        let res = res as u64;
571        self.write_column(position, res);
572        res
573    }
574
575    unsafe fn mul_hi_lo_signed(
576        &mut self,
577        x: &Self::Variable,
578        y: &Self::Variable,
579        position_hi: Self::Position,
580        position_lo: Self::Position,
581    ) -> (Self::Variable, Self::Variable) {
582        let x: u32 = (*x).try_into().unwrap();
583        let y: u32 = (*y).try_into().unwrap();
584        let mul = (((x as i32) as i64) * ((y as i32) as i64)) as u64;
585        let hi = (mul >> 32) as u32;
586        let lo = (mul & ((1 << 32) - 1)) as u32;
587        let hi = hi as u64;
588        let lo = lo as u64;
589        self.write_column(position_hi, hi);
590        self.write_column(position_lo, lo);
591        (hi, lo)
592    }
593
594    unsafe fn mul_hi_lo(
595        &mut self,
596        x: &Self::Variable,
597        y: &Self::Variable,
598        position_hi: Self::Position,
599        position_lo: Self::Position,
600    ) -> (Self::Variable, Self::Variable) {
601        let x: u32 = (*x).try_into().unwrap();
602        let y: u32 = (*y).try_into().unwrap();
603        let mul = (x as u64) * (y as u64);
604        let hi = (mul >> 32) as u32;
605        let lo = (mul & ((1 << 32) - 1)) as u32;
606        let hi = hi as u64;
607        let lo = lo as u64;
608        self.write_column(position_hi, hi);
609        self.write_column(position_lo, lo);
610        (hi, lo)
611    }
612
613    unsafe fn divmod_signed(
614        &mut self,
615        x: &Self::Variable,
616        y: &Self::Variable,
617        position_quotient: Self::Position,
618        position_remainder: Self::Position,
619    ) -> (Self::Variable, Self::Variable) {
620        let x: u32 = (*x).try_into().unwrap();
621        let y: u32 = (*y).try_into().unwrap();
622        let q = ((x as i32) / (y as i32)) as u32;
623        let r = ((x as i32) % (y as i32)) as u32;
624        let q = q as u64;
625        let r = r as u64;
626        self.write_column(position_quotient, q);
627        self.write_column(position_remainder, r);
628        (q, r)
629    }
630
631    unsafe fn divmod(
632        &mut self,
633        x: &Self::Variable,
634        y: &Self::Variable,
635        position_quotient: Self::Position,
636        position_remainder: Self::Position,
637    ) -> (Self::Variable, Self::Variable) {
638        let x: u32 = (*x).try_into().unwrap();
639        let y: u32 = (*y).try_into().unwrap();
640        let q = x / y;
641        let r = x % y;
642        let q = q as u64;
643        let r = r as u64;
644        self.write_column(position_quotient, q);
645        self.write_column(position_remainder, r);
646        (q, r)
647    }
648
649    unsafe fn count_leading_zeros(
650        &mut self,
651        x: &Self::Variable,
652        position: Self::Position,
653    ) -> Self::Variable {
654        let x: u32 = (*x).try_into().unwrap();
655        let res = x.leading_zeros();
656        let res = res as u64;
657        self.write_column(position, res);
658        res
659    }
660
661    unsafe fn count_leading_ones(
662        &mut self,
663        x: &Self::Variable,
664        position: Self::Position,
665    ) -> Self::Variable {
666        let x: u32 = (*x).try_into().unwrap();
667        let res = x.leading_ones();
668        let res = res as u64;
669        self.write_column(position, res);
670        res
671    }
672
673    fn copy(&mut self, x: &Self::Variable, position: Self::Position) -> Self::Variable {
674        self.write_column(position, *x);
675        *x
676    }
677
678    fn set_halted(&mut self, flag: Self::Variable) {
679        if flag == 0 {
680            self.halt = false
681        } else if flag == 1 {
682            self.halt = true
683        } else {
684            panic!("Bad value for flag in set_halted: {}", flag);
685        }
686    }
687
688    fn report_exit(&mut self, exit_code: &Self::Variable) {
689        println!(
690            "Exited with code {} at step {}",
691            *exit_code,
692            self.normalized_instruction_counter()
693        );
694    }
695
696    fn request_preimage_write(
697        &mut self,
698        addr: &Self::Variable,
699        len: &Self::Variable,
700        pos: Self::Position,
701    ) -> Self::Variable {
702        // The beginning of the syscall
703        if self.registers.preimage_offset == 0 {
704            let mut preimage_key = [0u8; 32];
705            for i in 0..8 {
706                let bytes = u32::to_be_bytes(self.registers.preimage_key[i]);
707                for j in 0..4 {
708                    preimage_key[4 * i + j] = bytes[j]
709                }
710            }
711            let preimage = self.preimage_oracle.get_preimage(preimage_key).get();
712            self.preimage = Some(preimage.clone());
713            self.preimage_key = Some(preimage_key);
714        }
715
716        const LENGTH_SIZE: usize = 8;
717
718        let preimage = self
719            .preimage
720            .as_ref()
721            .expect("to have a preimage if we're requesting it at a non-zero offset");
722        let preimage_len = preimage.len();
723        let preimage_offset = self.registers.preimage_offset as u64;
724
725        let max_read_len =
726            std::cmp::min(preimage_offset + len, (preimage_len + LENGTH_SIZE) as u64)
727                - preimage_offset;
728
729        // We read at most 4 bytes, ensuring that we respect word alignment.
730        // Here, if the address is not aligned, the first call will read < 4
731        // but the next calls will be 4 bytes (because the actual address would
732        // be updated with the offset) until reaching the end of the preimage
733        // (where the last call could be less than 4 bytes).
734        let actual_read_len = std::cmp::min(max_read_len, 4 - (addr & 3));
735
736        // This variable will contain the amount of bytes read which belong to
737        // the actual preimage
738        let mut preimage_read_len = 0;
739        let mut chunk = 0;
740        for i in 0..actual_read_len {
741            let idx = (preimage_offset + i) as usize;
742            // The first 8 bytes of the read preimage are the preimage length,
743            // followed by the body of the preimage
744            if idx < LENGTH_SIZE {
745                // Compute the byte index read from the length
746                let len_i = idx % MIPS_CHUNK_BYTES_LEN;
747
748                let length_byte = u64::to_be_bytes(preimage_len as u64)[idx];
749
750                // Write the individual byte of the length to the witness
751                self.write_column(
752                    Column::ScratchState(MIPS_LENGTH_BYTES_OFF + len_i),
753                    length_byte as u64,
754                );
755
756                // TODO: Proabably, the scratch state of MIPS_LENGTH_BYTES_OFF
757                // is redundant with lines below
758                unsafe {
759                    self.push_memory(&(*addr + i), length_byte as u64);
760                    self.push_memory_access(&(*addr + i), self.next_instruction_counter());
761                }
762            } else {
763                // Compute the byte index in the chunk of at most 4 bytes read
764                // from the preimage
765                let byte_i = (idx - LENGTH_SIZE) % MIPS_CHUNK_BYTES_LEN;
766
767                // This should really be handled by the keccak oracle.
768                let preimage_byte = self.preimage.as_ref().unwrap()[idx - LENGTH_SIZE];
769
770                // Write the individual byte of the preimage to the witness
771                self.write_column(
772                    Column::ScratchState(MIPS_PREIMAGE_BYTES_OFF + byte_i),
773                    preimage_byte as u64,
774                );
775
776                // Update the chunk of at most 4 bytes read from the preimage
777                chunk = chunk << 8 | preimage_byte as u64;
778
779                // At most, it will be actual_read_len when the length is not
780                // read in this call
781                preimage_read_len += 1;
782
783                // TODO: Proabably, the scratch state of MIPS_PREIMAGE_BYTES_OFF
784                // is redundant with lines below
785                unsafe {
786                    self.push_memory(&(*addr + i), preimage_byte as u64);
787                    self.push_memory_access(&(*addr + i), self.next_instruction_counter());
788                }
789            }
790        }
791        // Update the chunk of at most 4 bytes read from the preimage
792        // FIXME: this is not linked to the registers content in any way.
793        //        Is there anywhere else where the bytes are stored in the
794        //        scratch state?
795        self.write_column(Column::ScratchState(MIPS_PREIMAGE_CHUNK_OFF), chunk);
796
797        // Update the number of bytes read from the oracle in this step (can
798        // include bytelength and preimage bytes)
799        self.write_column(pos, actual_read_len);
800
801        // Number of preimage bytes processed in this instruction
802        self.write_column(
803            Column::ScratchState(MIPS_NUM_BYTES_READ_OFF),
804            preimage_read_len,
805        );
806
807        // Update the flags to count how many bytes are contained at least
808        for i in 0..MIPS_CHUNK_BYTES_LEN {
809            if preimage_read_len > i as u64 {
810                // This amount is only nonzero when it has read some preimage
811                // bytes.
812                self.write_column(Column::ScratchState(MIPS_HAS_N_BYTES_OFF + i), 1);
813            }
814        }
815
816        // Update the total number of preimage bytes read so far
817        self.preimage_bytes_read += preimage_read_len;
818        self.write_column(
819            Column::ScratchState(MIPS_BYTE_COUNTER_OFF),
820            self.preimage_bytes_read,
821        );
822
823        // If we've read the entire preimage, trigger Keccak workflow
824        if self.preimage_bytes_read == preimage_len as u64 {
825            self.write_column(Column::ScratchState(MIPS_END_OF_PREIMAGE_OFF), 1);
826
827            // Store preimage key in the witness excluding the MSB as 248 bits
828            // so it can be used for the communication channel between Keccak
829            let bytes31 = (1..32).fold(Fp::zero(), |acc, i| {
830                acc * Fp::two_pow(8) + Fp::from(self.preimage_key.unwrap()[i])
831            });
832            self.write_field_column(Self::Position::ScratchState(MIPS_PREIMAGE_KEY), bytes31);
833
834            debug!("Preimage has been read entirely, triggering Keccak process");
835            self.keccak_env = Some(KeccakEnv::<Fp>::new(
836                self.hash_counter,
837                self.preimage.as_ref().unwrap(),
838            ));
839
840            // COMMUNICATION CHANNEL: only on constraint side
841
842            // Update hash counter column
843            self.write_column(
844                Column::ScratchState(MIPS_HASH_COUNTER_OFF),
845                self.hash_counter,
846            );
847            // Number of preimage bytes left to be read should be zero at this
848            // point
849
850            // Reset environment
851            self.preimage_bytes_read = 0;
852            self.preimage_key = None;
853            self.hash_counter += 1;
854
855            // Reset PreimageCounter column will be done in the next call
856        }
857        actual_read_len
858    }
859
860    fn request_hint_write(&mut self, addr: &Self::Variable, len: &Self::Variable) {
861        let mut last_hint = match std::mem::take(&mut self.syscall_env.last_hint) {
862            Some(mut last_hint) => {
863                last_hint.reserve(*len as usize);
864                last_hint
865            }
866            None => Vec::with_capacity(*len as usize),
867        };
868
869        // This should really be handled by the keccak oracle.
870        for i in 0..*len {
871            // Push memory access
872            unsafe { self.push_memory_access(&(*addr + i), self.next_instruction_counter()) };
873            // Fetch the value without allocating witness columns
874            let value = {
875                let addr: u32 = (*addr).try_into().unwrap();
876                let page = addr >> PAGE_ADDRESS_SIZE;
877                let page_address = (addr & PAGE_ADDRESS_MASK) as usize;
878                let memory_page_idx = self.get_memory_page_index(page);
879                self.memory[memory_page_idx].1[page_address]
880            };
881            last_hint.push(value);
882        }
883
884        let len = last_hint.len();
885        let mut idx = 0;
886
887        while idx + 4 <= len {
888            let hint_len = u32::from_be_bytes(last_hint[idx..idx + 4].try_into().unwrap()) as usize;
889            idx += 4;
890            if idx + hint_len <= len {
891                let hint = last_hint[idx..idx + hint_len].to_vec();
892                idx += hint_len;
893                self.preimage_oracle.hint(Hint::create(hint));
894            }
895        }
896
897        let remaining = last_hint[idx..len].to_vec();
898
899        self.syscall_env.last_hint = Some(remaining);
900    }
901
902    fn reset(&mut self) {
903        self.scratch_state_idx = 0;
904        self.scratch_state = fresh_scratch_state();
905        self.selector = N_MIPS_SEL_COLS;
906    }
907}
908
909impl<Fp: PrimeField, PreImageOracle: PreImageOracleT> Env<Fp, PreImageOracle> {
910    pub fn create(page_size: usize, state: State, preimage_oracle: PreImageOracle) -> Self {
911        let initial_instruction_pointer = state.pc;
912        let next_instruction_pointer = state.next_pc;
913
914        let selector = N_MIPS_SEL_COLS;
915
916        let syscall_env = SyscallEnv::create(&state);
917
918        let mut initial_memory: Vec<(u32, Vec<u8>)> = state
919            .memory
920            .into_iter()
921            // Check that the conversion from page data is correct
922            .map(|page| (page.index, page.data))
923            .collect();
924
925        for (_address, initial_memory) in initial_memory.iter_mut() {
926            initial_memory.extend((0..(page_size - initial_memory.len())).map(|_| 0u8));
927            assert_eq!(initial_memory.len(), page_size);
928        }
929
930        let memory_offsets = initial_memory
931            .iter()
932            .map(|(offset, _)| *offset)
933            .collect::<Vec<_>>();
934
935        let initial_registers = {
936            let preimage_key = {
937                let mut preimage_key = [0u32; 8];
938                for (i, preimage_key_word) in preimage_key.iter_mut().enumerate() {
939                    *preimage_key_word = u32::from_be_bytes(
940                        state.preimage_key[i * 4..(i + 1) * 4].try_into().unwrap(),
941                    )
942                }
943                preimage_key
944            };
945            Registers {
946                lo: state.lo,
947                hi: state.hi,
948                general_purpose: state.registers,
949                current_instruction_pointer: initial_instruction_pointer,
950                next_instruction_pointer,
951                heap_pointer: state.heap,
952                preimage_key,
953                preimage_offset: state.preimage_offset,
954            }
955        };
956
957        Env {
958            instruction_counter: state.step,
959            memory: initial_memory.clone(),
960            last_memory_accesses: [0usize; 3],
961            memory_write_index: memory_offsets
962                .iter()
963                .map(|offset| (*offset, vec![0u64; page_size]))
964                .collect(),
965            last_memory_write_index_accesses: [0usize; 3],
966            registers: initial_registers.clone(),
967            registers_write_index: Registers::default(),
968            scratch_state_idx: 0,
969            scratch_state_idx_inverse: 0,
970            scratch_state: fresh_scratch_state(),
971            scratch_state_inverse: fresh_scratch_state(),
972            lookup_state_idx: 0,
973            lookup_state: vec![],
974            lookup_arity: vec![],
975            halt: state.exited,
976            syscall_env,
977            selector,
978            preimage_oracle,
979            preimage: state.preimage,
980            preimage_bytes_read: 0,
981            preimage_key: None,
982            keccak_env: None,
983            hash_counter: 0,
984            lookup_multiplicities: LookupMultiplicities::new(),
985        }
986    }
987
988    pub fn reset_scratch_state(&mut self) {
989        self.scratch_state_idx = 0;
990        self.scratch_state = fresh_scratch_state();
991        self.selector = N_MIPS_SEL_COLS;
992    }
993
994    pub fn reset_scratch_state_inverse(&mut self) {
995        self.scratch_state_idx_inverse = 0;
996        self.scratch_state_inverse = fresh_scratch_state();
997    }
998
999    pub fn reset_lookup_state(&mut self) {
1000        self.lookup_state_idx = 0;
1001        self.lookup_state = vec![];
1002    }
1003
1004    pub fn write_column(&mut self, column: Column, value: u64) {
1005        self.write_field_column(column, value.into())
1006    }
1007
1008    pub fn write_field_column(&mut self, column: Column, value: Fp) {
1009        match column {
1010            Column::ScratchState(idx) => self.scratch_state[idx] = value,
1011            Column::ScratchStateInverse(idx) => self.scratch_state_inverse[idx] = value,
1012            Column::InstructionCounter => panic!("Cannot overwrite the column {:?}", column),
1013            Column::Selector(s) => self.selector = s,
1014        }
1015    }
1016
1017    pub fn update_last_memory_access(&mut self, i: usize) {
1018        let [i_0, i_1, _] = self.last_memory_accesses;
1019        self.last_memory_accesses = [i, i_0, i_1]
1020    }
1021
1022    pub fn get_memory_page_index(&mut self, page: u32) -> usize {
1023        for &i in self.last_memory_accesses.iter() {
1024            if self.memory_write_index[i].0 == page {
1025                return i;
1026            }
1027        }
1028        for (i, (page_index, _memory)) in self.memory.iter_mut().enumerate() {
1029            if *page_index == page {
1030                self.update_last_memory_access(i);
1031                return i;
1032            }
1033        }
1034
1035        // Memory not found; dynamically allocate
1036        let memory = vec![0u8; PAGE_SIZE as usize];
1037        self.memory.push((page, memory));
1038        let i = self.memory.len() - 1;
1039        self.update_last_memory_access(i);
1040        i
1041    }
1042
1043    pub fn update_last_memory_write_index_access(&mut self, i: usize) {
1044        let [i_0, i_1, _] = self.last_memory_write_index_accesses;
1045        self.last_memory_write_index_accesses = [i, i_0, i_1]
1046    }
1047
1048    pub fn get_memory_access_page_index(&mut self, page: u32) -> usize {
1049        for &i in self.last_memory_write_index_accesses.iter() {
1050            if self.memory_write_index[i].0 == page {
1051                return i;
1052            }
1053        }
1054        for (i, (page_index, _memory_write_index)) in self.memory_write_index.iter_mut().enumerate()
1055        {
1056            if *page_index == page {
1057                self.update_last_memory_write_index_access(i);
1058                return i;
1059            }
1060        }
1061
1062        // Memory not found; dynamically allocate
1063        let memory_write_index = vec![0u64; PAGE_SIZE as usize];
1064        self.memory_write_index.push((page, memory_write_index));
1065        let i = self.memory_write_index.len() - 1;
1066        self.update_last_memory_write_index_access(i);
1067        i
1068    }
1069
1070    pub fn get_memory_direct(&mut self, addr: u32) -> u8 {
1071        let page = addr >> PAGE_ADDRESS_SIZE;
1072        let page_address = (addr & PAGE_ADDRESS_MASK) as usize;
1073        let memory_idx = self.get_memory_page_index(page);
1074        self.memory[memory_idx].1[page_address]
1075    }
1076
1077    pub fn decode_instruction(&mut self) -> (Instruction, u32) {
1078        let instruction =
1079            ((self.get_memory_direct(self.registers.current_instruction_pointer) as u32) << 24)
1080                | ((self.get_memory_direct(self.registers.current_instruction_pointer + 1) as u32)
1081                    << 16)
1082                | ((self.get_memory_direct(self.registers.current_instruction_pointer + 2) as u32)
1083                    << 8)
1084                | (self.get_memory_direct(self.registers.current_instruction_pointer + 3) as u32);
1085        let opcode = {
1086            match instruction >> 26 {
1087                0x00 => match instruction & 0x3F {
1088                    0x00 => {
1089                        if instruction == 0 {
1090                            Instruction::NoOp
1091                        } else {
1092                            Instruction::RType(RTypeInstruction::ShiftLeftLogical)
1093                        }
1094                    }
1095                    0x02 => Instruction::RType(RTypeInstruction::ShiftRightLogical),
1096                    0x03 => Instruction::RType(RTypeInstruction::ShiftRightArithmetic),
1097                    0x04 => Instruction::RType(RTypeInstruction::ShiftLeftLogicalVariable),
1098                    0x06 => Instruction::RType(RTypeInstruction::ShiftRightLogicalVariable),
1099                    0x07 => Instruction::RType(RTypeInstruction::ShiftRightArithmeticVariable),
1100                    0x08 => Instruction::RType(RTypeInstruction::JumpRegister),
1101                    0x09 => Instruction::RType(RTypeInstruction::JumpAndLinkRegister),
1102                    0x0a => Instruction::RType(RTypeInstruction::MoveZero),
1103                    0x0b => Instruction::RType(RTypeInstruction::MoveNonZero),
1104                    0x0c => match self.registers.general_purpose[2] {
1105                        4090 => Instruction::RType(RTypeInstruction::SyscallMmap),
1106                        4045 => {
1107                            // sysBrk
1108                            Instruction::RType(RTypeInstruction::SyscallOther)
1109                        }
1110                        4120 => {
1111                            // sysClone
1112                            Instruction::RType(RTypeInstruction::SyscallOther)
1113                        }
1114                        4246 => Instruction::RType(RTypeInstruction::SyscallExitGroup),
1115                        4003 => match self.registers.general_purpose[4] {
1116                            interpreter::FD_HINT_READ => {
1117                                Instruction::RType(RTypeInstruction::SyscallReadHint)
1118                            }
1119                            interpreter::FD_PREIMAGE_READ => {
1120                                Instruction::RType(RTypeInstruction::SyscallReadPreimage)
1121                            }
1122                            _ => Instruction::RType(RTypeInstruction::SyscallReadOther),
1123                        },
1124                        4004 => match self.registers.general_purpose[4] {
1125                            interpreter::FD_PREIMAGE_WRITE => {
1126                                Instruction::RType(RTypeInstruction::SyscallWritePreimage)
1127                            }
1128                            interpreter::FD_HINT_WRITE => {
1129                                Instruction::RType(RTypeInstruction::SyscallWriteHint)
1130                            }
1131                            _ => Instruction::RType(RTypeInstruction::SyscallWriteOther),
1132                        },
1133                        4055 => Instruction::RType(RTypeInstruction::SyscallFcntl),
1134                        _ => {
1135                            // NB: This has well-defined behavior. Don't panic!
1136                            Instruction::RType(RTypeInstruction::SyscallOther)
1137                        }
1138                    },
1139                    0x0f => Instruction::RType(RTypeInstruction::Sync),
1140                    0x10 => Instruction::RType(RTypeInstruction::MoveFromHi),
1141                    0x11 => Instruction::RType(RTypeInstruction::MoveToHi),
1142                    0x12 => Instruction::RType(RTypeInstruction::MoveFromLo),
1143                    0x13 => Instruction::RType(RTypeInstruction::MoveToLo),
1144                    0x18 => Instruction::RType(RTypeInstruction::Multiply),
1145                    0x19 => Instruction::RType(RTypeInstruction::MultiplyUnsigned),
1146                    0x1a => Instruction::RType(RTypeInstruction::Div),
1147                    0x1b => Instruction::RType(RTypeInstruction::DivUnsigned),
1148                    0x20 => Instruction::RType(RTypeInstruction::Add),
1149                    0x21 => Instruction::RType(RTypeInstruction::AddUnsigned),
1150                    0x22 => Instruction::RType(RTypeInstruction::Sub),
1151                    0x23 => Instruction::RType(RTypeInstruction::SubUnsigned),
1152                    0x24 => Instruction::RType(RTypeInstruction::And),
1153                    0x25 => Instruction::RType(RTypeInstruction::Or),
1154                    0x26 => Instruction::RType(RTypeInstruction::Xor),
1155                    0x27 => Instruction::RType(RTypeInstruction::Nor),
1156                    0x2a => Instruction::RType(RTypeInstruction::SetLessThan),
1157                    0x2b => Instruction::RType(RTypeInstruction::SetLessThanUnsigned),
1158                    _ => {
1159                        panic!("Unhandled instruction {:#X}", instruction)
1160                    }
1161                },
1162                0x01 => {
1163                    // RegImm instructions
1164                    match (instruction >> 16) & 0x1F {
1165                        0x0 => Instruction::IType(ITypeInstruction::BranchLtZero),
1166                        0x1 => Instruction::IType(ITypeInstruction::BranchGeqZero),
1167                        _ => panic!("Unhandled instruction {:#X}", instruction),
1168                    }
1169                }
1170                0x02 => Instruction::JType(JTypeInstruction::Jump),
1171                0x03 => Instruction::JType(JTypeInstruction::JumpAndLink),
1172                0x04 => Instruction::IType(ITypeInstruction::BranchEq),
1173                0x05 => Instruction::IType(ITypeInstruction::BranchNeq),
1174                0x06 => Instruction::IType(ITypeInstruction::BranchLeqZero),
1175                0x07 => Instruction::IType(ITypeInstruction::BranchGtZero),
1176                0x08 => Instruction::IType(ITypeInstruction::AddImmediate),
1177                0x09 => Instruction::IType(ITypeInstruction::AddImmediateUnsigned),
1178                0x0A => Instruction::IType(ITypeInstruction::SetLessThanImmediate),
1179                0x0B => Instruction::IType(ITypeInstruction::SetLessThanImmediateUnsigned),
1180                0x0C => Instruction::IType(ITypeInstruction::AndImmediate),
1181                0x0D => Instruction::IType(ITypeInstruction::OrImmediate),
1182                0x0E => Instruction::IType(ITypeInstruction::XorImmediate),
1183                0x0F => Instruction::IType(ITypeInstruction::LoadUpperImmediate),
1184                0x1C => match instruction & 0x3F {
1185                    0x02 => Instruction::RType(RTypeInstruction::MultiplyToRegister),
1186                    0x20 => Instruction::RType(RTypeInstruction::CountLeadingZeros),
1187                    0x21 => Instruction::RType(RTypeInstruction::CountLeadingOnes),
1188                    _ => panic!("Unhandled instruction {:#X}", instruction),
1189                },
1190                0x20 => Instruction::IType(ITypeInstruction::Load8),
1191                0x21 => Instruction::IType(ITypeInstruction::Load16),
1192                0x22 => Instruction::IType(ITypeInstruction::LoadWordLeft),
1193                0x23 => Instruction::IType(ITypeInstruction::Load32),
1194                0x24 => Instruction::IType(ITypeInstruction::Load8Unsigned),
1195                0x25 => Instruction::IType(ITypeInstruction::Load16Unsigned),
1196                0x26 => Instruction::IType(ITypeInstruction::LoadWordRight),
1197                0x28 => Instruction::IType(ITypeInstruction::Store8),
1198                0x29 => Instruction::IType(ITypeInstruction::Store16),
1199                0x2a => Instruction::IType(ITypeInstruction::StoreWordLeft),
1200                0x2b => Instruction::IType(ITypeInstruction::Store32),
1201                0x2e => Instruction::IType(ITypeInstruction::StoreWordRight),
1202                0x30 => {
1203                    // Note: This is ll (LoadLinked), but we're only simulating
1204                    // a single processor.
1205                    Instruction::IType(ITypeInstruction::Load32)
1206                }
1207                0x38 => {
1208                    // Note: This is sc (StoreConditional), but we're only
1209                    // simulating a single processor.
1210                    Instruction::IType(ITypeInstruction::Store32Conditional)
1211                }
1212                _ => {
1213                    panic!("Unhandled instruction {:#X}", instruction)
1214                }
1215            }
1216        };
1217        (opcode, instruction)
1218    }
1219
1220    /// The actual number of instructions executed results from dividing the
1221    /// instruction counter by MAX_ACC (floor).
1222    ///
1223    /// NOTE: actually, in practice it will be less than that, as there is no
1224    ///       single instruction that performs all of them.
1225    pub fn normalized_instruction_counter(&self) -> u64 {
1226        self.instruction_counter / MAX_ACC
1227    }
1228
1229    /// Computes what is the non-normalized next instruction counter, which
1230    /// accounts for the maximum number of register and memory accesses per
1231    /// instruction.
1232    ///
1233    /// Because MAX_NB_REG_ACC = 7 and MAX_NB_MEM_ACC = 12, at most the same
1234    /// instruction will increase the instruction counter by MAX_ACC = 19.
1235    ///
1236    /// Then, in order to update the instruction counter, we need to add 1 to
1237    /// the real instruction counter and multiply it by MAX_ACC to have a unique
1238    /// representation of each step (which is helpful for debugging).
1239    pub fn next_instruction_counter(&self) -> u64 {
1240        (self.normalized_instruction_counter() + 1) * MAX_ACC
1241    }
1242
1243    /// Execute a single step of the MIPS program.
1244    /// Returns the instruction that was executed.
1245    pub fn step(
1246        &mut self,
1247        config: &VmConfiguration,
1248        metadata: &Option<Meta>,
1249        start: &Start,
1250    ) -> Instruction {
1251        self.reset_scratch_state();
1252        self.reset_scratch_state_inverse();
1253        self.reset_lookup_state();
1254        let (opcode, _instruction) = self.decode_instruction();
1255
1256        self.pp_info(&config.info_at, metadata, start);
1257        self.snapshot_state_at(&config.snapshot_state_at);
1258
1259        interpreter::interpret_instruction(self, opcode);
1260
1261        self.instruction_counter = self.next_instruction_counter();
1262
1263        config.halt_address.iter().for_each(|halt_address: &u32| {
1264            if self.registers.current_instruction_pointer == *halt_address {
1265                debug!("Program jumped to halt address: {:#X}", halt_address);
1266                self.halt = true;
1267            }
1268        });
1269
1270        // Force stops at given iteration
1271        if self.should_trigger_at(&config.stop_at) {
1272            self.halt = true;
1273            println!(
1274                "Halted as requested at step={} instruction={:?}",
1275                self.normalized_instruction_counter(),
1276                opcode
1277            );
1278        }
1279
1280        // Integer division by MAX_ACC to obtain the actual instruction count
1281        if self.halt {
1282            debug!(
1283                "Halted at step={} instruction={:?}",
1284                self.normalized_instruction_counter(),
1285                opcode
1286            );
1287        }
1288        opcode
1289    }
1290
1291    fn should_trigger_at(&self, at: &StepFrequency) -> bool {
1292        let m: u64 = self.normalized_instruction_counter();
1293        match at {
1294            StepFrequency::Never => false,
1295            StepFrequency::Always => true,
1296            StepFrequency::Exactly(n) => *n == m,
1297            StepFrequency::Every(n) => m % *n == 0,
1298            StepFrequency::Range(lo, hi_opt) => {
1299                m >= *lo && (hi_opt.is_none() || m < hi_opt.unwrap())
1300            }
1301        }
1302    }
1303
1304    // Compute memory usage
1305    fn memory_usage(&self) -> String {
1306        let total = self.memory.len() * PAGE_SIZE as usize;
1307        memory_size(total)
1308    }
1309
1310    fn page_address(&self) -> (u32, usize) {
1311        let address = self.registers.current_instruction_pointer;
1312        let page = address >> PAGE_ADDRESS_SIZE;
1313        let page_address = (address & PAGE_ADDRESS_MASK) as usize;
1314        (page, page_address)
1315    }
1316
1317    fn get_opcode(&mut self) -> Option<u32> {
1318        let (page_id, page_address) = self.page_address();
1319        for (page_index, memory) in self.memory.iter() {
1320            if page_id == *page_index {
1321                let memory_slice: [u8; 4] = memory[page_address..page_address + 4]
1322                    .try_into()
1323                    .expect("Couldn't read 4 bytes at given address");
1324                return Some(u32::from_be_bytes(memory_slice));
1325            }
1326        }
1327        None
1328    }
1329
1330    fn snapshot_state_at(&mut self, at: &StepFrequency) {
1331        if self.should_trigger_at(at) {
1332            let filename = format!(
1333                "snapshot-state-{}.json",
1334                self.normalized_instruction_counter()
1335            );
1336            let file = File::create(filename.clone()).expect("Impossible to open file");
1337            let mut writer = BufWriter::new(file);
1338            let mut preimage_key = [0u8; 32];
1339            for i in 0..8 {
1340                let bytes = u32::to_be_bytes(self.registers.preimage_key[i]);
1341                for j in 0..4 {
1342                    preimage_key[4 * i + j] = bytes[j]
1343                }
1344            }
1345            let memory = self
1346                .memory
1347                .clone()
1348                .into_iter()
1349                .map(|(idx, data)| Page { index: idx, data })
1350                .collect();
1351            let s: State = State {
1352                pc: self.registers.current_instruction_pointer,
1353                next_pc: self.registers.next_instruction_pointer,
1354                step: self.normalized_instruction_counter(),
1355                registers: self.registers.general_purpose,
1356                lo: self.registers.lo,
1357                hi: self.registers.hi,
1358                heap: self.registers.heap_pointer,
1359                // FIXME: it should be the exit code. We do not keep it in the
1360                // witness atm
1361                exit: if self.halt { 1 } else { 0 },
1362                last_hint: self.syscall_env.last_hint.clone(),
1363                exited: self.halt,
1364                preimage_offset: self.registers.preimage_offset,
1365                preimage_key,
1366                memory,
1367                preimage: self.preimage.clone(),
1368            };
1369            let _ = serde_json::to_writer(&mut writer, &s);
1370            info!(
1371                "Snapshot state in {}, step {}",
1372                filename,
1373                self.normalized_instruction_counter()
1374            );
1375            writer.flush().expect("Flush writer failing")
1376        }
1377    }
1378
1379    fn pp_info(&mut self, at: &StepFrequency, meta: &Option<Meta>, start: &Start) {
1380        if self.should_trigger_at(at) {
1381            let elapsed = start.time.elapsed();
1382            // Compute the step number removing the MAX_ACC factor
1383            let step = self.normalized_instruction_counter();
1384            let pc = self.registers.current_instruction_pointer;
1385
1386            // Get the 32-bits opcode
1387            let insn = self.get_opcode().unwrap();
1388
1389            // Approximate instruction per seconds
1390            let how_many_steps = step as usize - start.step;
1391
1392            let ips = how_many_steps as f64 / elapsed.as_secs() as f64;
1393
1394            let pages = self.memory.len();
1395
1396            let mem = self.memory_usage();
1397            let name = meta
1398                .as_ref()
1399                .and_then(|m| m.find_address_symbol(pc))
1400                .unwrap_or("n/a".to_string());
1401
1402            info!(
1403                "processing step={} pc={:010x} insn={:010x} ips={:.2} pages={} mem={} name={}",
1404                step, pc, insn, ips, pages, mem, name
1405            );
1406        }
1407    }
1408}