use crate::cannon::{Page, State, PAGE_SIZE};
use elf::{endian::LittleEndian, section::SectionHeader, ElfBytes};
use log::debug;
use std::{collections::HashMap, path::Path};
pub fn parse_riscv32i(path: &Path) -> Result<State, String> {
debug!("Start parsing the ELF file to load a RISC-V 32i compatible state");
let file_data = std::fs::read(path).expect("Could not read file.");
let slice = file_data.as_slice();
let file = ElfBytes::<LittleEndian>::minimal_parse(slice).expect("Open ELF file failed.");
assert_eq!(file.ehdr.e_machine, 243);
let (shdrs_opt, strtab_opt) = file
.section_headers_with_strtab()
.expect("shdrs offsets should be valid");
let (shdrs, strtab) = (
shdrs_opt.expect("Should have shdrs"),
strtab_opt.expect("Should have strtab"),
);
let sections_by_name: HashMap<&str, SectionHeader> = shdrs
.iter()
.map(|shdr| {
(
strtab
.get(shdr.sh_name as usize)
.expect("Failed to get section name"),
shdr,
)
})
.collect();
debug!("Loading the text section, which contains the executable code.");
let text_section = sections_by_name
.get(".text")
.expect("Should have .text section");
let (text_section_data, _) = file
.section_data(text_section)
.expect("Failed to read data from .text section");
let code_section_starting_address = text_section.sh_addr as usize;
let code_section_size = text_section.sh_size as usize;
let code_section_end_address = code_section_starting_address + code_section_size;
debug!(
"The executable code starts at address {}, has size {} bytes, and ends at address {}.",
code_section_starting_address, code_section_size, code_section_end_address
);
let mut memory: Vec<Page> = vec![];
let page_size_usize: usize = PAGE_SIZE.try_into().unwrap();
let start_page_address: usize =
(code_section_starting_address / page_size_usize) * page_size_usize;
let end_page_address =
(code_section_end_address / (page_size_usize - 1)) * page_size_usize + page_size_usize;
let first_page_index = start_page_address / page_size_usize;
let last_page_index = (end_page_address - 1) / page_size_usize;
let mut data_offset = 0;
(first_page_index..=last_page_index).for_each(|page_index| {
let mut data = vec![0; page_size_usize];
if first_page_index == last_page_index {
let data_length = code_section_end_address - code_section_starting_address;
let page_offset = code_section_starting_address - start_page_address;
data[page_offset..page_offset + data_length]
.copy_from_slice(&text_section_data[0..data_length]);
data_offset += data_length;
} else {
let data_length = if page_index == last_page_index {
code_section_end_address - (page_index * page_size_usize)
} else {
page_size_usize
};
let page_offset = if page_index == first_page_index {
code_section_starting_address - start_page_address
} else {
0
};
data[page_offset..]
.copy_from_slice(&text_section_data[data_offset..data_offset + data_length]);
data_offset += data_length;
}
let page = Page {
index: page_index as u32,
data,
};
memory.push(page);
});
let registers: [u32; 32] = [0; 32];
let preimage_key: [u8; 32] = [0; 32];
let preimage_offset = 0;
let pc: u32 = file.ehdr.e_entry as u32;
assert!(pc != 0, "Entry point is 0. The documentation of the ELF library says that it means the ELF doesn't have an entry point. This is not supported.");
let next_pc: u32 = pc + 4u32;
let state = State {
memory,
preimage_key,
preimage_offset,
pc,
next_pc,
lo: 0,
hi: 0,
heap: 0,
exit: 0,
exited: false,
step: 0,
registers,
last_hint: None,
preimage: None,
};
Ok(state)
}