Generate direct control flow, using wasm blocks and loops

This commit is contained in:
Fabian 2020-12-31 19:14:33 -06:00
parent 705b22ee14
commit 37c3d1f83c
12 changed files with 1298 additions and 366 deletions

View file

@ -232,6 +232,10 @@ function gen_instruction_body_after_fixed_g(encoding, size)
{
instruction_postfix.push("analysis.no_next_instruction = true;");
}
if(encoding.absolute_jump)
{
instruction_postfix.push("analysis.absolute_jump = true;");
}
if(encoding.prefix)
{

View file

@ -244,8 +244,8 @@ const encodings = [
{ opcode: 0xF2AF, block_boundary: 1, custom: 1, is_string: 1, os: 1, },
{ opcode: 0xF3AF, block_boundary: 1, custom: 1, is_string: 1, os: 1, },
{ opcode: 0xC2, custom: 1, block_boundary: 1, no_next_instruction: 1, os: 1, imm16: 1, skip: 1, }, // ret
{ opcode: 0xC3, custom: 1, block_boundary: 1, no_next_instruction: 1, os: 1, skip: 1, },
{ opcode: 0xC2, custom: 1, block_boundary: 1, no_next_instruction: 1, os: 1, absolute_jump: 1, imm16: 1, skip: 1, }, // ret
{ opcode: 0xC3, custom: 1, block_boundary: 1, no_next_instruction: 1, os: 1, absolute_jump: 1, skip: 1, },
{ opcode: 0xC4, block_boundary: 1, os: 1, e: 1, skip: 1, }, // les
{ opcode: 0xC5, block_boundary: 1, os: 1, e: 1, skip: 1, }, // lds
@ -401,9 +401,9 @@ const encodings = [
{ opcode: 0xFE, e: 1, fixed_g: 1, custom: 1 },
{ opcode: 0xFF, os: 1, e: 1, fixed_g: 0, custom: 1, },
{ opcode: 0xFF, os: 1, e: 1, fixed_g: 1, custom: 1, },
{ opcode: 0xFF, os: 1, e: 1, fixed_g: 2, custom: 1, block_boundary: 1, skip: 1, },
{ opcode: 0xFF, os: 1, e: 1, fixed_g: 2, custom: 1, block_boundary: 1, absolute_jump: 1, skip: 1, },
{ opcode: 0xFF, os: 1, e: 1, fixed_g: 3, block_boundary: 1, skip: 1, },
{ opcode: 0xFF, os: 1, e: 1, fixed_g: 4, custom: 1, block_boundary: 1, no_next_instruction: 1, skip: 1, },
{ opcode: 0xFF, os: 1, e: 1, fixed_g: 4, custom: 1, block_boundary: 1, absolute_jump: 1, no_next_instruction: 1, skip: 1, },
{ opcode: 0xFF, os: 1, e: 1, fixed_g: 5, block_boundary: 1, no_next_instruction: 1, skip: 1, },
{ opcode: 0xFF, custom: 1, os: 1, e: 1, fixed_g: 6, },

View file

@ -19,9 +19,14 @@ const print_stats = {
"COMPILE_WITH_LOOP_SAFETY",
"COMPILE_PAGE",
"COMPILE_BASIC_BLOCK",
"COMPILE_DUPLICATED_BASIC_BLOCK",
"COMPILE_WASM_BLOCK",
"COMPILE_WASM_LOOP",
"COMPILE_DISPATCHER",
"COMPILE_ENTRY_POINT",
"COMPILE_WASM_TOTAL_BYTES",
"CACHE_MISMATCH",
"JIT_CACHE_OVERRIDE",
"JIT_CACHE_OVERRIDE_DIFFERENT_STATE_FLAGS",
"RUN_INTERPRETED",
"RUN_INTERPRETED_PENDING",
"RUN_INTERPRETED_NEAR_END_OF_PAGE",
@ -31,6 +36,24 @@ const print_stats = {
"RUN_INTERPRETED_STEPS",
"RUN_FROM_CACHE",
"RUN_FROM_CACHE_STEPS",
"DIRECT_EXIT",
"INDIRECT_JUMP",
"INDIRECT_JUMP_NO_ENTRY",
"NORMAL_PAGE_CHANGE",
"NORMAL_FALLTHRU",
"NORMAL_FALLTHRU_WITH_TARGET_BLOCK",
"NORMAL_BRANCH",
"NORMAL_BRANCH_WITH_TARGET_BLOCK",
"CONDITIONAL_JUMP",
"CONDITIONAL_JUMP_PAGE_CHANGE",
"CONDITIONAL_JUMP_EXIT",
"CONDITIONAL_JUMP_FALLTHRU",
"CONDITIONAL_JUMP_FALLTHRU_WITH_TARGET_BLOCK",
"CONDITIONAL_JUMP_BRANCH",
"CONDITIONAL_JUMP_BRANCH_WITH_TARGET_BLOCK",
"DISPATCHER_SMALL",
"DISPATCHER_LARGE",
"LOOP",
"FAILED_PAGE_CHANGE",
"SAFE_READ_FAST",
"SAFE_READ_SLOW_PAGE_CROSSED",
@ -93,9 +116,10 @@ const print_stats = {
text += "TLB_ENTRIES=" + tlb_entries + " (" + global_tlb_entries + " global, " + nonglobal_tlb_entries + " non-global)\n";
text += "WASM_TABLE_FREE=" + cpu.wm.exports["jit_get_wasm_table_index_free_list_count"]() + "\n";
text += "JIT_CACHE_SIZE=" + cpu.wm.exports["jit_get_cache_size"]() + "\n";
text += "FLAT_SEGMENTS=" + cpu.wm.exports["has_flat_segmentation"]() + "\n";
text += "do_many_cycles avg: " + do_many_cycles_total / do_many_cycles_count + "\n";
text += "do_many_cycles avg: " + (do_many_cycles_total / do_many_cycles_count || 0) + "\n";
text += "wasm memory size: " + (cpu.wasm_memory.buffer.byteLength >> 20) + "m\n";
return text;

View file

@ -18,12 +18,14 @@ pub enum AnalysisType {
pub struct Analysis {
pub no_next_instruction: bool,
pub absolute_jump: bool,
pub ty: AnalysisType,
}
pub fn analyze_step(mut cpu: &mut CpuContext) -> Analysis {
let mut analysis = Analysis {
no_next_instruction: false,
absolute_jump: false,
ty: AnalysisType::Normal,
};
cpu.prefixes = 0;

View file

@ -17,7 +17,7 @@ pub fn gen_add_cs_offset(ctx: &mut JitContext) {
ctx.builder.add_i32();
}
fn gen_get_eip(builder: &mut WasmBuilder) {
pub fn gen_get_eip(builder: &mut WasmBuilder) {
builder.load_fixed_i32(global_pointers::instruction_pointer as u32);
}
@ -89,36 +89,13 @@ pub fn gen_page_switch_check(
ctx.builder.const_i32(next_block_addr as i32);
ctx.builder.ne_i32();
ctx.builder.if_void();
// TODO: br_if
gen_profiler_stat_increment(ctx.builder, profiler::stat::FAILED_PAGE_CHANGE);
gen_debug_track_jit_exit(ctx.builder, last_instruction_addr);
ctx.builder.br(ctx.exit_label);
ctx.builder.block_end();
}
pub fn gen_absolute_indirect_jump(ctx: &mut JitContext, new_eip: WasmLocal) {
ctx.builder
.const_i32(global_pointers::instruction_pointer as i32);
ctx.builder.get_local(&new_eip);
ctx.builder.store_aligned_i32(0);
gen_get_phys_eip(ctx, &new_eip);
ctx.builder.free_local(new_eip);
ctx.builder
.const_i32(ctx.our_wasm_table_index.to_u16() as i32);
ctx.builder.const_i32(ctx.state_flags.to_u32() as i32);
ctx.builder.call_fn3_ret("jit_find_cache_entry_in_page");
let new_basic_block_index = ctx.builder.tee_new_local();
ctx.builder.const_i32(0);
ctx.builder.ge_i32();
ctx.builder.if_void();
ctx.builder.get_local(&new_basic_block_index);
ctx.builder.set_local(ctx.basic_block_index_local);
ctx.builder.br(ctx.main_loop_label);
ctx.builder.block_end();
ctx.builder.free_local(new_basic_block_index);
}
pub fn gen_increment_timestamp_counter(builder: &mut WasmBuilder, n: i32) {
builder.increment_fixed_i32(global_pointers::timestamp_counter as u32, n)
}

407
src/rust/control_flow.rs Normal file
View file

@ -0,0 +1,407 @@
use std::collections::{HashMap, HashSet};
use std::iter;
use jit::{BasicBlock, BasicBlockType};
use profiler;
const ENTRY_NODE_ID: u32 = 0xffff_ffff;
type Graph = HashMap<u32, HashSet<u32>>;
/// Reverse the direction of all edges in the graph
fn rev_graph_edges(nodes: &Graph) -> Graph {
let mut rev_nodes = Graph::new();
for (from, tos) in nodes {
for to in tos {
rev_nodes
.entry(*to)
.or_insert_with(|| HashSet::new())
.insert(*from);
}
}
rev_nodes
}
pub fn make_graph(basic_blocks: &Vec<BasicBlock>) -> Graph {
let mut nodes = Graph::new();
let mut entry_edges = HashSet::new();
for b in basic_blocks.iter() {
let mut edges = HashSet::new();
match &b.ty {
BasicBlockType::ConditionalJump {
next_block_addr,
next_block_branch_taken_addr,
..
} => {
if let Some(next_block_addr) = next_block_addr {
edges.insert(*next_block_addr);
}
if let Some(next_block_branch_taken_addr) = next_block_branch_taken_addr {
edges.insert(*next_block_branch_taken_addr);
}
},
BasicBlockType::Normal { next_block_addr } => {
edges.insert(*next_block_addr);
},
BasicBlockType::Exit => {},
BasicBlockType::AbsoluteEip => {
// Not necessary: We generate a loop around the outer brtable unconditionally
//edges.insert(ENTRY_NODE_ID);
},
}
nodes.insert(b.addr, edges);
if b.is_entry_block {
entry_edges.insert(b.addr);
}
}
// Entry node that represents the initial basic block of the generated function (must be
// able to reach all entry nodes)
nodes.insert(ENTRY_NODE_ID, entry_edges);
return nodes;
}
pub enum WasmStructure {
BasicBlock(u32),
Dispatcher(Vec<u32>),
Loop(Vec<WasmStructure>),
Block(Vec<WasmStructure>),
}
impl WasmStructure {
pub fn print(&self, depth: usize) {
match self {
Self::BasicBlock(addr) => dbg_log!("{} 0x{:x}", " ".repeat(depth), addr),
Self::Dispatcher(entries) => {
dbg_log!("{} Dispatcher entries:", " ".repeat(depth));
for e in entries {
dbg_log!("{} {:x}", " ".repeat(depth), e);
}
},
Self::Loop(elements) => {
dbg_log!("{} loop_void({})", " ".repeat(depth), elements.len());
for e in elements {
e.print(depth + 1)
}
dbg_log!("{} loop_end({})", " ".repeat(depth), elements.len());
},
Self::Block(elements) => {
dbg_log!("{} block_void({})", " ".repeat(depth), elements.len());
for e in elements {
e.print(depth + 1)
}
dbg_log!("{} block_end({})", " ".repeat(depth), elements.len());
},
}
}
fn branches(&self, edges: &Graph) -> HashSet<u32> {
fn handle(block: &WasmStructure, edges: &Graph, result: &mut HashSet<u32>) {
match block {
WasmStructure::BasicBlock(addr) => result.extend(edges.get(&addr).unwrap()),
WasmStructure::Dispatcher(entries) => result.extend(entries),
WasmStructure::Loop(children) | WasmStructure::Block(children) => {
for c in children.iter() {
handle(c, edges, result);
}
},
}
};
let mut result = HashSet::new();
handle(self, edges, &mut result);
result
}
pub fn head(&self) -> Box<dyn iter::Iterator<Item = u32> + '_> {
match self {
Self::BasicBlock(addr) => Box::new(iter::once(*addr)),
Self::Dispatcher(entries) => Box::new(entries.iter().copied()),
Self::Loop(children) => children.first().unwrap().head(),
Self::Block(elements) => elements.first().unwrap().head(),
}
}
}
/// Check:
/// - Dispatcher appears at the beginning of a loop
/// - No two nested blocks at the end
/// - No two nested loops at the beginning
/// - No empty blocks or loops
/// - The entry node block is not present
pub fn assert_invariants(blocks: &Vec<WasmStructure>) {
fn check(node: &WasmStructure, in_tail_block: bool, in_head_loop: bool, is_first: bool) {
match node {
WasmStructure::Block(children) => {
dbg_assert!(!in_tail_block);
dbg_assert!(!children.is_empty());
for (i, c) in children.iter().enumerate() {
let is_first = i == 0;
let is_last = i == children.len() - 1;
check(c, is_last, in_head_loop && is_first, is_first);
}
},
WasmStructure::Loop(children) => {
dbg_assert!(!in_head_loop);
dbg_assert!(!children.is_empty());
for (i, c) in children.iter().enumerate() {
let is_first = i == 0;
let is_last = i == children.len() - 1;
check(c, in_tail_block && is_last, is_first, is_first);
}
},
&WasmStructure::BasicBlock(addr) => dbg_assert!(addr != ENTRY_NODE_ID),
WasmStructure::Dispatcher(_) => {
dbg_assert!(is_first);
//dbg_assert!(in_head_loop); // fails for module dispatcher
},
}
}
for (i, b) in blocks.iter().enumerate() {
check(b, false, false, i == 0);
}
}
/// Strongly connected components via Kosaraju's algorithm
fn scc(edges: &Graph, rev_edges: &Graph) -> Vec<Vec<u32>> {
fn visit(
node: u32,
edges: &Graph,
rev_edges: &Graph,
visited: &mut HashSet<u32>,
l: &mut Vec<u32>,
) {
if visited.contains(&node) {
return;
}
visited.insert(node);
for &next in edges.get(&node).unwrap() {
visit(next, edges, rev_edges, visited, l);
}
l.push(node);
}
let mut l = Vec::new();
let mut visited = HashSet::new();
for &node in edges.keys() {
visit(node, edges, rev_edges, &mut visited, &mut l);
}
fn assign(
node: u32,
edges: &Graph,
rev_edges: &Graph,
assigned: &mut HashSet<u32>,
group: &mut Vec<u32>,
) {
if assigned.contains(&node) {
return;
}
assigned.insert(node);
group.push(node);
if let Some(nexts) = rev_edges.get(&node) {
for &next in nexts {
assign(next, edges, rev_edges, assigned, group);
}
}
}
let mut assigned = HashSet::new();
let mut assignment = Vec::new();
for &node in l.iter().rev() {
let mut group = Vec::new();
assign(node, edges, rev_edges, &mut assigned, &mut group);
if !group.is_empty() {
assignment.push(group);
}
}
assignment
}
pub fn loopify(nodes: &Graph) -> Vec<WasmStructure> {
let rev_nodes = rev_graph_edges(nodes);
let groups = scc(nodes, &rev_nodes);
return groups
.iter()
.flat_map(|group| {
dbg_assert!(!group.is_empty());
if group.len() == 1 {
let addr = group[0];
if addr == ENTRY_NODE_ID {
let entries = nodes.get(&ENTRY_NODE_ID).unwrap().iter().copied().collect();
return vec![WasmStructure::Dispatcher(entries)].into_iter();
}
let block = WasmStructure::BasicBlock(addr);
// self-loops
if nodes.get(&group[0]).unwrap().contains(&group[0]) {
return vec![WasmStructure::Loop(vec![block])].into_iter();
}
else {
return vec![block].into_iter();
}
}
let entries_to_group: Vec<u32> = group
.iter()
.filter(|addr| {
// reachable from outside of the group
rev_nodes.get(addr).map_or(false, |x| {
x.iter().any(|incoming| !group.contains(incoming))
})
})
.copied()
.collect();
if entries_to_group.len() != 1 {
dbg_log!(
"Compiling multi-entry loop with {} entries and {} basic blocks",
entries_to_group.len(),
group.len()
);
}
let max_extra_basic_blocks = 100;
if entries_to_group.len() * group.len() > max_extra_basic_blocks {
let mut subgroup_edges: Graph = Graph::new();
for elem in group {
subgroup_edges.insert(
*elem,
nodes
.get(&elem)
.unwrap()
.iter()
.filter(|dest| {
// XXX: This might remove forward edges to other loop entries
// Probably not an issue since it can go through the
// dispatcher
group.contains(dest) && !entries_to_group.contains(dest)
})
.copied()
.collect(),
);
}
let mut loop_nodes = loopify(&subgroup_edges);
if entries_to_group.len() > 1 {
loop_nodes.insert(0, WasmStructure::Dispatcher(entries_to_group));
}
return vec![WasmStructure::Loop(loop_nodes)].into_iter();
}
else {
profiler::stat_increment_by(
profiler::stat::COMPILE_DUPLICATED_BASIC_BLOCK,
((entries_to_group.len() - 1) * group.len()) as u64,
);
let nodes: Vec<WasmStructure> = entries_to_group
.iter()
.map(|&entry| {
let mut subgroup_edges: Graph = Graph::new();
for &elem in group {
subgroup_edges.insert(
elem,
nodes
.get(&elem)
.unwrap()
.iter()
.copied()
.filter(|dest| group.contains(dest) && *dest != entry)
.collect(),
);
}
let loop_nodes = loopify(&subgroup_edges);
WasmStructure::Loop(loop_nodes)
})
.collect();
nodes.into_iter()
}
})
.collect();
}
pub fn blockify(blocks: &mut Vec<WasmStructure>, edges: &Graph) {
let mut cached_branches: Vec<HashSet<u32>> = Vec::new();
for i in 0..blocks.len() {
cached_branches.push(blocks[i].branches(edges));
}
let mut i = 0;
while i < blocks.len() {
match &mut blocks[i] {
WasmStructure::BasicBlock(_) | WasmStructure::Dispatcher(_) => {},
WasmStructure::Loop (
blocks
)
// TODO: Might be faster to do this *after* inserting blocks in this block
| WasmStructure::Block(blocks) => blockify(blocks, edges),
}
let source = {
let mut source = None;
for j in 0..i {
if blocks[i].head().any(|bb| cached_branches[j].contains(&bb)) {
source = Some(j);
break;
}
}
match source {
Some(s) => s,
None => {
i += 1;
continue;
},
}
};
// This is optional: Avoid putting a single basic block into a block
if source == i - 1 {
match &blocks[source] {
&WasmStructure::BasicBlock(_) => {
i += 1;
continue;
},
_ => {},
}
}
let replacement = WasmStructure::Block(Vec::new());
let children: Vec<WasmStructure> =
blocks.splice(source..i, iter::once(replacement)).collect();
match &mut blocks[source] {
WasmStructure::Block(c) => c.extend(children),
_ => dbg_assert!(false),
}
match &blocks[source + 1] {
WasmStructure::BasicBlock(_) =>
//dbg_assert!(*b == bbs.next().unwrap())
{}
WasmStructure::Dispatcher(_) => {},
WasmStructure::Loop(_blocks) | WasmStructure::Block(_blocks) => {}, //dbg_assert!(blocks[0].head() == bb),
}
{
let replacement = HashSet::new();
let children: Vec<HashSet<u32>> = cached_branches
.splice(source..i, iter::once(replacement))
.collect();
dbg_assert!(cached_branches[source].len() == 0);
let mut iter = children.into_iter();
cached_branches[source] = iter.next().unwrap();
for c in iter {
cached_branches[source].extend(c);
}
}
// skip the inserted block and this block
i = source + 2;
}
}

File diff suppressed because it is too large Load diff

View file

@ -2702,33 +2702,37 @@ pub fn instr32_E9_jit(ctx: &mut JitContext, imm: u32) {
}
pub fn instr16_C2_jit(ctx: &mut JitContext, imm16: u32) {
ctx.builder.const_i32(0);
codegen::gen_pop16(ctx);
codegen::gen_add_cs_offset(ctx);
let new_eip = ctx.builder.set_new_local();
ctx.builder
.store_aligned_i32(global_pointers::instruction_pointer as u32);
codegen::gen_adjust_stack_reg(ctx, imm16);
codegen::gen_absolute_indirect_jump(ctx, new_eip);
}
pub fn instr32_C2_jit(ctx: &mut JitContext, imm16: u32) {
ctx.builder.const_i32(0);
codegen::gen_pop32s(ctx);
codegen::gen_add_cs_offset(ctx);
let new_eip = ctx.builder.set_new_local();
ctx.builder
.store_aligned_i32(global_pointers::instruction_pointer as u32);
codegen::gen_adjust_stack_reg(ctx, imm16);
codegen::gen_absolute_indirect_jump(ctx, new_eip);
}
pub fn instr16_C3_jit(ctx: &mut JitContext) {
ctx.builder.const_i32(0);
codegen::gen_pop16(ctx);
codegen::gen_add_cs_offset(ctx);
let new_eip = ctx.builder.set_new_local();
codegen::gen_absolute_indirect_jump(ctx, new_eip);
ctx.builder
.store_aligned_i32(global_pointers::instruction_pointer as u32);
}
pub fn instr32_C3_jit(ctx: &mut JitContext) {
ctx.builder.const_i32(0);
codegen::gen_pop32s(ctx);
codegen::gen_add_cs_offset(ctx);
let new_eip = ctx.builder.set_new_local();
codegen::gen_absolute_indirect_jump(ctx, new_eip);
ctx.builder
.store_aligned_i32(global_pointers::instruction_pointer as u32);
}
pub fn instr16_C9_jit(ctx: &mut JitContext) { codegen::gen_leave(ctx, false); }
@ -3782,19 +3786,23 @@ pub fn instr16_FF_2_mem_jit(ctx: &mut JitContext, modrm_byte: ModrmByte) {
codegen::gen_push16(ctx, &value_local);
ctx.builder.free_local(value_local);
codegen::gen_absolute_indirect_jump(ctx, new_eip);
ctx.builder.const_i32(0);
ctx.builder.get_local(&new_eip);
ctx.builder
.store_aligned_i32(global_pointers::instruction_pointer as u32);
ctx.builder.free_local(new_eip);
}
pub fn instr16_FF_2_reg_jit(ctx: &mut JitContext, r: u32) {
codegen::gen_get_reg16(ctx, r);
codegen::gen_add_cs_offset(ctx);
let new_eip = ctx.builder.set_new_local();
codegen::gen_get_real_eip(ctx);
let value_local = ctx.builder.set_new_local();
codegen::gen_push16(ctx, &value_local);
ctx.builder.free_local(value_local);
codegen::gen_absolute_indirect_jump(ctx, new_eip);
ctx.builder.const_i32(0);
codegen::gen_get_reg16(ctx, r);
codegen::gen_add_cs_offset(ctx);
ctx.builder
.store_aligned_i32(global_pointers::instruction_pointer as u32);
}
pub fn instr32_FF_2_mem_jit(ctx: &mut JitContext, modrm_byte: ModrmByte) {
codegen::gen_modrm_resolve_safe_read32(ctx, modrm_byte);
@ -3806,44 +3814,52 @@ pub fn instr32_FF_2_mem_jit(ctx: &mut JitContext, modrm_byte: ModrmByte) {
codegen::gen_push32(ctx, &value_local);
ctx.builder.free_local(value_local);
codegen::gen_absolute_indirect_jump(ctx, new_eip);
ctx.builder.const_i32(0);
ctx.builder.get_local(&new_eip);
ctx.builder
.store_aligned_i32(global_pointers::instruction_pointer as u32);
ctx.builder.free_local(new_eip);
}
pub fn instr32_FF_2_reg_jit(ctx: &mut JitContext, r: u32) {
codegen::gen_get_reg32(ctx, r);
codegen::gen_add_cs_offset(ctx);
let new_eip = ctx.builder.set_new_local();
codegen::gen_get_real_eip(ctx);
let value_local = ctx.builder.set_new_local();
codegen::gen_push32(ctx, &value_local);
ctx.builder.free_local(value_local);
codegen::gen_absolute_indirect_jump(ctx, new_eip);
ctx.builder.const_i32(0);
codegen::gen_get_reg32(ctx, r);
codegen::gen_add_cs_offset(ctx);
ctx.builder
.store_aligned_i32(global_pointers::instruction_pointer as u32);
}
pub fn instr16_FF_4_mem_jit(ctx: &mut JitContext, modrm_byte: ModrmByte) {
ctx.builder.const_i32(0);
codegen::gen_modrm_resolve_safe_read16(ctx, modrm_byte);
codegen::gen_add_cs_offset(ctx);
let new_eip = ctx.builder.set_new_local();
codegen::gen_absolute_indirect_jump(ctx, new_eip);
ctx.builder
.store_aligned_i32(global_pointers::instruction_pointer as u32);
}
pub fn instr16_FF_4_reg_jit(ctx: &mut JitContext, r: u32) {
ctx.builder.const_i32(0);
codegen::gen_get_reg16(ctx, r);
codegen::gen_add_cs_offset(ctx);
let new_eip = ctx.builder.set_new_local();
codegen::gen_absolute_indirect_jump(ctx, new_eip);
ctx.builder
.store_aligned_i32(global_pointers::instruction_pointer as u32);
}
pub fn instr32_FF_4_mem_jit(ctx: &mut JitContext, modrm_byte: ModrmByte) {
ctx.builder.const_i32(0);
codegen::gen_modrm_resolve_safe_read32(ctx, modrm_byte);
codegen::gen_add_cs_offset(ctx);
let new_eip = ctx.builder.set_new_local();
codegen::gen_absolute_indirect_jump(ctx, new_eip);
ctx.builder
.store_aligned_i32(global_pointers::instruction_pointer as u32);
}
pub fn instr32_FF_4_reg_jit(ctx: &mut JitContext, r: u32) {
ctx.builder.const_i32(0);
codegen::gen_get_reg32(ctx, r);
codegen::gen_add_cs_offset(ctx);
let new_eip = ctx.builder.set_new_local();
codegen::gen_absolute_indirect_jump(ctx, new_eip);
ctx.builder
.store_aligned_i32(global_pointers::instruction_pointer as u32);
}
pub fn instr16_FF_6_mem_jit(ctx: &mut JitContext, modrm_byte: ModrmByte) {

View file

@ -21,6 +21,7 @@ pub mod profiler;
mod analysis;
mod codegen;
mod config;
mod control_flow;
mod cpu_context;
mod gen;
mod jit;

View file

@ -2,15 +2,15 @@ use cpu;
use cpu::global_pointers;
use wasmgen::wasm_builder::WasmBuilder;
struct Instruction {
prefixes: Vec<u8>,
opcode: u8,
fixed_g: u8,
is_mem: bool,
is_0f: bool,
pub struct Instruction {
pub prefixes: Vec<u8>,
pub opcode: u8,
pub fixed_g: u8,
pub is_mem: bool,
pub is_0f: bool,
}
fn decode(mut instruction: u32) -> Instruction {
pub fn decode(mut instruction: u32) -> Instruction {
let mut is_0f = false;
let mut prefixes = vec![];
let mut final_opcode = 0;

View file

@ -1,4 +1,4 @@
#[allow(dead_code, non_camel_case_types)]
#[allow(non_camel_case_types)]
pub enum stat {
COMPILE,
COMPILE_SUCCESS,
@ -7,9 +7,15 @@ pub enum stat {
COMPILE_WITH_LOOP_SAFETY,
COMPILE_PAGE,
COMPILE_BASIC_BLOCK,
COMPILE_DUPLICATED_BASIC_BLOCK,
COMPILE_WASM_BLOCK,
COMPILE_WASM_LOOP,
COMPILE_DISPATCHER,
COMPILE_ENTRY_POINT,
COMPILE_WASM_TOTAL_BYTES,
CACHE_MISMATCH,
JIT_CACHE_OVERRIDE,
JIT_CACHE_OVERRIDE_DIFFERENT_STATE_FLAGS,
RUN_INTERPRETED,
RUN_INTERPRETED_PENDING,
@ -22,6 +28,25 @@ pub enum stat {
RUN_FROM_CACHE,
RUN_FROM_CACHE_STEPS,
DIRECT_EXIT,
INDIRECT_JUMP,
INDIRECT_JUMP_NO_ENTRY,
NORMAL_PAGE_CHANGE,
NORMAL_FALLTHRU,
NORMAL_FALLTHRU_WITH_TARGET_BLOCK,
NORMAL_BRANCH,
NORMAL_BRANCH_WITH_TARGET_BLOCK,
CONDITIONAL_JUMP,
CONDITIONAL_JUMP_PAGE_CHANGE,
CONDITIONAL_JUMP_EXIT,
CONDITIONAL_JUMP_FALLTHRU,
CONDITIONAL_JUMP_FALLTHRU_WITH_TARGET_BLOCK,
CONDITIONAL_JUMP_BRANCH,
CONDITIONAL_JUMP_BRANCH_WITH_TARGET_BLOCK,
DISPATCHER_SMALL,
DISPATCHER_LARGE,
LOOP,
FAILED_PAGE_CHANGE,
SAFE_READ_FAST,

View file

@ -869,30 +869,31 @@ impl WasmBuilder {
#[allow(dead_code)]
pub fn drop_(&mut self) { self.instruction_body.push(op::OP_DROP); }
// Generate a br_table where an input of [i] will branch [i]th outer block,
// where [i] is passed on the wasm stack
pub fn brtable_and_cases(&mut self, cases_count: u32) {
pub fn brtable(
&mut self,
default_case: Label,
cases: &mut dyn std::iter::ExactSizeIterator<Item = &Label>,
) {
self.instruction_body.push(op::OP_BRTABLE);
write_leb_u32(&mut self.instruction_body, cases_count);
for i in 0..(cases_count + 1) {
write_leb_u32(&mut self.instruction_body, i);
write_leb_u32(&mut self.instruction_body, cases.len() as u32);
for case in cases {
self.write_label(*case);
}
self.write_label(default_case);
}
pub fn br(&mut self, label: Label) {
let depth = *self.label_to_depth.get(&label).unwrap();
dbg_assert!(depth <= self.label_stack.len());
self.instruction_body.push(op::OP_BR);
write_leb_u32(
&mut self.instruction_body,
(self.label_stack.len() - depth) as u32,
);
self.write_label(label);
}
pub fn br_if(&mut self, label: Label) {
self.instruction_body.push(op::OP_BRIF);
self.write_label(label);
}
fn write_label(&mut self, label: Label) {
let depth = *self.label_to_depth.get(&label).unwrap();
dbg_assert!(depth <= self.label_stack.len());
self.instruction_body.push(op::OP_BRIF);
write_leb_u32(
&mut self.instruction_body,
(self.label_stack.len() - depth) as u32,