From adebb6b320b8133d4a104882a1227353f7c33b20 Mon Sep 17 00:00:00 2001 From: TheCPP Date: Sat, 13 Jul 2024 20:26:57 +0200 Subject: [PATCH] [X64] adding support to the return intstruction into the asm generation backend --- examples/simple.rs | 7 +- src/IR/ir.rs | 24 +++++++ src/Target/mod.rs | 13 ++-- src/Target/registry.rs | 70 ++++++++++++++++++- src/Target/x64/call.rs | 24 +++++++ src/Target/x64/ir.rs | 155 ++++++++++++++++++++++------------------- src/Target/x64/mod.rs | 5 ++ 7 files changed, 214 insertions(+), 84 deletions(-) diff --git a/examples/simple.rs b/examples/simple.rs index dba74c36..cc17f51a 100644 --- a/examples/simple.rs +++ b/examples/simple.rs @@ -1,5 +1,5 @@ use std::error::Error; -use Ygen::{prelude::*, PassManager::Passes::PreComputeValue, Target::{initializeX64Target, CallConv}}; +use Ygen::{prelude::*, Target::{initializeX64Target, CallConv}}; pub fn main() -> Result<(), Box> { @@ -17,11 +17,14 @@ pub fn main() -> Result<(), Box> { let entry = func.addBlock("entry"); builder.positionAtEnd(entry); - let val = builder.BuildAdd(ty.arg(0), ty.arg(1)); + //let val = builder.BuildAdd(ty.arg(0), ty.arg(1)); //let add2 = builder.BuildAdd(Type::i32(5), Type::i32(5)); //let ret = builder.BuildAdd(val, add2); //builder.BuildRet( ret ); + + builder.BuildRet( Type::i32(5) ); + let block = builder.getLastBlock().clone().unwrap().clone(); let func = func.clone().to_owned().clone(); diff --git a/src/IR/ir.rs b/src/IR/ir.rs index 4f15d595..b2e09148 100644 --- a/src/IR/ir.rs +++ b/src/IR/ir.rs @@ -1,5 +1,6 @@ use std::{any::Any, fmt::Debug, hash::Hash}; use super::{FunctionType, IRBuilder, Type, TypeMetadata, Var, VerifyError}; +use crate::Target::TARGETS; macro_rules! IrTypeWith3 { ($name:tt, $param1:tt, $param2:tt, $param3:tt) => { @@ -118,6 +119,10 @@ impl Ir for Return { fn as_any(&self) -> &dyn Any { self } + + fn compile(&self) -> Vec { + TARGETS.get().unwrap().lock().unwrap().getCompileFuncRetType()(self) + } } impl Ir for Return { @@ -150,6 +155,10 @@ impl Ir for Return { fn as_any(&self) -> &dyn Any { self } + + fn compile(&self) -> Vec { + TARGETS.get().unwrap().lock().unwrap().getCompileFuncRetVar()(self) + } } impl Ir for Add { @@ -196,6 +205,10 @@ impl Ir for Add { fn as_any(&self) -> &dyn Any { self } + + fn compile(&self) -> Vec { + TARGETS.get().unwrap().lock().unwrap().getCompileFuncAddTypeType()(self) + } } impl Ir for Add { @@ -242,6 +255,10 @@ impl Ir for Add { fn as_any(&self) -> &dyn Any { self } + + fn compile(&self) -> Vec { + TARGETS.get().unwrap().lock().unwrap().getCompileFuncAddVarVar()(self) + } } impl Ir for ConstAssign { @@ -280,6 +297,10 @@ impl Ir for ConstAssign { fn clone_box(&self) -> Box { Box::new(self.clone()) } + + fn compile(&self) -> Vec { + TARGETS.get().unwrap().lock().unwrap().getCompileFuncConstAssign()(self) + } } /// Trait for the return instruction @@ -359,6 +380,9 @@ pub(crate) trait Ir: Debug + Any { /// Clones the node into a box of `Box` fn clone_box(&self) -> Box; + + /// Compiles the node based on the initialized TARGETS.get().unwrap().lock().unwrap() + fn compile(&self) -> Vec; } impl Clone for Box { diff --git a/src/Target/mod.rs b/src/Target/mod.rs index 7ec742e9..c0d38448 100644 --- a/src/Target/mod.rs +++ b/src/Target/mod.rs @@ -2,11 +2,12 @@ mod triple; mod registry; mod x64; pub use x64::initializeX64Target; +pub(crate) use registry::TARGETS; pub use triple::Triple; pub use registry::TargetRegistry; /// Target architecture -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Arch { /// Unknown Architecture Unknown, @@ -111,7 +112,7 @@ pub enum Arch { } /// Target calling convention -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum CallConv { /// Windows standart WindowsFastCall, @@ -120,7 +121,7 @@ pub enum CallConv { } /// Vendor -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Vendor { /// Unknown Vendor Unknown, @@ -157,7 +158,7 @@ pub enum Vendor { } /// Target OS -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum OS { /// Unknown OS Unknown, @@ -232,7 +233,7 @@ pub enum OS { } /// Target environment -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Environment { /// Unknown environment Unknown, @@ -275,7 +276,7 @@ pub enum Environment { } /// Target object format -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum ObjFormat { /// Unknown Unknown, diff --git a/src/Target/registry.rs b/src/Target/registry.rs index 6dce45bf..59d51872 100644 --- a/src/Target/registry.rs +++ b/src/Target/registry.rs @@ -1,13 +1,38 @@ -use std::sync::Mutex; +use std::{collections::HashMap, sync::Mutex}; use once_cell::sync::OnceCell; -use super::Arch; +use crate::prelude::{Return, Type, Var}; + +use super::{Arch, CallConv}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct BackendInfos { + pub(crate) varsStorage: HashMap, +} + +impl BackendInfos { + pub(crate) fn insertVar(&mut self, var: Var, store: VarStorage) { + self.varsStorage.insert(var, store); + } +} + -/// The Target Registry: stores if a target was already initialized #[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) enum VarStorage { + Register(String), + Memory(String), +} +pub(crate) type CompileFunc = fn(&T/*, &mut BackendInfos*/) -> Vec; + +/// The Target Registry: stores if a target was already initialized +#[derive(Debug, Clone)] pub struct TargetRegistry { pub(crate) inited_targets: Vec, + funcForRetType: HashMap>>, + funcForRetVar: HashMap>>, + pub(crate) backend: BackendInfos, + pub(crate) call: CallConv, } pub(crate) static TARGETS: OnceCell> = OnceCell::new(); @@ -17,9 +42,48 @@ impl TargetRegistry { pub fn new() -> Self { Self { inited_targets: vec![], + funcForRetType: HashMap::new(), + funcForRetVar: HashMap::new(), + call: CallConv::SystemV, + backend: BackendInfos { varsStorage: HashMap::new() }, } } + /// sets the callback for compiling the return ir node into asm + pub(crate) fn setCompileFuncForRetType(&mut self, arch: Arch, callback: CompileFunc>) { + if !self.funcForRetType.contains_key(&arch) { + self.funcForRetType.insert(arch, callback); + } + } + + /// gets the callback for compiling the return ir node into asm + pub(crate) fn getCompileFuncRetType(&self) -> CompileFunc> { + if let Some(last_arch) = self.inited_targets.last() { + if let Some(func) = self.funcForRetType.get(last_arch) { + *func + } else { todo!() } + } else { todo!()} + } + + + + /// sets the callback for compiling the return ir node into asm + pub(crate) fn setCompileFuncForRetVar(&mut self, arch: Arch, callback: CompileFunc>) { + if !self.funcForRetVar.contains_key(&arch) { + self.funcForRetVar.insert(arch, callback); + } + } + + /// gets the callback for compiling the return ir node into asm + pub(crate) fn getCompileFuncRetVar(&self) -> CompileFunc> { + if let Some(last_arch) = self.inited_targets.last() { + if let Some(func) = self.funcForRetVar.get(last_arch) { + *func + } else { todo!() } + } else { todo!()} + } + + /// Sets an architecture as initialized pub fn set_inited(&mut self, arch: Arch) { if !self.inited_targets.contains(&arch) { diff --git a/src/Target/x64/call.rs b/src/Target/x64/call.rs index 8c01a862..abe46073 100644 --- a/src/Target/x64/call.rs +++ b/src/Target/x64/call.rs @@ -32,4 +32,28 @@ impl CallConv { CallConv::WindowsFastCall => vec!["rdx".into(), "rcx".into(), "r8".into(), "r9".into()], } } + + /// Returns the return register + pub fn ret16(&self) -> String { + match self { + CallConv::WindowsFastCall => "ax".into(), + CallConv::SystemV => "ax".into(), + } + } + + /// Returns the return register + pub fn ret32(&self) -> String { + match self { + CallConv::WindowsFastCall => "eax".into(), + CallConv::SystemV => "eax".into(), + } + } + + /// Returns the return register + pub fn ret64(&self) -> String { + match self { + CallConv::WindowsFastCall => "rax".into(), + CallConv::SystemV => "rax".into(), + } + } } \ No newline at end of file diff --git a/src/Target/x64/ir.rs b/src/Target/x64/ir.rs index c7c3fbde..c12f0e47 100644 --- a/src/Target/x64/ir.rs +++ b/src/Target/x64/ir.rs @@ -1,92 +1,102 @@ -use std::{any::Any, collections::HashMap}; +use std::collections::HashMap; -use crate::{prelude::{Block, Function, TypeMetadata, Var}, IR::ir::*}; +use crate::{prelude::{Block, Function, Type, TypeMetadata, Var}, Target::{registry::{BackendInfos, VarStorage}, TARGETS}, IR::ir::*}; use crate::Target::CallConv; -/// Stores compilation infos for ir node compilation -#[derive(Debug, Clone, PartialEq, Eq)] -pub(crate) struct x64CompilationInfos { - pub(crate) varsStorage: HashMap, -} +pub(crate) fn CompileAddVarVar(add: &Add) -> Vec { + let infos = &mut TARGETS.get().unwrap().lock().unwrap().backend; + + let loc1 = if let Some(loc1) = infos.varsStorage.get(&add.inner1) { + loc1.clone() + } else { + panic!("unknown variable: {:?}", add.inner1) + }; + + let loc2 = if let Some(loc2) = infos.varsStorage.get(&add.inner2) { + loc2.clone() + + } else { + panic!("unknown variable: {:?}", add.inner1) + }; + + let op0 = if let VarStorage::Register(ref reg) = loc1 { + reg.to_string() + } else if let VarStorage::Memory(ref mem) = loc1 { + mem.to_string() + } else { panic!() }; + + let op1 = if let VarStorage::Register(ref reg) = loc2 { + reg.to_string() + } else if let VarStorage::Memory(ref mem) = loc2 { + mem.to_string() + } else { panic!() }; + + let ret: String = "rax".into(); + todo!("implement actual variable storage information"); + + infos.insertVar( + add.inner3.clone(), + VarStorage::Register(ret.clone()) + ); + + if let VarStorage::Register(_) = loc1 { + if let VarStorage::Register(_) = loc2 { + return vec![format!("lea {}, [{} + {}", ret, op0, op1)]; + } + } -impl x64CompilationInfos { - pub(crate) fn insertVar(&mut self, var: Var, store: VarStorage) { - self.varsStorage.insert(var, store); + if let VarStorage::Memory(_) = loc1 { + if let VarStorage::Memory(_) = loc2 { + return vec![ + format!("mov rax, {}", op0), + format!("mov rbx, {}", op1), + format!("add rax, rbx"), + format!("mov rax, {}", ret), + ]; + } } -} -#[derive(Debug, Clone, PartialEq, Eq)] -pub(crate) enum VarStorage { - Register(String), - Memory(String), + vec![] } -/// A trait which is used to implement compilability for ir nodes -pub(crate) trait Compile: Ir { - /// Compiles the node into an asm string - fn compile(&self, infos: &mut x64CompilationInfos) -> Vec; +pub(crate) fn CompileRetType(ret: &Return) -> Vec { + vec![format!("mov {}, {}", match ret.inner1 { + Type::u16(_) | Type::i16(_) => TARGETS.get().unwrap().lock().unwrap().call.ret16(), + Type::u32(_) | Type::i32(_) => TARGETS.get().unwrap().lock().unwrap().call.ret32(), + Type::u64(_) | Type::i64(_) => TARGETS.get().unwrap().lock().unwrap().call.ret64(), + Type::Void => todo!(), + }, ret.inner1.val())] } -impl Compile for Add { - fn compile(&self, infos: &mut x64CompilationInfos) -> Vec { - let loc1 = if let Some(loc1) = infos.varsStorage.get(&self.inner1) { - loc1.clone() - } else { - panic!("unknown variable: {:?}", self.inner1) - }; - - let loc2 = if let Some(loc2) = infos.varsStorage.get(&self.inner2) { - loc2.clone() - - } else { - panic!("unknown variable: {:?}", self.inner1) - }; - - let op0 = if let VarStorage::Register(ref reg) = loc1 { - reg.to_string() - } else if let VarStorage::Memory(ref mem) = loc1 { - mem.to_string() - } else { panic!() }; - - let op1 = if let VarStorage::Register(ref reg) = loc2 { - reg.to_string() - } else if let VarStorage::Memory(ref mem) = loc2 { - mem.to_string() - } else { panic!() }; - - let ret: String = "rax".into(); - - infos.insertVar( - self.inner3.clone(), - VarStorage::Register(ret.clone()) - ); - - if let VarStorage::Register(_) = loc1 { - if let VarStorage::Register(_) = loc2 { - return vec![format!("lea {}, [{} + {}", ret, op0, op1)]; - } - } - if let VarStorage::Memory(_) = loc1 { - if let VarStorage::Memory(_) = loc2 { - return vec![ - format!("mov rax, {}", op0), - format!("mov rbx, {}", op1), - format!("add rax, rbx"), - format!("mov rax, {}", ret), - ]; - } - } +pub(crate) fn CompileRetVar(ret: &Return) -> Vec { + let target = TARGETS.get().unwrap().lock().unwrap(); + let (var, loc) = if let Some(loc) = target.backend.varsStorage.get_key_value(&ret.inner1) { + loc.clone() + } else { + panic!("unknown variable: {:?}", ret.inner1) + }; - vec![] + if var.ty == TypeMetadata::Void { + return vec![]; } -} + vec![format!("mov {}, {}", match var.ty { + TypeMetadata::u16 | TypeMetadata::i16 => target.call.ret16(), + TypeMetadata::u32 | TypeMetadata::i32 => target.call.ret32(), + TypeMetadata::u64 | TypeMetadata::i64=> target.call.ret64(), + _ => unreachable!(), + }, { + if let VarStorage::Memory(mem) = loc { mem } + else if let VarStorage::Register(reg) = loc { reg } + else { unreachable!() } + })] +} impl Block { /// Builds the block to x86 assembly intel syntax pub fn buildAsmX86(&self, func: &Function, call: &CallConv) -> Vec { - let mut info = x64CompilationInfos { varsStorage: HashMap::new() }; + let mut info = BackendInfos { varsStorage: HashMap::new() }; let mut reg_vars = 0; let mut stack_off = 0; @@ -116,8 +126,7 @@ impl Block { } for node in &self.nodes { - let ty = (node.as_any()).downcast_ref::>().unwrap(); - ty.compile(&mut info); + node.compile(); } vec![] diff --git a/src/Target/x64/mod.rs b/src/Target/x64/mod.rs index 7271bbf6..9a41445a 100644 --- a/src/Target/x64/mod.rs +++ b/src/Target/x64/mod.rs @@ -2,6 +2,8 @@ use std::sync::Mutex; +use ir::*; + use super::{registry::TARGETS, Arch, TargetRegistry}; pub(crate) mod ir; @@ -14,4 +16,7 @@ pub fn initializeX64Target() { }); TARGETS.get().unwrap().lock().unwrap().set_inited(Arch::X86_64); + + TARGETS.get().unwrap().lock().unwrap().setCompileFuncForRetType(Arch::X86_64, CompileRetType); + TARGETS.get().unwrap().lock().unwrap().setCompileFuncForRetVar(Arch::X86_64, CompileRetVar); } \ No newline at end of file