separate crates for compiler-lib, compiler-cli and vm

This commit is contained in:
Michael Sippel 2024-08-14 00:26:18 +02:00
parent 72122bf4fc
commit 184c8f3d50
Signed by: senvas
GPG key ID: F96CF119C34B64A6
19 changed files with 337 additions and 191 deletions

View file

@ -1,11 +1,6 @@
[package]
name = "ltir"
version = "0.1.0"
edition = "2021"
[dependencies]
laddertypes = { path = "../lib-laddertypes" }
tisc = { path = "../lib-tisc" }
iterate-text = "0.0.1"
tiny-ansi = "0.1.0"
[workspace]
members = [
"lib-ltcore",
"ltcc",
"ltvm"
]

10
lib-ltcore/Cargo.toml Normal file
View file

@ -0,0 +1,10 @@
[package]
name = "ltcore"
version = "0.1.0"
edition = "2021"
[dependencies]
laddertypes = { path = "../../lib-laddertypes" }
tisc = { path = "../../lib-tisc" }
serde = { version = "1.0", features = ["derive"] }

8
lib-ltcore/src/lib.rs Normal file
View file

@ -0,0 +1,8 @@
pub mod expr;
pub mod lexer;
pub mod parser;
pub mod procedure_compiler;
pub mod runtime;
pub mod symbols;

13
ltcc/Cargo.toml Normal file
View file

@ -0,0 +1,13 @@
[package]
name = "ltcc"
version = "0.1.0"
edition = "2021"
[dependencies]
ltcore = { path = "../lib-ltcore" }
tisc = { path = "../../lib-tisc" }
clap = { version = "4.5.15", features = ["derive"] }
tiny-ansi = "0.1.0"
iterate-text = "0.0.1"
bincode = "1.3.3"

8
ltcc/hello.lt Normal file
View file

@ -0,0 +1,8 @@
export {
let star = λ{}
↦ emit 42;
let main = λ{} ↦ {
print-nullterm 'H''e''l''l''o'' ''W''o''r''l''d''!''\n''\0';
};
}

BIN
ltcc/hello.lt.o Normal file

Binary file not shown.

70
ltcc/src/diagnostic.rs Normal file
View file

@ -0,0 +1,70 @@
use {
std::collections::HashMap,
std::sync::{Arc, RwLock},
std::{boxed::Box, ops::Deref},
tiny_ansi::TinyAnsi,
ltcore::{
lexer::InputRegionTag,
expr::{LTExpr, Statement},
procedure_compiler::ProcedureCompiler,
symbols::Scope,
}
};
pub fn print_diagnostic(
path: &str,
region: InputRegionTag,
message: String
) {
let lines = iterate_text::file::lines::IterateFileLines::new(path);
let mut line_region = InputRegionTag::default();
let n_before = 3;
let n_after = 3;
let mut last_lines = Vec::new();
let mut next_lines = 0;
println!("\n{}:", path.green());
for (i, l) in lines.enumerate() {
line_region.end += l.chars().count();
last_lines.push((i+1, l.clone()));
if last_lines.len() > n_before {
last_lines.remove(0);
}
if region.begin >= line_region.begin &&
region.begin < line_region.end {
next_lines = n_after;
let column_begin = region.begin - line_region.begin;
let column_end = region.end - line_region.begin;
// display the source line
for (j,ll) in last_lines.iter() {
print!("{}\t{}{}",
format!("{}",j).to_string().bright_black(),
"|".bright_black().bold(),
ll.bright_white());
}
print!("\t{}", "|".bright_magenta());
for _ in 0..column_begin { print!("{}", ".".magenta().bold()); }
for _ in column_begin..column_end { print!("{}", "^".magenta().bold()); }
print!("\n");
print!("{} [{}-{}]: {}\n", "error".bright_red(), column_begin, column_end, message.white());
}
else if next_lines > 0 {
next_lines -= 1;
print!("{}\t{}{}", format!("{}", i+1).to_string().bright_black(), "|".bright_black().bold(), l.bright_white());
}
line_region.begin = line_region.end;
}
}

125
ltcc/src/main.rs Normal file
View file

@ -0,0 +1,125 @@
use clap::Parser;
use {
std::collections::HashMap,
std::sync::{Arc, RwLock},
std::{boxed::Box, ops::Deref},
std::io::Write,
tiny_ansi::TinyAnsi,
ltcore::{
lexer::InputRegionTag,
expr::{LTExpr, Statement},
procedure_compiler::ProcedureCompiler,
symbols::Scope,
}
};
mod diagnostic;
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
struct Args {
/// source files
sources: Vec< String >,
/// path to the target bytecode file
#[arg(short, long)]
output: String
}
fn main() {
let args = Args::parse();
let mut linker = tisc::Linker::new();
let root_scope = ltcore::runtime::init_runtime(&mut linker);
let main_scope = Scope::with_parent(&root_scope);
let typectx = main_scope.read().unwrap().typectx.clone();
for path in args.sources {
let iter_chars = iterate_text::file::characters::IterateFileCharacters::new(path.clone());
/* compile source file
*/
let mut lexer = ltcore::lexer::LTIRLexer::from( iter_chars.peekable() );
let mut program_tokens =
lexer
.filter(|tok| match tok {
(_, Ok(ltcore::lexer::LTIRToken::Comment(_))) => false,
_ => true
})
.peekable();
match ltcore::parser::parse_expr( &typectx, &mut program_tokens ) {
Ok( ast ) => {
let (exports, diagnostics, proc_code) = ProcedureCompiler::new(&main_scope)
.compile(&ast)
.into_asm(&path);
for (region, message) in diagnostics {
crate::diagnostic::print_diagnostic(
path.as_str(),
region,
format!("{}", message)
);
}
eprintln!("{} {}", "Compiled".green(), path.bold());
for (name, def) in exports.iter() {
eprintln!("export {}: {:?}", name.yellow().bold(), def);
}
main_scope.write().unwrap().import(
exports
);
/* link assembly-program to symbols
*/
linker.add_procedure(path.as_str(), proc_code);
}
Err( (region, parse_error) ) => {
crate::diagnostic::print_diagnostic(
path.as_str(),
region,
format!("{:?}", parse_error)
);
eprintln!("=======\nParse Error: Abort\n");
}
}
}
eprintln!("write output file {}", args.output);
let obj_file = tisc::linker::ObjectFile {
symbols: Arc::into_inner(main_scope).unwrap().into_inner().unwrap()
.export()
.into_iter()
.filter_map(|(symbol, def)| match def {
ltcore::symbols::SymbolDef::Procedure { in_types:_, out_types:_, link_addr, export } => {
if export {
match link_addr {
tisc::LinkAddr::Absolute(w) => {
eprintln!("add symbol {} -> {}", symbol, w);
Some(( symbol, w ))
}
tisc::LinkAddr::Relative{ symbol: b, offset } => {
let addr = linker.get_link_addr(&b).unwrap_or(-1);
eprintln!("relative symbol {} -> {}({})+{}", symbol, b, addr, offset);
Some((symbol, addr + offset ))
}
}
} else {
None
}
}
_ => None
})
.collect(),
code: linker.link_partial().expect("Link error:")
};
let mut output = std::io::BufWriter::new(
std::fs::File::create(args.output).expect("Failed to open file")
);
bincode::serialize_into( output, &obj_file );
}

30
ltcc/src/oldmain.rs Normal file
View file

@ -0,0 +1,30 @@
use {
std::collections::HashMap,
std::sync::{Arc, RwLock},
std::{boxed::Box, ops::Deref},
tiny_ansi::TinyAnsi
};
use crate::{
lexer::InputRegionTag,
expr::{LTExpr, Statement},
procedure_compiler::ProcedureCompiler,
symbols::Scope,
};
/* TODO:
* - import function symbols
* - Compiler error reporting
* - parse float literals
* - return type annotation
* - write to address resulting from expression
* - sized objects
* - Typecheck for LTExpr::Application
* - typecheck & inference for rest
*/
fn main() {
// create virtual machine with 4096 words of memory
let mut vm = tisc::VM::new(0x1000);
}

BIN
ltcc/stdio.lt.o Normal file

Binary file not shown.

12
ltvm/Cargo.toml Normal file
View file

@ -0,0 +1,12 @@
[package]
name = "ltvm"
version = "0.1.0"
edition = "2021"
[dependencies]
ltcore = { path = "../lib-ltcore" }
tisc = { path = "../../lib-tisc" }
clap = { version = "4.5.15", features = ["derive"] }
tiny-ansi = "0.1.0"
bincode = "1.3.3"

55
ltvm/src/main.rs Normal file
View file

@ -0,0 +1,55 @@
use {
std::io::Read,
clap::Parser,
tiny_ansi::TinyAnsi,
};
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
struct Args {
/// source files
sources: Vec< String >,
/// entry symbol
#[arg(short, long, default_value_t = String::from("main"))]
entry: String,
/// memory size
#[arg(short, long, default_value_t = 0x1000)]
memsize: usize
}
fn main() {
let args = Args::parse();
let mut vm = tisc::VM::new( args.memsize );
let mut linker = tisc::Linker::new();
let mut symbols = std::collections::HashMap::<String, tisc::LinkAddr>::new();
for source_path in args.sources.iter() {
let mut input = std::io::BufReader::new(
std::fs::File::open(source_path).expect("Failed to open file")
);
linker.import( source_path, bincode::deserialize_from( input ).expect("") );
}
/*
let entry_addr = symbols.get(&args.entry)
.expect(&format!("cant find entry symbol '{}'", args.entry));
*/
let entry_addr = linker.get_link_addr(&args.entry).unwrap();
let bytecode = linker.link_total().expect("Link error:");
eprintln!("{} ({} bytes)", "Loaded bytecode.".green(), bytecode.len());
eprintln!("================\n");
vm.load(bytecode);
vm.execute(entry_addr);
eprintln!(
"\n================\nVM execution finished\ndatastack = {:?}\n====",
vm.data_stack
);
}

View file

@ -1,180 +0,0 @@
use {
std::collections::HashMap,
std::sync::{Arc, RwLock},
std::{boxed::Box, ops::Deref},
tiny_ansi::TinyAnsi
};
mod expr;
mod lexer;
mod parser;
mod procedure_compiler;
mod runtime;
mod symbols;
use crate::{
lexer::InputRegionTag,
expr::{LTExpr, Statement},
procedure_compiler::ProcedureCompiler,
symbols::Scope,
};
fn print_diagnostic(
path: &str,
region: InputRegionTag,
message: String
) {
let lines = iterate_text::file::lines::IterateFileLines::new(path);
let mut line_region = InputRegionTag::default();
let n_before = 3;
let n_after = 3;
let mut last_lines = Vec::new();
let mut next_lines = 0;
println!("\n{}:", path.green());
for (i, l) in lines.enumerate() {
line_region.end += l.chars().count();
last_lines.push((i+1, l.clone()));
if last_lines.len() > n_before {
last_lines.remove(0);
}
if region.begin >= line_region.begin &&
region.begin < line_region.end {
next_lines = n_after;
let column_begin = region.begin - line_region.begin;
let column_end = region.end - line_region.begin;
// display the source line
for (j,ll) in last_lines.iter() {
print!("{}\t{}{}",
format!("{}",j).to_string().bright_black(),
"|".bright_black().bold(),
ll.bright_white());
}
print!("\t{}", "|".bright_magenta());
for _ in 0..column_begin { print!("{}", ".".magenta().bold()); }
for _ in column_begin..column_end { print!("{}", "^".magenta().bold()); }
print!("\n");
print!("{} [{}-{}]: {}\n", "error".bright_red(), column_begin, column_end, message.white());
}
else if next_lines > 0 {
next_lines -= 1;
print!("{}\t{}{}", format!("{}", i+1).to_string().bright_black(), "|".bright_black().bold(), l.bright_white());
}
line_region.begin = line_region.end;
}
}
/* TODO:
* - import function symbols
* - Compiler error reporting
* - parse float literals
* - return type annotation
* - write to address resulting from expression
* - sized objects
* - Typecheck for LTExpr::Application
* - typecheck & inference for rest
*/
fn main() {
// create virtual machine with 4096 words of memory
let mut vm = tisc::VM::new(0x1000);
let mut linker = tisc::Linker::new();
let root_scope = crate::runtime::init_runtime(&mut linker);
let main_scope = Scope::with_parent(&root_scope);
let typectx = main_scope.read().unwrap().typectx.clone();
/* open source file
*/
let args: Vec<String> = std::env::args().collect();
if args.len() < 2 {
eprintln!("{}", "No source files specified.".red());
return;
}
let mut args_iter = args.into_iter();
args_iter.next();
for path in args_iter {
let iter_chars = iterate_text::file::characters::IterateFileCharacters::new(path.clone());
/* compile source file
*/
let mut lexer = lexer::LTIRLexer::from( iter_chars.peekable() );
let mut program_tokens = lexer.filter(|tok| match tok {
(_, Ok(lexer::LTIRToken::Comment(_))) => false,
_ => true
})
.peekable();
match parser::parse_expr( &typectx, &mut program_tokens ) {
Ok( ast ) => {
let (exports, diagnostics, bytecode) = ProcedureCompiler::new(&main_scope)
.compile(&ast)
.into_asm(&path);
for (region, message) in diagnostics {
print_diagnostic(
path.as_str(),
region,
format!("{}", message)
);
}
eprintln!("{} {}", "Compiled".green(), path.bold());
for (name, def) in exports.iter() {
eprintln!("export {}: {:?}", name.yellow().bold(), def);
}
main_scope.write().unwrap().import(
exports
);
/* link assembly-program to symbols
*/
linker.add_procedure(path.as_str(), bytecode);
}
Err( (region, parse_error) ) => {
print_diagnostic(
path.as_str(),
region,
format!("{:?}", parse_error)
);
eprintln!("=======\nParse Error: Abort\n");
}
}
}
/* load & run compiled bytecode
*/
let main_addr = linker
.get_link_addr(&"main.lt".into())
.expect("'main.lt' not found");
let bytecode = linker.link_total().expect("Link error:");
eprintln!("{} ({} bytes)", "Linked bytecode.".green(), bytecode.len());
eprintln!("================\n");
vm.load(bytecode);
vm.execute(main_addr);
eprintln!(
"\n================\nVM execution finished\ndatastack = {:?}\n====",
vm.data_stack
);
}