Compare commits
16 commits
72122bf4fc
...
b8535aa772
Author | SHA1 | Date | |
---|---|---|---|
b8535aa772 | |||
c9c42d383f | |||
d295243dd0 | |||
2ac69a7b12 | |||
1a152670d3 | |||
8fd59f04ee | |||
0cbbcd5b24 | |||
ce3967c2de | |||
70e5ef734c | |||
7441826f58 | |||
bacb3cf519 | |||
7cc47f05c5 | |||
08f592ad60 | |||
f5984e0b08 | |||
cf2ea4df78 | |||
4e9a4d1204 |
25 changed files with 1708 additions and 816 deletions
17
Cargo.toml
17
Cargo.toml
|
@ -1,11 +1,6 @@
|
|||
[package]
|
||||
name = "ltir"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
laddertypes = { path = "../lib-laddertypes" }
|
||||
tisc = { path = "../lib-tisc" }
|
||||
iterate-text = "0.0.1"
|
||||
tiny-ansi = "0.1.0"
|
||||
|
||||
[workspace]
|
||||
members = [
|
||||
"lib-ltcore",
|
||||
"ltcc",
|
||||
"ltvm"
|
||||
]
|
||||
|
|
10
lib-ltcore/Cargo.toml
Normal file
10
lib-ltcore/Cargo.toml
Normal file
|
@ -0,0 +1,10 @@
|
|||
[package]
|
||||
name = "ltcore"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
laddertypes = { path = "../../lib-laddertypes", features = ["pretty"] }
|
||||
tisc = { path = "../../lib-tisc" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
tiny-ansi = "0.1.0"
|
215
lib-ltcore/src/expr.rs
Normal file
215
lib-ltcore/src/expr.rs
Normal file
|
@ -0,0 +1,215 @@
|
|||
use {
|
||||
std::{
|
||||
boxed::Box,
|
||||
sync::{Arc, RwLock}
|
||||
},
|
||||
crate::{
|
||||
lexer::InputRegionTag,
|
||||
symbols::Scope
|
||||
},
|
||||
tiny_ansi::TinyAnsi
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Statement {
|
||||
Assignment {
|
||||
name_region: InputRegionTag,
|
||||
var_id: String,
|
||||
val_expr: LTExpr,
|
||||
},
|
||||
LetAssign {
|
||||
name_region: InputRegionTag,
|
||||
typ: Option<laddertypes::TypeTerm>,
|
||||
var_id: String,
|
||||
val_expr: LTExpr,
|
||||
},
|
||||
Return(LTExpr),
|
||||
Expr(LTExpr),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum TypeErrorKind {
|
||||
// ParseError(laddertypes::parser::ParseError),
|
||||
AssignMismatch {
|
||||
expected: laddertypes::TypeTerm,
|
||||
received: laddertypes::TypeTerm,
|
||||
},
|
||||
ArgTypeMismatch {
|
||||
expected: laddertypes::TypeTerm,
|
||||
received: laddertypes::TypeTerm,
|
||||
},
|
||||
BranchMismatch {
|
||||
if_branch: laddertypes::TypeTerm,
|
||||
else_branch: laddertypes::TypeTerm
|
||||
},
|
||||
SuperfluousArgument,
|
||||
NoSymbol,
|
||||
Todo
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TypeError {
|
||||
pub region: InputRegionTag,
|
||||
pub kind: TypeErrorKind
|
||||
}
|
||||
|
||||
impl TypeErrorKind {
|
||||
pub fn fmt(&self, dict: &mut impl laddertypes::TypeDict) -> String {
|
||||
match self {
|
||||
TypeErrorKind::BranchMismatch { if_branch, else_branch } => {
|
||||
format!("Type Mismatch\nif branch\n:::{}\nelse branch\n:::{}",
|
||||
if_branch.clone().sugar(dict).pretty(dict, 1),
|
||||
else_branch.clone().sugar(dict).pretty(dict, 1)
|
||||
)
|
||||
},
|
||||
TypeErrorKind::AssignMismatch { expected, received } |
|
||||
TypeErrorKind::ArgTypeMismatch { expected, received } => {
|
||||
format!("Type Mismatch\n{}{}\n{}{}",
|
||||
"expected\n ::: ".green(),
|
||||
expected.clone().sugar(dict).pretty(dict, 1),
|
||||
"received\n ::: ".green(),
|
||||
received.clone().sugar(dict).pretty(dict, 1)
|
||||
)
|
||||
}
|
||||
TypeErrorKind::SuperfluousArgument => {
|
||||
format!("Superfluous Argument")
|
||||
}
|
||||
TypeErrorKind::NoSymbol => {
|
||||
format!("Unknown Symbol")
|
||||
}
|
||||
TypeErrorKind::Todo => {
|
||||
format!("TODO")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type TypeTag = Result< laddertypes::TypeTerm, Vec<TypeError> >;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum LTExpr {
|
||||
WordLiteral {
|
||||
region: InputRegionTag,
|
||||
val: tisc::VM_Word,
|
||||
},
|
||||
StringLiteral {
|
||||
region: InputRegionTag,
|
||||
value: String,
|
||||
},
|
||||
Symbol {
|
||||
region: InputRegionTag,
|
||||
typ: Option<TypeTag>,
|
||||
symbol: String,
|
||||
},
|
||||
Ascend {
|
||||
region: InputRegionTag,
|
||||
typ: laddertypes::TypeTerm,
|
||||
expr: Box<LTExpr>
|
||||
},
|
||||
Descend {
|
||||
region: InputRegionTag,
|
||||
typ: laddertypes::TypeTerm,
|
||||
expr: Box<LTExpr>
|
||||
},
|
||||
Application {
|
||||
region: InputRegionTag,
|
||||
typ: Option<TypeTag>,
|
||||
head: Box<LTExpr>,
|
||||
body: Vec<LTExpr>,
|
||||
},
|
||||
Abstraction {
|
||||
region: InputRegionTag,
|
||||
scope: Arc<RwLock<Scope>>,
|
||||
args: Vec<(InputRegionTag, String, Option<TypeTag>)>,
|
||||
body: Box<LTExpr>,
|
||||
},
|
||||
Branch {
|
||||
region: InputRegionTag,
|
||||
condition: Box<LTExpr>,
|
||||
if_expr: Box<LTExpr>,
|
||||
else_expr: Box<LTExpr>,
|
||||
},
|
||||
WhileLoop {
|
||||
region: InputRegionTag,
|
||||
condition: Box<LTExpr>,
|
||||
body: Box<LTExpr>,
|
||||
},
|
||||
Block {
|
||||
region: InputRegionTag,
|
||||
scope: Arc<RwLock<Scope>>,
|
||||
statements: Vec<Statement>,
|
||||
},
|
||||
ExportBlock {
|
||||
region: InputRegionTag,
|
||||
scope: Arc<RwLock<Scope>>,
|
||||
statements: Vec<Statement>,
|
||||
}
|
||||
}
|
||||
|
||||
impl LTExpr {
|
||||
pub fn get_region(&self) -> InputRegionTag {
|
||||
match self {
|
||||
LTExpr::WordLiteral{ region, val } => region,
|
||||
LTExpr::StringLiteral{ region, value } => region,
|
||||
LTExpr::Symbol{ region, typ, symbol } => region,
|
||||
LTExpr::Ascend{ region, typ, expr } => region,
|
||||
LTExpr::Descend{ region, typ, expr } => region,
|
||||
LTExpr::Application{ region, typ, head, body } => region,
|
||||
LTExpr::Abstraction{ region, scope, args, body } => region,
|
||||
LTExpr::Branch{ region, condition, if_expr, else_expr } => region,
|
||||
LTExpr::WhileLoop{ region, condition, body } => region,
|
||||
LTExpr::Block{ region, scope, statements } => region,
|
||||
LTExpr::ExportBlock{ region, scope, statements } => region
|
||||
}.clone()
|
||||
}
|
||||
|
||||
pub fn get(&self, addr: Vec<usize>) -> Option<LTExpr> {
|
||||
if addr.len() == 0 {
|
||||
Some(self.clone())
|
||||
} else {
|
||||
let mut sub_addr = addr.clone();
|
||||
let top_idx = sub_addr.remove(0);
|
||||
match self {
|
||||
LTExpr::Ascend{ region, typ, expr } => expr.get(addr),
|
||||
LTExpr::Descend{ region, typ, expr } => expr.get(addr),
|
||||
LTExpr::Application{ region, typ, head, body } => {
|
||||
match top_idx {
|
||||
0 => head.get(sub_addr),
|
||||
i => {
|
||||
if let Some(b) = body.get(i - 1) {
|
||||
b.get(sub_addr)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
LTExpr::Abstraction{ region, scope, args, body } => {
|
||||
body.get(addr)
|
||||
}
|
||||
LTExpr::Branch{ region, condition, if_expr, else_expr } => {
|
||||
match top_idx {
|
||||
0 => condition.get(sub_addr),
|
||||
1 => if_expr.get(sub_addr),
|
||||
2 => else_expr.get(sub_addr),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
LTExpr::WhileLoop{ region, condition, body } => {
|
||||
match top_idx {
|
||||
0 => condition.get(sub_addr),
|
||||
1 => body.get(sub_addr),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
LTExpr::Block{ region, scope, statements } |
|
||||
LTExpr::ExportBlock{ region, scope, statements } => {
|
||||
// statements.get(top_idx)?.get(sub_addr)
|
||||
None
|
||||
}
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -6,7 +6,7 @@ pub enum LTIRToken {
|
|||
Num(i64),
|
||||
|
||||
// SingleQuote(String),
|
||||
// DoubleQuote(String),
|
||||
DoubleQuote(String),
|
||||
// TripleQuote(String),
|
||||
Lambda,
|
||||
MapsTo,
|
||||
|
@ -16,6 +16,9 @@ pub enum LTIRToken {
|
|||
ExprOpen,
|
||||
ExprClose,
|
||||
|
||||
Ascend(String),
|
||||
Descend(String),
|
||||
|
||||
BlockOpen,
|
||||
BlockClose,
|
||||
StatementSep,
|
||||
|
@ -25,6 +28,7 @@ pub enum LTIRToken {
|
|||
pub enum LexError {
|
||||
InvalidDigit,
|
||||
InvalidChar,
|
||||
UnexpectedEnd
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||
|
@ -35,6 +39,10 @@ pub enum LexerState {
|
|||
Sym(String),
|
||||
Num(i64),
|
||||
Char(Option<char>),
|
||||
DoubleQuote(String),
|
||||
|
||||
Ascend(String),
|
||||
Descend(String)
|
||||
}
|
||||
|
||||
impl LexerState {
|
||||
|
@ -46,6 +54,9 @@ impl LexerState {
|
|||
LexerState::Sym(s) => Some(LTIRToken::Symbol(s)),
|
||||
LexerState::Num(n) => Some(LTIRToken::Num(n)),
|
||||
LexerState::Char(c) => Some(LTIRToken::Char(c?)),
|
||||
LexerState::DoubleQuote(s) => Some(LTIRToken::DoubleQuote(s)),
|
||||
LexerState::Ascend(s) => Some(LTIRToken::Ascend(s)),
|
||||
LexerState::Descend(s) => Some(LTIRToken::Descend(s))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -196,6 +207,12 @@ where
|
|||
}
|
||||
}
|
||||
}
|
||||
'\"' => {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
state = LexerState::DoubleQuote(String::new());
|
||||
}
|
||||
c => {
|
||||
if c.is_whitespace() {
|
||||
self.chars.next();
|
||||
|
@ -242,6 +259,40 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
LexerState::DoubleQuote(val) => {
|
||||
match self.chars.next() {
|
||||
Some('\"') => {
|
||||
self.position += 1;
|
||||
region.end +=1;
|
||||
break;
|
||||
}
|
||||
Some('\\') => {
|
||||
self.position += 2;
|
||||
region.end += 2;
|
||||
match self.chars.next() {
|
||||
Some('0') => {
|
||||
val.push('\0');
|
||||
}
|
||||
Some('n') => {
|
||||
val.push('\n');
|
||||
}
|
||||
Some('\\') => {
|
||||
val.push('\\');
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Some(c) => {
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
val.push(c);
|
||||
}
|
||||
None => {
|
||||
return Some((region, Err(LexError::UnexpectedEnd)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LexerState::Char(val) => {
|
||||
self.position += 2;
|
||||
region.end += 2;
|
||||
|
@ -290,6 +341,19 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
LexerState::Ascend(s) |
|
||||
LexerState::Descend(s) => {
|
||||
if *c == ')' || *c == ';' {
|
||||
let token = state.clone().into_token().unwrap();
|
||||
return Some((region, Ok(token)));
|
||||
} else {
|
||||
if let Some(c) = self.chars.next() {
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
s.push(c);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if c.is_whitespace()
|
||||
|| *c == '('
|
||||
|
@ -302,6 +366,28 @@ where
|
|||
|| *c == '↦'
|
||||
{
|
||||
// finish the current token
|
||||
match &mut state {
|
||||
LexerState::Sym(s) => {
|
||||
match s.as_str(){
|
||||
"as"=> {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
state = LexerState::Ascend(String::new());
|
||||
continue;
|
||||
}
|
||||
"des" => {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
state = LexerState::Descend(String::new());
|
||||
continue;
|
||||
}
|
||||
_ =>{}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if let Some(token) = state.clone().into_token() {
|
||||
return Some((region, Ok(token)));
|
||||
|
@ -325,7 +411,6 @@ where
|
|||
return Some((region, Err(LexError::InvalidDigit)));
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
@ -363,3 +448,4 @@ mod tests {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
9
lib-ltcore/src/lib.rs
Normal file
9
lib-ltcore/src/lib.rs
Normal file
|
@ -0,0 +1,9 @@
|
|||
|
||||
pub mod expr;
|
||||
pub mod lexer;
|
||||
pub mod parser;
|
||||
pub mod typing;
|
||||
pub mod procedure_compiler;
|
||||
pub mod runtime;
|
||||
pub mod symbols;
|
||||
|
|
@ -2,6 +2,12 @@ use {
|
|||
crate::{
|
||||
expr::{LTExpr, Statement, TypeError, TypeTag},
|
||||
lexer::{LTIRLexer, LTIRToken, LexError, InputRegionTag},
|
||||
symbols::{Scope}
|
||||
},
|
||||
laddertypes::{
|
||||
dict::TypeDict,
|
||||
parser::ParseLadderType,
|
||||
unparser::UnparseLadderType
|
||||
},
|
||||
std::{
|
||||
iter::Peekable,
|
||||
|
@ -54,9 +60,9 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
* `: T`
|
||||
*/
|
||||
pub fn parse_type_tag<It>(
|
||||
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
||||
typectx: &mut impl TypeDict,
|
||||
tokens: &mut Peekable<It>,
|
||||
) -> Result<Option<laddertypes::TypeTerm>, (InputRegionTag, ParseError)>
|
||||
) -> Result<Option<(InputRegionTag, laddertypes::TypeTerm)>, (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
{
|
||||
let peek = { tokens.peek().cloned() };
|
||||
|
@ -64,8 +70,8 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
match peektok {
|
||||
Ok(LTIRToken::AssignType(typeterm_str)) => {
|
||||
tokens.next();
|
||||
match typectx.write().unwrap().parse(typeterm_str.as_str()) {
|
||||
Ok(typeterm) => Ok(Some(typeterm)),
|
||||
match typectx.parse(typeterm_str.as_str()) {
|
||||
Ok(typeterm) => Ok(Some((region, typeterm))),
|
||||
Err(parse_error) => Err((region, ParseError::TypeParseError(parse_error))),
|
||||
}
|
||||
}
|
||||
|
@ -81,11 +87,11 @@ pub enum VariableBinding {
|
|||
Atomic {
|
||||
region: InputRegionTag,
|
||||
symbol: String,
|
||||
typtag: Option<laddertypes::TypeTerm>
|
||||
typtag: Option<(InputRegionTag, laddertypes::TypeTerm)>
|
||||
},
|
||||
Struct {
|
||||
members: Vec< VariableBinding >,
|
||||
typtag: Option<laddertypes::TypeTerm>
|
||||
typtag: Option<(InputRegionTag, laddertypes::TypeTerm)>
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -93,7 +99,7 @@ impl VariableBinding {
|
|||
pub fn flatten(self) -> Vec<(InputRegionTag, String, Option<laddertypes::TypeTerm>)> {
|
||||
match self {
|
||||
VariableBinding::Atomic{ region, symbol, typtag } =>
|
||||
vec![ (region, symbol, typtag) ],
|
||||
vec![ (region, symbol, typtag.map(|t|t.1)) ],
|
||||
VariableBinding::Struct{ members, typtag } =>
|
||||
members
|
||||
.into_iter()
|
||||
|
@ -109,7 +115,7 @@ impl VariableBinding {
|
|||
* or `x : T`
|
||||
*/
|
||||
pub fn parse_binding_expr<It>(
|
||||
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
||||
typectx: &mut impl TypeDict,
|
||||
tokens: &mut Peekable<It>,
|
||||
) -> Result< VariableBinding, (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
|
@ -142,7 +148,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
* `{ x:T; y:U; ... }`
|
||||
*/
|
||||
pub fn parse_binding_block<It>(
|
||||
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
||||
typectx: &mut impl TypeDict,
|
||||
tokens: &mut Peekable<It>,
|
||||
) -> Result< Vec<VariableBinding>, (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
|
@ -175,7 +181,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
}
|
||||
|
||||
pub fn parse_statement<It>(
|
||||
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
||||
super_scope: &Arc<RwLock<Scope>>,
|
||||
tokens: &mut Peekable<It>,
|
||||
) -> Result<crate::expr::Statement, (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
|
@ -188,7 +194,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
tokens.next();
|
||||
// todo accept address-expression instead of symbol
|
||||
let (name_region, name) = parse_symbol(tokens)?;
|
||||
let val_expr = parse_expr(typectx, tokens)?;
|
||||
let val_expr = parse_expr(super_scope, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
||||
|
||||
Ok(Statement::Assignment {
|
||||
|
@ -200,48 +206,40 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
"let" => {
|
||||
tokens.next();
|
||||
let (name_region, name) = parse_symbol(tokens)?;
|
||||
let typ = parse_type_tag(typectx, tokens)?;
|
||||
let typ = parse_type_tag(&mut *super_scope.write().unwrap(), tokens)?;
|
||||
|
||||
/* todo
|
||||
let mut variable_bindings = parse_binding_expr(typectx, tokens)?;
|
||||
*/
|
||||
let _ = parse_expect(tokens, LTIRToken::AssignValue);
|
||||
let val_expr = parse_expr(typectx, tokens)?;
|
||||
let val_expr = parse_expr(super_scope, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
||||
|
||||
Ok(Statement::LetAssign {
|
||||
name_region,
|
||||
typ: match typ {
|
||||
Some(t) => Some(Ok(t)),
|
||||
Some((r,t)) => Some(t),
|
||||
None => None
|
||||
},
|
||||
var_id: name,
|
||||
val_expr,
|
||||
})
|
||||
}
|
||||
"while" => {
|
||||
tokens.next();
|
||||
let _ = parse_expect(tokens, LTIRToken::ExprOpen)?;
|
||||
let cond = parse_expr(typectx, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::ExprClose)?;
|
||||
Ok(Statement::WhileLoop {
|
||||
condition: cond,
|
||||
body: parse_statement_block(typectx, tokens)?,
|
||||
})
|
||||
}
|
||||
"return" => {
|
||||
tokens.next();
|
||||
let expr = parse_expr(typectx, tokens)?;
|
||||
let expr = parse_expr(super_scope, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
||||
Ok(Statement::Return(parse_expr(typectx, tokens)?))
|
||||
Ok(Statement::Return(parse_expr(super_scope, tokens)?))
|
||||
}
|
||||
_ => {
|
||||
let expr = parse_expr(typectx, tokens)?;
|
||||
let expr = parse_expr(super_scope, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
||||
Ok(Statement::Expr(expr))
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(_) => {
|
||||
let expr = parse_expr(typectx, tokens)?;
|
||||
let expr = parse_expr(super_scope, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
||||
Ok(Statement::Expr(expr))
|
||||
}
|
||||
|
@ -253,7 +251,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
}
|
||||
|
||||
pub fn parse_statement_block<It>(
|
||||
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
||||
scope: &Arc<RwLock<Scope>>,
|
||||
tokens: &mut Peekable<It>,
|
||||
) -> Result<Vec<Statement>, (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
|
@ -268,7 +266,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
return Ok(statements);
|
||||
}
|
||||
Ok(_) => {
|
||||
statements.push(parse_statement(typectx, tokens)?);
|
||||
statements.push(parse_statement(scope, tokens)?);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err((*region, ParseError::LexError(err.clone())));
|
||||
|
@ -280,14 +278,21 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
}
|
||||
|
||||
pub fn parse_atom<It>(
|
||||
typectx: &mut impl TypeDict,
|
||||
tokens: &mut Peekable<It>,
|
||||
) -> Result<crate::expr::LTExpr, (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
{
|
||||
match tokens.next() {
|
||||
Some((region, Ok(LTIRToken::Symbol(sym)))) => Ok(LTExpr::Symbol{ region, symbol: sym, typ: None }),
|
||||
Some((region, Ok(LTIRToken::Char(c)))) => Ok(LTExpr::lit_uint(c as u64)),
|
||||
Some((region, Ok(LTIRToken::Num(n)))) => Ok(LTExpr::lit_uint(n as u64)),
|
||||
Some((region, Ok(LTIRToken::Char(c)))) => Ok(
|
||||
LTExpr::Ascend {
|
||||
region: region.clone(),
|
||||
typ: typectx.parse("Char ~ Unicode ~ ℤ_2^32").unwrap(),
|
||||
expr: Box::new(LTExpr::WordLiteral{ region, val: c as tisc::VM_Word })
|
||||
}
|
||||
),
|
||||
Some((region, Ok(LTIRToken::Num(n)))) => Ok(LTExpr::WordLiteral{ region, val: n as tisc::VM_Word }),
|
||||
Some((region, Ok(_))) => Err((region, ParseError::UnexpectedToken)),
|
||||
Some((region, Err(err))) => Err((region, ParseError::LexError(err))),
|
||||
None => Err((InputRegionTag::default(), ParseError::UnexpectedEnd)),
|
||||
|
@ -295,7 +300,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
}
|
||||
|
||||
pub fn parse_expr<It>(
|
||||
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
||||
super_scope: &Arc<RwLock<Scope>>,
|
||||
tokens: &mut Peekable<It>,
|
||||
) -> Result<crate::expr::LTExpr, (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
|
@ -306,13 +311,18 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
match tok {
|
||||
Ok(LTIRToken::Lambda) => {
|
||||
if children.len() == 0 {
|
||||
let region = region.clone();
|
||||
tokens.next();
|
||||
|
||||
let mut variable_bindings = parse_binding_expr(typectx, tokens)?;
|
||||
let scope = Scope::with_parent(super_scope);
|
||||
|
||||
let mut variable_bindings = parse_binding_expr(&mut *scope.write().unwrap(), tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::MapsTo);
|
||||
let body = parse_expr(typectx, tokens)?;
|
||||
let body = parse_expr(&scope, tokens)?;
|
||||
|
||||
return Ok(LTExpr::Abstraction {
|
||||
region,
|
||||
scope,
|
||||
args: variable_bindings.flatten().into_iter().map(|(r,s,t)| (r,s,t.map(|t|Ok(t))) ).collect(),
|
||||
body: Box::new(body),
|
||||
});
|
||||
|
@ -330,14 +340,23 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
}
|
||||
_ => {}
|
||||
}
|
||||
children.push(parse_expr(typectx, tokens)?);
|
||||
children.push(parse_expr(super_scope, tokens)?);
|
||||
}
|
||||
}
|
||||
Ok(LTIRToken::ExprClose) => {
|
||||
break;
|
||||
}
|
||||
Ok(LTIRToken::BlockOpen) => {
|
||||
children.push(LTExpr::block(parse_statement_block(typectx, tokens)?));
|
||||
let region = region.clone();
|
||||
let scope = Scope::with_parent(super_scope);
|
||||
let statements = parse_statement_block(&scope, tokens)?;
|
||||
|
||||
children.push(
|
||||
LTExpr::Block {
|
||||
region,
|
||||
scope,
|
||||
statements
|
||||
});
|
||||
}
|
||||
Ok(LTIRToken::BlockClose) => {
|
||||
break;
|
||||
|
@ -345,43 +364,116 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
Ok(LTIRToken::StatementSep) => {
|
||||
break;
|
||||
}
|
||||
Ok(LTIRToken::DoubleQuote(s)) => {
|
||||
let region = region.clone();
|
||||
let value = s.clone();
|
||||
tokens.next();
|
||||
children.push(LTExpr::StringLiteral{ region, value });
|
||||
}
|
||||
Ok(LTIRToken::Ascend(type_str)) => {
|
||||
let mut region = region.clone();
|
||||
let typ = super_scope.write().unwrap().parse(type_str);
|
||||
|
||||
if let Some(expr) = children.pop() {
|
||||
region.begin = expr.get_region().begin;
|
||||
|
||||
match typ {
|
||||
Ok(typ) => {
|
||||
children.push(LTExpr::Ascend {
|
||||
region: region.clone(),
|
||||
typ,
|
||||
expr: Box::new(expr)
|
||||
});
|
||||
},
|
||||
Err(e) => {
|
||||
return Err((region, ParseError::TypeParseError(e)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tokens.next();
|
||||
}
|
||||
Ok(LTIRToken::Descend(type_str)) => {
|
||||
let region = region.clone();
|
||||
let typ = super_scope.write().unwrap().parse(type_str);
|
||||
|
||||
if let Some(expr) = children.pop() {
|
||||
match typ {
|
||||
Ok(typ) => {
|
||||
children.push(LTExpr::Descend {
|
||||
region,
|
||||
typ,
|
||||
expr: Box::new(expr)
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
return Err((region, ParseError::TypeParseError(e)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tokens.next();
|
||||
}
|
||||
Ok(LTIRToken::Symbol(name)) => match name.as_str() {
|
||||
"if" => {
|
||||
let region = region.clone();
|
||||
tokens.next();
|
||||
let _ = parse_expect(tokens, LTIRToken::ExprOpen)?;
|
||||
let cond = parse_expr(typectx, tokens)?;
|
||||
let cond = parse_expr(super_scope, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::ExprClose)?;
|
||||
let if_expr = LTExpr::block(parse_statement_block(typectx, tokens)?);
|
||||
let mut else_expr = LTExpr::block(vec![]);
|
||||
|
||||
let if_statements = parse_statement_block(super_scope, tokens)?;
|
||||
|
||||
let scope = super_scope.clone();
|
||||
let if_expr = LTExpr::Block{ region: region.clone(), scope, statements: if_statements };
|
||||
let scope = super_scope.clone();
|
||||
let mut else_expr = LTExpr::Block{ region: InputRegionTag::default(), scope, statements: vec![] };
|
||||
|
||||
if let Some((region, peektok)) = tokens.peek() {
|
||||
if let Ok(LTIRToken::Symbol(name)) = peektok {
|
||||
if name == "else" {
|
||||
tokens.next();
|
||||
else_expr = parse_expr(typectx, tokens)?;
|
||||
else_expr = parse_expr(super_scope, tokens)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
children.push(LTExpr::Branch {
|
||||
region,
|
||||
condition: Box::new(cond),
|
||||
if_expr: Box::new(if_expr),
|
||||
else_expr: Box::new(else_expr),
|
||||
});
|
||||
},
|
||||
"export" => {
|
||||
"while" => {
|
||||
let region = region.clone();
|
||||
tokens.next();
|
||||
let block = parse_statement_block(typectx, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::ExprOpen)?;
|
||||
let cond = parse_expr(super_scope, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::ExprClose)?;
|
||||
children.push(LTExpr::WhileLoop {
|
||||
region,
|
||||
condition: Box::new(cond),
|
||||
body: Box::new(parse_expr(super_scope, tokens)?),
|
||||
});
|
||||
}
|
||||
"export" => {
|
||||
let region = region.clone();
|
||||
tokens.next();
|
||||
let scope = Scope::with_parent(super_scope);
|
||||
let block = parse_statement_block(&scope, tokens)?;
|
||||
children.push(LTExpr::ExportBlock {
|
||||
region,
|
||||
scope,
|
||||
statements: block
|
||||
});
|
||||
},
|
||||
name => {
|
||||
children.push(parse_atom(tokens)?);
|
||||
children.push(parse_atom(&mut *super_scope.write().unwrap(), tokens)?);
|
||||
}
|
||||
},
|
||||
Ok(atom) => {
|
||||
children.push(parse_atom(tokens)?);
|
||||
children.push(parse_atom(&mut *super_scope.write().unwrap(), tokens)?);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err((*region, ParseError::LexError(err.clone())));
|
||||
|
@ -389,13 +481,24 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
}
|
||||
}
|
||||
|
||||
if children.len() > 0 {
|
||||
if children.len() > 1 {
|
||||
let head = children.remove(0);
|
||||
|
||||
let mut region = head.get_region();
|
||||
for c in children.iter() {
|
||||
let cr = c.get_region();
|
||||
region.begin = usize::min( region.begin, cr.begin );
|
||||
region.end = usize::max( region.end, cr.end );
|
||||
}
|
||||
|
||||
Ok(LTExpr::Application {
|
||||
region,
|
||||
typ: None,
|
||||
head: Box::new(head),
|
||||
body: children,
|
||||
})
|
||||
} else if children.len() == 1 {
|
||||
Ok(children.pop().unwrap())
|
||||
} else {
|
||||
Err((InputRegionTag::default(), ParseError::UnexpectedEnd))
|
||||
}
|
||||
|
@ -404,6 +507,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
|||
|
||||
|
||||
mod tests {
|
||||
use crate::parser::{LTExpr, InputRegionTag};
|
||||
use std::sync::{Arc, RwLock};
|
||||
|
||||
#[test]
|
||||
|
@ -415,12 +519,51 @@ mod tests {
|
|||
assert_eq!(
|
||||
bindings,
|
||||
Ok(crate::parser::VariableBinding::Atomic{
|
||||
region: InputRegionTag{ begin: 0, end: 1 },
|
||||
symbol: "x".into(),
|
||||
typtag: None
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_double_quote() {
|
||||
let mut lexer = crate::lexer::LTIRLexer::from("\"test\"".chars()).peekable();
|
||||
let typectx = Arc::new(RwLock::new(laddertypes::dict::TypeDict::new()));
|
||||
let expr = crate::parser::parse_expr( &typectx, &mut lexer );
|
||||
|
||||
assert_eq!(
|
||||
expr,
|
||||
Ok(LTExpr::StringLiteral{
|
||||
region: InputRegionTag{ begin: 0, end: 6 },
|
||||
value: "test".into()
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_ascend() {
|
||||
let mut lexer = crate::lexer::LTIRLexer::from("\"ff\" as <Seq <Digit 16>>".chars()).peekable();
|
||||
let typectx = Arc::new(RwLock::new(laddertypes::dict::TypeDict::new()));
|
||||
let expr = crate::parser::parse_expr( &typectx, &mut lexer );
|
||||
|
||||
assert_eq!(
|
||||
expr,
|
||||
Ok(LTExpr::Ascend {
|
||||
region: InputRegionTag{ begin: 5, end: 24 },
|
||||
typ: match typectx.write().unwrap().parse("<Seq <Digit 16>>") {
|
||||
Ok(t) => Ok(t),
|
||||
Err(e) => Err(crate::parser::TypeError::ParseError(e))
|
||||
},
|
||||
expr: Box::new(LTExpr::StringLiteral {
|
||||
region: InputRegionTag{ begin: 0, end: 4 },
|
||||
value: "ff".into()
|
||||
})
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_parse_typed_atomic_binding() {
|
||||
let mut lexer = crate::lexer::LTIRLexer::from("x:T".chars()).peekable();
|
||||
|
@ -430,8 +573,9 @@ mod tests {
|
|||
assert_eq!(
|
||||
bindings,
|
||||
Ok(crate::parser::VariableBinding::Atomic{
|
||||
region: InputRegionTag{ begin: 0, end: 1 },
|
||||
symbol: "x".into(),
|
||||
typtag: Some(typectx.write().unwrap().parse("T").unwrap())
|
||||
typtag: Some((InputRegionTag{begin: 1, end:3}, typectx.write().unwrap().parse("T").unwrap()))
|
||||
})
|
||||
);
|
||||
}
|
||||
|
@ -446,8 +590,12 @@ mod tests {
|
|||
bindings,
|
||||
Ok(crate::parser::VariableBinding::Struct{
|
||||
members: vec![
|
||||
crate::parser::VariableBinding::Atomic{ symbol: "x".into(), typtag: None },
|
||||
crate::parser::VariableBinding::Atomic{ symbol: "y".into(), typtag: None }
|
||||
crate::parser::VariableBinding::Atomic{
|
||||
region: InputRegionTag{ begin: 1, end: 2 },
|
||||
symbol: "x".into(), typtag: None },
|
||||
crate::parser::VariableBinding::Atomic{
|
||||
region: InputRegionTag{ begin: 3, end: 4 },
|
||||
symbol: "y".into(), typtag: None }
|
||||
],
|
||||
typtag: None
|
||||
})
|
||||
|
@ -464,10 +612,14 @@ mod tests {
|
|||
bindings,
|
||||
Ok(crate::parser::VariableBinding::Struct{
|
||||
members: vec![
|
||||
crate::parser::VariableBinding::Atomic{ symbol: "x".into(), typtag: None },
|
||||
crate::parser::VariableBinding::Atomic{ symbol: "y".into(), typtag: None }
|
||||
crate::parser::VariableBinding::Atomic{
|
||||
region: InputRegionTag{ begin: 1, end: 2 },
|
||||
symbol: "x".into(), typtag: None },
|
||||
crate::parser::VariableBinding::Atomic{
|
||||
region: InputRegionTag{ begin: 3, end: 4 },
|
||||
symbol: "y".into(), typtag: None }
|
||||
],
|
||||
typtag: Some(typectx.write().unwrap().parse("T").unwrap())
|
||||
typtag: Some((InputRegionTag{begin:5, end:7}, typectx.write().unwrap().parse("T").unwrap()))
|
||||
})
|
||||
);
|
||||
}
|
||||
|
@ -486,12 +638,16 @@ mod tests {
|
|||
Ok(crate::parser::VariableBinding::Struct{
|
||||
members: vec![
|
||||
crate::parser::VariableBinding::Atomic{
|
||||
region: InputRegionTag{ begin: 1, end: 2 },
|
||||
symbol: "x".into(),
|
||||
typtag: Some(type_u)
|
||||
typtag: Some((InputRegionTag{begin: 2, end:4}, type_u))
|
||||
},
|
||||
crate::parser::VariableBinding::Atomic{ symbol: "y".into(), typtag: None }
|
||||
crate::parser::VariableBinding::Atomic{
|
||||
region: InputRegionTag{ begin: 6, end: 7 },
|
||||
symbol: "y".into(),
|
||||
typtag: None }
|
||||
],
|
||||
typtag: Some(type_t)
|
||||
typtag: Some((InputRegionTag{begin: 8, end:10}, type_t))
|
||||
})
|
||||
);
|
||||
}
|
332
lib-ltcore/src/procedure_compiler.rs
Normal file
332
lib-ltcore/src/procedure_compiler.rs
Normal file
|
@ -0,0 +1,332 @@
|
|||
use {
|
||||
crate::{
|
||||
lexer::InputRegionTag,
|
||||
expr::{LTExpr, Statement, TypeTag, TypeError},
|
||||
symbols::{Scope, SymbolDef},
|
||||
},
|
||||
std::{
|
||||
ops::Deref,
|
||||
sync::{Arc, RwLock},
|
||||
},
|
||||
laddertypes::{
|
||||
parser::ParseLadderType,
|
||||
unparser::UnparseLadderType
|
||||
},
|
||||
tisc::{assembler::AssemblyWord, linker::LinkAddr},
|
||||
tiny_ansi::TinyAnsi
|
||||
};
|
||||
|
||||
pub struct ProcedureCompiler {
|
||||
proc_symbol: String,
|
||||
scope: Arc<RwLock<Scope>>,
|
||||
asm: tisc::Assembler,
|
||||
subroutines: Vec<tisc::assembler::AssemblyWord>,
|
||||
pub linker: tisc::Linker,
|
||||
pub diagnostics: Vec<( InputRegionTag, String )>
|
||||
}
|
||||
|
||||
impl ProcedureCompiler {
|
||||
pub fn new(proc_symbol: String, scope: Arc<RwLock<Scope>>) -> Self {
|
||||
ProcedureCompiler {
|
||||
proc_symbol,
|
||||
scope,
|
||||
subroutines: Vec::new(),
|
||||
asm: tisc::Assembler::new(),
|
||||
linker: tisc::Linker::new(),
|
||||
diagnostics: Vec::new()
|
||||
}
|
||||
}
|
||||
/*
|
||||
pub fn export_symbols(&self) -> Vec<(String, SymbolDef)> {
|
||||
let mut scope = self.scope.write().unwrap();
|
||||
scope.update_link_addresses(&self.proc_symbol, &self.linker);
|
||||
scope.export()
|
||||
}
|
||||
*/
|
||||
pub fn get_bytecode(mut self, ret: bool) -> (
|
||||
Vec<(String, SymbolDef)>,
|
||||
Vec<tisc::assembler::AssemblyWord>
|
||||
) {
|
||||
let frame_size = self.scope.read().unwrap().get_frame_size();
|
||||
if frame_size > 0 {
|
||||
let alloc_asm = tisc::Assembler::new()
|
||||
.lit(frame_size as tisc::VM_Word).call("data-frame-alloc");
|
||||
let drop_asm = tisc::Assembler::new()
|
||||
.lit(frame_size as tisc::VM_Word).call("data-frame-drop");
|
||||
|
||||
self.asm = alloc_asm.join( self.asm ).join( drop_asm );
|
||||
}
|
||||
|
||||
let main_section = self.asm.build();
|
||||
|
||||
//self.linker.add_procedure( &self.proc_symbol, main_section );
|
||||
// ^--- this would insert the asm section at the end,
|
||||
// we however need it an the beginning of the bytecode
|
||||
|
||||
// insert section at front
|
||||
self.linker.next_addr += main_section.len() as i64;
|
||||
for (name,section) in self.linker.symbols.iter_mut() {
|
||||
section.addr += main_section.len() as i64;
|
||||
}
|
||||
self.linker.symbols.insert(
|
||||
self.proc_symbol.clone(),
|
||||
tisc::linker::Section { addr: 0, data: main_section }
|
||||
);
|
||||
|
||||
// replace all symbol definitions from subroutines
|
||||
// with relative LinkAddr`s
|
||||
self.scope.write().unwrap().update_link_addresses(
|
||||
&self.proc_symbol,
|
||||
&self.linker
|
||||
);
|
||||
|
||||
(
|
||||
self.scope.read().unwrap().export(),
|
||||
self.linker.link_relative( &self.proc_symbol ).expect("link error")
|
||||
)
|
||||
}
|
||||
|
||||
pub fn compile_statement(mut self, statement: &Statement, enable_export: bool) -> Self {
|
||||
match statement {
|
||||
Statement::Assignment { name_region, var_id, val_expr } => {
|
||||
self = self.compile_expr(val_expr);
|
||||
|
||||
match self.scope.read().unwrap().get(var_id) {
|
||||
Some(SymbolDef::FrameRef { typ, stack_ref }) => {
|
||||
self.asm = self.asm.lit(stack_ref).call("data-frame-set");
|
||||
}
|
||||
Some(SymbolDef::StaticRef { typ, link_addr }) => {
|
||||
self.asm = self
|
||||
.asm
|
||||
.static_ref(var_id.as_str())
|
||||
.inst(tisc::VM_Instruction::Store);
|
||||
}
|
||||
Some(SymbolDef::Procedure {
|
||||
in_types,
|
||||
out_types,
|
||||
link_addr,
|
||||
export
|
||||
}) => {
|
||||
self.asm = self.asm
|
||||
.call(var_id.as_str())
|
||||
.inst(tisc::VM_Instruction::Store);
|
||||
}
|
||||
None => {
|
||||
self.diagnostics.push(
|
||||
(name_region.clone(),
|
||||
format!("cannot assign undefined symbol '{}'!", var_id))
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
Statement::LetAssign {
|
||||
name_region,
|
||||
typ,
|
||||
var_id,
|
||||
val_expr,
|
||||
} => {
|
||||
let val_type = self.scope.read().unwrap()
|
||||
.get(var_id).unwrap()
|
||||
.get_type(&mut self.scope.clone());
|
||||
let val_type = val_type.sugar(&mut self.scope.clone());
|
||||
match val_type {
|
||||
laddertypes::SugaredTypeTerm::Func(mut f_types) => {
|
||||
let mut c = ProcedureCompiler::new(
|
||||
var_id.clone(),
|
||||
self.scope.clone()
|
||||
);
|
||||
c = c.compile_expr( val_expr );
|
||||
self.diagnostics.append(&mut c.diagnostics);
|
||||
|
||||
let (symbols,code) = c.get_bytecode( true );
|
||||
eprintln!("LET assign compiled {}", var_id);
|
||||
for (i,l) in tisc::assembler::disassemble( &code ).iter().enumerate() {
|
||||
eprintln!("{}+{} ... {}", var_id, i, l);
|
||||
}
|
||||
self.linker.add_procedure(var_id, code);
|
||||
|
||||
/*
|
||||
let out_types = vec![ f_types.pop().unwrap().desugar(&mut self.scope.clone()) ];
|
||||
let in_types = f_types.into_iter().map(|t| t.desugar(&mut self.scope.clone())).collect();
|
||||
|
||||
self.scope.write().unwrap().declare_proc(
|
||||
var_id.clone(),
|
||||
in_types,
|
||||
out_types,
|
||||
true
|
||||
);
|
||||
*/
|
||||
}
|
||||
|
||||
_ => {
|
||||
self = self.compile_statement(&Statement::Assignment {
|
||||
name_region: *name_region,
|
||||
var_id: var_id.clone(),
|
||||
val_expr: val_expr.clone(),
|
||||
}, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
Statement::Expr(expr) => {
|
||||
self = self.compile_expr(expr);
|
||||
}
|
||||
Statement::Return(expr) => {
|
||||
self = self.compile_expr(expr);
|
||||
}
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
pub fn compile_expr(mut self, expr: <Expr) -> Self {
|
||||
match expr {
|
||||
LTExpr::Symbol { region, typ, symbol } => match self.scope.read().unwrap().get(symbol) {
|
||||
Some(SymbolDef::FrameRef { typ, stack_ref }) => {
|
||||
self.asm = self.asm.lit(stack_ref).call("data-frame-get");
|
||||
}
|
||||
Some(SymbolDef::StaticRef { typ, link_addr }) => {
|
||||
self.asm = self.asm.static_ref(symbol.as_str());
|
||||
}
|
||||
Some(SymbolDef::Procedure {
|
||||
in_types,
|
||||
out_types,
|
||||
link_addr,
|
||||
export
|
||||
}) => {
|
||||
self.asm = self.asm.call_symbol(link_addr);
|
||||
}
|
||||
None => {
|
||||
self.diagnostics.push(
|
||||
(region.clone(), format!("undefined symbol '{}'!", symbol))
|
||||
);
|
||||
}
|
||||
},
|
||||
LTExpr::StringLiteral { region, value } => {
|
||||
self.asm = self.asm.lit(0);
|
||||
for c in value.chars().rev() {
|
||||
self.asm = self.asm.lit(c as i64);
|
||||
}
|
||||
}
|
||||
LTExpr::WordLiteral { region, val } => {
|
||||
self.asm = self.asm.lit(*val);
|
||||
}
|
||||
LTExpr::Ascend { region, typ, expr } => {
|
||||
self = self.compile_expr(expr);
|
||||
}
|
||||
LTExpr::Descend { region, typ, expr } => {
|
||||
self = self.compile_expr(expr);
|
||||
}
|
||||
LTExpr::Branch {
|
||||
region,
|
||||
condition,
|
||||
if_expr,
|
||||
else_expr,
|
||||
} => {
|
||||
self = self.compile_expr(condition);
|
||||
|
||||
let asm = self.asm;
|
||||
self.asm = tisc::Assembler::new();
|
||||
self = self.compile_expr(if_expr);
|
||||
let if_asm = self.asm;
|
||||
self.asm = tisc::Assembler::new();
|
||||
self = self.compile_expr(else_expr);
|
||||
let else_asm = self.asm;
|
||||
self.asm = asm;
|
||||
self.asm = self.asm.branch(if_asm, else_asm);
|
||||
}
|
||||
LTExpr::WhileLoop { region, condition, body } => {
|
||||
let asm = self.asm;
|
||||
|
||||
self.asm = tisc::Assembler::new();
|
||||
self = self.compile_expr(condition);
|
||||
let cond_asm = self.asm;
|
||||
|
||||
self.asm = tisc::Assembler::new();
|
||||
self = self.compile_expr(body);
|
||||
let body_asm = self.asm;
|
||||
|
||||
self.asm = asm;
|
||||
self.asm = self.asm.while_loop(cond_asm, body_asm);
|
||||
}
|
||||
LTExpr::Application { region, typ, head, body } => {
|
||||
for arg in body.iter().rev() {
|
||||
self = self.compile_expr(arg);
|
||||
}
|
||||
self = self.compile_expr(head);
|
||||
}
|
||||
LTExpr::Abstraction { region, scope, args, body } => {
|
||||
let mut abs_compiler = ProcedureCompiler::new("__abs__".into(), scope.clone());
|
||||
|
||||
for (region, arg_name, arg_type) in args.iter() {
|
||||
match scope.read().unwrap().get(arg_name) {
|
||||
Some(SymbolDef::FrameRef{ typ, stack_ref }) => {
|
||||
eprintln!("Arg {} stack ref = {}", arg_name, stack_ref);
|
||||
|
||||
// TODO: aknowledge actual size of arguments
|
||||
// let arg_size = typ.get_size()
|
||||
let arg_size = 1;
|
||||
|
||||
for i in 0..arg_size {
|
||||
abs_compiler.asm = abs_compiler.asm
|
||||
.lit(stack_ref + i)
|
||||
.call("data-frame-set");
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
self.diagnostics.push(
|
||||
(region.clone(),
|
||||
format!("argument variable is not a frame-ref"))
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
abs_compiler = abs_compiler.compile_expr( body );
|
||||
let (abs_symbols, mut abs_code) = abs_compiler.get_bytecode( false );
|
||||
|
||||
for (s,def) in abs_symbols.iter() {
|
||||
eprintln!("{} = {:?}", s, def);
|
||||
}
|
||||
for (i, l) in tisc::assembler::disassemble(&abs_code).into_iter().enumerate() {
|
||||
eprintln!("__abs__+{} .. {}", i, l);
|
||||
}
|
||||
|
||||
self.asm.words.append( &mut abs_code );
|
||||
/*
|
||||
self.linker.add_procedure(
|
||||
"__abs__".into(),
|
||||
abs_code
|
||||
);*/
|
||||
}
|
||||
LTExpr::Block { region, scope, statements } => {
|
||||
let mut block_compiler = ProcedureCompiler::new(
|
||||
"__block__".into(),
|
||||
scope.clone()
|
||||
);
|
||||
|
||||
for stmnt in statements.iter() {
|
||||
block_compiler = block_compiler.compile_statement( stmnt, true );
|
||||
}
|
||||
|
||||
let (block_symbols, mut block_code) = block_compiler.get_bytecode( true );
|
||||
|
||||
eprintln!("BLOCK compiler:");
|
||||
for (s,def) in block_symbols.iter() {
|
||||
eprintln!("{} = {:?}", s, def);
|
||||
}
|
||||
for (i,l) in tisc::assembler::disassemble( &block_code ).into_iter().enumerate() {
|
||||
eprintln!("block+{} .. {}", i, l);
|
||||
}
|
||||
|
||||
self.linker.
|
||||
self.scope.write().unwrap().import(
|
||||
block_symbols
|
||||
);
|
||||
self.asm.words.append(&mut block_code);
|
||||
}
|
||||
LTExpr::ExportBlock{ region, scope, statements } => {
|
||||
}
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
|
@ -6,7 +6,6 @@ use {
|
|||
|
||||
pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
|
||||
let symbols = Scope::new();
|
||||
let typectx = symbols.read().unwrap().typectx.clone();
|
||||
|
||||
/* Duplicate the top item on the stack,
|
||||
* and whatever type this word has is preserved
|
||||
|
@ -14,8 +13,8 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
|
|||
symbols.write().unwrap().declare_proc_parse(
|
||||
"dup",
|
||||
vec!["T"],
|
||||
vec!["T~machine::Word"],
|
||||
vec!["T~machine::Word", "T~machine::Word"],
|
||||
vec!["T~machine.Word"],
|
||||
vec!["T~machine.Word", "T~machine.Word"],
|
||||
);
|
||||
|
||||
/* drop topmost element
|
||||
|
@ -23,7 +22,7 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
|
|||
symbols.write().unwrap().declare_proc_parse(
|
||||
"drop",
|
||||
vec!["T"],
|
||||
vec!["T~machine::Word"],
|
||||
vec!["T~machine.Word"],
|
||||
vec![],
|
||||
);
|
||||
/* Put a single Ascii character on stdout
|
||||
|
@ -31,14 +30,14 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
|
|||
symbols.write().unwrap().declare_proc_parse(
|
||||
"emit",
|
||||
vec![],
|
||||
vec!["Char~Ascii~machine::Word"],
|
||||
vec!["Char~Unicode~ℤ_2^32~ℤ_2^64~machine.UInt64~machine.Word"],
|
||||
vec![],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"accept",
|
||||
vec![],
|
||||
vec![],
|
||||
vec!["Char~Ascii~machine::Word"],
|
||||
vec!["Char~Unicode~ℤ_2^32~ℤ_2^64~machine.UInt64~machine.Word"],
|
||||
);
|
||||
|
||||
linker.add_procedure("dup", tisc::Assembler::new().inst(tisc::VM_Instruction::Dup).build());
|
||||
|
@ -55,96 +54,96 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
|
|||
"i+",
|
||||
vec![],
|
||||
vec![
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine.UInt64~machine.Word",
|
||||
"ℤ_2^64~machine.UInt64~machine.Word",
|
||||
],
|
||||
vec!["ℤ_2^64~machine::UInt64~machine::Word"],
|
||||
vec!["ℤ_2^64~machine.UInt64~machine.Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"i-",
|
||||
vec![],
|
||||
vec![
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine.UInt64~machine.Word",
|
||||
"ℤ_2^64~machine.UInt64~machine.Word",
|
||||
],
|
||||
vec!["ℤ_2^64~machine::UInt64~machine::Word"],
|
||||
vec!["ℤ_2^64~machine::UInt64~machine.Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"i*",
|
||||
vec![],
|
||||
vec![
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine.UInt64~machine.Word",
|
||||
"ℤ_2^64~machine.UInt64~machine.Word",
|
||||
],
|
||||
vec!["ℤ_2^64~machine::UInt64~machine::Word"],
|
||||
vec!["ℤ_2^64~machine.UInt64~machine.Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"i/",
|
||||
vec![],
|
||||
vec![
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine.UInt64~machine.Word",
|
||||
"ℤ_2^64~machine.UInt64~machine.Word",
|
||||
],
|
||||
vec!["ℤ_2^64~machine::UInt64~machine::Word"],
|
||||
vec!["ℤ_2^64~machine.UInt64~machine.Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"i%",
|
||||
vec![],
|
||||
vec![
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine.UInt64~machine.Word",
|
||||
"ℤ_2^64~machine.UInt64~machine.Word",
|
||||
],
|
||||
vec!["ℤ_2^64~machine::UInt64~machine::Word"],
|
||||
vec!["ℤ_2^64~machine.UInt64~machine.Word"],
|
||||
);
|
||||
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"f+",
|
||||
vec![],
|
||||
vec![
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine.f64~machine.Word",
|
||||
"ℝ~machine.f64~machine.Word",
|
||||
],
|
||||
vec!["ℝ~machine::f64~machine::Word"],
|
||||
vec!["ℝ~machine.f64~machine.Word"],
|
||||
);
|
||||
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"f-",
|
||||
vec![],
|
||||
vec![
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine.f64~machine.Word",
|
||||
"ℝ~machine.f64~machine.Word",
|
||||
],
|
||||
vec!["ℝ~machine::f64~machine::Word"],
|
||||
vec!["ℝ~machine.f64~machine.Word"],
|
||||
);
|
||||
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"f*",
|
||||
vec![],
|
||||
vec![
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine.f64~machine.Word",
|
||||
"ℝ~machine.f64~machine.Word",
|
||||
],
|
||||
vec!["ℝ~machine::f64~machine::Word"],
|
||||
vec!["ℝ~machine.f64~machine.Word"],
|
||||
);
|
||||
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"f/",
|
||||
vec![],
|
||||
vec![
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine.f64~machine.Word",
|
||||
"ℝ~machine.f64~machine.Word",
|
||||
],
|
||||
vec!["ℝ~machine::f64~machine::Word"],
|
||||
vec!["ℝ~machine.f64~machine.Word"],
|
||||
);
|
||||
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"f%",
|
||||
vec![],
|
||||
vec![
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine.f64~machine.Word",
|
||||
"ℝ~machine.f64~machine.Word",
|
||||
],
|
||||
vec!["ℝ~machine::f64~machine::Word"],
|
||||
vec!["ℝ~machine.f64~machine.Word"],
|
||||
);
|
||||
|
||||
linker.add_procedure("i+", tisc::Assembler::new().inst(tisc::VM_Instruction::IntAdd).build());
|
||||
|
@ -163,27 +162,27 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
|
|||
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"bit-neg",
|
||||
vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
|
||||
vec![], vec!["machine.Word", "machine.Word"], vec!["machine.Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"bit-and",
|
||||
vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
|
||||
vec![], vec!["machine.Word", "machine.Word"], vec!["machine.Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"bit-or",
|
||||
vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
|
||||
vec![], vec!["machine.Word", "machine.Word"], vec!["machine.Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"bit-xor",
|
||||
vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
|
||||
vec![], vec!["machine.Word", "machine.Word"], vec!["machine.Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"bit-shl",
|
||||
vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
|
||||
vec![], vec!["machine.Word", "machine.Word"], vec!["machine.Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"bit-shr",
|
||||
vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
|
||||
vec![], vec!["machine.Word", "machine.Word"], vec!["machine.Word"],
|
||||
);
|
||||
|
||||
linker.add_procedure("bit-neg", tisc::Assembler::new().inst(tisc::VM_Instruction::BitNeg).build());
|
||||
|
@ -199,8 +198,8 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
|
|||
symbols.write().unwrap().declare_proc_parse(
|
||||
"@",
|
||||
vec![],
|
||||
vec!["<MutRef T~machine::Word>~machine::Address~machine::Word"],
|
||||
vec!["T~machine::Word"],
|
||||
vec!["<MutRef T~machine.Word>~machine.Address~machine.Word"],
|
||||
vec!["T~machine.Word"],
|
||||
);
|
||||
/* Store to memory
|
||||
*/
|
||||
|
@ -208,8 +207,8 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
|
|||
"!",
|
||||
vec![],
|
||||
vec![
|
||||
"<MutRef T~machine::Word>~machine::Address~machine::Word",
|
||||
"T~machine::Word",
|
||||
"<MutRef T~machine.Word>~machine.Address~machine.Word",
|
||||
"T~machine.Word",
|
||||
],
|
||||
vec![],
|
||||
);
|
||||
|
@ -221,7 +220,7 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
|
|||
|
||||
symbols.write().unwrap().declare_static_parse(
|
||||
"data-frame-ptr",
|
||||
"<MutRef <Seq machine::Word>>~machine::Address~machine::Word",
|
||||
"<MutRef <Seq machine.Word>>~machine.Address~machine.Word",
|
||||
);
|
||||
linker.add_static("data-frame-ptr", vec![0x1000]);
|
||||
|
||||
|
@ -230,7 +229,7 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
|
|||
vec!["T"],
|
||||
vec![
|
||||
"T~machine::Word",
|
||||
"<RefMut T~machine::Word>~LocalVariableId~machine::UInt64~machine::Word",
|
||||
"<RefMut T~machine.Word>~LocalVariableId~machine.UInt64~machine.Word",
|
||||
],
|
||||
vec![],
|
||||
);
|
||||
|
@ -248,8 +247,8 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
|
|||
symbols.write().unwrap().declare_proc_parse(
|
||||
"data-frame-get",
|
||||
vec!["T"],
|
||||
vec!["<Ref T~machine::Word>~DataFrameRef~machine::UInt64~machine::Word"],
|
||||
vec!["T~machine::Word"],
|
||||
vec!["<Ref T~machine.Word>~DataFrameRef~machine.UInt64~machine.Word"],
|
||||
vec!["T~machine.Word"],
|
||||
);
|
||||
linker.add_procedure(
|
||||
"data-frame-get",
|
|
@ -1,5 +1,9 @@
|
|||
use {
|
||||
crate::expr::LTExpr,
|
||||
laddertypes::{
|
||||
TypeDict, TypeID,
|
||||
parser::ParseLadderType
|
||||
},
|
||||
std::{
|
||||
collections::HashMap,
|
||||
sync::{Arc, RwLock},
|
||||
|
@ -28,7 +32,7 @@ pub enum SymbolDef {
|
|||
impl SymbolDef {
|
||||
pub fn get_type(
|
||||
&self,
|
||||
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
||||
typedict: &mut impl laddertypes::dict::TypeDict,
|
||||
) -> laddertypes::TypeTerm {
|
||||
match self {
|
||||
SymbolDef::FrameRef { typ, stack_ref: _ } => typ.clone(),
|
||||
|
@ -38,21 +42,38 @@ impl SymbolDef {
|
|||
out_types,
|
||||
link_addr: _,
|
||||
export: _,
|
||||
} => laddertypes::TypeTerm::App(vec![
|
||||
typectx
|
||||
.write()
|
||||
.unwrap()
|
||||
.parse("Fn")
|
||||
.expect("parse typeterm"),
|
||||
laddertypes::TypeTerm::App(in_types.clone()),
|
||||
laddertypes::TypeTerm::App(out_types.clone()),
|
||||
]),
|
||||
} => {
|
||||
let mut out_types = out_types.clone();
|
||||
let out_type =
|
||||
if out_types.len() == 1 {
|
||||
out_types.pop().unwrap()
|
||||
} else {
|
||||
laddertypes::TypeTerm::App(
|
||||
std::iter::once(
|
||||
typedict.parse("Struct").unwrap()
|
||||
).chain(
|
||||
out_types.into_iter()
|
||||
).collect()
|
||||
)
|
||||
};
|
||||
|
||||
laddertypes::TypeTerm::App(
|
||||
std::iter::once(
|
||||
typedict.parse("Func").expect("parse typeterm")
|
||||
).chain(
|
||||
in_types.clone().into_iter()
|
||||
).chain(
|
||||
std::iter::once(out_type)
|
||||
).collect()
|
||||
)
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Describes a lexical scope of symbols
|
||||
*/
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Scope {
|
||||
/* definition of runtime symbols
|
||||
*/
|
||||
|
@ -60,7 +81,7 @@ pub struct Scope {
|
|||
|
||||
/* type symbols
|
||||
*/
|
||||
pub typectx: Arc<RwLock<laddertypes::TypeDict>>,
|
||||
typedict: Arc<RwLock<laddertypes::BimapTypeDict>>,
|
||||
|
||||
/* number of words required for
|
||||
* the stack frame of this scope
|
||||
|
@ -73,39 +94,70 @@ pub struct Scope {
|
|||
parent: Option<Arc<RwLock<Scope>>>,
|
||||
}
|
||||
|
||||
impl TypeDict for Scope {
|
||||
fn insert(&mut self, name: String, id: TypeID) {
|
||||
self.typedict.write().unwrap().insert(name,id)
|
||||
}
|
||||
fn add_varname(&mut self, vn: String) -> TypeID {
|
||||
self.typedict.add_varname(vn)
|
||||
}
|
||||
fn add_typename(&mut self, tn: String) -> TypeID {
|
||||
if let Some(parent) = self.parent.as_mut() {
|
||||
parent.add_typename(tn)
|
||||
} else {
|
||||
self.typedict.add_typename(tn)
|
||||
}
|
||||
}
|
||||
fn get_typeid(&self, tn: &String) -> Option<TypeID> {
|
||||
if let Some(id) = self.typedict.get_typeid(tn) {
|
||||
Some(id)
|
||||
} else {
|
||||
if let Some(parent) = self.parent.as_ref() {
|
||||
parent.get_typeid(tn)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
fn get_typename(&self, tid: &TypeID) -> Option<String> {
|
||||
if let Some(name) = self.typedict.get_typename(tid) {
|
||||
Some(name)
|
||||
} else {
|
||||
if let Some(parent) = self.parent.as_ref() {
|
||||
parent.get_typename(tid)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
pub fn new() -> Arc<RwLock<Self>> {
|
||||
Arc::new(RwLock::new(Scope {
|
||||
symbols: HashMap::new(),
|
||||
typectx: Arc::new(RwLock::new(laddertypes::dict::TypeDict::new())),
|
||||
typedict: Arc::new(RwLock::new(laddertypes::dict::BimapTypeDict::new())),
|
||||
frame_size: 0,
|
||||
parent: None,
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn with_parent(parent: &Arc<RwLock<Scope>>) -> Arc<RwLock<Self>> {
|
||||
let s = Scope {
|
||||
symbols: HashMap::new(),
|
||||
|
||||
// todo: create proper child scope
|
||||
typectx: parent.read().unwrap().typectx.clone(),
|
||||
|
||||
frame_size: 0,
|
||||
parent: Some(parent.clone()),
|
||||
};
|
||||
|
||||
Arc::new(RwLock::new(s))
|
||||
let mut s = Scope::new();
|
||||
s.write().unwrap().parent = Some(parent.clone());
|
||||
s
|
||||
}
|
||||
|
||||
pub fn export(self) -> Vec<(String, SymbolDef)> {
|
||||
pub fn export(&self) -> Vec<(String, SymbolDef)> {
|
||||
self.symbols
|
||||
.into_iter()
|
||||
.iter()
|
||||
.filter(|(name, def)|
|
||||
match def {
|
||||
SymbolDef::Procedure { in_types:_, out_types:_, link_addr:_, export } => *export,
|
||||
_ => false
|
||||
}
|
||||
)
|
||||
.map(|(n,d)| (n.clone(), d.clone()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
@ -138,6 +190,14 @@ impl Scope {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn get_type(&mut self, name: &str) -> Option<laddertypes::TypeTerm> {
|
||||
if let Some(sdef) = self.get(name) {
|
||||
Some(sdef.get_type( &mut self.typedict ))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// takes the link-addresses from a Linker
|
||||
/// and updates the symbol table to relative addresses
|
||||
/// based on the next super-label
|
||||
|
@ -162,7 +222,7 @@ impl Scope {
|
|||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -176,29 +236,26 @@ impl Scope {
|
|||
out_types: Vec<&str>,
|
||||
) {
|
||||
for v in type_vars {
|
||||
self.typectx.write().unwrap().add_varname(v.into());
|
||||
self.add_varname(v.into());
|
||||
}
|
||||
|
||||
let mut td = self.typedict.clone();
|
||||
|
||||
self.declare_proc(
|
||||
String::from(name),
|
||||
in_types
|
||||
.into_iter()
|
||||
.map(|t| {
|
||||
self.typectx
|
||||
.write()
|
||||
.unwrap()
|
||||
.parse(t)
|
||||
.expect("parse typeterm")
|
||||
.map(move |t| {
|
||||
td.parse(t).expect("parse typeterm")
|
||||
})
|
||||
.collect(),
|
||||
out_types
|
||||
.into_iter()
|
||||
.map(|t| {
|
||||
self.typectx
|
||||
.write()
|
||||
.unwrap()
|
||||
.parse(t)
|
||||
.expect("parse typeterm")
|
||||
.map({
|
||||
let mut td = self.typedict.clone();
|
||||
move |t| {
|
||||
td.parse(t).expect("parse typeterm")
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
false
|
||||
|
@ -227,9 +284,6 @@ impl Scope {
|
|||
|
||||
pub fn declare_var_parse(&mut self, name: &str, typ: &str) {
|
||||
let typ = self
|
||||
.typectx
|
||||
.write()
|
||||
.unwrap()
|
||||
.parse(typ)
|
||||
.expect("parse typeterm");
|
||||
self.declare_var(String::from(name), typ);
|
||||
|
@ -249,9 +303,6 @@ impl Scope {
|
|||
|
||||
pub fn declare_static_parse(&mut self, name: &str, typ: &str) {
|
||||
let typ = self
|
||||
.typectx
|
||||
.write()
|
||||
.unwrap()
|
||||
.parse(typ)
|
||||
.expect("parse typeterm");
|
||||
self.declare_static(String::from(name), typ);
|
319
lib-ltcore/src/typing.rs
Normal file
319
lib-ltcore/src/typing.rs
Normal file
|
@ -0,0 +1,319 @@
|
|||
use {
|
||||
crate::{
|
||||
lexer::InputRegionTag,
|
||||
expr::{LTExpr, Statement, TypeTag, TypeError, TypeErrorKind},
|
||||
symbols::{Scope, SymbolDef},
|
||||
},
|
||||
std::{
|
||||
ops::Deref,
|
||||
sync::{Arc, RwLock},
|
||||
},
|
||||
laddertypes::{
|
||||
parser::ParseLadderType,
|
||||
unparser::UnparseLadderType,
|
||||
dict::TypeDict
|
||||
},
|
||||
tisc::{assembler::AssemblyWord, linker::LinkAddr},
|
||||
tiny_ansi::TinyAnsi
|
||||
};
|
||||
|
||||
impl LTExpr {
|
||||
/*
|
||||
pub fn get_type(&self) -> TypeTag {
|
||||
Err(TypeError::Todo)
|
||||
}*/
|
||||
|
||||
pub fn infer_type(&self, scope: &Arc<RwLock<Scope>>) -> TypeTag
|
||||
{
|
||||
match self {
|
||||
LTExpr::WordLiteral{ region, val } => {
|
||||
Ok(scope.write().unwrap().parse(
|
||||
"ℤ_2^64 ~ machine.UInt64 ~ machine.Word"
|
||||
).unwrap())
|
||||
}
|
||||
|
||||
LTExpr::StringLiteral{ region, value } => {
|
||||
Ok(scope.write().unwrap().parse(
|
||||
"<Seq Char ~ Unicode ~ ℤ_2^32 ~ ℤ_2^64 ~ machine.UInt64>
|
||||
~ <TermArray 0 machine.UInt64 ~ machine.Word>"
|
||||
).unwrap())
|
||||
}
|
||||
|
||||
LTExpr::Symbol { region, typ, symbol } => {
|
||||
let mut s = scope.write().unwrap();
|
||||
if let Some(sdef) = s.get(symbol) {
|
||||
Ok(sdef.get_type(&mut *s))
|
||||
} else {
|
||||
let region = region.clone();
|
||||
Err(vec![ TypeError{ region, kind: TypeErrorKind::NoSymbol } ])
|
||||
}
|
||||
}
|
||||
|
||||
LTExpr::Ascend { region, typ, expr } => {
|
||||
let expr_type = expr.infer_type( scope )?;
|
||||
let sub_type = typ.clone();
|
||||
|
||||
/*
|
||||
* todo: check potential overlap of typ with expr_type
|
||||
*/
|
||||
if let Ok(i) = sub_type.is_syntactic_subtype_of(&expr_type) {
|
||||
let mut lnf = expr_type.get_lnf_vec();
|
||||
let mut sub_lnf = sub_type.get_lnf_vec();
|
||||
|
||||
for x in 0..i {
|
||||
lnf.insert(x, sub_lnf.remove(0));
|
||||
}
|
||||
let result_type = laddertypes::TypeTerm::Ladder(lnf);
|
||||
Ok(result_type)
|
||||
} else {
|
||||
Ok(laddertypes::TypeTerm::Ladder(vec![
|
||||
sub_type,
|
||||
expr_type
|
||||
]))
|
||||
}
|
||||
}
|
||||
|
||||
LTExpr::Descend { region, typ, expr } => {
|
||||
let expr_type = expr.infer_type(scope)?;
|
||||
let super_type = typ.clone();
|
||||
|
||||
if let Ok(i) = expr_type.is_syntactic_subtype_of(&super_type) {
|
||||
let lnf = expr_type.get_lnf_vec();
|
||||
let result_type = laddertypes::TypeTerm::Ladder(lnf[i..].into_iter().cloned().collect());
|
||||
Ok(result_type)
|
||||
} else {
|
||||
return Err(vec![ TypeError{
|
||||
region: region.clone(),
|
||||
kind: TypeErrorKind::ArgTypeMismatch {
|
||||
expected: expr_type,
|
||||
received: super_type
|
||||
}
|
||||
} ]);
|
||||
}
|
||||
}
|
||||
|
||||
LTExpr::Abstraction { region, scope, args, body } => {
|
||||
let mut f = Vec::new();
|
||||
|
||||
for (region, name, typ) in args {
|
||||
if let Some(typ) = typ {
|
||||
let typ = typ.clone()?;
|
||||
let sugar_typ = typ.clone().sugar(&mut *scope.write().unwrap());
|
||||
f.push( sugar_typ );
|
||||
|
||||
scope.write().unwrap().declare_var(name.clone(), typ.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let body_type = body.infer_type( scope )?;
|
||||
f.push( body_type.sugar(&mut *scope.write().unwrap()) );
|
||||
|
||||
Ok(laddertypes::SugaredTypeTerm::Func(f).desugar( &mut *scope.write().unwrap() ))
|
||||
}
|
||||
|
||||
LTExpr::Application{ region, typ, head, body } => {
|
||||
let mut head_type = head.infer_type(scope)?;
|
||||
let mut args = body.into_iter();
|
||||
|
||||
let mut result_type = head_type;
|
||||
let mut sugared_result_type = result_type.sugar(&mut *scope.write().unwrap());
|
||||
|
||||
let mut errors = Vec::new();
|
||||
|
||||
while let laddertypes::SugaredTypeTerm::Func(mut f_types) = sugared_result_type {
|
||||
sugared_result_type = f_types.pop().unwrap();
|
||||
|
||||
for (argi, expected_arg_type) in f_types.iter().enumerate() {
|
||||
if let Some(arg) = args.next() {
|
||||
|
||||
let expected_arg_type = expected_arg_type.clone().desugar(&mut *scope.write().unwrap());
|
||||
|
||||
// check subtype
|
||||
let received_arg_type = arg.infer_type(scope)?;
|
||||
if ! received_arg_type.is_syntactic_subtype_of(&expected_arg_type).is_ok() {
|
||||
errors.push(TypeError{
|
||||
region: arg.get_region(),
|
||||
kind: TypeErrorKind::ArgTypeMismatch {
|
||||
expected: expected_arg_type,
|
||||
received: received_arg_type
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
} else {
|
||||
// partial application.
|
||||
f_types.push(sugared_result_type);
|
||||
sugared_result_type = laddertypes::SugaredTypeTerm::Func(
|
||||
f_types[argi .. ].into_iter().cloned().collect()
|
||||
);
|
||||
|
||||
// todo examine stack ..
|
||||
|
||||
return
|
||||
if errors.len() == 0 {
|
||||
result_type = sugared_result_type.desugar(&mut *scope.write().unwrap());
|
||||
Ok(result_type)
|
||||
} else {
|
||||
Err(errors)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while let Some(arg) = args.next() {
|
||||
errors.push(TypeError{
|
||||
region: arg.get_region(),
|
||||
kind: TypeErrorKind::SuperfluousArgument
|
||||
});
|
||||
}
|
||||
|
||||
if errors.len() == 0 {
|
||||
result_type = sugared_result_type.desugar(&mut *scope.write().unwrap());
|
||||
Ok(result_type)
|
||||
} else {
|
||||
Err(errors)
|
||||
}
|
||||
}
|
||||
|
||||
LTExpr::Branch { region, condition, if_expr, else_expr } => {
|
||||
let received_cond_type = condition.infer_type(scope)?;
|
||||
let expected_cond_type = scope.write().unwrap().parse("Bool ~ machine.Word").unwrap();
|
||||
|
||||
if received_cond_type.is_syntactic_subtype_of(&expected_cond_type).is_ok() {
|
||||
|
||||
let if_expr_type = if_expr.infer_type(scope)?;
|
||||
let else_expr_type = else_expr.infer_type(scope)?;
|
||||
|
||||
if if_expr_type.is_syntactic_subtype_of(&else_expr_type).is_ok() {
|
||||
Ok(else_expr_type)
|
||||
} else if else_expr_type.is_syntactic_subtype_of(&if_expr_type).is_ok() {
|
||||
Ok(if_expr_type)
|
||||
} else {
|
||||
Err(vec![TypeError{
|
||||
region: region.clone(),
|
||||
kind: TypeErrorKind::BranchMismatch {
|
||||
if_branch: if_expr_type,
|
||||
else_branch: else_expr_type
|
||||
}
|
||||
}])
|
||||
}
|
||||
} else {
|
||||
Err(vec![ TypeError{
|
||||
region: condition.get_region(),
|
||||
kind: TypeErrorKind::ArgTypeMismatch {
|
||||
expected: expected_cond_type,
|
||||
received: received_cond_type
|
||||
}
|
||||
}])
|
||||
}
|
||||
}
|
||||
LTExpr::WhileLoop { region, condition, body } => {
|
||||
let received_cond_type = condition.infer_type(scope)?;
|
||||
let expected_cond_type = scope.write().unwrap().parse("Bool ~ machine.Word").unwrap();
|
||||
|
||||
if received_cond_type.is_syntactic_subtype_of(&expected_cond_type).is_ok() {
|
||||
let body_type = body.infer_type(scope)?;
|
||||
let body_type = body_type.sugar(&mut scope.clone());
|
||||
let loop_type = laddertypes::SugaredTypeTerm::Seq(vec![ body_type ]);
|
||||
Ok(loop_type.desugar(&mut scope.clone()))
|
||||
} else {
|
||||
return Err(vec![ TypeError{
|
||||
region: condition.get_region(),
|
||||
kind: TypeErrorKind::ArgTypeMismatch {
|
||||
expected: expected_cond_type,
|
||||
received: received_cond_type
|
||||
}
|
||||
}]);
|
||||
}
|
||||
}
|
||||
LTExpr::ExportBlock{ region, scope, statements } |
|
||||
LTExpr::Block{ region, scope, statements } => {
|
||||
let mut types = Vec::new();
|
||||
|
||||
for s in statements {
|
||||
match s.infer_type(scope) {
|
||||
Ok(Some(t)) => {
|
||||
if !t.is_empty() {
|
||||
types.insert(0, t);
|
||||
}
|
||||
}
|
||||
Ok(None) => {}
|
||||
Err(e) => {
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(
|
||||
if types.len() == 1 { types.pop().unwrap() }
|
||||
else { laddertypes::SugaredTypeTerm::Struct(types) }
|
||||
.desugar(&mut scope.clone())
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Statement {
|
||||
pub fn infer_type(&self, scope: &Arc<RwLock<Scope>>) -> Result< Option<laddertypes::SugaredTypeTerm> , Vec<TypeError> > {
|
||||
match self {
|
||||
Statement::LetAssign{ name_region, typ, var_id, val_expr } => {
|
||||
let typ = val_expr.infer_type( scope )?;
|
||||
|
||||
match typ.clone().sugar( &mut scope.clone() ) {
|
||||
laddertypes::SugaredTypeTerm::Func(mut args) => {
|
||||
let out_type = args.pop().unwrap();
|
||||
let out_types =
|
||||
match out_type.clone() {
|
||||
laddertypes::SugaredTypeTerm::Struct(oa) => oa.into_iter().map(|t|t.desugar(&mut scope.clone())).collect(),
|
||||
_ => vec![ out_type.desugar(&mut scope.clone()) ]
|
||||
};
|
||||
let in_types = args.into_iter().map(|t| t.desugar(&mut scope.clone())).collect();
|
||||
|
||||
scope.write().unwrap()
|
||||
.declare_proc(
|
||||
var_id.clone(),
|
||||
in_types,
|
||||
out_types,
|
||||
true
|
||||
);
|
||||
|
||||
return Ok(None);
|
||||
}
|
||||
_ => {
|
||||
let id = scope.write().unwrap().declare_var(var_id.clone(), typ);
|
||||
eprintln!("TYPING declare var = {}", id);
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
},
|
||||
Statement::Return(expr) |
|
||||
Statement::Expr(expr) => {
|
||||
let t = expr.infer_type(scope)?;
|
||||
|
||||
if t != laddertypes::TypeTerm::App(vec![]) {
|
||||
let st = t.sugar(&mut scope.clone());
|
||||
Ok(Some(st))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
Statement::Assignment { name_region, var_id, val_expr } => {
|
||||
let received_type = val_expr.infer_type(scope)?;
|
||||
let expected_type = scope.write().unwrap().get_type(var_id).unwrap();
|
||||
if ! received_type.is_syntactic_subtype_of(&expected_type).is_ok() {
|
||||
return Err(vec![ TypeError{
|
||||
region: val_expr.get_region(),
|
||||
kind: TypeErrorKind::AssignMismatch {
|
||||
expected: expected_type,
|
||||
received: received_type
|
||||
}
|
||||
}]);
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -16,19 +16,3 @@ export {
|
|||
let int-max = λ{ a:ℤ~machine.Int64; b:ℤ~machine.Int64; } ↦ if( int-gt a b ) { a; } else { b; };
|
||||
};
|
||||
|
||||
|
||||
/* syntax ambiguity */
|
||||
|
||||
let f'0 = λx:A -> B ↦ { ... };
|
||||
|
||||
/* could be interpreted as .. */
|
||||
let f'1 = λ{x: A -> B} ↦ {};
|
||||
/* ..or.. */
|
||||
let f'2 = λx:A ↦ B:{};
|
||||
|
||||
|
||||
do {
|
||||
!a 10;
|
||||
!b 20;
|
||||
}
|
||||
|
||||
|
|
|
@ -25,11 +25,11 @@ export {
|
|||
{bp:ℕ; bq:ℕ;}: ℚ ~ <Ratio ℕ ~ ℤ_2^64 ~ machine.UInt64> ;
|
||||
} ↦ {
|
||||
let l = lcm aq bq;
|
||||
let as = i/ l aq;
|
||||
let bs = i/ l bq;
|
||||
let a = i/ l aq;
|
||||
let b = i/ l bq;
|
||||
|
||||
i* aq as;
|
||||
i+ (i* ap as) (i* bp bs);
|
||||
i* aq a;
|
||||
i+ (i* ap a) (i* bp b);
|
||||
};
|
||||
|
||||
let ratio-mul = λ{
|
||||
|
|
14
ltcc/Cargo.toml
Normal file
14
ltcc/Cargo.toml
Normal file
|
@ -0,0 +1,14 @@
|
|||
[package]
|
||||
name = "ltcc"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
laddertypes = { path = "../../lib-laddertypes" }
|
||||
ltcore = { path = "../lib-ltcore" }
|
||||
tisc = { path = "../../lib-tisc" }
|
||||
clap = { version = "4.5.15", features = ["derive"] }
|
||||
tiny-ansi = "0.1.0"
|
||||
iterate-text = "0.0.1"
|
||||
bincode = "1.3.3"
|
||||
|
8
ltcc/hello.lt
Normal file
8
ltcc/hello.lt
Normal file
|
@ -0,0 +1,8 @@
|
|||
export {
|
||||
let star = λ{}
|
||||
↦ emit 42;
|
||||
|
||||
let main = λ{} ↦ {
|
||||
print-nullterm 'H''e''l''l''o'' ''W''o''r''l''d''!''\n''\0';
|
||||
};
|
||||
}
|
84
ltcc/src/diagnostic.rs
Normal file
84
ltcc/src/diagnostic.rs
Normal file
|
@ -0,0 +1,84 @@
|
|||
use {
|
||||
std::collections::HashMap,
|
||||
std::sync::{Arc, RwLock},
|
||||
std::{boxed::Box, ops::Deref},
|
||||
tiny_ansi::TinyAnsi,
|
||||
ltcore::{
|
||||
lexer::InputRegionTag,
|
||||
expr::{LTExpr, Statement},
|
||||
procedure_compiler::ProcedureCompiler,
|
||||
symbols::Scope,
|
||||
}
|
||||
};
|
||||
|
||||
pub fn print_diagnostic(
|
||||
path: &str,
|
||||
region: InputRegionTag,
|
||||
message: String
|
||||
) {
|
||||
let lines = iterate_text::file::lines::IterateFileLines::new(path);
|
||||
|
||||
let mut line_region = InputRegionTag::default();
|
||||
|
||||
let n_before = 5;
|
||||
let n_after = 5;
|
||||
|
||||
let mut last_lines = Vec::new();
|
||||
let mut next_lines = 0;
|
||||
|
||||
println!("\n{}:", path.green());
|
||||
for (i, l) in lines.enumerate() {
|
||||
line_region.end += l.chars().count();
|
||||
|
||||
last_lines.push((i+1, l.clone()));
|
||||
if last_lines.len() > n_before {
|
||||
last_lines.remove(0);
|
||||
}
|
||||
|
||||
if region.begin >= line_region.begin &&
|
||||
region.begin < line_region.end {
|
||||
|
||||
next_lines = n_after;
|
||||
|
||||
let column_begin = region.begin - line_region.begin;
|
||||
let column_end = region.end - line_region.begin;
|
||||
|
||||
|
||||
let tab_width = 4;
|
||||
|
||||
let column_begin_c = column_begin + (tab_width-1)*l.chars().take(column_begin).filter(|&c|c=='\t').count();
|
||||
let column_end_c = column_end + (tab_width-1)*l.chars().filter(|&c|c=='\t').count();
|
||||
|
||||
// display the source line
|
||||
for (j,ll) in last_lines.iter() {
|
||||
print!("{}\t{}{}",
|
||||
format!("{}",j).to_string().bright_black(),
|
||||
"|".bright_black().bold(),
|
||||
ll.chars().map(|c| {
|
||||
if c == '\t' {
|
||||
std::iter::repeat(' ').take(tab_width)
|
||||
} else {
|
||||
std::iter::repeat(c).take(1)
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
.collect::<String>()
|
||||
.bright_white());
|
||||
}
|
||||
|
||||
print!("\t{}", "|".bright_magenta());
|
||||
for _ in 0..column_begin_c { print!("{}", ".".magenta().bold()); }
|
||||
for _ in column_begin_c..column_end_c { print!("{}", "^".magenta().bold()); }
|
||||
print!("\n");
|
||||
|
||||
print!("{} [{}-{}]: {}\n", "error".bright_red(), column_begin, column_end, message.yellow());
|
||||
}
|
||||
else if next_lines > 0 {
|
||||
next_lines -= 1;
|
||||
print!("{}\t{}{}", format!("{}", i+1).to_string().bright_black(), "|".bright_black().bold(), l.bright_white());
|
||||
}
|
||||
|
||||
line_region.begin = line_region.end;
|
||||
}
|
||||
}
|
||||
|
161
ltcc/src/main.rs
Normal file
161
ltcc/src/main.rs
Normal file
|
@ -0,0 +1,161 @@
|
|||
use clap::Parser;
|
||||
|
||||
use {
|
||||
std::collections::HashMap,
|
||||
std::sync::{Arc, RwLock},
|
||||
std::{boxed::Box, ops::Deref},
|
||||
std::io::Write,
|
||||
tiny_ansi::TinyAnsi,
|
||||
laddertypes::dict::TypeDict,
|
||||
ltcore::{
|
||||
lexer::InputRegionTag,
|
||||
expr::{LTExpr, Statement},
|
||||
procedure_compiler::ProcedureCompiler,
|
||||
symbols::Scope,
|
||||
}
|
||||
};
|
||||
|
||||
mod diagnostic;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, about, long_about = None)]
|
||||
struct Args {
|
||||
/// source files
|
||||
sources: Vec< String >,
|
||||
|
||||
/// path to the target bytecode file
|
||||
#[arg(short, long)]
|
||||
output: String
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let args = Args::parse();
|
||||
|
||||
let mut linker = tisc::Linker::new();
|
||||
let root_scope = ltcore::runtime::init_runtime(&mut linker);
|
||||
let mut main_scope = Scope::with_parent(&root_scope);
|
||||
|
||||
for path in args.sources {
|
||||
let iter_chars = iterate_text::file::characters::IterateFileCharacters::new(path.clone());
|
||||
|
||||
/* compile source file
|
||||
*/
|
||||
let mut lexer = ltcore::lexer::LTIRLexer::from( iter_chars.peekable() );
|
||||
let mut program_tokens =
|
||||
lexer
|
||||
.filter(|tok| match tok {
|
||||
(_, Ok(ltcore::lexer::LTIRToken::Comment(_))) => false,
|
||||
_ => true
|
||||
})
|
||||
.peekable();
|
||||
|
||||
match ltcore::parser::parse_expr( &mut main_scope, &mut program_tokens ) {
|
||||
Ok( mut ast ) => {
|
||||
let mut compiler = ProcedureCompiler::new(path.clone(), main_scope.clone());
|
||||
|
||||
match ast.infer_type(&main_scope) {
|
||||
Ok(mut t) => {
|
||||
eprintln!("Typecheck {}", "OK".green().bold());
|
||||
t = t.normalize();
|
||||
t = t.param_normalize();
|
||||
let mut tc = main_scope.clone();
|
||||
eprintln!( "{}", t.sugar(&mut tc).pretty(&tc,0) );
|
||||
|
||||
}
|
||||
Err(type_errs) => {
|
||||
for e in type_errs.iter() {
|
||||
crate::diagnostic::print_diagnostic(
|
||||
path.as_str(),
|
||||
e.region,
|
||||
e.kind.fmt(&mut main_scope.clone())
|
||||
);
|
||||
}
|
||||
|
||||
eprintln!("----------------------------------");
|
||||
eprintln!("{} ({} errors)", "Typecheck failed".bright_red().bold(), type_errs.len());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
compiler = compiler.compile_expr(&ast);
|
||||
let diagnostics = compiler.diagnostics.clone();
|
||||
let (exports, proc_code) = compiler.get_bytecode(false);
|
||||
|
||||
for (region, message) in diagnostics {
|
||||
crate::diagnostic::print_diagnostic(
|
||||
path.as_str(),
|
||||
region,
|
||||
format!("{}", message)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
eprintln!("{} {}", "Compiled".green(), path.bold());
|
||||
for (name, def) in exports.iter() {
|
||||
eprintln!("export {}:", name.yellow().bold());
|
||||
let mut t = def.get_type(&mut main_scope);
|
||||
t = t.normalize();
|
||||
t = t.param_normalize();
|
||||
let mut tc = main_scope.clone();
|
||||
eprintln!( "{}", t.sugar(&mut tc).pretty(&tc,0) );
|
||||
}
|
||||
|
||||
main_scope.write().unwrap().import(
|
||||
exports
|
||||
);
|
||||
|
||||
/* link assembly-program to symbols
|
||||
*/
|
||||
eprintln!("generated bytecode ({})", proc_code.len() );
|
||||
for (i,l) in tisc::assembler::disassemble(&proc_code).iter().enumerate() {
|
||||
eprintln!("{} .... {}", i,l);
|
||||
}
|
||||
linker.add_procedure(path.as_str(), proc_code);
|
||||
}
|
||||
Err( (region, parse_error) ) => {
|
||||
crate::diagnostic::print_diagnostic(
|
||||
path.as_str(),
|
||||
region,
|
||||
format!("{:?}", parse_error)
|
||||
);
|
||||
|
||||
eprintln!("=======\nParse Error: Abort\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
eprintln!("write output file {}", args.output);
|
||||
let obj_file = tisc::linker::ObjectFile {
|
||||
symbols: Arc::into_inner(main_scope).unwrap().into_inner().unwrap()
|
||||
.export()
|
||||
.into_iter()
|
||||
.filter_map(|(symbol, def)| match def {
|
||||
ltcore::symbols::SymbolDef::Procedure { in_types:_, out_types:_, link_addr, export } => {
|
||||
if export {
|
||||
match link_addr {
|
||||
tisc::LinkAddr::Absolute(w) => {
|
||||
eprintln!("add symbol {} -> {}", symbol, w);
|
||||
Some(( symbol, w ))
|
||||
}
|
||||
tisc::LinkAddr::Relative{ symbol: b, offset } => {
|
||||
let addr = linker.get_link_addr(&b).unwrap_or(-1);
|
||||
eprintln!("relative symbol {} -> {}({})+{}", symbol, b, addr, offset);
|
||||
Some((symbol, addr + offset ))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None
|
||||
})
|
||||
.collect(),
|
||||
|
||||
code: linker.link_partial().expect("Link error:")
|
||||
};
|
||||
|
||||
let mut output = std::io::BufWriter::new(
|
||||
std::fs::File::create(args.output).expect("Failed to open file")
|
||||
);
|
||||
bincode::serialize_into( output, &obj_file );
|
||||
}
|
30
ltcc/src/oldmain.rs
Normal file
30
ltcc/src/oldmain.rs
Normal file
|
@ -0,0 +1,30 @@
|
|||
use {
|
||||
std::collections::HashMap,
|
||||
std::sync::{Arc, RwLock},
|
||||
std::{boxed::Box, ops::Deref},
|
||||
tiny_ansi::TinyAnsi
|
||||
};
|
||||
|
||||
use crate::{
|
||||
lexer::InputRegionTag,
|
||||
expr::{LTExpr, Statement},
|
||||
procedure_compiler::ProcedureCompiler,
|
||||
symbols::Scope,
|
||||
};
|
||||
|
||||
/* TODO:
|
||||
* - import function symbols
|
||||
* - Compiler error reporting
|
||||
* - parse float literals
|
||||
* - return type annotation
|
||||
* - write to address resulting from expression
|
||||
* - sized objects
|
||||
* - Typecheck for LTExpr::Application
|
||||
* - typecheck & inference for rest
|
||||
*/
|
||||
|
||||
fn main() {
|
||||
// create virtual machine with 4096 words of memory
|
||||
let mut vm = tisc::VM::new(0x1000);
|
||||
|
||||
}
|
1
ltcc/test.lt
Normal file
1
ltcc/test.lt
Normal file
|
@ -0,0 +1 @@
|
|||
|
BIN
ltcc/test.lt.o
Normal file
BIN
ltcc/test.lt.o
Normal file
Binary file not shown.
12
ltvm/Cargo.toml
Normal file
12
ltvm/Cargo.toml
Normal file
|
@ -0,0 +1,12 @@
|
|||
[package]
|
||||
name = "ltvm"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
ltcore = { path = "../lib-ltcore" }
|
||||
tisc = { path = "../../lib-tisc" }
|
||||
clap = { version = "4.5.15", features = ["derive"] }
|
||||
tiny-ansi = "0.1.0"
|
||||
bincode = "1.3.3"
|
||||
|
55
ltvm/src/main.rs
Normal file
55
ltvm/src/main.rs
Normal file
|
@ -0,0 +1,55 @@
|
|||
use {
|
||||
std::io::Read,
|
||||
clap::Parser,
|
||||
tiny_ansi::TinyAnsi,
|
||||
};
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, about, long_about = None)]
|
||||
struct Args {
|
||||
/// source files
|
||||
sources: Vec< String >,
|
||||
|
||||
/// entry symbol
|
||||
#[arg(short, long, default_value_t = String::from("main"))]
|
||||
entry: String,
|
||||
|
||||
/// memory size
|
||||
#[arg(short, long, default_value_t = 0x1000)]
|
||||
memsize: usize
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let args = Args::parse();
|
||||
|
||||
let mut vm = tisc::VM::new( args.memsize );
|
||||
let mut linker = tisc::Linker::new();
|
||||
|
||||
let mut symbols = std::collections::HashMap::<String, tisc::LinkAddr>::new();
|
||||
|
||||
for source_path in args.sources.iter() {
|
||||
let mut input = std::io::BufReader::new(
|
||||
std::fs::File::open(source_path).expect("Failed to open file")
|
||||
);
|
||||
|
||||
linker.import( source_path, bincode::deserialize_from( input ).expect("") );
|
||||
}
|
||||
|
||||
let entry_addr = linker.get_link_addr(&args.entry).unwrap_or(0);
|
||||
/*
|
||||
.expect(&format!("cant find entry symbol '{}'", args.entry));
|
||||
*/
|
||||
let bytecode = linker.link_total().expect("Link error:");
|
||||
|
||||
eprintln!("{} ({} bytes)", "Loaded bytecode.".green(), bytecode.len());
|
||||
eprintln!("================\n");
|
||||
|
||||
vm.load(bytecode);
|
||||
vm.execute(entry_addr);
|
||||
|
||||
eprintln!(
|
||||
"\n================\nVM execution finished\ndatastack = {:?}\n====",
|
||||
vm.data_stack
|
||||
);
|
||||
|
||||
}
|
6
main.lt
6
main.lt
|
@ -1,5 +1,6 @@
|
|||
{
|
||||
print-nullterm 'H''e''l''l''o'' ''W''o''r''l''d''!''\n''\0';
|
||||
export {
|
||||
let main = λ{} ↦ {
|
||||
print-nullterm "Hello World!\n";
|
||||
|
||||
/* test ratio
|
||||
*/
|
||||
|
@ -32,5 +33,6 @@
|
|||
'\n''\0';
|
||||
|
||||
uint-machine-to-posint 16 256;
|
||||
};
|
||||
}
|
||||
|
||||
|
|
166
src/expr.rs
166
src/expr.rs
|
@ -1,166 +0,0 @@
|
|||
use {
|
||||
std::{
|
||||
boxed::Box,
|
||||
sync::{Arc, RwLock}
|
||||
},
|
||||
crate::{
|
||||
lexer::InputRegionTag
|
||||
}
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Statement {
|
||||
Assignment {
|
||||
name_region: InputRegionTag,
|
||||
var_id: String,
|
||||
val_expr: LTExpr,
|
||||
},
|
||||
LetAssign {
|
||||
typ: Option<TypeTag>,
|
||||
var_id: String,
|
||||
val_expr: LTExpr,
|
||||
},
|
||||
WhileLoop {
|
||||
condition: LTExpr,
|
||||
body: Vec<Statement>,
|
||||
},
|
||||
Return(LTExpr),
|
||||
Expr(LTExpr),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum TypeError {
|
||||
ParseError(laddertypes::parser::ParseError),
|
||||
Mismatch {
|
||||
expected: laddertypes::TypeTerm,
|
||||
received: laddertypes::TypeTerm,
|
||||
},
|
||||
}
|
||||
|
||||
pub type TypeTag = Result<laddertypes::TypeTerm, TypeError>;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum LTExpr {
|
||||
Literal {
|
||||
typ: Option<TypeTag>,
|
||||
val: tisc::VM_Word,
|
||||
},
|
||||
Symbol {
|
||||
region: InputRegionTag,
|
||||
typ: Option<TypeTag>,
|
||||
symbol: String,
|
||||
},
|
||||
Application {
|
||||
typ: Option<TypeTag>,
|
||||
head: Box<LTExpr>,
|
||||
body: Vec<LTExpr>,
|
||||
},
|
||||
Abstraction {
|
||||
args: Vec<(InputRegionTag, String, Option<TypeTag>)>,
|
||||
body: Box<LTExpr>,
|
||||
},
|
||||
Branch {
|
||||
condition: Box<LTExpr>,
|
||||
if_expr: Box<LTExpr>,
|
||||
else_expr: Box<LTExpr>,
|
||||
},
|
||||
Block {
|
||||
statements: Vec<Statement>,
|
||||
},
|
||||
ExportBlock {
|
||||
statements: Vec<Statement>,
|
||||
}
|
||||
}
|
||||
|
||||
impl LTExpr {
|
||||
/*
|
||||
pub fn symbol(str: &str) -> Self {
|
||||
LTExpr::Symbol {
|
||||
typ: None, //typectx.write().unwrap().parse("<Ref memory::Word>~Symbol~<Seq Char>").expect("parse typeterm"),
|
||||
symbol: String::from(str),
|
||||
}
|
||||
}
|
||||
*/
|
||||
pub fn lit_uint(val: u64) -> Self {
|
||||
LTExpr::Literal {
|
||||
typ: None, //typectx.write().unwrap().parse("ℤ_2^64~machine::UInt64~machine::Word").expect("parse typeterm"),
|
||||
val: val as tisc::VM_Word,
|
||||
}
|
||||
}
|
||||
/*
|
||||
pub fn abstraction(args: Vec<(&str, &str)>, body: LTExpr) -> LTExpr {
|
||||
LTExpr::Abstraction {
|
||||
args: args
|
||||
.into_iter()
|
||||
.map(
|
||||
|(arg_name, arg_type)| (arg_name.into(), None), //typectx.write().unwrap().parse(t).expect("parse typeterm")
|
||||
)
|
||||
.collect(),
|
||||
body: Box::new(body),
|
||||
}
|
||||
}
|
||||
*/
|
||||
pub fn application(head: LTExpr, body: Vec<LTExpr>) -> Self {
|
||||
LTExpr::Application {
|
||||
typ: None,
|
||||
head: Box::new(head),
|
||||
body: body,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn block(body: Vec<Statement>) -> Self {
|
||||
LTExpr::Block { statements: body }
|
||||
}
|
||||
}
|
||||
|
||||
impl Statement {
|
||||
pub fn while_loop(cond: LTExpr, body: Vec<Statement>) -> Self {
|
||||
Statement::WhileLoop {
|
||||
condition: cond,
|
||||
body,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
impl LTExpr {
|
||||
fn get_type(&self, dict: &laddertypes::dict::TypeDict) -> laddertypes::TypeTerm {
|
||||
match self {
|
||||
LTExpr::StringLiteral{ val:_, typ } => { typ.clone() }
|
||||
LTExpr::MemoryLiteral{ val:_, typ } => { typ.clone() }
|
||||
LTExpr::Abstraction{ arg_type, val_expr } => {
|
||||
laddertypes::TypeTerm::App(vec![
|
||||
laddertypes::TypeTerm::TypeID(dict.get_typeid(&"Fn".into()).expect("expected function type")),
|
||||
arg_type.clone(),
|
||||
val_expr.get_type(dict)
|
||||
])
|
||||
}
|
||||
LTExpr::Application{ head, body } => {
|
||||
match head.deref() {
|
||||
LTExpr::Abstraction{ arg_type, val_expr } => {
|
||||
val_expr.get_type(dict)
|
||||
}
|
||||
_ => {
|
||||
panic!("invalid application");
|
||||
}
|
||||
}
|
||||
}
|
||||
LTExpr::Block{ statements } => {
|
||||
if let Some(last_statement) = statements.last() {
|
||||
match last_statement {
|
||||
Statement::Return(ret_expr) |
|
||||
Statement::Expr(ret_expr) => {
|
||||
ret_expr.get_type(dict)
|
||||
}
|
||||
_ => {
|
||||
laddertypes::TypeTerm::unit()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
laddertypes::TypeTerm::unit()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
180
src/main.rs
180
src/main.rs
|
@ -1,180 +0,0 @@
|
|||
use {
|
||||
std::collections::HashMap,
|
||||
std::sync::{Arc, RwLock},
|
||||
std::{boxed::Box, ops::Deref},
|
||||
tiny_ansi::TinyAnsi
|
||||
};
|
||||
|
||||
mod expr;
|
||||
mod lexer;
|
||||
mod parser;
|
||||
mod procedure_compiler;
|
||||
mod runtime;
|
||||
mod symbols;
|
||||
|
||||
use crate::{
|
||||
lexer::InputRegionTag,
|
||||
expr::{LTExpr, Statement},
|
||||
procedure_compiler::ProcedureCompiler,
|
||||
symbols::Scope,
|
||||
};
|
||||
|
||||
fn print_diagnostic(
|
||||
path: &str,
|
||||
region: InputRegionTag,
|
||||
message: String
|
||||
) {
|
||||
let lines = iterate_text::file::lines::IterateFileLines::new(path);
|
||||
|
||||
let mut line_region = InputRegionTag::default();
|
||||
|
||||
let n_before = 3;
|
||||
let n_after = 3;
|
||||
|
||||
let mut last_lines = Vec::new();
|
||||
let mut next_lines = 0;
|
||||
|
||||
println!("\n{}:", path.green());
|
||||
for (i, l) in lines.enumerate() {
|
||||
line_region.end += l.chars().count();
|
||||
|
||||
last_lines.push((i+1, l.clone()));
|
||||
if last_lines.len() > n_before {
|
||||
last_lines.remove(0);
|
||||
}
|
||||
|
||||
if region.begin >= line_region.begin &&
|
||||
region.begin < line_region.end {
|
||||
|
||||
next_lines = n_after;
|
||||
|
||||
let column_begin = region.begin - line_region.begin;
|
||||
let column_end = region.end - line_region.begin;
|
||||
|
||||
// display the source line
|
||||
for (j,ll) in last_lines.iter() {
|
||||
print!("{}\t{}{}",
|
||||
format!("{}",j).to_string().bright_black(),
|
||||
"|".bright_black().bold(),
|
||||
ll.bright_white());
|
||||
}
|
||||
|
||||
print!("\t{}", "|".bright_magenta());
|
||||
for _ in 0..column_begin { print!("{}", ".".magenta().bold()); }
|
||||
for _ in column_begin..column_end { print!("{}", "^".magenta().bold()); }
|
||||
print!("\n");
|
||||
|
||||
print!("{} [{}-{}]: {}\n", "error".bright_red(), column_begin, column_end, message.white());
|
||||
}
|
||||
else if next_lines > 0 {
|
||||
next_lines -= 1;
|
||||
print!("{}\t{}{}", format!("{}", i+1).to_string().bright_black(), "|".bright_black().bold(), l.bright_white());
|
||||
}
|
||||
|
||||
line_region.begin = line_region.end;
|
||||
}
|
||||
}
|
||||
|
||||
/* TODO:
|
||||
* - import function symbols
|
||||
* - Compiler error reporting
|
||||
* - parse float literals
|
||||
* - return type annotation
|
||||
* - write to address resulting from expression
|
||||
* - sized objects
|
||||
* - Typecheck for LTExpr::Application
|
||||
* - typecheck & inference for rest
|
||||
*/
|
||||
|
||||
fn main() {
|
||||
// create virtual machine with 4096 words of memory
|
||||
let mut vm = tisc::VM::new(0x1000);
|
||||
|
||||
let mut linker = tisc::Linker::new();
|
||||
let root_scope = crate::runtime::init_runtime(&mut linker);
|
||||
let main_scope = Scope::with_parent(&root_scope);
|
||||
let typectx = main_scope.read().unwrap().typectx.clone();
|
||||
|
||||
/* open source file
|
||||
*/
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
|
||||
if args.len() < 2 {
|
||||
eprintln!("{}", "No source files specified.".red());
|
||||
return;
|
||||
}
|
||||
|
||||
let mut args_iter = args.into_iter();
|
||||
args_iter.next();
|
||||
|
||||
for path in args_iter {
|
||||
let iter_chars = iterate_text::file::characters::IterateFileCharacters::new(path.clone());
|
||||
|
||||
/* compile source file
|
||||
*/
|
||||
let mut lexer = lexer::LTIRLexer::from( iter_chars.peekable() );
|
||||
let mut program_tokens = lexer.filter(|tok| match tok {
|
||||
(_, Ok(lexer::LTIRToken::Comment(_))) => false,
|
||||
_ => true
|
||||
})
|
||||
.peekable();
|
||||
|
||||
match parser::parse_expr( &typectx, &mut program_tokens ) {
|
||||
Ok( ast ) => {
|
||||
let (exports, diagnostics, bytecode) = ProcedureCompiler::new(&main_scope)
|
||||
.compile(&ast)
|
||||
.into_asm(&path);
|
||||
|
||||
for (region, message) in diagnostics {
|
||||
print_diagnostic(
|
||||
path.as_str(),
|
||||
region,
|
||||
format!("{}", message)
|
||||
);
|
||||
}
|
||||
|
||||
eprintln!("{} {}", "Compiled".green(), path.bold());
|
||||
for (name, def) in exports.iter() {
|
||||
eprintln!("export {}: {:?}", name.yellow().bold(), def);
|
||||
}
|
||||
|
||||
main_scope.write().unwrap().import(
|
||||
exports
|
||||
);
|
||||
|
||||
/* link assembly-program to symbols
|
||||
*/
|
||||
linker.add_procedure(path.as_str(), bytecode);
|
||||
}
|
||||
Err( (region, parse_error) ) => {
|
||||
print_diagnostic(
|
||||
path.as_str(),
|
||||
region,
|
||||
format!("{:?}", parse_error)
|
||||
);
|
||||
|
||||
eprintln!("=======\nParse Error: Abort\n");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* load & run compiled bytecode
|
||||
*/
|
||||
let main_addr = linker
|
||||
.get_link_addr(&"main.lt".into())
|
||||
.expect("'main.lt' not found");
|
||||
|
||||
let bytecode = linker.link_total().expect("Link error:");
|
||||
|
||||
eprintln!("{} ({} bytes)", "Linked bytecode.".green(), bytecode.len());
|
||||
eprintln!("================\n");
|
||||
|
||||
vm.load(bytecode);
|
||||
vm.execute(main_addr);
|
||||
|
||||
eprintln!(
|
||||
"\n================\nVM execution finished\ndatastack = {:?}\n====",
|
||||
vm.data_stack
|
||||
);
|
||||
}
|
|
@ -1,285 +0,0 @@
|
|||
use {
|
||||
crate::{
|
||||
lexer::InputRegionTag,
|
||||
expr::{LTExpr, Statement},
|
||||
symbols::{Scope, SymbolDef},
|
||||
},
|
||||
std::{
|
||||
ops::Deref,
|
||||
sync::{Arc, RwLock},
|
||||
},
|
||||
tisc::{assembler::AssemblyWord, linker::LinkAddr},
|
||||
};
|
||||
|
||||
pub struct ProcedureCompiler {
|
||||
pub symbols: Arc<RwLock<Scope>>,
|
||||
asm: tisc::Assembler,
|
||||
linker: tisc::Linker,
|
||||
result_size: usize,
|
||||
|
||||
pub diagnostics: Vec<( InputRegionTag, String )>
|
||||
}
|
||||
|
||||
impl ProcedureCompiler {
|
||||
pub fn new(parent_scope: &Arc<RwLock<Scope>>) -> Self {
|
||||
ProcedureCompiler {
|
||||
symbols: Scope::with_parent(parent_scope),
|
||||
asm: tisc::Assembler::new(),
|
||||
linker: tisc::Linker::new(),
|
||||
result_size: 0,
|
||||
|
||||
diagnostics: Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_asm(mut self, proc_symbol: &String) -> (Vec<(String, SymbolDef)>, Vec<(InputRegionTag, String)>, Vec<tisc::assembler::AssemblyWord>) {
|
||||
let mut symbols =
|
||||
Arc::try_unwrap(self.symbols).ok().unwrap()
|
||||
.into_inner().unwrap();
|
||||
|
||||
symbols.update_link_addresses(
|
||||
proc_symbol,
|
||||
&self.linker
|
||||
);
|
||||
|
||||
let data_frame_size = symbols.get_frame_size() as i64;
|
||||
|
||||
let body = self.asm.build();
|
||||
self.linker.add_procedure("__procedure_body__", body);
|
||||
let body_addr = self
|
||||
.linker
|
||||
.get_link_addr(&"__procedure_body__".into())
|
||||
.unwrap();
|
||||
|
||||
let subroutines = self
|
||||
.linker
|
||||
.link_relative(&"__subroutines__".into())
|
||||
.expect("link error");
|
||||
|
||||
let mut entry = tisc::Assembler::new();
|
||||
if data_frame_size > 0 {
|
||||
entry = entry.lit(data_frame_size).call("data-frame-alloc");
|
||||
}
|
||||
entry = entry.call_symbol(LinkAddr::Relative {
|
||||
symbol: "__subroutines__".into(),
|
||||
offset: body_addr,
|
||||
});
|
||||
|
||||
if data_frame_size > 0 {
|
||||
entry = entry.lit(data_frame_size).call("data-frame-drop");
|
||||
}
|
||||
|
||||
let mut superlink = tisc::Linker::new();
|
||||
superlink.add_procedure("", entry.build());
|
||||
superlink.add_procedure("__subroutines__", subroutines);
|
||||
|
||||
symbols.update_link_addresses(
|
||||
&proc_symbol,
|
||||
&superlink
|
||||
);
|
||||
|
||||
let mut symbol_exports = symbols.export();
|
||||
let subroutines_addr = superlink.get_link_addr(&"__subroutines__".into()).unwrap();
|
||||
for (name, def) in symbol_exports.iter_mut() {
|
||||
match def {
|
||||
SymbolDef::Procedure{ in_types:_, out_types:_, link_addr, export:_ } => {
|
||||
match link_addr {
|
||||
LinkAddr::Relative{ symbol, offset } => {
|
||||
*offset += subroutines_addr;
|
||||
}
|
||||
LinkAddr::Absolute(addr) => {
|
||||
*addr += subroutines_addr;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
let bytecode = superlink.link_relative(proc_symbol).expect("link error");
|
||||
(symbol_exports, self.diagnostics, bytecode)
|
||||
}
|
||||
|
||||
pub fn verify(&self) {
|
||||
// todo
|
||||
}
|
||||
|
||||
pub fn compile_statement(mut self, statement: &Statement, enable_export: bool) -> Self {
|
||||
match statement {
|
||||
Statement::Assignment { name_region, var_id, val_expr } => {
|
||||
self = self.compile(val_expr);
|
||||
|
||||
match self.symbols.read().unwrap().get(var_id) {
|
||||
Some(SymbolDef::FrameRef { typ, stack_ref }) => {
|
||||
self.asm = self.asm.lit(stack_ref).call("data-frame-set");
|
||||
}
|
||||
Some(SymbolDef::StaticRef { typ, link_addr }) => {
|
||||
self.asm = self
|
||||
.asm
|
||||
.static_ref(var_id.as_str())
|
||||
.inst(tisc::VM_Instruction::Store);
|
||||
}
|
||||
Some(SymbolDef::Procedure {
|
||||
in_types,
|
||||
out_types,
|
||||
link_addr,
|
||||
export
|
||||
}) => {
|
||||
self.asm = self
|
||||
.asm
|
||||
.call(var_id.as_str())
|
||||
.inst(tisc::VM_Instruction::Store);
|
||||
}
|
||||
None => {
|
||||
self.diagnostics.push(
|
||||
(name_region.clone(),
|
||||
format!("cannot assign undefined symbol '{}'!", var_id))
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
Statement::LetAssign {
|
||||
typ,
|
||||
var_id,
|
||||
val_expr,
|
||||
} => match val_expr {
|
||||
LTExpr::Abstraction { args: _, body: _ } => {
|
||||
self.symbols
|
||||
.write()
|
||||
.unwrap()
|
||||
.declare_proc(var_id.clone(), vec![], vec![], enable_export);
|
||||
|
||||
let (exports, mut diagnostics, lambda_procedure) = ProcedureCompiler::new(&self.symbols)
|
||||
.compile(val_expr)
|
||||
.into_asm(var_id);
|
||||
|
||||
self.diagnostics.append(&mut diagnostics);
|
||||
|
||||
self.linker.add_procedure(var_id, lambda_procedure);
|
||||
|
||||
let offset = self.linker.get_link_addr(var_id).unwrap();
|
||||
|
||||
// forward already exported symbols
|
||||
if enable_export {
|
||||
self.symbols.write().unwrap().import( exports );
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
self.symbols
|
||||
.write()
|
||||
.unwrap()
|
||||
.declare_var(var_id.clone(), laddertypes::TypeTerm::unit());
|
||||
|
||||
self = self.compile_statement(&Statement::Assignment {
|
||||
name_region: InputRegionTag::default(),
|
||||
var_id: var_id.clone(),
|
||||
val_expr: val_expr.clone(),
|
||||
}, false);
|
||||
}
|
||||
},
|
||||
Statement::WhileLoop { condition, body } => {
|
||||
let asm = self.asm;
|
||||
|
||||
self.asm = tisc::Assembler::new();
|
||||
self = self.compile(condition);
|
||||
let cond_asm = self.asm;
|
||||
|
||||
self.asm = tisc::Assembler::new();
|
||||
for statement in body.into_iter() {
|
||||
self = self.compile_statement(statement, false);
|
||||
}
|
||||
let body_asm = self.asm;
|
||||
|
||||
self.asm = asm;
|
||||
self.asm = self.asm.while_loop(cond_asm, body_asm);
|
||||
}
|
||||
Statement::Expr(expr) => {
|
||||
self = self.compile(expr);
|
||||
}
|
||||
Statement::Return(expr) => {
|
||||
self = self.compile(expr);
|
||||
}
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
pub fn compile(mut self, expr: <Expr) -> Self {
|
||||
match expr {
|
||||
LTExpr::Symbol { region, typ, symbol } => match self.symbols.read().unwrap().get(symbol) {
|
||||
Some(SymbolDef::FrameRef { typ, stack_ref }) => {
|
||||
self.asm = self.asm.lit(stack_ref).call("data-frame-get");
|
||||
}
|
||||
Some(SymbolDef::StaticRef { typ, link_addr }) => {
|
||||
self.asm = self.asm.static_ref(symbol.as_str());
|
||||
}
|
||||
Some(SymbolDef::Procedure {
|
||||
in_types,
|
||||
out_types,
|
||||
link_addr,
|
||||
export
|
||||
}) => {
|
||||
self.asm = self.asm.call_symbol(link_addr);
|
||||
}
|
||||
None => {
|
||||
self.diagnostics.push(
|
||||
(region.clone(), format!("undefined symbol '{}'!", symbol))
|
||||
);
|
||||
}
|
||||
},
|
||||
LTExpr::Literal { typ, val } => {
|
||||
self.asm = self.asm.lit(*val);
|
||||
}
|
||||
LTExpr::Application { typ, head, body } => {
|
||||
for arg in body.iter().rev() {
|
||||
self = self.compile(arg);
|
||||
}
|
||||
self = self.compile(head);
|
||||
}
|
||||
LTExpr::Abstraction { args, body } => {
|
||||
for (region, arg_name, arg_type) in args.iter() {
|
||||
if let Some(Ok(typeterm)) = arg_type {
|
||||
let id = self
|
||||
.symbols
|
||||
.write()
|
||||
.unwrap()
|
||||
.declare_var(arg_name.clone(), typeterm.clone());
|
||||
self.asm = self.asm.lit(id).call("data-frame-set");
|
||||
} else {
|
||||
self.diagnostics.push((
|
||||
region.clone(),
|
||||
format!("invalid type {:?} for argument {}", arg_type, arg_name)
|
||||
));
|
||||
}
|
||||
}
|
||||
self = self.compile(body);
|
||||
}
|
||||
LTExpr::Branch {
|
||||
condition,
|
||||
if_expr,
|
||||
else_expr,
|
||||
} => {
|
||||
self = self.compile(condition);
|
||||
|
||||
let asm = self.asm;
|
||||
self.asm = tisc::Assembler::new();
|
||||
self = self.compile(if_expr);
|
||||
let if_asm = self.asm;
|
||||
self.asm = tisc::Assembler::new();
|
||||
self = self.compile(else_expr);
|
||||
let else_asm = self.asm;
|
||||
self.asm = asm;
|
||||
self.asm = self.asm.branch(if_asm, else_asm);
|
||||
}
|
||||
LTExpr::Block { statements } => {
|
||||
for s in statements.iter() {
|
||||
self = self.compile_statement(s, false);
|
||||
}
|
||||
}
|
||||
LTExpr::ExportBlock{ statements } => {
|
||||
for s in statements.iter() {
|
||||
self = self.compile_statement(s, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue