Compare commits

..

No commits in common. "dev" and "topic-lib-structure" have entirely different histories.

14 changed files with 361 additions and 922 deletions

View file

@ -4,7 +4,7 @@ version = "0.1.0"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
laddertypes = { path = "../../lib-laddertypes", features = ["pretty"] } laddertypes = { path = "../../lib-laddertypes" }
tisc = { path = "../../lib-tisc" } tisc = { path = "../../lib-tisc" }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
tiny-ansi = "0.1.0"

View file

@ -4,13 +4,11 @@ use {
sync::{Arc, RwLock} sync::{Arc, RwLock}
}, },
crate::{ crate::{
lexer::InputRegionTag, lexer::InputRegionTag
symbols::Scope }
},
tiny_ansi::TinyAnsi
}; };
#[derive(Clone, Debug)] #[derive(Clone, Debug, PartialEq)]
pub enum Statement { pub enum Statement {
Assignment { Assignment {
name_region: InputRegionTag, name_region: InputRegionTag,
@ -18,75 +16,33 @@ pub enum Statement {
val_expr: LTExpr, val_expr: LTExpr,
}, },
LetAssign { LetAssign {
name_region: InputRegionTag, typ: Option<TypeTag>,
typ: Option<laddertypes::TypeTerm>,
var_id: String, var_id: String,
val_expr: LTExpr, val_expr: LTExpr,
}, },
WhileLoop {
condition: LTExpr,
body: Vec<Statement>,
},
Return(LTExpr), Return(LTExpr),
Expr(LTExpr), Expr(LTExpr),
} }
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
pub enum TypeErrorKind { pub enum TypeError {
// ParseError(laddertypes::parser::ParseError), ParseError(laddertypes::parser::ParseError),
AssignMismatch { Mismatch {
expected: laddertypes::TypeTerm, expected: laddertypes::TypeTerm,
received: laddertypes::TypeTerm, received: laddertypes::TypeTerm,
}, },
ArgTypeMismatch {
expected: laddertypes::TypeTerm,
received: laddertypes::TypeTerm,
},
BranchMismatch {
if_branch: laddertypes::TypeTerm,
else_branch: laddertypes::TypeTerm
},
SuperfluousArgument,
NoSymbol, NoSymbol,
SuperflousArgument,
Todo Todo
} }
#[derive(Clone, Debug)] pub type TypeTag = Result<laddertypes::TypeTerm, TypeError>;
pub struct TypeError {
pub region: InputRegionTag,
pub kind: TypeErrorKind
}
impl TypeErrorKind { #[derive(Clone, Debug, PartialEq)]
pub fn fmt(&self, dict: &mut impl laddertypes::TypeDict) -> String {
match self {
TypeErrorKind::BranchMismatch { if_branch, else_branch } => {
format!("Type Mismatch\nif branch\n:::{}\nelse branch\n:::{}",
if_branch.clone().sugar(dict).pretty(dict, 1),
else_branch.clone().sugar(dict).pretty(dict, 1)
)
},
TypeErrorKind::AssignMismatch { expected, received } |
TypeErrorKind::ArgTypeMismatch { expected, received } => {
format!("Type Mismatch\n{}{}\n{}{}",
"expected\n ::: ".green(),
expected.clone().sugar(dict).pretty(dict, 1),
"received\n ::: ".green(),
received.clone().sugar(dict).pretty(dict, 1)
)
}
TypeErrorKind::SuperfluousArgument => {
format!("Superfluous Argument")
}
TypeErrorKind::NoSymbol => {
format!("Unknown Symbol")
}
TypeErrorKind::Todo => {
format!("TODO")
}
}
}
}
pub type TypeTag = Result< laddertypes::TypeTerm, Vec<TypeError> >;
#[derive(Clone, Debug)]
pub enum LTExpr { pub enum LTExpr {
WordLiteral { WordLiteral {
region: InputRegionTag, region: InputRegionTag,
@ -103,12 +59,12 @@ pub enum LTExpr {
}, },
Ascend { Ascend {
region: InputRegionTag, region: InputRegionTag,
typ: laddertypes::TypeTerm, typ: TypeTag,
expr: Box<LTExpr> expr: Box<LTExpr>
}, },
Descend { Descend {
region: InputRegionTag, region: InputRegionTag,
typ: laddertypes::TypeTerm, typ: TypeTag,
expr: Box<LTExpr> expr: Box<LTExpr>
}, },
Application { Application {
@ -119,7 +75,6 @@ pub enum LTExpr {
}, },
Abstraction { Abstraction {
region: InputRegionTag, region: InputRegionTag,
scope: Arc<RwLock<Scope>>,
args: Vec<(InputRegionTag, String, Option<TypeTag>)>, args: Vec<(InputRegionTag, String, Option<TypeTag>)>,
body: Box<LTExpr>, body: Box<LTExpr>,
}, },
@ -129,19 +84,12 @@ pub enum LTExpr {
if_expr: Box<LTExpr>, if_expr: Box<LTExpr>,
else_expr: Box<LTExpr>, else_expr: Box<LTExpr>,
}, },
WhileLoop {
region: InputRegionTag,
condition: Box<LTExpr>,
body: Box<LTExpr>,
},
Block { Block {
region: InputRegionTag, region: InputRegionTag,
scope: Arc<RwLock<Scope>>,
statements: Vec<Statement>, statements: Vec<Statement>,
}, },
ExportBlock { ExportBlock {
region: InputRegionTag, region: InputRegionTag,
scope: Arc<RwLock<Scope>>,
statements: Vec<Statement>, statements: Vec<Statement>,
} }
} }
@ -149,66 +97,45 @@ pub enum LTExpr {
impl LTExpr { impl LTExpr {
pub fn get_region(&self) -> InputRegionTag { pub fn get_region(&self) -> InputRegionTag {
match self { match self {
LTExpr::WordLiteral{ region, val } => region, LTExpr::WordLiteral { region, val } => region,
LTExpr::StringLiteral{ region, value } => region, LTExpr::StringLiteral { region, value } => region,
LTExpr::Symbol{ region, typ, symbol } => region, LTExpr::Symbol { region, typ, symbol } => region,
LTExpr::Ascend{ region, typ, expr } => region, LTExpr::Ascend { region, typ, expr } => region,
LTExpr::Descend{ region, typ, expr } => region, LTExpr::Descend{ region, typ, expr } => region,
LTExpr::Application{ region, typ, head, body } => region, LTExpr::Application{ region, typ, head, body } => region,
LTExpr::Abstraction{ region, scope, args, body } => region, LTExpr::Abstraction{ region, args, body } => region,
LTExpr::Branch{ region, condition, if_expr, else_expr } => region, LTExpr::Branch{ region, condition, if_expr, else_expr } => region,
LTExpr::WhileLoop{ region, condition, body } => region, LTExpr::Block{ region, statements } => region,
LTExpr::Block{ region, scope, statements } => region, LTExpr::ExportBlock{ region, statements } => region
LTExpr::ExportBlock{ region, scope, statements } => region
}.clone() }.clone()
} }
pub fn get(&self, addr: Vec<usize>) -> Option<LTExpr> { pub fn lit_uint(val: u64) -> Self {
if addr.len() == 0 { LTExpr::WordLiteral {
Some(self.clone()) region: InputRegionTag::default(),
} else { val: val as tisc::VM_Word,
let mut sub_addr = addr.clone(); }
let top_idx = sub_addr.remove(0); }
match self {
LTExpr::Ascend{ region, typ, expr } => expr.get(addr), pub fn application(head: LTExpr, body: Vec<LTExpr>) -> Self {
LTExpr::Descend{ region, typ, expr } => expr.get(addr), LTExpr::Application {
LTExpr::Application{ region, typ, head, body } => { region: InputRegionTag::default(),
match top_idx { typ: None,
0 => head.get(sub_addr), head: Box::new(head),
i => { body: body,
if let Some(b) = body.get(i - 1) { }
b.get(sub_addr) }
} else {
None pub fn block(body: Vec<Statement>) -> Self {
} LTExpr::Block { region: InputRegionTag::default(), statements: body }
} }
} }
}
LTExpr::Abstraction{ region, scope, args, body } => { impl Statement {
body.get(addr) pub fn while_loop(cond: LTExpr, body: Vec<Statement>) -> Self {
} Statement::WhileLoop {
LTExpr::Branch{ region, condition, if_expr, else_expr } => { condition: cond,
match top_idx { body,
0 => condition.get(sub_addr),
1 => if_expr.get(sub_addr),
2 => else_expr.get(sub_addr),
_ => None
}
}
LTExpr::WhileLoop{ region, condition, body } => {
match top_idx {
0 => condition.get(sub_addr),
1 => body.get(sub_addr),
_ => None
}
}
LTExpr::Block{ region, scope, statements } |
LTExpr::ExportBlock{ region, scope, statements } => {
// statements.get(top_idx)?.get(sub_addr)
None
}
_ => None
}
} }
} }
} }

View file

@ -2,7 +2,6 @@
pub mod expr; pub mod expr;
pub mod lexer; pub mod lexer;
pub mod parser; pub mod parser;
pub mod typing;
pub mod procedure_compiler; pub mod procedure_compiler;
pub mod runtime; pub mod runtime;
pub mod symbols; pub mod symbols;

View file

@ -2,12 +2,6 @@ use {
crate::{ crate::{
expr::{LTExpr, Statement, TypeError, TypeTag}, expr::{LTExpr, Statement, TypeError, TypeTag},
lexer::{LTIRLexer, LTIRToken, LexError, InputRegionTag}, lexer::{LTIRLexer, LTIRToken, LexError, InputRegionTag},
symbols::{Scope}
},
laddertypes::{
dict::TypeDict,
parser::ParseLadderType,
unparser::UnparseLadderType
}, },
std::{ std::{
iter::Peekable, iter::Peekable,
@ -60,7 +54,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
* `: T` * `: T`
*/ */
pub fn parse_type_tag<It>( pub fn parse_type_tag<It>(
typectx: &mut impl TypeDict, typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
tokens: &mut Peekable<It>, tokens: &mut Peekable<It>,
) -> Result<Option<(InputRegionTag, laddertypes::TypeTerm)>, (InputRegionTag, ParseError)> ) -> Result<Option<(InputRegionTag, laddertypes::TypeTerm)>, (InputRegionTag, ParseError)>
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)> where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
@ -70,7 +64,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
match peektok { match peektok {
Ok(LTIRToken::AssignType(typeterm_str)) => { Ok(LTIRToken::AssignType(typeterm_str)) => {
tokens.next(); tokens.next();
match typectx.parse(typeterm_str.as_str()) { match typectx.write().unwrap().parse(typeterm_str.as_str()) {
Ok(typeterm) => Ok(Some((region, typeterm))), Ok(typeterm) => Ok(Some((region, typeterm))),
Err(parse_error) => Err((region, ParseError::TypeParseError(parse_error))), Err(parse_error) => Err((region, ParseError::TypeParseError(parse_error))),
} }
@ -115,7 +109,7 @@ impl VariableBinding {
* or `x : T` * or `x : T`
*/ */
pub fn parse_binding_expr<It>( pub fn parse_binding_expr<It>(
typectx: &mut impl TypeDict, typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
tokens: &mut Peekable<It>, tokens: &mut Peekable<It>,
) -> Result< VariableBinding, (InputRegionTag, ParseError)> ) -> Result< VariableBinding, (InputRegionTag, ParseError)>
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)> where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
@ -148,7 +142,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
* `{ x:T; y:U; ... }` * `{ x:T; y:U; ... }`
*/ */
pub fn parse_binding_block<It>( pub fn parse_binding_block<It>(
typectx: &mut impl TypeDict, typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
tokens: &mut Peekable<It>, tokens: &mut Peekable<It>,
) -> Result< Vec<VariableBinding>, (InputRegionTag, ParseError)> ) -> Result< Vec<VariableBinding>, (InputRegionTag, ParseError)>
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)> where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
@ -181,7 +175,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
} }
pub fn parse_statement<It>( pub fn parse_statement<It>(
super_scope: &Arc<RwLock<Scope>>, typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
tokens: &mut Peekable<It>, tokens: &mut Peekable<It>,
) -> Result<crate::expr::Statement, (InputRegionTag, ParseError)> ) -> Result<crate::expr::Statement, (InputRegionTag, ParseError)>
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)> where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
@ -194,7 +188,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
tokens.next(); tokens.next();
// todo accept address-expression instead of symbol // todo accept address-expression instead of symbol
let (name_region, name) = parse_symbol(tokens)?; let (name_region, name) = parse_symbol(tokens)?;
let val_expr = parse_expr(super_scope, tokens)?; let val_expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?; let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::Assignment { Ok(Statement::Assignment {
@ -206,40 +200,48 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
"let" => { "let" => {
tokens.next(); tokens.next();
let (name_region, name) = parse_symbol(tokens)?; let (name_region, name) = parse_symbol(tokens)?;
let typ = parse_type_tag(&mut *super_scope.write().unwrap(), tokens)?; let typ = parse_type_tag(typectx, tokens)?;
/* todo /* todo
let mut variable_bindings = parse_binding_expr(typectx, tokens)?; let mut variable_bindings = parse_binding_expr(typectx, tokens)?;
*/ */
let _ = parse_expect(tokens, LTIRToken::AssignValue); let _ = parse_expect(tokens, LTIRToken::AssignValue);
let val_expr = parse_expr(super_scope, tokens)?; let val_expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?; let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::LetAssign { Ok(Statement::LetAssign {
name_region,
typ: match typ { typ: match typ {
Some((r,t)) => Some(t), Some((r,t)) => Some(Ok(t)),
None => None None => None
}, },
var_id: name, var_id: name,
val_expr, val_expr,
}) })
} }
"while" => {
tokens.next();
let _ = parse_expect(tokens, LTIRToken::ExprOpen)?;
let cond = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::ExprClose)?;
Ok(Statement::WhileLoop {
condition: cond,
body: parse_statement_block(typectx, tokens)?,
})
}
"return" => { "return" => {
tokens.next(); tokens.next();
let expr = parse_expr(super_scope, tokens)?; let expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?; let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::Return(parse_expr(super_scope, tokens)?)) Ok(Statement::Return(parse_expr(typectx, tokens)?))
} }
_ => { _ => {
let expr = parse_expr(super_scope, tokens)?; let expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?; let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::Expr(expr)) Ok(Statement::Expr(expr))
} }
} }
} }
Ok(_) => { Ok(_) => {
let expr = parse_expr(super_scope, tokens)?; let expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?; let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::Expr(expr)) Ok(Statement::Expr(expr))
} }
@ -251,7 +253,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
} }
pub fn parse_statement_block<It>( pub fn parse_statement_block<It>(
scope: &Arc<RwLock<Scope>>, typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
tokens: &mut Peekable<It>, tokens: &mut Peekable<It>,
) -> Result<Vec<Statement>, (InputRegionTag, ParseError)> ) -> Result<Vec<Statement>, (InputRegionTag, ParseError)>
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)> where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
@ -266,7 +268,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
return Ok(statements); return Ok(statements);
} }
Ok(_) => { Ok(_) => {
statements.push(parse_statement(scope, tokens)?); statements.push(parse_statement(typectx, tokens)?);
} }
Err(err) => { Err(err) => {
return Err((*region, ParseError::LexError(err.clone()))); return Err((*region, ParseError::LexError(err.clone())));
@ -278,21 +280,14 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
} }
pub fn parse_atom<It>( pub fn parse_atom<It>(
typectx: &mut impl TypeDict,
tokens: &mut Peekable<It>, tokens: &mut Peekable<It>,
) -> Result<crate::expr::LTExpr, (InputRegionTag, ParseError)> ) -> Result<crate::expr::LTExpr, (InputRegionTag, ParseError)>
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)> where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
{ {
match tokens.next() { match tokens.next() {
Some((region, Ok(LTIRToken::Symbol(sym)))) => Ok(LTExpr::Symbol{ region, symbol: sym, typ: None }), Some((region, Ok(LTIRToken::Symbol(sym)))) => Ok(LTExpr::Symbol{ region, symbol: sym, typ: None }),
Some((region, Ok(LTIRToken::Char(c)))) => Ok( Some((region, Ok(LTIRToken::Char(c)))) => Ok(LTExpr::lit_uint(c as u64)),
LTExpr::Ascend { Some((region, Ok(LTIRToken::Num(n)))) => Ok(LTExpr::lit_uint(n as u64)),
region: region.clone(),
typ: typectx.parse("Char ~ Unicode ~ _2^32").unwrap(),
expr: Box::new(LTExpr::WordLiteral{ region, val: c as tisc::VM_Word })
}
),
Some((region, Ok(LTIRToken::Num(n)))) => Ok(LTExpr::WordLiteral{ region, val: n as tisc::VM_Word }),
Some((region, Ok(_))) => Err((region, ParseError::UnexpectedToken)), Some((region, Ok(_))) => Err((region, ParseError::UnexpectedToken)),
Some((region, Err(err))) => Err((region, ParseError::LexError(err))), Some((region, Err(err))) => Err((region, ParseError::LexError(err))),
None => Err((InputRegionTag::default(), ParseError::UnexpectedEnd)), None => Err((InputRegionTag::default(), ParseError::UnexpectedEnd)),
@ -300,7 +295,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
} }
pub fn parse_expr<It>( pub fn parse_expr<It>(
super_scope: &Arc<RwLock<Scope>>, typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
tokens: &mut Peekable<It>, tokens: &mut Peekable<It>,
) -> Result<crate::expr::LTExpr, (InputRegionTag, ParseError)> ) -> Result<crate::expr::LTExpr, (InputRegionTag, ParseError)>
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)> where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
@ -314,15 +309,12 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
let region = region.clone(); let region = region.clone();
tokens.next(); tokens.next();
let scope = Scope::with_parent(super_scope); let mut variable_bindings = parse_binding_expr(typectx, tokens)?;
let mut variable_bindings = parse_binding_expr(&mut *scope.write().unwrap(), tokens)?;
let _ = parse_expect(tokens, LTIRToken::MapsTo); let _ = parse_expect(tokens, LTIRToken::MapsTo);
let body = parse_expr(&scope, tokens)?; let body = parse_expr(typectx, tokens)?;
return Ok(LTExpr::Abstraction { return Ok(LTExpr::Abstraction {
region, region,
scope,
args: variable_bindings.flatten().into_iter().map(|(r,s,t)| (r,s,t.map(|t|Ok(t))) ).collect(), args: variable_bindings.flatten().into_iter().map(|(r,s,t)| (r,s,t.map(|t|Ok(t))) ).collect(),
body: Box::new(body), body: Box::new(body),
}); });
@ -340,23 +332,14 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
} }
_ => {} _ => {}
} }
children.push(parse_expr(super_scope, tokens)?); children.push(parse_expr(typectx, tokens)?);
} }
} }
Ok(LTIRToken::ExprClose) => { Ok(LTIRToken::ExprClose) => {
break; break;
} }
Ok(LTIRToken::BlockOpen) => { Ok(LTIRToken::BlockOpen) => {
let region = region.clone(); children.push(LTExpr::block(parse_statement_block(typectx, tokens)?));
let scope = Scope::with_parent(super_scope);
let statements = parse_statement_block(&scope, tokens)?;
children.push(
LTExpr::Block {
region,
scope,
statements
});
} }
Ok(LTIRToken::BlockClose) => { Ok(LTIRToken::BlockClose) => {
break; break;
@ -371,45 +354,37 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
children.push(LTExpr::StringLiteral{ region, value }); children.push(LTExpr::StringLiteral{ region, value });
} }
Ok(LTIRToken::Ascend(type_str)) => { Ok(LTIRToken::Ascend(type_str)) => {
let mut region = region.clone(); let region = region.clone();
let typ = super_scope.write().unwrap().parse(type_str); let typ =
match typectx.write().unwrap().parse(type_str) {
Ok(t) => Ok(t),
Err(e) => Err(TypeError::ParseError(e))
};
if let Some(expr) = children.pop() { if let Some(expr) = children.pop() {
region.begin = expr.get_region().begin; children.push(LTExpr::Ascend {
region: region.clone(),
match typ { typ,
Ok(typ) => { expr: Box::new(expr)
children.push(LTExpr::Ascend { });
region: region.clone(),
typ,
expr: Box::new(expr)
});
},
Err(e) => {
return Err((region, ParseError::TypeParseError(e)));
}
}
} }
tokens.next(); tokens.next();
} }
Ok(LTIRToken::Descend(type_str)) => { Ok(LTIRToken::Descend(type_str)) => {
let region = region.clone(); let region = region.clone();
let typ = super_scope.write().unwrap().parse(type_str); let typ =
match typectx.write().unwrap().parse(type_str) {
Ok(t) => Ok(t),
Err(e) => Err(TypeError::ParseError(e))
};
if let Some(expr) = children.pop() { if let Some(expr) = children.pop() {
match typ { children.push(LTExpr::Descend {
Ok(typ) => { region,
children.push(LTExpr::Descend { typ,
region, expr: Box::new(expr)
typ, });
expr: Box::new(expr)
});
}
Err(e) => {
return Err((region, ParseError::TypeParseError(e)));
}
}
} }
tokens.next(); tokens.next();
@ -419,21 +394,16 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
let region = region.clone(); let region = region.clone();
tokens.next(); tokens.next();
let _ = parse_expect(tokens, LTIRToken::ExprOpen)?; let _ = parse_expect(tokens, LTIRToken::ExprOpen)?;
let cond = parse_expr(super_scope, tokens)?; let cond = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::ExprClose)?; let _ = parse_expect(tokens, LTIRToken::ExprClose)?;
let if_expr = LTExpr::block(parse_statement_block(typectx, tokens)?);
let if_statements = parse_statement_block(super_scope, tokens)?; let mut else_expr = LTExpr::block(vec![]);
let scope = super_scope.clone();
let if_expr = LTExpr::Block{ region: region.clone(), scope, statements: if_statements };
let scope = super_scope.clone();
let mut else_expr = LTExpr::Block{ region: InputRegionTag::default(), scope, statements: vec![] };
if let Some((region, peektok)) = tokens.peek() { if let Some((region, peektok)) = tokens.peek() {
if let Ok(LTIRToken::Symbol(name)) = peektok { if let Ok(LTIRToken::Symbol(name)) = peektok {
if name == "else" { if name == "else" {
tokens.next(); tokens.next();
else_expr = parse_expr(super_scope, tokens)?; else_expr = parse_expr(typectx, tokens)?;
} }
} }
} }
@ -445,35 +415,21 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
else_expr: Box::new(else_expr), else_expr: Box::new(else_expr),
}); });
}, },
"while" => {
let region = region.clone();
tokens.next();
let _ = parse_expect(tokens, LTIRToken::ExprOpen)?;
let cond = parse_expr(super_scope, tokens)?;
let _ = parse_expect(tokens, LTIRToken::ExprClose)?;
children.push(LTExpr::WhileLoop {
region,
condition: Box::new(cond),
body: Box::new(parse_expr(super_scope, tokens)?),
});
}
"export" => { "export" => {
let region = region.clone(); let region = region.clone();
tokens.next(); tokens.next();
let scope = Scope::with_parent(super_scope); let block = parse_statement_block(typectx, tokens)?;
let block = parse_statement_block(&scope, tokens)?;
children.push(LTExpr::ExportBlock { children.push(LTExpr::ExportBlock {
region, region,
scope,
statements: block statements: block
}); });
}, },
name => { name => {
children.push(parse_atom(&mut *super_scope.write().unwrap(), tokens)?); children.push(parse_atom(tokens)?);
} }
}, },
Ok(atom) => { Ok(atom) => {
children.push(parse_atom(&mut *super_scope.write().unwrap(), tokens)?); children.push(parse_atom(tokens)?);
} }
Err(err) => { Err(err) => {
return Err((*region, ParseError::LexError(err.clone()))); return Err((*region, ParseError::LexError(err.clone())));
@ -483,16 +439,8 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
if children.len() > 1 { if children.len() > 1 {
let head = children.remove(0); let head = children.remove(0);
let mut region = head.get_region();
for c in children.iter() {
let cr = c.get_region();
region.begin = usize::min( region.begin, cr.begin );
region.end = usize::max( region.end, cr.end );
}
Ok(LTExpr::Application { Ok(LTExpr::Application {
region, region: InputRegionTag::default(),
typ: None, typ: None,
head: Box::new(head), head: Box::new(head),
body: children, body: children,

View file

@ -8,90 +8,107 @@ use {
ops::Deref, ops::Deref,
sync::{Arc, RwLock}, sync::{Arc, RwLock},
}, },
laddertypes::{
parser::ParseLadderType,
unparser::UnparseLadderType
},
tisc::{assembler::AssemblyWord, linker::LinkAddr}, tisc::{assembler::AssemblyWord, linker::LinkAddr},
tiny_ansi::TinyAnsi
}; };
pub struct ProcedureCompiler { pub struct ProcedureCompiler {
proc_symbol: String, pub symbols: Arc<RwLock<Scope>>,
scope: Arc<RwLock<Scope>>,
asm: tisc::Assembler, asm: tisc::Assembler,
subroutines: Vec<tisc::assembler::AssemblyWord>, linker: tisc::Linker,
pub linker: tisc::Linker, result_size: usize,
pub diagnostics: Vec<( InputRegionTag, String )> pub diagnostics: Vec<( InputRegionTag, String )>
} }
impl ProcedureCompiler { impl ProcedureCompiler {
pub fn new(proc_symbol: String, scope: Arc<RwLock<Scope>>) -> Self { pub fn new(parent_scope: &Arc<RwLock<Scope>>) -> Self {
ProcedureCompiler { ProcedureCompiler {
proc_symbol, symbols: Scope::with_parent(parent_scope),
scope,
subroutines: Vec::new(),
asm: tisc::Assembler::new(), asm: tisc::Assembler::new(),
linker: tisc::Linker::new(), linker: tisc::Linker::new(),
result_size: 0,
diagnostics: Vec::new() diagnostics: Vec::new()
} }
} }
/*
pub fn export_symbols(&self) -> Vec<(String, SymbolDef)> {
let mut scope = self.scope.write().unwrap();
scope.update_link_addresses(&self.proc_symbol, &self.linker);
scope.export()
}
*/
pub fn get_bytecode(mut self, ret: bool) -> (
Vec<(String, SymbolDef)>,
Vec<tisc::assembler::AssemblyWord>
) {
let frame_size = self.scope.read().unwrap().get_frame_size();
if frame_size > 0 {
let alloc_asm = tisc::Assembler::new()
.lit(frame_size as tisc::VM_Word).call("data-frame-alloc");
let drop_asm = tisc::Assembler::new()
.lit(frame_size as tisc::VM_Word).call("data-frame-drop");
self.asm = alloc_asm.join( self.asm ).join( drop_asm ); pub fn into_asm(mut self, proc_symbol: &String) -> (Vec<(String, SymbolDef)>, Vec<(InputRegionTag, String)>, Vec<tisc::assembler::AssemblyWord>) {
} let mut symbols =
Arc::try_unwrap(self.symbols).ok().unwrap()
.into_inner().unwrap();
let main_section = self.asm.build(); symbols.update_link_addresses(
proc_symbol,
//self.linker.add_procedure( &self.proc_symbol, main_section );
// ^--- this would insert the asm section at the end,
// we however need it an the beginning of the bytecode
// insert section at front
self.linker.next_addr += main_section.len() as i64;
for (name,section) in self.linker.symbols.iter_mut() {
section.addr += main_section.len() as i64;
}
self.linker.symbols.insert(
self.proc_symbol.clone(),
tisc::linker::Section { addr: 0, data: main_section }
);
// replace all symbol definitions from subroutines
// with relative LinkAddr`s
self.scope.write().unwrap().update_link_addresses(
&self.proc_symbol,
&self.linker &self.linker
); );
( let data_frame_size = symbols.get_frame_size() as i64;
self.scope.read().unwrap().export(),
self.linker.link_relative( &self.proc_symbol ).expect("link error") let body = self.asm.build();
) self.linker.add_procedure("__procedure_body__", body);
let body_addr = self
.linker
.get_link_addr(&"__procedure_body__".into())
.unwrap();
let subroutines = self
.linker
.link_relative(&"__subroutines__".into())
.expect("link error");
let mut entry = tisc::Assembler::new();
if data_frame_size > 0 {
entry = entry.lit(data_frame_size).call("data-frame-alloc");
}
entry = entry.call_symbol(LinkAddr::Relative {
symbol: "__subroutines__".into(),
offset: body_addr,
});
if data_frame_size > 0 {
entry = entry.lit(data_frame_size).call("data-frame-drop");
}
let mut superlink = tisc::Linker::new();
superlink.add_procedure("", entry.build());
superlink.add_procedure("__subroutines__", subroutines);
symbols.update_link_addresses(
&proc_symbol,
&superlink
);
let mut symbol_exports = symbols.export();
let subroutines_addr = superlink.get_link_addr(&"__subroutines__".into()).unwrap();
for (name, def) in symbol_exports.iter_mut() {
match def {
SymbolDef::Procedure{ in_types:_, out_types:_, link_addr, export:_ } => {
match link_addr {
LinkAddr::Relative{ symbol, offset } => {
*offset += subroutines_addr;
}
LinkAddr::Absolute(addr) => {
*addr += subroutines_addr;
}
}
}
_ => {}
}
}
let bytecode = superlink.link_relative(proc_symbol).expect("link error");
(symbol_exports, self.diagnostics, bytecode)
}
pub fn verify(&self) {
// todo
} }
pub fn compile_statement(mut self, statement: &Statement, enable_export: bool) -> Self { pub fn compile_statement(mut self, statement: &Statement, enable_export: bool) -> Self {
match statement { match statement {
Statement::Assignment { name_region, var_id, val_expr } => { Statement::Assignment { name_region, var_id, val_expr } => {
self = self.compile_expr(val_expr); self = self.compile(val_expr);
match self.scope.read().unwrap().get(var_id) { match self.symbols.read().unwrap().get(var_id) {
Some(SymbolDef::FrameRef { typ, stack_ref }) => { Some(SymbolDef::FrameRef { typ, stack_ref }) => {
self.asm = self.asm.lit(stack_ref).call("data-frame-set"); self.asm = self.asm.lit(stack_ref).call("data-frame-set");
} }
@ -106,8 +123,9 @@ impl ProcedureCompiler {
out_types, out_types,
link_addr, link_addr,
export export
}) => { }) => {
self.asm = self.asm self.asm = self
.asm
.call(var_id.as_str()) .call(var_id.as_str())
.inst(tisc::VM_Instruction::Store); .inst(tisc::VM_Instruction::Store);
} }
@ -120,66 +138,73 @@ impl ProcedureCompiler {
} }
} }
Statement::LetAssign { Statement::LetAssign {
name_region,
typ, typ,
var_id, var_id,
val_expr, val_expr,
} => { } => match val_expr {
let val_type = self.scope.read().unwrap() LTExpr::Abstraction { region:_, args: _, body: _ } => {
.get(var_id).unwrap() self.symbols
.get_type(&mut self.scope.clone()); .write()
let val_type = val_type.sugar(&mut self.scope.clone()); .unwrap()
match val_type { .declare_proc(var_id.clone(), vec![], vec![], enable_export);
laddertypes::SugaredTypeTerm::Func(mut f_types) => {
let mut c = ProcedureCompiler::new(
var_id.clone(),
self.scope.clone()
);
c = c.compile_expr( val_expr );
self.diagnostics.append(&mut c.diagnostics);
let (symbols,code) = c.get_bytecode( true ); let (exports, mut diagnostics, lambda_procedure) = ProcedureCompiler::new(&self.symbols)
eprintln!("LET assign compiled {}", var_id); .compile(val_expr)
for (i,l) in tisc::assembler::disassemble( &code ).iter().enumerate() { .into_asm(var_id);
eprintln!("{}+{} ... {}", var_id, i, l);
}
self.linker.add_procedure(var_id, code);
/* self.diagnostics.append(&mut diagnostics);
let out_types = vec![ f_types.pop().unwrap().desugar(&mut self.scope.clone()) ];
let in_types = f_types.into_iter().map(|t| t.desugar(&mut self.scope.clone())).collect();
self.scope.write().unwrap().declare_proc( self.linker.add_procedure(var_id, lambda_procedure);
var_id.clone(),
in_types,
out_types,
true
);
*/
}
_ => { let offset = self.linker.get_link_addr(var_id).unwrap();
self = self.compile_statement(&Statement::Assignment {
name_region: *name_region, // forward already exported symbols
var_id: var_id.clone(), if enable_export {
val_expr: val_expr.clone(), self.symbols.write().unwrap().import( exports );
}, false);
} }
} }
_ => {
self.symbols
.write()
.unwrap()
.declare_var(var_id.clone(), laddertypes::TypeTerm::unit());
self = self.compile_statement(&Statement::Assignment {
name_region: InputRegionTag::default(),
var_id: var_id.clone(),
val_expr: val_expr.clone(),
}, false);
}
},
Statement::WhileLoop { condition, body } => {
let asm = self.asm;
self.asm = tisc::Assembler::new();
self = self.compile(condition);
let cond_asm = self.asm;
self.asm = tisc::Assembler::new();
for statement in body.into_iter() {
self = self.compile_statement(statement, false);
}
let body_asm = self.asm;
self.asm = asm;
self.asm = self.asm.while_loop(cond_asm, body_asm);
} }
Statement::Expr(expr) => { Statement::Expr(expr) => {
self = self.compile_expr(expr); self = self.compile(expr);
} }
Statement::Return(expr) => { Statement::Return(expr) => {
self = self.compile_expr(expr); self = self.compile(expr);
} }
} }
self self
} }
pub fn compile_expr(mut self, expr: &LTExpr) -> Self { pub fn compile(mut self, expr: &LTExpr) -> Self {
match expr { match expr {
LTExpr::Symbol { region, typ, symbol } => match self.scope.read().unwrap().get(symbol) { LTExpr::Symbol { region, typ, symbol } => match self.symbols.read().unwrap().get(symbol) {
Some(SymbolDef::FrameRef { typ, stack_ref }) => { Some(SymbolDef::FrameRef { typ, stack_ref }) => {
self.asm = self.asm.lit(stack_ref).call("data-frame-get"); self.asm = self.asm.lit(stack_ref).call("data-frame-get");
} }
@ -210,10 +235,34 @@ impl ProcedureCompiler {
self.asm = self.asm.lit(*val); self.asm = self.asm.lit(*val);
} }
LTExpr::Ascend { region, typ, expr } => { LTExpr::Ascend { region, typ, expr } => {
self = self.compile_expr(expr); self = self.compile(expr);
} }
LTExpr::Descend { region, typ, expr } => { LTExpr::Descend { region, typ, expr } => {
self = self.compile_expr(expr); self = self.compile(expr);
}
LTExpr::Application { region, typ, head, body } => {
for arg in body.iter().rev() {
self = self.compile(arg);
}
self = self.compile(head);
}
LTExpr::Abstraction { region, args, body } => {
for (region, arg_name, arg_type) in args.iter() {
if let Some(Ok(typeterm)) = arg_type {
let id = self
.symbols
.write()
.unwrap()
.declare_var(arg_name.clone(), typeterm.clone());
self.asm = self.asm.lit(id).call("data-frame-set");
} else {
self.diagnostics.push((
region.clone(),
format!("invalid type {:?} for argument {}", arg_type, arg_name)
));
}
}
self = self.compile(body);
} }
LTExpr::Branch { LTExpr::Branch {
region, region,
@ -221,112 +270,29 @@ impl ProcedureCompiler {
if_expr, if_expr,
else_expr, else_expr,
} => { } => {
self = self.compile_expr(condition); self = self.compile(condition);
let asm = self.asm; let asm = self.asm;
self.asm = tisc::Assembler::new(); self.asm = tisc::Assembler::new();
self = self.compile_expr(if_expr); self = self.compile(if_expr);
let if_asm = self.asm; let if_asm = self.asm;
self.asm = tisc::Assembler::new(); self.asm = tisc::Assembler::new();
self = self.compile_expr(else_expr); self = self.compile(else_expr);
let else_asm = self.asm; let else_asm = self.asm;
self.asm = asm; self.asm = asm;
self.asm = self.asm.branch(if_asm, else_asm); self.asm = self.asm.branch(if_asm, else_asm);
} }
LTExpr::WhileLoop { region, condition, body } => { LTExpr::Block { region, statements } => {
let asm = self.asm; for s in statements.iter() {
self = self.compile_statement(s, false);
self.asm = tisc::Assembler::new(); }
self = self.compile_expr(condition);
let cond_asm = self.asm;
self.asm = tisc::Assembler::new();
self = self.compile_expr(body);
let body_asm = self.asm;
self.asm = asm;
self.asm = self.asm.while_loop(cond_asm, body_asm);
} }
LTExpr::Application { region, typ, head, body } => { LTExpr::ExportBlock{ region, statements } => {
for arg in body.iter().rev() { for s in statements.iter() {
self = self.compile_expr(arg); self = self.compile_statement(s, true);
} }
self = self.compile_expr(head);
}
LTExpr::Abstraction { region, scope, args, body } => {
let mut abs_compiler = ProcedureCompiler::new("__abs__".into(), scope.clone());
for (region, arg_name, arg_type) in args.iter() {
match scope.read().unwrap().get(arg_name) {
Some(SymbolDef::FrameRef{ typ, stack_ref }) => {
eprintln!("Arg {} stack ref = {}", arg_name, stack_ref);
// TODO: aknowledge actual size of arguments
// let arg_size = typ.get_size()
let arg_size = 1;
for i in 0..arg_size {
abs_compiler.asm = abs_compiler.asm
.lit(stack_ref + i)
.call("data-frame-set");
}
}
_ => {
self.diagnostics.push(
(region.clone(),
format!("argument variable is not a frame-ref"))
);
}
}
}
abs_compiler = abs_compiler.compile_expr( body );
let (abs_symbols, mut abs_code) = abs_compiler.get_bytecode( false );
for (s,def) in abs_symbols.iter() {
eprintln!("{} = {:?}", s, def);
}
for (i, l) in tisc::assembler::disassemble(&abs_code).into_iter().enumerate() {
eprintln!("__abs__+{} .. {}", i, l);
}
self.asm.words.append( &mut abs_code );
/*
self.linker.add_procedure(
"__abs__".into(),
abs_code
);*/
}
LTExpr::Block { region, scope, statements } => {
let mut block_compiler = ProcedureCompiler::new(
"__block__".into(),
scope.clone()
);
for stmnt in statements.iter() {
block_compiler = block_compiler.compile_statement( stmnt, true );
}
let (block_symbols, mut block_code) = block_compiler.get_bytecode( true );
eprintln!("BLOCK compiler:");
for (s,def) in block_symbols.iter() {
eprintln!("{} = {:?}", s, def);
}
for (i,l) in tisc::assembler::disassemble( &block_code ).into_iter().enumerate() {
eprintln!("block+{} .. {}", i, l);
}
self.linker.
self.scope.write().unwrap().import(
block_symbols
);
self.asm.words.append(&mut block_code);
}
LTExpr::ExportBlock{ region, scope, statements } => {
} }
} }
self self
} }
} }

View file

@ -6,6 +6,7 @@ use {
pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> { pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
let symbols = Scope::new(); let symbols = Scope::new();
let typectx = symbols.read().unwrap().typectx.clone();
/* Duplicate the top item on the stack, /* Duplicate the top item on the stack,
* and whatever type this word has is preserved * and whatever type this word has is preserved
@ -13,8 +14,8 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"dup", "dup",
vec!["T"], vec!["T"],
vec!["T~machine.Word"], vec!["T~machine::Word"],
vec!["T~machine.Word", "T~machine.Word"], vec!["T~machine::Word", "T~machine::Word"],
); );
/* drop topmost element /* drop topmost element
@ -22,7 +23,7 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"drop", "drop",
vec!["T"], vec!["T"],
vec!["T~machine.Word"], vec!["T~machine::Word"],
vec![], vec![],
); );
/* Put a single Ascii character on stdout /* Put a single Ascii character on stdout
@ -30,14 +31,14 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"emit", "emit",
vec![], vec![],
vec!["Char~Unicode~_2^32~_2^64~machine.UInt64~machine.Word"], vec!["Char~Ascii~machine::Word"],
vec![], vec![],
); );
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"accept", "accept",
vec![], vec![],
vec![], vec![],
vec!["Char~Unicode~_2^32~_2^64~machine.UInt64~machine.Word"], vec!["Char~Ascii~machine::Word"],
); );
linker.add_procedure("dup", tisc::Assembler::new().inst(tisc::VM_Instruction::Dup).build()); linker.add_procedure("dup", tisc::Assembler::new().inst(tisc::VM_Instruction::Dup).build());
@ -57,7 +58,7 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
"_2^64~machine.UInt64~machine.Word", "_2^64~machine.UInt64~machine.Word",
"_2^64~machine.UInt64~machine.Word", "_2^64~machine.UInt64~machine.Word",
], ],
vec!["_2^64~machine.UInt64~machine.Word"], vec!["_2^64~machine::UInt64~machine::Word"],
); );
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"i-", "i-",
@ -66,7 +67,7 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
"_2^64~machine.UInt64~machine.Word", "_2^64~machine.UInt64~machine.Word",
"_2^64~machine.UInt64~machine.Word", "_2^64~machine.UInt64~machine.Word",
], ],
vec!["_2^64~machine::UInt64~machine.Word"], vec!["_2^64~machine::UInt64~machine::Word"],
); );
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"i*", "i*",
@ -84,7 +85,7 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
"_2^64~machine.UInt64~machine.Word", "_2^64~machine.UInt64~machine.Word",
"_2^64~machine.UInt64~machine.Word", "_2^64~machine.UInt64~machine.Word",
], ],
vec!["_2^64~machine.UInt64~machine.Word"], vec!["_2^64~machine::UInt64~machine::Word"],
); );
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"i%", "i%",
@ -162,27 +163,27 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"bit-neg", "bit-neg",
vec![], vec!["machine.Word", "machine.Word"], vec!["machine.Word"], vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
); );
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"bit-and", "bit-and",
vec![], vec!["machine.Word", "machine.Word"], vec!["machine.Word"], vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
); );
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"bit-or", "bit-or",
vec![], vec!["machine.Word", "machine.Word"], vec!["machine.Word"], vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
); );
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"bit-xor", "bit-xor",
vec![], vec!["machine.Word", "machine.Word"], vec!["machine.Word"], vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
); );
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"bit-shl", "bit-shl",
vec![], vec!["machine.Word", "machine.Word"], vec!["machine.Word"], vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
); );
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"bit-shr", "bit-shr",
vec![], vec!["machine.Word", "machine.Word"], vec!["machine.Word"], vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
); );
linker.add_procedure("bit-neg", tisc::Assembler::new().inst(tisc::VM_Instruction::BitNeg).build()); linker.add_procedure("bit-neg", tisc::Assembler::new().inst(tisc::VM_Instruction::BitNeg).build());
@ -220,7 +221,7 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
symbols.write().unwrap().declare_static_parse( symbols.write().unwrap().declare_static_parse(
"data-frame-ptr", "data-frame-ptr",
"<MutRef <Seq machine.Word>>~machine.Address~machine.Word", "<MutRef <Seq machine::Word>>~machine::Address~machine::Word",
); );
linker.add_static("data-frame-ptr", vec![0x1000]); linker.add_static("data-frame-ptr", vec![0x1000]);
@ -229,7 +230,7 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
vec!["T"], vec!["T"],
vec![ vec![
"T~machine::Word", "T~machine::Word",
"<RefMut T~machine.Word>~LocalVariableId~machine.UInt64~machine.Word", "<RefMut T~machine::Word>~LocalVariableId~machine::UInt64~machine::Word",
], ],
vec![], vec![],
); );
@ -247,8 +248,8 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"data-frame-get", "data-frame-get",
vec!["T"], vec!["T"],
vec!["<Ref T~machine.Word>~DataFrameRef~machine.UInt64~machine.Word"], vec!["<Ref T~machine::Word>~DataFrameRef~machine::UInt64~machine::Word"],
vec!["T~machine.Word"], vec!["T~machine::Word"],
); );
linker.add_procedure( linker.add_procedure(
"data-frame-get", "data-frame-get",

View file

@ -1,9 +1,5 @@
use { use {
crate::expr::LTExpr, crate::expr::LTExpr,
laddertypes::{
TypeDict, TypeID,
parser::ParseLadderType
},
std::{ std::{
collections::HashMap, collections::HashMap,
sync::{Arc, RwLock}, sync::{Arc, RwLock},
@ -32,7 +28,7 @@ pub enum SymbolDef {
impl SymbolDef { impl SymbolDef {
pub fn get_type( pub fn get_type(
&self, &self,
typedict: &mut impl laddertypes::dict::TypeDict, typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
) -> laddertypes::TypeTerm { ) -> laddertypes::TypeTerm {
match self { match self {
SymbolDef::FrameRef { typ, stack_ref: _ } => typ.clone(), SymbolDef::FrameRef { typ, stack_ref: _ } => typ.clone(),
@ -42,38 +38,29 @@ impl SymbolDef {
out_types, out_types,
link_addr: _, link_addr: _,
export: _, export: _,
} => { } => laddertypes::TypeTerm::App(
let mut out_types = out_types.clone(); std::iter::once(
let out_type = typectx
if out_types.len() == 1 { .write()
out_types.pop().unwrap() .unwrap()
} else { .parse("Func")
laddertypes::TypeTerm::App( .expect("parse typeterm")
std::iter::once( ).chain(
typedict.parse("Struct").unwrap() in_types.clone().into_iter()
).chain( ).chain(
out_types.into_iter()
).collect()
)
};
laddertypes::TypeTerm::App(
std::iter::once( std::iter::once(
typedict.parse("Func").expect("parse typeterm") typectx.write().unwrap().parse("Struct").expect("parse typeterm")
).chain( ).chain(
in_types.clone().into_iter() out_types.clone().into_iter()
).chain( )
std::iter::once(out_type) ).collect()
).collect() ),
)
},
} }
} }
} }
/* Describes a lexical scope of symbols /* Describes a lexical scope of symbols
*/ */
#[derive(Clone, Debug)]
pub struct Scope { pub struct Scope {
/* definition of runtime symbols /* definition of runtime symbols
*/ */
@ -81,7 +68,7 @@ pub struct Scope {
/* type symbols /* type symbols
*/ */
typedict: Arc<RwLock<laddertypes::BimapTypeDict>>, pub typectx: Arc<RwLock<laddertypes::TypeDict>>,
/* number of words required for /* number of words required for
* the stack frame of this scope * the stack frame of this scope
@ -94,70 +81,39 @@ pub struct Scope {
parent: Option<Arc<RwLock<Scope>>>, parent: Option<Arc<RwLock<Scope>>>,
} }
impl TypeDict for Scope {
fn insert(&mut self, name: String, id: TypeID) {
self.typedict.write().unwrap().insert(name,id)
}
fn add_varname(&mut self, vn: String) -> TypeID {
self.typedict.add_varname(vn)
}
fn add_typename(&mut self, tn: String) -> TypeID {
if let Some(parent) = self.parent.as_mut() {
parent.add_typename(tn)
} else {
self.typedict.add_typename(tn)
}
}
fn get_typeid(&self, tn: &String) -> Option<TypeID> {
if let Some(id) = self.typedict.get_typeid(tn) {
Some(id)
} else {
if let Some(parent) = self.parent.as_ref() {
parent.get_typeid(tn)
} else {
None
}
}
}
fn get_typename(&self, tid: &TypeID) -> Option<String> {
if let Some(name) = self.typedict.get_typename(tid) {
Some(name)
} else {
if let Some(parent) = self.parent.as_ref() {
parent.get_typename(tid)
} else {
None
}
}
}
}
impl Scope { impl Scope {
pub fn new() -> Arc<RwLock<Self>> { pub fn new() -> Arc<RwLock<Self>> {
Arc::new(RwLock::new(Scope { Arc::new(RwLock::new(Scope {
symbols: HashMap::new(), symbols: HashMap::new(),
typedict: Arc::new(RwLock::new(laddertypes::dict::BimapTypeDict::new())), typectx: Arc::new(RwLock::new(laddertypes::dict::TypeDict::new())),
frame_size: 0, frame_size: 0,
parent: None, parent: None,
})) }))
} }
pub fn with_parent(parent: &Arc<RwLock<Scope>>) -> Arc<RwLock<Self>> { pub fn with_parent(parent: &Arc<RwLock<Scope>>) -> Arc<RwLock<Self>> {
let mut s = Scope::new(); let s = Scope {
s.write().unwrap().parent = Some(parent.clone()); symbols: HashMap::new(),
s
// todo: create proper child scope
typectx: parent.read().unwrap().typectx.clone(),
frame_size: 0,
parent: Some(parent.clone()),
};
Arc::new(RwLock::new(s))
} }
pub fn export(&self) -> Vec<(String, SymbolDef)> { pub fn export(self) -> Vec<(String, SymbolDef)> {
self.symbols self.symbols
.iter() .into_iter()
.filter(|(name, def)| .filter(|(name, def)|
match def { match def {
SymbolDef::Procedure { in_types:_, out_types:_, link_addr:_, export } => *export, SymbolDef::Procedure { in_types:_, out_types:_, link_addr:_, export } => *export,
_ => false _ => false
} }
) )
.map(|(n,d)| (n.clone(), d.clone()))
.collect() .collect()
} }
@ -190,14 +146,6 @@ impl Scope {
} }
} }
pub fn get_type(&mut self, name: &str) -> Option<laddertypes::TypeTerm> {
if let Some(sdef) = self.get(name) {
Some(sdef.get_type( &mut self.typedict ))
} else {
None
}
}
/// takes the link-addresses from a Linker /// takes the link-addresses from a Linker
/// and updates the symbol table to relative addresses /// and updates the symbol table to relative addresses
/// based on the next super-label /// based on the next super-label
@ -209,7 +157,7 @@ impl Scope {
for (name, def) in self.symbols.iter_mut() { for (name, def) in self.symbols.iter_mut() {
if let Some(offset) = linker.get_link_addr( name ) { if let Some(offset) = linker.get_link_addr( name ) {
match def { match def {
SymbolDef::Procedure { SymbolDef::Procedure {
in_types:_,out_types:_, in_types:_,out_types:_,
link_addr, link_addr,
export:_ export:_
@ -236,26 +184,29 @@ impl Scope {
out_types: Vec<&str>, out_types: Vec<&str>,
) { ) {
for v in type_vars { for v in type_vars {
self.add_varname(v.into()); self.typectx.write().unwrap().add_varname(v.into());
} }
let mut td = self.typedict.clone();
self.declare_proc( self.declare_proc(
String::from(name), String::from(name),
in_types in_types
.into_iter() .into_iter()
.map(move |t| { .map(|t| {
td.parse(t).expect("parse typeterm") self.typectx
.write()
.unwrap()
.parse(t)
.expect("parse typeterm")
}) })
.collect(), .collect(),
out_types out_types
.into_iter() .into_iter()
.map({ .map(|t| {
let mut td = self.typedict.clone(); self.typectx
move |t| { .write()
td.parse(t).expect("parse typeterm") .unwrap()
} .parse(t)
.expect("parse typeterm")
}) })
.collect(), .collect(),
false false
@ -284,6 +235,9 @@ impl Scope {
pub fn declare_var_parse(&mut self, name: &str, typ: &str) { pub fn declare_var_parse(&mut self, name: &str, typ: &str) {
let typ = self let typ = self
.typectx
.write()
.unwrap()
.parse(typ) .parse(typ)
.expect("parse typeterm"); .expect("parse typeterm");
self.declare_var(String::from(name), typ); self.declare_var(String::from(name), typ);
@ -303,6 +257,9 @@ impl Scope {
pub fn declare_static_parse(&mut self, name: &str, typ: &str) { pub fn declare_static_parse(&mut self, name: &str, typ: &str) {
let typ = self let typ = self
.typectx
.write()
.unwrap()
.parse(typ) .parse(typ)
.expect("parse typeterm"); .expect("parse typeterm");
self.declare_static(String::from(name), typ); self.declare_static(String::from(name), typ);

View file

@ -1,319 +0,0 @@
use {
crate::{
lexer::InputRegionTag,
expr::{LTExpr, Statement, TypeTag, TypeError, TypeErrorKind},
symbols::{Scope, SymbolDef},
},
std::{
ops::Deref,
sync::{Arc, RwLock},
},
laddertypes::{
parser::ParseLadderType,
unparser::UnparseLadderType,
dict::TypeDict
},
tisc::{assembler::AssemblyWord, linker::LinkAddr},
tiny_ansi::TinyAnsi
};
impl LTExpr {
/*
pub fn get_type(&self) -> TypeTag {
Err(TypeError::Todo)
}*/
pub fn infer_type(&self, scope: &Arc<RwLock<Scope>>) -> TypeTag
{
match self {
LTExpr::WordLiteral{ region, val } => {
Ok(scope.write().unwrap().parse(
"_2^64 ~ machine.UInt64 ~ machine.Word"
).unwrap())
}
LTExpr::StringLiteral{ region, value } => {
Ok(scope.write().unwrap().parse(
"<Seq Char ~ Unicode ~ _2^32 ~ _2^64 ~ machine.UInt64>
~ <TermArray 0 machine.UInt64 ~ machine.Word>"
).unwrap())
}
LTExpr::Symbol { region, typ, symbol } => {
let mut s = scope.write().unwrap();
if let Some(sdef) = s.get(symbol) {
Ok(sdef.get_type(&mut *s))
} else {
let region = region.clone();
Err(vec![ TypeError{ region, kind: TypeErrorKind::NoSymbol } ])
}
}
LTExpr::Ascend { region, typ, expr } => {
let expr_type = expr.infer_type( scope )?;
let sub_type = typ.clone();
/*
* todo: check potential overlap of typ with expr_type
*/
if let Ok(i) = sub_type.is_syntactic_subtype_of(&expr_type) {
let mut lnf = expr_type.get_lnf_vec();
let mut sub_lnf = sub_type.get_lnf_vec();
for x in 0..i {
lnf.insert(x, sub_lnf.remove(0));
}
let result_type = laddertypes::TypeTerm::Ladder(lnf);
Ok(result_type)
} else {
Ok(laddertypes::TypeTerm::Ladder(vec![
sub_type,
expr_type
]))
}
}
LTExpr::Descend { region, typ, expr } => {
let expr_type = expr.infer_type(scope)?;
let super_type = typ.clone();
if let Ok(i) = expr_type.is_syntactic_subtype_of(&super_type) {
let lnf = expr_type.get_lnf_vec();
let result_type = laddertypes::TypeTerm::Ladder(lnf[i..].into_iter().cloned().collect());
Ok(result_type)
} else {
return Err(vec![ TypeError{
region: region.clone(),
kind: TypeErrorKind::ArgTypeMismatch {
expected: expr_type,
received: super_type
}
} ]);
}
}
LTExpr::Abstraction { region, scope, args, body } => {
let mut f = Vec::new();
for (region, name, typ) in args {
if let Some(typ) = typ {
let typ = typ.clone()?;
let sugar_typ = typ.clone().sugar(&mut *scope.write().unwrap());
f.push( sugar_typ );
scope.write().unwrap().declare_var(name.clone(), typ.clone());
}
}
let body_type = body.infer_type( scope )?;
f.push( body_type.sugar(&mut *scope.write().unwrap()) );
Ok(laddertypes::SugaredTypeTerm::Func(f).desugar( &mut *scope.write().unwrap() ))
}
LTExpr::Application{ region, typ, head, body } => {
let mut head_type = head.infer_type(scope)?;
let mut args = body.into_iter();
let mut result_type = head_type;
let mut sugared_result_type = result_type.sugar(&mut *scope.write().unwrap());
let mut errors = Vec::new();
while let laddertypes::SugaredTypeTerm::Func(mut f_types) = sugared_result_type {
sugared_result_type = f_types.pop().unwrap();
for (argi, expected_arg_type) in f_types.iter().enumerate() {
if let Some(arg) = args.next() {
let expected_arg_type = expected_arg_type.clone().desugar(&mut *scope.write().unwrap());
// check subtype
let received_arg_type = arg.infer_type(scope)?;
if ! received_arg_type.is_syntactic_subtype_of(&expected_arg_type).is_ok() {
errors.push(TypeError{
region: arg.get_region(),
kind: TypeErrorKind::ArgTypeMismatch {
expected: expected_arg_type,
received: received_arg_type
}
});
}
} else {
// partial application.
f_types.push(sugared_result_type);
sugared_result_type = laddertypes::SugaredTypeTerm::Func(
f_types[argi .. ].into_iter().cloned().collect()
);
// todo examine stack ..
return
if errors.len() == 0 {
result_type = sugared_result_type.desugar(&mut *scope.write().unwrap());
Ok(result_type)
} else {
Err(errors)
};
}
}
}
while let Some(arg) = args.next() {
errors.push(TypeError{
region: arg.get_region(),
kind: TypeErrorKind::SuperfluousArgument
});
}
if errors.len() == 0 {
result_type = sugared_result_type.desugar(&mut *scope.write().unwrap());
Ok(result_type)
} else {
Err(errors)
}
}
LTExpr::Branch { region, condition, if_expr, else_expr } => {
let received_cond_type = condition.infer_type(scope)?;
let expected_cond_type = scope.write().unwrap().parse("Bool ~ machine.Word").unwrap();
if received_cond_type.is_syntactic_subtype_of(&expected_cond_type).is_ok() {
let if_expr_type = if_expr.infer_type(scope)?;
let else_expr_type = else_expr.infer_type(scope)?;
if if_expr_type.is_syntactic_subtype_of(&else_expr_type).is_ok() {
Ok(else_expr_type)
} else if else_expr_type.is_syntactic_subtype_of(&if_expr_type).is_ok() {
Ok(if_expr_type)
} else {
Err(vec![TypeError{
region: region.clone(),
kind: TypeErrorKind::BranchMismatch {
if_branch: if_expr_type,
else_branch: else_expr_type
}
}])
}
} else {
Err(vec![ TypeError{
region: condition.get_region(),
kind: TypeErrorKind::ArgTypeMismatch {
expected: expected_cond_type,
received: received_cond_type
}
}])
}
}
LTExpr::WhileLoop { region, condition, body } => {
let received_cond_type = condition.infer_type(scope)?;
let expected_cond_type = scope.write().unwrap().parse("Bool ~ machine.Word").unwrap();
if received_cond_type.is_syntactic_subtype_of(&expected_cond_type).is_ok() {
let body_type = body.infer_type(scope)?;
let body_type = body_type.sugar(&mut scope.clone());
let loop_type = laddertypes::SugaredTypeTerm::Seq(vec![ body_type ]);
Ok(loop_type.desugar(&mut scope.clone()))
} else {
return Err(vec![ TypeError{
region: condition.get_region(),
kind: TypeErrorKind::ArgTypeMismatch {
expected: expected_cond_type,
received: received_cond_type
}
}]);
}
}
LTExpr::ExportBlock{ region, scope, statements } |
LTExpr::Block{ region, scope, statements } => {
let mut types = Vec::new();
for s in statements {
match s.infer_type(scope) {
Ok(Some(t)) => {
if !t.is_empty() {
types.insert(0, t);
}
}
Ok(None) => {}
Err(e) => {
return Err(e);
}
}
}
Ok(
if types.len() == 1 { types.pop().unwrap() }
else { laddertypes::SugaredTypeTerm::Struct(types) }
.desugar(&mut scope.clone())
)
}
}
}
}
impl Statement {
pub fn infer_type(&self, scope: &Arc<RwLock<Scope>>) -> Result< Option<laddertypes::SugaredTypeTerm> , Vec<TypeError> > {
match self {
Statement::LetAssign{ name_region, typ, var_id, val_expr } => {
let typ = val_expr.infer_type( scope )?;
match typ.clone().sugar( &mut scope.clone() ) {
laddertypes::SugaredTypeTerm::Func(mut args) => {
let out_type = args.pop().unwrap();
let out_types =
match out_type.clone() {
laddertypes::SugaredTypeTerm::Struct(oa) => oa.into_iter().map(|t|t.desugar(&mut scope.clone())).collect(),
_ => vec![ out_type.desugar(&mut scope.clone()) ]
};
let in_types = args.into_iter().map(|t| t.desugar(&mut scope.clone())).collect();
scope.write().unwrap()
.declare_proc(
var_id.clone(),
in_types,
out_types,
true
);
return Ok(None);
}
_ => {
let id = scope.write().unwrap().declare_var(var_id.clone(), typ);
eprintln!("TYPING declare var = {}", id);
Ok(None)
}
}
},
Statement::Return(expr) |
Statement::Expr(expr) => {
let t = expr.infer_type(scope)?;
if t != laddertypes::TypeTerm::App(vec![]) {
let st = t.sugar(&mut scope.clone());
Ok(Some(st))
} else {
Ok(None)
}
}
Statement::Assignment { name_region, var_id, val_expr } => {
let received_type = val_expr.infer_type(scope)?;
let expected_type = scope.write().unwrap().get_type(var_id).unwrap();
if ! received_type.is_syntactic_subtype_of(&expected_type).is_ok() {
return Err(vec![ TypeError{
region: val_expr.get_region(),
kind: TypeErrorKind::AssignMismatch {
expected: expected_type,
received: received_type
}
}]);
} else {
Ok(None)
}
}
}
}
}

View file

@ -4,7 +4,6 @@ version = "0.1.0"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
laddertypes = { path = "../../lib-laddertypes" }
ltcore = { path = "../lib-ltcore" } ltcore = { path = "../lib-ltcore" }
tisc = { path = "../../lib-tisc" } tisc = { path = "../../lib-tisc" }
clap = { version = "4.5.15", features = ["derive"] } clap = { version = "4.5.15", features = ["derive"] }

View file

@ -20,8 +20,8 @@ pub fn print_diagnostic(
let mut line_region = InputRegionTag::default(); let mut line_region = InputRegionTag::default();
let n_before = 5; let n_before = 3;
let n_after = 5; let n_after = 3;
let mut last_lines = Vec::new(); let mut last_lines = Vec::new();
let mut next_lines = 0; let mut next_lines = 0;
@ -67,11 +67,11 @@ pub fn print_diagnostic(
} }
print!("\t{}", "|".bright_magenta()); print!("\t{}", "|".bright_magenta());
for _ in 0..column_begin_c { print!("{}", ".".magenta().bold()); } for _ in 0..column_begin { print!("{}", ".".magenta().bold()); }
for _ in column_begin_c..column_end_c { print!("{}", "^".magenta().bold()); } for _ in column_begin..column_end { print!("{}", "^".magenta().bold()); }
print!("\n"); print!("\n");
print!("{} [{}-{}]: {}\n", "error".bright_red(), column_begin, column_end, message.yellow()); print!("{} [{}-{}]: {}\n", "error".bright_red(), column_begin, column_end, message.white());
} }
else if next_lines > 0 { else if next_lines > 0 {
next_lines -= 1; next_lines -= 1;

View file

@ -6,7 +6,6 @@ use {
std::{boxed::Box, ops::Deref}, std::{boxed::Box, ops::Deref},
std::io::Write, std::io::Write,
tiny_ansi::TinyAnsi, tiny_ansi::TinyAnsi,
laddertypes::dict::TypeDict,
ltcore::{ ltcore::{
lexer::InputRegionTag, lexer::InputRegionTag,
expr::{LTExpr, Statement}, expr::{LTExpr, Statement},
@ -33,7 +32,8 @@ fn main() {
let mut linker = tisc::Linker::new(); let mut linker = tisc::Linker::new();
let root_scope = ltcore::runtime::init_runtime(&mut linker); let root_scope = ltcore::runtime::init_runtime(&mut linker);
let mut main_scope = Scope::with_parent(&root_scope); let main_scope = Scope::with_parent(&root_scope);
let typectx = main_scope.read().unwrap().typectx.clone();
for path in args.sources { for path in args.sources {
let iter_chars = iterate_text::file::characters::IterateFileCharacters::new(path.clone()); let iter_chars = iterate_text::file::characters::IterateFileCharacters::new(path.clone());
@ -49,37 +49,11 @@ fn main() {
}) })
.peekable(); .peekable();
match ltcore::parser::parse_expr( &mut main_scope, &mut program_tokens ) { match ltcore::parser::parse_expr( &typectx, &mut program_tokens ) {
Ok( mut ast ) => { Ok( ast ) => {
let mut compiler = ProcedureCompiler::new(path.clone(), main_scope.clone()); let (exports, diagnostics, proc_code) = ProcedureCompiler::new(&main_scope)
.compile(&ast)
match ast.infer_type(&main_scope) { .into_asm(&path);
Ok(mut t) => {
eprintln!("Typecheck {}", "OK".green().bold());
t = t.normalize();
t = t.param_normalize();
let mut tc = main_scope.clone();
eprintln!( "{}", t.sugar(&mut tc).pretty(&tc,0) );
}
Err(type_errs) => {
for e in type_errs.iter() {
crate::diagnostic::print_diagnostic(
path.as_str(),
e.region,
e.kind.fmt(&mut main_scope.clone())
);
}
eprintln!("----------------------------------");
eprintln!("{} ({} errors)", "Typecheck failed".bright_red().bold(), type_errs.len());
return;
}
}
compiler = compiler.compile_expr(&ast);
let diagnostics = compiler.diagnostics.clone();
let (exports, proc_code) = compiler.get_bytecode(false);
for (region, message) in diagnostics { for (region, message) in diagnostics {
crate::diagnostic::print_diagnostic( crate::diagnostic::print_diagnostic(
@ -89,15 +63,9 @@ fn main() {
); );
} }
eprintln!("{} {}", "Compiled".green(), path.bold()); eprintln!("{} {}", "Compiled".green(), path.bold());
for (name, def) in exports.iter() { for (name, def) in exports.iter() {
eprintln!("export {}:", name.yellow().bold()); eprintln!("export {}: {:?}", name.yellow().bold(), def);
let mut t = def.get_type(&mut main_scope);
t = t.normalize();
t = t.param_normalize();
let mut tc = main_scope.clone();
eprintln!( "{}", t.sugar(&mut tc).pretty(&tc,0) );
} }
main_scope.write().unwrap().import( main_scope.write().unwrap().import(
@ -106,10 +74,6 @@ fn main() {
/* link assembly-program to symbols /* link assembly-program to symbols
*/ */
eprintln!("generated bytecode ({})", proc_code.len() );
for (i,l) in tisc::assembler::disassemble(&proc_code).iter().enumerate() {
eprintln!("{} .... {}", i,l);
}
linker.add_procedure(path.as_str(), proc_code); linker.add_procedure(path.as_str(), proc_code);
} }
Err( (region, parse_error) ) => { Err( (region, parse_error) ) => {

View file

@ -1 +0,0 @@

Binary file not shown.

View file

@ -35,10 +35,8 @@ fn main() {
linker.import( source_path, bincode::deserialize_from( input ).expect("") ); linker.import( source_path, bincode::deserialize_from( input ).expect("") );
} }
let entry_addr = linker.get_link_addr(&args.entry).unwrap_or(0); let entry_addr = linker.get_link_addr(&args.entry)
/*
.expect(&format!("cant find entry symbol '{}'", args.entry)); .expect(&format!("cant find entry symbol '{}'", args.entry));
*/
let bytecode = linker.link_total().expect("Link error:"); let bytecode = linker.link_total().expect("Link error:");
eprintln!("{} ({} bytes)", "Loaded bytecode.".green(), bytecode.len()); eprintln!("{} ({} bytes)", "Loaded bytecode.".green(), bytecode.len());