separate crates for compiler-lib, compiler-cli and vm
This commit is contained in:
parent
72122bf4fc
commit
4e9a4d1204
17 changed files with 337 additions and 191 deletions
10
lib-ltcore/Cargo.toml
Normal file
10
lib-ltcore/Cargo.toml
Normal file
|
@ -0,0 +1,10 @@
|
|||
[package]
|
||||
name = "ltcore"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
laddertypes = { path = "../../lib-laddertypes" }
|
||||
tisc = { path = "../../lib-tisc" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
166
lib-ltcore/src/expr.rs
Normal file
166
lib-ltcore/src/expr.rs
Normal file
|
@ -0,0 +1,166 @@
|
|||
use {
|
||||
std::{
|
||||
boxed::Box,
|
||||
sync::{Arc, RwLock}
|
||||
},
|
||||
crate::{
|
||||
lexer::InputRegionTag
|
||||
}
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Statement {
|
||||
Assignment {
|
||||
name_region: InputRegionTag,
|
||||
var_id: String,
|
||||
val_expr: LTExpr,
|
||||
},
|
||||
LetAssign {
|
||||
typ: Option<TypeTag>,
|
||||
var_id: String,
|
||||
val_expr: LTExpr,
|
||||
},
|
||||
WhileLoop {
|
||||
condition: LTExpr,
|
||||
body: Vec<Statement>,
|
||||
},
|
||||
Return(LTExpr),
|
||||
Expr(LTExpr),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum TypeError {
|
||||
ParseError(laddertypes::parser::ParseError),
|
||||
Mismatch {
|
||||
expected: laddertypes::TypeTerm,
|
||||
received: laddertypes::TypeTerm,
|
||||
},
|
||||
}
|
||||
|
||||
pub type TypeTag = Result<laddertypes::TypeTerm, TypeError>;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum LTExpr {
|
||||
Literal {
|
||||
typ: Option<TypeTag>,
|
||||
val: tisc::VM_Word,
|
||||
},
|
||||
Symbol {
|
||||
region: InputRegionTag,
|
||||
typ: Option<TypeTag>,
|
||||
symbol: String,
|
||||
},
|
||||
Application {
|
||||
typ: Option<TypeTag>,
|
||||
head: Box<LTExpr>,
|
||||
body: Vec<LTExpr>,
|
||||
},
|
||||
Abstraction {
|
||||
args: Vec<(InputRegionTag, String, Option<TypeTag>)>,
|
||||
body: Box<LTExpr>,
|
||||
},
|
||||
Branch {
|
||||
condition: Box<LTExpr>,
|
||||
if_expr: Box<LTExpr>,
|
||||
else_expr: Box<LTExpr>,
|
||||
},
|
||||
Block {
|
||||
statements: Vec<Statement>,
|
||||
},
|
||||
ExportBlock {
|
||||
statements: Vec<Statement>,
|
||||
}
|
||||
}
|
||||
|
||||
impl LTExpr {
|
||||
/*
|
||||
pub fn symbol(str: &str) -> Self {
|
||||
LTExpr::Symbol {
|
||||
typ: None, //typectx.write().unwrap().parse("<Ref memory::Word>~Symbol~<Seq Char>").expect("parse typeterm"),
|
||||
symbol: String::from(str),
|
||||
}
|
||||
}
|
||||
*/
|
||||
pub fn lit_uint(val: u64) -> Self {
|
||||
LTExpr::Literal {
|
||||
typ: None, //typectx.write().unwrap().parse("ℤ_2^64~machine::UInt64~machine::Word").expect("parse typeterm"),
|
||||
val: val as tisc::VM_Word,
|
||||
}
|
||||
}
|
||||
/*
|
||||
pub fn abstraction(args: Vec<(&str, &str)>, body: LTExpr) -> LTExpr {
|
||||
LTExpr::Abstraction {
|
||||
args: args
|
||||
.into_iter()
|
||||
.map(
|
||||
|(arg_name, arg_type)| (arg_name.into(), None), //typectx.write().unwrap().parse(t).expect("parse typeterm")
|
||||
)
|
||||
.collect(),
|
||||
body: Box::new(body),
|
||||
}
|
||||
}
|
||||
*/
|
||||
pub fn application(head: LTExpr, body: Vec<LTExpr>) -> Self {
|
||||
LTExpr::Application {
|
||||
typ: None,
|
||||
head: Box::new(head),
|
||||
body: body,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn block(body: Vec<Statement>) -> Self {
|
||||
LTExpr::Block { statements: body }
|
||||
}
|
||||
}
|
||||
|
||||
impl Statement {
|
||||
pub fn while_loop(cond: LTExpr, body: Vec<Statement>) -> Self {
|
||||
Statement::WhileLoop {
|
||||
condition: cond,
|
||||
body,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
impl LTExpr {
|
||||
fn get_type(&self, dict: &laddertypes::dict::TypeDict) -> laddertypes::TypeTerm {
|
||||
match self {
|
||||
LTExpr::StringLiteral{ val:_, typ } => { typ.clone() }
|
||||
LTExpr::MemoryLiteral{ val:_, typ } => { typ.clone() }
|
||||
LTExpr::Abstraction{ arg_type, val_expr } => {
|
||||
laddertypes::TypeTerm::App(vec![
|
||||
laddertypes::TypeTerm::TypeID(dict.get_typeid(&"Fn".into()).expect("expected function type")),
|
||||
arg_type.clone(),
|
||||
val_expr.get_type(dict)
|
||||
])
|
||||
}
|
||||
LTExpr::Application{ head, body } => {
|
||||
match head.deref() {
|
||||
LTExpr::Abstraction{ arg_type, val_expr } => {
|
||||
val_expr.get_type(dict)
|
||||
}
|
||||
_ => {
|
||||
panic!("invalid application");
|
||||
}
|
||||
}
|
||||
}
|
||||
LTExpr::Block{ statements } => {
|
||||
if let Some(last_statement) = statements.last() {
|
||||
match last_statement {
|
||||
Statement::Return(ret_expr) |
|
||||
Statement::Expr(ret_expr) => {
|
||||
ret_expr.get_type(dict)
|
||||
}
|
||||
_ => {
|
||||
laddertypes::TypeTerm::unit()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
laddertypes::TypeTerm::unit()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
365
lib-ltcore/src/lexer.rs
Normal file
365
lib-ltcore/src/lexer.rs
Normal file
|
@ -0,0 +1,365 @@
|
|||
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||
pub enum LTIRToken {
|
||||
Comment(String),
|
||||
Symbol(String),
|
||||
Char(char),
|
||||
Num(i64),
|
||||
|
||||
// SingleQuote(String),
|
||||
// DoubleQuote(String),
|
||||
// TripleQuote(String),
|
||||
Lambda,
|
||||
MapsTo,
|
||||
AssignType(String),
|
||||
AssignValue,
|
||||
|
||||
ExprOpen,
|
||||
ExprClose,
|
||||
|
||||
BlockOpen,
|
||||
BlockClose,
|
||||
StatementSep,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||
pub enum LexError {
|
||||
InvalidDigit,
|
||||
InvalidChar,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||
pub enum LexerState {
|
||||
Any,
|
||||
Comment(String),
|
||||
TypeTerm(String),
|
||||
Sym(String),
|
||||
Num(i64),
|
||||
Char(Option<char>),
|
||||
}
|
||||
|
||||
impl LexerState {
|
||||
fn into_token(self) -> Option<LTIRToken> {
|
||||
match self {
|
||||
LexerState::Any => None,
|
||||
LexerState::Comment(s) => Some(LTIRToken::Comment(s)),
|
||||
LexerState::TypeTerm(s) => Some(LTIRToken::AssignType(s)),
|
||||
LexerState::Sym(s) => Some(LTIRToken::Symbol(s)),
|
||||
LexerState::Num(n) => Some(LTIRToken::Num(n)),
|
||||
LexerState::Char(c) => Some(LTIRToken::Char(c?)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LTIRLexer<It>
|
||||
where
|
||||
It: std::iter::Iterator<Item = char>,
|
||||
{
|
||||
chars: std::iter::Peekable<It>,
|
||||
position: usize
|
||||
}
|
||||
|
||||
impl<It> LTIRLexer<It>
|
||||
where
|
||||
It: Iterator<Item = char>,
|
||||
{
|
||||
pub fn into_inner(self) -> std::iter::Peekable<It> {
|
||||
self.chars
|
||||
}
|
||||
}
|
||||
|
||||
impl<It> From<It> for LTIRLexer<It>
|
||||
where
|
||||
It: Iterator<Item = char>,
|
||||
{
|
||||
fn from(chars: It) -> Self {
|
||||
LTIRLexer {
|
||||
chars: chars.peekable(),
|
||||
position: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct InputRegionTag {
|
||||
pub begin: usize,
|
||||
pub end: usize
|
||||
}
|
||||
|
||||
impl Default for InputRegionTag {
|
||||
fn default() -> Self {
|
||||
InputRegionTag {
|
||||
begin: 0,
|
||||
end: 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl InputRegionTag {
|
||||
pub fn max( a: InputRegionTag, b: InputRegionTag ) -> InputRegionTag {
|
||||
InputRegionTag {
|
||||
begin: usize::min( a.begin, b.begin ),
|
||||
end: usize::max( a.end, b.end )
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<It> Iterator for LTIRLexer<It>
|
||||
where
|
||||
It: Iterator<Item = char>,
|
||||
{
|
||||
type Item = (InputRegionTag, Result<LTIRToken, LexError>);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let mut state = LexerState::Any;
|
||||
let mut region = InputRegionTag{
|
||||
begin: self.position,
|
||||
end: self.position
|
||||
};
|
||||
|
||||
while let Some(c) = self.chars.peek() {
|
||||
match &mut state {
|
||||
// determine token type
|
||||
LexerState::Any => match c {
|
||||
'λ' => {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
return Some((region, Ok(LTIRToken::Lambda)));
|
||||
}
|
||||
'↦' => {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
return Some((region, Ok(LTIRToken::MapsTo)));
|
||||
}
|
||||
'(' => {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
return Some((region, Ok(LTIRToken::ExprOpen)));
|
||||
}
|
||||
')' => {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
return Some((region, Ok(LTIRToken::ExprClose)));
|
||||
}
|
||||
'{' => {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
return Some((region, Ok(LTIRToken::BlockOpen)));
|
||||
}
|
||||
'}' => {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
return Some((region, Ok(LTIRToken::BlockClose)));
|
||||
}
|
||||
':' => {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
state = LexerState::TypeTerm(String::new());
|
||||
}
|
||||
'=' => {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
return Some((region, Ok(LTIRToken::AssignValue)));
|
||||
}
|
||||
';' => {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
return Some((region, Ok(LTIRToken::StatementSep)));
|
||||
}
|
||||
'\'' => {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
state = LexerState::Char(None);
|
||||
}
|
||||
'/' => {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
|
||||
match self.chars.next() {
|
||||
Some('*') => {
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
state = LexerState::Comment(String::new());
|
||||
}
|
||||
_ => {
|
||||
return Some((region, Err(LexError::InvalidChar)));
|
||||
}
|
||||
}
|
||||
}
|
||||
c => {
|
||||
if c.is_whitespace() {
|
||||
self.chars.next();
|
||||
self.position += 1;
|
||||
region.begin += 1;
|
||||
region.end += 1;
|
||||
} else if c.is_digit(10) {
|
||||
state = LexerState::Num(0);
|
||||
} else {
|
||||
state = LexerState::Sym(String::new());
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
LexerState::Comment(s) => {
|
||||
|
||||
match self.chars.next() {
|
||||
Some('*') => {
|
||||
match self.chars.peek() {
|
||||
Some('/') => {
|
||||
self.chars.next();
|
||||
self.position += 2;
|
||||
region.end += 2;
|
||||
|
||||
if let Some(token) = state.clone().into_token() {
|
||||
return Some((region, Ok(token)));
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
s.push('*');
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(c) => {
|
||||
s.push(c);
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
}
|
||||
None => {
|
||||
return Some((region, Err(LexError::InvalidChar)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LexerState::Char(val) => {
|
||||
self.position += 2;
|
||||
region.end += 2;
|
||||
*val = Some(match self.chars.next() {
|
||||
Some('\\') => {
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
match self.chars.next() {
|
||||
Some('0') => '\0',
|
||||
Some('n') => '\n',
|
||||
Some('t') => '\t',
|
||||
Some(c) => c,
|
||||
None => {
|
||||
return Some((region, Err(LexError::InvalidChar)));
|
||||
}
|
||||
}
|
||||
},
|
||||
Some(c) => c,
|
||||
None => {
|
||||
return Some((region, Err(LexError::InvalidChar)));
|
||||
}
|
||||
});
|
||||
|
||||
match self.chars.next() {
|
||||
Some('\'') => {
|
||||
if let Some(token) = state.clone().into_token() {
|
||||
return Some((region, Ok(token)));
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Some((region, Err(LexError::InvalidChar)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LexerState::TypeTerm(s) => {
|
||||
if *c == '=' || *c == '↦' || *c == ';' {
|
||||
let token = state.clone().into_token().unwrap();
|
||||
return Some((region, Ok(token)));
|
||||
} else {
|
||||
if let Some(c) = self.chars.next() {
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
s.push(c);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => {
|
||||
if c.is_whitespace()
|
||||
|| *c == '('
|
||||
|| *c == ')'
|
||||
|| *c == '{'
|
||||
|| *c == '}'
|
||||
|| *c == ';'
|
||||
|| *c == '='
|
||||
|| *c == ':'
|
||||
|| *c == '↦'
|
||||
{
|
||||
// finish the current token
|
||||
|
||||
if let Some(token) = state.clone().into_token() {
|
||||
return Some((region, Ok(token)));
|
||||
}
|
||||
} else {
|
||||
// append to the current token
|
||||
|
||||
let c = self.chars.next().unwrap();
|
||||
self.position += 1;
|
||||
region.end += 1;
|
||||
|
||||
match &mut state {
|
||||
LexerState::Sym(s) => {
|
||||
s.push(c);
|
||||
}
|
||||
|
||||
LexerState::Num(n) => {
|
||||
if let Some(d) = c.to_digit(10) {
|
||||
*n = (*n) * 10 + d as i64;
|
||||
} else {
|
||||
return Some((region, Err(LexError::InvalidDigit)));
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(token) = state.into_token() {
|
||||
Some((region, Ok(token)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod tests {
|
||||
#[test]
|
||||
fn test_lexer() {
|
||||
let mut lexer = crate::lexer::LTIRLexer::from(
|
||||
"let var1:ℕ=123;
|
||||
/* comment */
|
||||
let square =λx.* x x;
|
||||
|
||||
let sqrt = λx:ℝ~machine::Float64~machine::Word.(f64-sqrt x);
|
||||
let magnitude =
|
||||
λx:ℝ
|
||||
.λy:ℝ
|
||||
.sqrt (+ (* x x) (* y y));
|
||||
"
|
||||
.chars(),
|
||||
);
|
||||
|
||||
for (range, token) in lexer {
|
||||
eprintln!("[{:?}] {:?}", range, token);
|
||||
}
|
||||
}
|
||||
}
|
8
lib-ltcore/src/lib.rs
Normal file
8
lib-ltcore/src/lib.rs
Normal file
|
@ -0,0 +1,8 @@
|
|||
|
||||
pub mod expr;
|
||||
pub mod lexer;
|
||||
pub mod parser;
|
||||
pub mod procedure_compiler;
|
||||
pub mod runtime;
|
||||
pub mod symbols;
|
||||
|
498
lib-ltcore/src/parser.rs
Normal file
498
lib-ltcore/src/parser.rs
Normal file
|
@ -0,0 +1,498 @@
|
|||
use {
|
||||
crate::{
|
||||
expr::{LTExpr, Statement, TypeError, TypeTag},
|
||||
lexer::{LTIRLexer, LTIRToken, LexError, InputRegionTag},
|
||||
},
|
||||
std::{
|
||||
iter::Peekable,
|
||||
sync::{Arc, RwLock},
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum ParseError {
|
||||
LexError(LexError),
|
||||
UnexpectedClose,
|
||||
UnexpectedEnd,
|
||||
UnexpectedToken,
|
||||
TypeParseError(laddertypes::parser::ParseError)
|
||||
}
|
||||
|
||||
pub fn parse_expect<It>(
|
||||
tokens: &mut Peekable<It>,
|
||||
expected_token: LTIRToken,
|
||||
) -> Result<(), (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
{
|
||||
match tokens.next() {
|
||||
Some((region, Ok(t))) => {
|
||||
if t == expected_token {
|
||||
Ok(())
|
||||
} else {
|
||||
Err((region, ParseError::UnexpectedToken))
|
||||
}
|
||||
}
|
||||
Some((region, Err(err))) => Err((region, ParseError::LexError(err))),
|
||||
None => Err((InputRegionTag::default(), ParseError::UnexpectedEnd)),
|
||||
}
|
||||
}
|
||||
|
||||
/* parse symbol name
|
||||
*/
|
||||
pub fn parse_symbol<It>(tokens: &mut Peekable<It>) -> Result<(InputRegionTag, String), (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
{
|
||||
match tokens.next() {
|
||||
Some((region, Ok(LTIRToken::Symbol(name)))) => Ok((region, name)),
|
||||
Some((region, Ok(_))) => Err((region, ParseError::UnexpectedToken)),
|
||||
Some((region, Err(err))) => Err((region, ParseError::LexError(err))),
|
||||
None => Err((InputRegionTag::default(), ParseError::UnexpectedEnd)),
|
||||
}
|
||||
}
|
||||
|
||||
/* parse an optional type annotation
|
||||
* `: T`
|
||||
*/
|
||||
pub fn parse_type_tag<It>(
|
||||
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
||||
tokens: &mut Peekable<It>,
|
||||
) -> Result<Option<laddertypes::TypeTerm>, (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
{
|
||||
let peek = { tokens.peek().cloned() };
|
||||
if let Some((region, peektok)) = peek {
|
||||
match peektok {
|
||||
Ok(LTIRToken::AssignType(typeterm_str)) => {
|
||||
tokens.next();
|
||||
match typectx.write().unwrap().parse(typeterm_str.as_str()) {
|
||||
Ok(typeterm) => Ok(Some(typeterm)),
|
||||
Err(parse_error) => Err((region, ParseError::TypeParseError(parse_error))),
|
||||
}
|
||||
}
|
||||
_ => Ok(None),
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum VariableBinding {
|
||||
Atomic {
|
||||
region: InputRegionTag,
|
||||
symbol: String,
|
||||
typtag: Option<laddertypes::TypeTerm>
|
||||
},
|
||||
Struct {
|
||||
members: Vec< VariableBinding >,
|
||||
typtag: Option<laddertypes::TypeTerm>
|
||||
}
|
||||
}
|
||||
|
||||
impl VariableBinding {
|
||||
pub fn flatten(self) -> Vec<(InputRegionTag, String, Option<laddertypes::TypeTerm>)> {
|
||||
match self {
|
||||
VariableBinding::Atomic{ region, symbol, typtag } =>
|
||||
vec![ (region, symbol, typtag) ],
|
||||
VariableBinding::Struct{ members, typtag } =>
|
||||
members
|
||||
.into_iter()
|
||||
.map(|a| a.flatten().into_iter())
|
||||
.flatten()
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* parse a symbol binding of the form
|
||||
* `x`
|
||||
* or `x : T`
|
||||
*/
|
||||
pub fn parse_binding_expr<It>(
|
||||
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
||||
tokens: &mut Peekable<It>,
|
||||
) -> Result< VariableBinding, (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
{
|
||||
if let Some((region, peektok)) = tokens.peek().clone() {
|
||||
match peektok {
|
||||
Ok(LTIRToken::BlockOpen) => {
|
||||
Ok(VariableBinding::Struct {
|
||||
members: parse_binding_block(typectx, tokens)?,
|
||||
typtag: parse_type_tag(typectx, tokens)?
|
||||
})
|
||||
}
|
||||
Ok(LTIRToken::Symbol(_)) => {
|
||||
let (name_region, name) = parse_symbol(tokens)?;
|
||||
Ok(VariableBinding::Atomic{
|
||||
region: name_region,
|
||||
symbol: name,
|
||||
typtag: parse_type_tag(typectx, tokens)?
|
||||
})
|
||||
}
|
||||
Err(err) => Err((region.clone(), ParseError::LexError(err.clone()))),
|
||||
_ => Err((region.clone(), ParseError::UnexpectedToken))
|
||||
}
|
||||
} else {
|
||||
Err((InputRegionTag::default(), ParseError::UnexpectedEnd))
|
||||
}
|
||||
}
|
||||
|
||||
/* parse a block of symbol bidnings
|
||||
* `{ x:T; y:U; ... }`
|
||||
*/
|
||||
pub fn parse_binding_block<It>(
|
||||
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
||||
tokens: &mut Peekable<It>,
|
||||
) -> Result< Vec<VariableBinding>, (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
{
|
||||
let mut last_region = InputRegionTag::default();
|
||||
|
||||
let _ = parse_expect(tokens, LTIRToken::BlockOpen)?;
|
||||
|
||||
let mut bindings = Vec::new();
|
||||
while let Some((region, peektok)) = tokens.peek() {
|
||||
last_region = *region;
|
||||
match peektok {
|
||||
Ok(LTIRToken::BlockClose) => {
|
||||
tokens.next();
|
||||
return Ok(bindings);
|
||||
}
|
||||
Ok(LTIRToken::StatementSep) => {
|
||||
tokens.next();
|
||||
}
|
||||
Ok(_) => {
|
||||
bindings.push(parse_binding_expr(typectx, tokens)?);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err((last_region, ParseError::LexError(err.clone())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err((last_region, ParseError::UnexpectedEnd))
|
||||
}
|
||||
|
||||
pub fn parse_statement<It>(
|
||||
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
||||
tokens: &mut Peekable<It>,
|
||||
) -> Result<crate::expr::Statement, (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
{
|
||||
if let Some((region, peektok)) = tokens.peek() {
|
||||
match peektok {
|
||||
Ok(LTIRToken::Symbol(sym)) => {
|
||||
match sym.as_str() {
|
||||
"!" => {
|
||||
tokens.next();
|
||||
// todo accept address-expression instead of symbol
|
||||
let (name_region, name) = parse_symbol(tokens)?;
|
||||
let val_expr = parse_expr(typectx, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
||||
|
||||
Ok(Statement::Assignment {
|
||||
name_region,
|
||||
var_id: name,
|
||||
val_expr,
|
||||
})
|
||||
}
|
||||
"let" => {
|
||||
tokens.next();
|
||||
let (name_region, name) = parse_symbol(tokens)?;
|
||||
let typ = parse_type_tag(typectx, tokens)?;
|
||||
/* todo
|
||||
let mut variable_bindings = parse_binding_expr(typectx, tokens)?;
|
||||
*/
|
||||
let _ = parse_expect(tokens, LTIRToken::AssignValue);
|
||||
let val_expr = parse_expr(typectx, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
||||
|
||||
Ok(Statement::LetAssign {
|
||||
typ: match typ {
|
||||
Some(t) => Some(Ok(t)),
|
||||
None => None
|
||||
},
|
||||
var_id: name,
|
||||
val_expr,
|
||||
})
|
||||
}
|
||||
"while" => {
|
||||
tokens.next();
|
||||
let _ = parse_expect(tokens, LTIRToken::ExprOpen)?;
|
||||
let cond = parse_expr(typectx, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::ExprClose)?;
|
||||
Ok(Statement::WhileLoop {
|
||||
condition: cond,
|
||||
body: parse_statement_block(typectx, tokens)?,
|
||||
})
|
||||
}
|
||||
"return" => {
|
||||
tokens.next();
|
||||
let expr = parse_expr(typectx, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
||||
Ok(Statement::Return(parse_expr(typectx, tokens)?))
|
||||
}
|
||||
_ => {
|
||||
let expr = parse_expr(typectx, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
||||
Ok(Statement::Expr(expr))
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(_) => {
|
||||
let expr = parse_expr(typectx, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
||||
Ok(Statement::Expr(expr))
|
||||
}
|
||||
Err(err) => Err((*region, ParseError::LexError(err.clone()))),
|
||||
}
|
||||
} else {
|
||||
Err((InputRegionTag::default(), ParseError::UnexpectedEnd))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_statement_block<It>(
|
||||
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
||||
tokens: &mut Peekable<It>,
|
||||
) -> Result<Vec<Statement>, (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
{
|
||||
let _ = parse_expect(tokens, LTIRToken::BlockOpen)?;
|
||||
|
||||
let mut statements = Vec::new();
|
||||
while let Some((region, peektok)) = tokens.peek() {
|
||||
match peektok {
|
||||
Ok(LTIRToken::BlockClose) => {
|
||||
tokens.next();
|
||||
return Ok(statements);
|
||||
}
|
||||
Ok(_) => {
|
||||
statements.push(parse_statement(typectx, tokens)?);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err((*region, ParseError::LexError(err.clone())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err((InputRegionTag::default(), ParseError::UnexpectedEnd))
|
||||
}
|
||||
|
||||
pub fn parse_atom<It>(
|
||||
tokens: &mut Peekable<It>,
|
||||
) -> Result<crate::expr::LTExpr, (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
{
|
||||
match tokens.next() {
|
||||
Some((region, Ok(LTIRToken::Symbol(sym)))) => Ok(LTExpr::Symbol{ region, symbol: sym, typ: None }),
|
||||
Some((region, Ok(LTIRToken::Char(c)))) => Ok(LTExpr::lit_uint(c as u64)),
|
||||
Some((region, Ok(LTIRToken::Num(n)))) => Ok(LTExpr::lit_uint(n as u64)),
|
||||
Some((region, Ok(_))) => Err((region, ParseError::UnexpectedToken)),
|
||||
Some((region, Err(err))) => Err((region, ParseError::LexError(err))),
|
||||
None => Err((InputRegionTag::default(), ParseError::UnexpectedEnd)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_expr<It>(
|
||||
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
||||
tokens: &mut Peekable<It>,
|
||||
) -> Result<crate::expr::LTExpr, (InputRegionTag, ParseError)>
|
||||
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
|
||||
{
|
||||
let mut children = Vec::new();
|
||||
|
||||
while let Some((region, tok)) = tokens.peek() {
|
||||
match tok {
|
||||
Ok(LTIRToken::Lambda) => {
|
||||
if children.len() == 0 {
|
||||
tokens.next();
|
||||
|
||||
let mut variable_bindings = parse_binding_expr(typectx, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::MapsTo);
|
||||
let body = parse_expr(typectx, tokens)?;
|
||||
|
||||
return Ok(LTExpr::Abstraction {
|
||||
args: variable_bindings.flatten().into_iter().map(|(r,s,t)| (r,s,t.map(|t|Ok(t))) ).collect(),
|
||||
body: Box::new(body),
|
||||
});
|
||||
} else {
|
||||
return Err((*region, ParseError::UnexpectedToken));
|
||||
}
|
||||
}
|
||||
Ok(LTIRToken::ExprOpen) => {
|
||||
tokens.next();
|
||||
while let Some((region, peektok)) = tokens.peek() {
|
||||
match peektok {
|
||||
Ok(LTIRToken::ExprClose) => {
|
||||
tokens.next();
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
children.push(parse_expr(typectx, tokens)?);
|
||||
}
|
||||
}
|
||||
Ok(LTIRToken::ExprClose) => {
|
||||
break;
|
||||
}
|
||||
Ok(LTIRToken::BlockOpen) => {
|
||||
children.push(LTExpr::block(parse_statement_block(typectx, tokens)?));
|
||||
}
|
||||
Ok(LTIRToken::BlockClose) => {
|
||||
break;
|
||||
}
|
||||
Ok(LTIRToken::StatementSep) => {
|
||||
break;
|
||||
}
|
||||
Ok(LTIRToken::Symbol(name)) => match name.as_str() {
|
||||
"if" => {
|
||||
tokens.next();
|
||||
let _ = parse_expect(tokens, LTIRToken::ExprOpen)?;
|
||||
let cond = parse_expr(typectx, tokens)?;
|
||||
let _ = parse_expect(tokens, LTIRToken::ExprClose)?;
|
||||
let if_expr = LTExpr::block(parse_statement_block(typectx, tokens)?);
|
||||
let mut else_expr = LTExpr::block(vec![]);
|
||||
|
||||
if let Some((region, peektok)) = tokens.peek() {
|
||||
if let Ok(LTIRToken::Symbol(name)) = peektok {
|
||||
if name == "else" {
|
||||
tokens.next();
|
||||
else_expr = parse_expr(typectx, tokens)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
children.push(LTExpr::Branch {
|
||||
condition: Box::new(cond),
|
||||
if_expr: Box::new(if_expr),
|
||||
else_expr: Box::new(else_expr),
|
||||
});
|
||||
},
|
||||
"export" => {
|
||||
tokens.next();
|
||||
let block = parse_statement_block(typectx, tokens)?;
|
||||
children.push(LTExpr::ExportBlock {
|
||||
statements: block
|
||||
});
|
||||
},
|
||||
name => {
|
||||
children.push(parse_atom(tokens)?);
|
||||
}
|
||||
},
|
||||
Ok(atom) => {
|
||||
children.push(parse_atom(tokens)?);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err((*region, ParseError::LexError(err.clone())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if children.len() > 0 {
|
||||
let head = children.remove(0);
|
||||
Ok(LTExpr::Application {
|
||||
typ: None,
|
||||
head: Box::new(head),
|
||||
body: children,
|
||||
})
|
||||
} else {
|
||||
Err((InputRegionTag::default(), ParseError::UnexpectedEnd))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
mod tests {
|
||||
use std::sync::{Arc, RwLock};
|
||||
|
||||
#[test]
|
||||
fn test_parse_atomic_binding() {
|
||||
let mut lexer = crate::lexer::LTIRLexer::from("x".chars()).peekable();
|
||||
let typectx = Arc::new(RwLock::new(laddertypes::dict::TypeDict::new()));
|
||||
let bindings = crate::parser::parse_binding_expr( &typectx, &mut lexer );
|
||||
|
||||
assert_eq!(
|
||||
bindings,
|
||||
Ok(crate::parser::VariableBinding::Atomic{
|
||||
symbol: "x".into(),
|
||||
typtag: None
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_typed_atomic_binding() {
|
||||
let mut lexer = crate::lexer::LTIRLexer::from("x:T".chars()).peekable();
|
||||
let typectx = Arc::new(RwLock::new(laddertypes::dict::TypeDict::new()));
|
||||
let bindings = crate::parser::parse_binding_expr( &typectx, &mut lexer );
|
||||
|
||||
assert_eq!(
|
||||
bindings,
|
||||
Ok(crate::parser::VariableBinding::Atomic{
|
||||
symbol: "x".into(),
|
||||
typtag: Some(typectx.write().unwrap().parse("T").unwrap())
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_struct_binding() {
|
||||
let mut lexer = crate::lexer::LTIRLexer::from("{x y}".chars()).peekable();
|
||||
let typectx = Arc::new(RwLock::new(laddertypes::dict::TypeDict::new()));
|
||||
let bindings = crate::parser::parse_binding_expr( &typectx, &mut lexer );
|
||||
|
||||
assert_eq!(
|
||||
bindings,
|
||||
Ok(crate::parser::VariableBinding::Struct{
|
||||
members: vec![
|
||||
crate::parser::VariableBinding::Atomic{ symbol: "x".into(), typtag: None },
|
||||
crate::parser::VariableBinding::Atomic{ symbol: "y".into(), typtag: None }
|
||||
],
|
||||
typtag: None
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_typed_struct_binding1() {
|
||||
let mut lexer = crate::lexer::LTIRLexer::from("{x y}:T".chars()).peekable();
|
||||
let typectx = Arc::new(RwLock::new(laddertypes::dict::TypeDict::new()));
|
||||
let bindings = crate::parser::parse_binding_expr( &typectx, &mut lexer );
|
||||
|
||||
assert_eq!(
|
||||
bindings,
|
||||
Ok(crate::parser::VariableBinding::Struct{
|
||||
members: vec![
|
||||
crate::parser::VariableBinding::Atomic{ symbol: "x".into(), typtag: None },
|
||||
crate::parser::VariableBinding::Atomic{ symbol: "y".into(), typtag: None }
|
||||
],
|
||||
typtag: Some(typectx.write().unwrap().parse("T").unwrap())
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_typed_struct_binding2() {
|
||||
let mut lexer = crate::lexer::LTIRLexer::from("{x:U; y}:T".chars()).peekable();
|
||||
let typectx = Arc::new(RwLock::new(laddertypes::dict::TypeDict::new()));
|
||||
let bindings = crate::parser::parse_binding_expr( &typectx, &mut lexer );
|
||||
|
||||
let type_u = typectx.write().unwrap().parse("U").unwrap();
|
||||
let type_t = typectx.write().unwrap().parse("T").unwrap();
|
||||
|
||||
assert_eq!(
|
||||
bindings,
|
||||
Ok(crate::parser::VariableBinding::Struct{
|
||||
members: vec![
|
||||
crate::parser::VariableBinding::Atomic{
|
||||
symbol: "x".into(),
|
||||
typtag: Some(type_u)
|
||||
},
|
||||
crate::parser::VariableBinding::Atomic{ symbol: "y".into(), typtag: None }
|
||||
],
|
||||
typtag: Some(type_t)
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
285
lib-ltcore/src/procedure_compiler.rs
Normal file
285
lib-ltcore/src/procedure_compiler.rs
Normal file
|
@ -0,0 +1,285 @@
|
|||
use {
|
||||
crate::{
|
||||
lexer::InputRegionTag,
|
||||
expr::{LTExpr, Statement},
|
||||
symbols::{Scope, SymbolDef},
|
||||
},
|
||||
std::{
|
||||
ops::Deref,
|
||||
sync::{Arc, RwLock},
|
||||
},
|
||||
tisc::{assembler::AssemblyWord, linker::LinkAddr},
|
||||
};
|
||||
|
||||
pub struct ProcedureCompiler {
|
||||
pub symbols: Arc<RwLock<Scope>>,
|
||||
asm: tisc::Assembler,
|
||||
linker: tisc::Linker,
|
||||
result_size: usize,
|
||||
|
||||
pub diagnostics: Vec<( InputRegionTag, String )>
|
||||
}
|
||||
|
||||
impl ProcedureCompiler {
|
||||
pub fn new(parent_scope: &Arc<RwLock<Scope>>) -> Self {
|
||||
ProcedureCompiler {
|
||||
symbols: Scope::with_parent(parent_scope),
|
||||
asm: tisc::Assembler::new(),
|
||||
linker: tisc::Linker::new(),
|
||||
result_size: 0,
|
||||
|
||||
diagnostics: Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_asm(mut self, proc_symbol: &String) -> (Vec<(String, SymbolDef)>, Vec<(InputRegionTag, String)>, Vec<tisc::assembler::AssemblyWord>) {
|
||||
let mut symbols =
|
||||
Arc::try_unwrap(self.symbols).ok().unwrap()
|
||||
.into_inner().unwrap();
|
||||
|
||||
symbols.update_link_addresses(
|
||||
proc_symbol,
|
||||
&self.linker
|
||||
);
|
||||
|
||||
let data_frame_size = symbols.get_frame_size() as i64;
|
||||
|
||||
let body = self.asm.build();
|
||||
self.linker.add_procedure("__procedure_body__", body);
|
||||
let body_addr = self
|
||||
.linker
|
||||
.get_link_addr(&"__procedure_body__".into())
|
||||
.unwrap();
|
||||
|
||||
let subroutines = self
|
||||
.linker
|
||||
.link_relative(&"__subroutines__".into())
|
||||
.expect("link error");
|
||||
|
||||
let mut entry = tisc::Assembler::new();
|
||||
if data_frame_size > 0 {
|
||||
entry = entry.lit(data_frame_size).call("data-frame-alloc");
|
||||
}
|
||||
entry = entry.call_symbol(LinkAddr::Relative {
|
||||
symbol: "__subroutines__".into(),
|
||||
offset: body_addr,
|
||||
});
|
||||
|
||||
if data_frame_size > 0 {
|
||||
entry = entry.lit(data_frame_size).call("data-frame-drop");
|
||||
}
|
||||
|
||||
let mut superlink = tisc::Linker::new();
|
||||
superlink.add_procedure("", entry.build());
|
||||
superlink.add_procedure("__subroutines__", subroutines);
|
||||
|
||||
symbols.update_link_addresses(
|
||||
&proc_symbol,
|
||||
&superlink
|
||||
);
|
||||
|
||||
let mut symbol_exports = symbols.export();
|
||||
let subroutines_addr = superlink.get_link_addr(&"__subroutines__".into()).unwrap();
|
||||
for (name, def) in symbol_exports.iter_mut() {
|
||||
match def {
|
||||
SymbolDef::Procedure{ in_types:_, out_types:_, link_addr, export:_ } => {
|
||||
match link_addr {
|
||||
LinkAddr::Relative{ symbol, offset } => {
|
||||
*offset += subroutines_addr;
|
||||
}
|
||||
LinkAddr::Absolute(addr) => {
|
||||
*addr += subroutines_addr;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
let bytecode = superlink.link_relative(proc_symbol).expect("link error");
|
||||
(symbol_exports, self.diagnostics, bytecode)
|
||||
}
|
||||
|
||||
pub fn verify(&self) {
|
||||
// todo
|
||||
}
|
||||
|
||||
pub fn compile_statement(mut self, statement: &Statement, enable_export: bool) -> Self {
|
||||
match statement {
|
||||
Statement::Assignment { name_region, var_id, val_expr } => {
|
||||
self = self.compile(val_expr);
|
||||
|
||||
match self.symbols.read().unwrap().get(var_id) {
|
||||
Some(SymbolDef::FrameRef { typ, stack_ref }) => {
|
||||
self.asm = self.asm.lit(stack_ref).call("data-frame-set");
|
||||
}
|
||||
Some(SymbolDef::StaticRef { typ, link_addr }) => {
|
||||
self.asm = self
|
||||
.asm
|
||||
.static_ref(var_id.as_str())
|
||||
.inst(tisc::VM_Instruction::Store);
|
||||
}
|
||||
Some(SymbolDef::Procedure {
|
||||
in_types,
|
||||
out_types,
|
||||
link_addr,
|
||||
export
|
||||
}) => {
|
||||
self.asm = self
|
||||
.asm
|
||||
.call(var_id.as_str())
|
||||
.inst(tisc::VM_Instruction::Store);
|
||||
}
|
||||
None => {
|
||||
self.diagnostics.push(
|
||||
(name_region.clone(),
|
||||
format!("cannot assign undefined symbol '{}'!", var_id))
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
Statement::LetAssign {
|
||||
typ,
|
||||
var_id,
|
||||
val_expr,
|
||||
} => match val_expr {
|
||||
LTExpr::Abstraction { args: _, body: _ } => {
|
||||
self.symbols
|
||||
.write()
|
||||
.unwrap()
|
||||
.declare_proc(var_id.clone(), vec![], vec![], enable_export);
|
||||
|
||||
let (exports, mut diagnostics, lambda_procedure) = ProcedureCompiler::new(&self.symbols)
|
||||
.compile(val_expr)
|
||||
.into_asm(var_id);
|
||||
|
||||
self.diagnostics.append(&mut diagnostics);
|
||||
|
||||
self.linker.add_procedure(var_id, lambda_procedure);
|
||||
|
||||
let offset = self.linker.get_link_addr(var_id).unwrap();
|
||||
|
||||
// forward already exported symbols
|
||||
if enable_export {
|
||||
self.symbols.write().unwrap().import( exports );
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
self.symbols
|
||||
.write()
|
||||
.unwrap()
|
||||
.declare_var(var_id.clone(), laddertypes::TypeTerm::unit());
|
||||
|
||||
self = self.compile_statement(&Statement::Assignment {
|
||||
name_region: InputRegionTag::default(),
|
||||
var_id: var_id.clone(),
|
||||
val_expr: val_expr.clone(),
|
||||
}, false);
|
||||
}
|
||||
},
|
||||
Statement::WhileLoop { condition, body } => {
|
||||
let asm = self.asm;
|
||||
|
||||
self.asm = tisc::Assembler::new();
|
||||
self = self.compile(condition);
|
||||
let cond_asm = self.asm;
|
||||
|
||||
self.asm = tisc::Assembler::new();
|
||||
for statement in body.into_iter() {
|
||||
self = self.compile_statement(statement, false);
|
||||
}
|
||||
let body_asm = self.asm;
|
||||
|
||||
self.asm = asm;
|
||||
self.asm = self.asm.while_loop(cond_asm, body_asm);
|
||||
}
|
||||
Statement::Expr(expr) => {
|
||||
self = self.compile(expr);
|
||||
}
|
||||
Statement::Return(expr) => {
|
||||
self = self.compile(expr);
|
||||
}
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
pub fn compile(mut self, expr: <Expr) -> Self {
|
||||
match expr {
|
||||
LTExpr::Symbol { region, typ, symbol } => match self.symbols.read().unwrap().get(symbol) {
|
||||
Some(SymbolDef::FrameRef { typ, stack_ref }) => {
|
||||
self.asm = self.asm.lit(stack_ref).call("data-frame-get");
|
||||
}
|
||||
Some(SymbolDef::StaticRef { typ, link_addr }) => {
|
||||
self.asm = self.asm.static_ref(symbol.as_str());
|
||||
}
|
||||
Some(SymbolDef::Procedure {
|
||||
in_types,
|
||||
out_types,
|
||||
link_addr,
|
||||
export
|
||||
}) => {
|
||||
self.asm = self.asm.call_symbol(link_addr);
|
||||
}
|
||||
None => {
|
||||
self.diagnostics.push(
|
||||
(region.clone(), format!("undefined symbol '{}'!", symbol))
|
||||
);
|
||||
}
|
||||
},
|
||||
LTExpr::Literal { typ, val } => {
|
||||
self.asm = self.asm.lit(*val);
|
||||
}
|
||||
LTExpr::Application { typ, head, body } => {
|
||||
for arg in body.iter().rev() {
|
||||
self = self.compile(arg);
|
||||
}
|
||||
self = self.compile(head);
|
||||
}
|
||||
LTExpr::Abstraction { args, body } => {
|
||||
for (region, arg_name, arg_type) in args.iter() {
|
||||
if let Some(Ok(typeterm)) = arg_type {
|
||||
let id = self
|
||||
.symbols
|
||||
.write()
|
||||
.unwrap()
|
||||
.declare_var(arg_name.clone(), typeterm.clone());
|
||||
self.asm = self.asm.lit(id).call("data-frame-set");
|
||||
} else {
|
||||
self.diagnostics.push((
|
||||
region.clone(),
|
||||
format!("invalid type {:?} for argument {}", arg_type, arg_name)
|
||||
));
|
||||
}
|
||||
}
|
||||
self = self.compile(body);
|
||||
}
|
||||
LTExpr::Branch {
|
||||
condition,
|
||||
if_expr,
|
||||
else_expr,
|
||||
} => {
|
||||
self = self.compile(condition);
|
||||
|
||||
let asm = self.asm;
|
||||
self.asm = tisc::Assembler::new();
|
||||
self = self.compile(if_expr);
|
||||
let if_asm = self.asm;
|
||||
self.asm = tisc::Assembler::new();
|
||||
self = self.compile(else_expr);
|
||||
let else_asm = self.asm;
|
||||
self.asm = asm;
|
||||
self.asm = self.asm.branch(if_asm, else_asm);
|
||||
}
|
||||
LTExpr::Block { statements } => {
|
||||
for s in statements.iter() {
|
||||
self = self.compile_statement(s, false);
|
||||
}
|
||||
}
|
||||
LTExpr::ExportBlock{ statements } => {
|
||||
for s in statements.iter() {
|
||||
self = self.compile_statement(s, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
297
lib-ltcore/src/runtime.rs
Normal file
297
lib-ltcore/src/runtime.rs
Normal file
|
@ -0,0 +1,297 @@
|
|||
use {
|
||||
crate::{expr::LTExpr, procedure_compiler::ProcedureCompiler, symbols::Scope},
|
||||
std::sync::{Arc, RwLock},
|
||||
tisc::linker::Linker,
|
||||
};
|
||||
|
||||
pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
|
||||
let symbols = Scope::new();
|
||||
let typectx = symbols.read().unwrap().typectx.clone();
|
||||
|
||||
/* Duplicate the top item on the stack,
|
||||
* and whatever type this word has is preserved
|
||||
*/
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"dup",
|
||||
vec!["T"],
|
||||
vec!["T~machine::Word"],
|
||||
vec!["T~machine::Word", "T~machine::Word"],
|
||||
);
|
||||
|
||||
/* drop topmost element
|
||||
*/
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"drop",
|
||||
vec!["T"],
|
||||
vec!["T~machine::Word"],
|
||||
vec![],
|
||||
);
|
||||
/* Put a single Ascii character on stdout
|
||||
*/
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"emit",
|
||||
vec![],
|
||||
vec!["Char~Ascii~machine::Word"],
|
||||
vec![],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"accept",
|
||||
vec![],
|
||||
vec![],
|
||||
vec!["Char~Ascii~machine::Word"],
|
||||
);
|
||||
|
||||
linker.add_procedure("dup", tisc::Assembler::new().inst(tisc::VM_Instruction::Dup).build());
|
||||
linker.add_procedure("drop", tisc::Assembler::new().inst(tisc::VM_Instruction::Drop).build());
|
||||
linker.add_procedure("emit", tisc::Assembler::new().inst(tisc::VM_Instruction::Emit).build());
|
||||
linker.add_procedure("accept", tisc::Assembler::new().inst(tisc::VM_Instruction::Accept).build());
|
||||
|
||||
/* The top two items must be native u64 integers,
|
||||
* which are replaced by their sum.
|
||||
* We do not know wheter a sum of two integers actually
|
||||
* preserves the semantics of a more abstract type
|
||||
*/
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"i+",
|
||||
vec![],
|
||||
vec![
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
],
|
||||
vec!["ℤ_2^64~machine::UInt64~machine::Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"i-",
|
||||
vec![],
|
||||
vec![
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
],
|
||||
vec!["ℤ_2^64~machine::UInt64~machine::Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"i*",
|
||||
vec![],
|
||||
vec![
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
],
|
||||
vec!["ℤ_2^64~machine::UInt64~machine::Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"i/",
|
||||
vec![],
|
||||
vec![
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
],
|
||||
vec!["ℤ_2^64~machine::UInt64~machine::Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"i%",
|
||||
vec![],
|
||||
vec![
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
"ℤ_2^64~machine::UInt64~machine::Word",
|
||||
],
|
||||
vec!["ℤ_2^64~machine::UInt64~machine::Word"],
|
||||
);
|
||||
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"f+",
|
||||
vec![],
|
||||
vec![
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
],
|
||||
vec!["ℝ~machine::f64~machine::Word"],
|
||||
);
|
||||
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"f-",
|
||||
vec![],
|
||||
vec![
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
],
|
||||
vec!["ℝ~machine::f64~machine::Word"],
|
||||
);
|
||||
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"f*",
|
||||
vec![],
|
||||
vec![
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
],
|
||||
vec!["ℝ~machine::f64~machine::Word"],
|
||||
);
|
||||
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"f/",
|
||||
vec![],
|
||||
vec![
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
],
|
||||
vec!["ℝ~machine::f64~machine::Word"],
|
||||
);
|
||||
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"f%",
|
||||
vec![],
|
||||
vec![
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
"ℝ~machine::f64~machine::Word",
|
||||
],
|
||||
vec!["ℝ~machine::f64~machine::Word"],
|
||||
);
|
||||
|
||||
linker.add_procedure("i+", tisc::Assembler::new().inst(tisc::VM_Instruction::IntAdd).build());
|
||||
linker.add_procedure("i-", tisc::Assembler::new().inst(tisc::VM_Instruction::IntSub).build());
|
||||
linker.add_procedure("i*", tisc::Assembler::new().inst(tisc::VM_Instruction::IntMul).build());
|
||||
linker.add_procedure("i/", tisc::Assembler::new().inst(tisc::VM_Instruction::IntDiv).build());
|
||||
linker.add_procedure("i%", tisc::Assembler::new().inst(tisc::VM_Instruction::IntRem).build());
|
||||
|
||||
linker.add_procedure("f+", tisc::Assembler::new().inst(tisc::VM_Instruction::FltAdd).build());
|
||||
linker.add_procedure("f-", tisc::Assembler::new().inst(tisc::VM_Instruction::FltSub).build());
|
||||
linker.add_procedure("f*", tisc::Assembler::new().inst(tisc::VM_Instruction::FltMul).build());
|
||||
linker.add_procedure("f/", tisc::Assembler::new().inst(tisc::VM_Instruction::FltDiv).build());
|
||||
linker.add_procedure("f%", tisc::Assembler::new().inst(tisc::VM_Instruction::FltRem).build());
|
||||
|
||||
|
||||
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"bit-neg",
|
||||
vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"bit-and",
|
||||
vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"bit-or",
|
||||
vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"bit-xor",
|
||||
vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"bit-shl",
|
||||
vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
|
||||
);
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"bit-shr",
|
||||
vec![], vec!["machine::Word", "machine::Word"], vec!["machine::Word"],
|
||||
);
|
||||
|
||||
linker.add_procedure("bit-neg", tisc::Assembler::new().inst(tisc::VM_Instruction::BitNeg).build());
|
||||
linker.add_procedure("bit-and", tisc::Assembler::new().inst(tisc::VM_Instruction::BitAnd).build());
|
||||
linker.add_procedure("bit-or", tisc::Assembler::new().inst(tisc::VM_Instruction::BitOr).build());
|
||||
linker.add_procedure("bit-xor", tisc::Assembler::new().inst(tisc::VM_Instruction::BitXor).build());
|
||||
linker.add_procedure("bit-shl", tisc::Assembler::new().inst(tisc::VM_Instruction::BitShl).build());
|
||||
linker.add_procedure("bit-shr", tisc::Assembler::new().inst(tisc::VM_Instruction::BitShr).build());
|
||||
|
||||
|
||||
/* Fetch memory address
|
||||
*/
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"@",
|
||||
vec![],
|
||||
vec!["<MutRef T~machine::Word>~machine::Address~machine::Word"],
|
||||
vec!["T~machine::Word"],
|
||||
);
|
||||
/* Store to memory
|
||||
*/
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"!",
|
||||
vec![],
|
||||
vec![
|
||||
"<MutRef T~machine::Word>~machine::Address~machine::Word",
|
||||
"T~machine::Word",
|
||||
],
|
||||
vec![],
|
||||
);
|
||||
|
||||
linker.add_procedure("@", tisc::Assembler::new().inst(tisc::VM_Instruction::Fetch).build());
|
||||
linker.add_procedure("!", tisc::Assembler::new().inst(tisc::VM_Instruction::Store).build());
|
||||
|
||||
|
||||
|
||||
symbols.write().unwrap().declare_static_parse(
|
||||
"data-frame-ptr",
|
||||
"<MutRef <Seq machine::Word>>~machine::Address~machine::Word",
|
||||
);
|
||||
linker.add_static("data-frame-ptr", vec![0x1000]);
|
||||
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"data-frame-set",
|
||||
vec!["T"],
|
||||
vec![
|
||||
"T~machine::Word",
|
||||
"<RefMut T~machine::Word>~LocalVariableId~machine::UInt64~machine::Word",
|
||||
],
|
||||
vec![],
|
||||
);
|
||||
|
||||
linker.add_procedure(
|
||||
"data-frame-set",
|
||||
tisc::Assembler::new()
|
||||
.static_ref("data-frame-ptr")
|
||||
.inst(tisc::VM_Instruction::Fetch)
|
||||
.inst(tisc::VM_Instruction::IntAdd)
|
||||
.inst(tisc::VM_Instruction::Store)
|
||||
.build(),
|
||||
);
|
||||
|
||||
symbols.write().unwrap().declare_proc_parse(
|
||||
"data-frame-get",
|
||||
vec!["T"],
|
||||
vec!["<Ref T~machine::Word>~DataFrameRef~machine::UInt64~machine::Word"],
|
||||
vec!["T~machine::Word"],
|
||||
);
|
||||
linker.add_procedure(
|
||||
"data-frame-get",
|
||||
tisc::Assembler::new()
|
||||
.static_ref("data-frame-ptr")
|
||||
.inst(tisc::VM_Instruction::Fetch)
|
||||
.inst(tisc::VM_Instruction::IntAdd)
|
||||
.inst(tisc::VM_Instruction::Fetch)
|
||||
.build(),
|
||||
);
|
||||
|
||||
symbols
|
||||
.write()
|
||||
.unwrap()
|
||||
.declare_proc_parse("data-frame-alloc", vec![], vec![], vec![]);
|
||||
|
||||
linker.add_procedure(
|
||||
"data-frame-alloc",
|
||||
tisc::Assembler::new()
|
||||
.static_ref("data-frame-ptr")
|
||||
.inst(tisc::VM_Instruction::Fetch)
|
||||
.call("i-")
|
||||
.static_ref("data-frame-ptr")
|
||||
.inst(tisc::VM_Instruction::Store)
|
||||
.build(),
|
||||
);
|
||||
|
||||
symbols
|
||||
.write()
|
||||
.unwrap()
|
||||
.declare_proc_parse("data-frame-drop", vec![], vec![], vec![]);
|
||||
|
||||
linker.add_procedure(
|
||||
"data-frame-drop",
|
||||
tisc::Assembler::new()
|
||||
.static_ref("data-frame-ptr")
|
||||
.inst(tisc::VM_Instruction::Fetch)
|
||||
.call("i+")
|
||||
.static_ref("data-frame-ptr")
|
||||
.inst(tisc::VM_Instruction::Store)
|
||||
.build(),
|
||||
);
|
||||
|
||||
symbols
|
||||
}
|
269
lib-ltcore/src/symbols.rs
Normal file
269
lib-ltcore/src/symbols.rs
Normal file
|
@ -0,0 +1,269 @@
|
|||
use {
|
||||
crate::expr::LTExpr,
|
||||
std::{
|
||||
collections::HashMap,
|
||||
sync::{Arc, RwLock},
|
||||
},
|
||||
tisc::linker::LinkAddr,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum SymbolDef {
|
||||
FrameRef {
|
||||
typ: laddertypes::TypeTerm,
|
||||
stack_ref: tisc::VM_Word,
|
||||
},
|
||||
StaticRef {
|
||||
typ: laddertypes::TypeTerm,
|
||||
link_addr: Option<tisc::VM_Word>,
|
||||
},
|
||||
Procedure {
|
||||
in_types: Vec<laddertypes::TypeTerm>,
|
||||
out_types: Vec<laddertypes::TypeTerm>,
|
||||
link_addr: LinkAddr,
|
||||
export: bool
|
||||
},
|
||||
}
|
||||
|
||||
impl SymbolDef {
|
||||
pub fn get_type(
|
||||
&self,
|
||||
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
||||
) -> laddertypes::TypeTerm {
|
||||
match self {
|
||||
SymbolDef::FrameRef { typ, stack_ref: _ } => typ.clone(),
|
||||
SymbolDef::StaticRef { typ, link_addr: _ } => typ.clone(),
|
||||
SymbolDef::Procedure {
|
||||
in_types,
|
||||
out_types,
|
||||
link_addr: _,
|
||||
export: _,
|
||||
} => laddertypes::TypeTerm::App(vec![
|
||||
typectx
|
||||
.write()
|
||||
.unwrap()
|
||||
.parse("Fn")
|
||||
.expect("parse typeterm"),
|
||||
laddertypes::TypeTerm::App(in_types.clone()),
|
||||
laddertypes::TypeTerm::App(out_types.clone()),
|
||||
]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Describes a lexical scope of symbols
|
||||
*/
|
||||
pub struct Scope {
|
||||
/* definition of runtime symbols
|
||||
*/
|
||||
symbols: HashMap<String, SymbolDef>,
|
||||
|
||||
/* type symbols
|
||||
*/
|
||||
pub typectx: Arc<RwLock<laddertypes::TypeDict>>,
|
||||
|
||||
/* number of words required for
|
||||
* the stack frame of this scope
|
||||
*/
|
||||
frame_size: usize,
|
||||
|
||||
/* parent scope whose all
|
||||
* symbols are inherited
|
||||
*/
|
||||
parent: Option<Arc<RwLock<Scope>>>,
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
pub fn new() -> Arc<RwLock<Self>> {
|
||||
Arc::new(RwLock::new(Scope {
|
||||
symbols: HashMap::new(),
|
||||
typectx: Arc::new(RwLock::new(laddertypes::dict::TypeDict::new())),
|
||||
frame_size: 0,
|
||||
parent: None,
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn with_parent(parent: &Arc<RwLock<Scope>>) -> Arc<RwLock<Self>> {
|
||||
let s = Scope {
|
||||
symbols: HashMap::new(),
|
||||
|
||||
// todo: create proper child scope
|
||||
typectx: parent.read().unwrap().typectx.clone(),
|
||||
|
||||
frame_size: 0,
|
||||
parent: Some(parent.clone()),
|
||||
};
|
||||
|
||||
Arc::new(RwLock::new(s))
|
||||
}
|
||||
|
||||
pub fn export(self) -> Vec<(String, SymbolDef)> {
|
||||
self.symbols
|
||||
.into_iter()
|
||||
.filter(|(name, def)|
|
||||
match def {
|
||||
SymbolDef::Procedure { in_types:_, out_types:_, link_addr:_, export } => *export,
|
||||
_ => false
|
||||
}
|
||||
)
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn import(&mut self, symbol_imports: Vec<(String, SymbolDef)>) {
|
||||
for (name, def) in symbol_imports {
|
||||
self.symbols.insert( name, def );
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_frame_size(&self) -> usize {
|
||||
self.frame_size
|
||||
}
|
||||
|
||||
pub fn get(&self, name: &str) -> Option<SymbolDef> {
|
||||
match self.symbols.get(name) {
|
||||
Some(def) => Some(def.clone()),
|
||||
None => {
|
||||
if let Some(parent) = self.parent.as_ref() {
|
||||
match parent.read().unwrap().get(name) {
|
||||
Some(SymbolDef::FrameRef { typ, stack_ref }) => Some(SymbolDef::FrameRef {
|
||||
typ: typ.clone(),
|
||||
stack_ref: stack_ref + self.get_frame_size() as i64,
|
||||
}),
|
||||
x => x.clone(),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// takes the link-addresses from a Linker
|
||||
/// and updates the symbol table to relative addresses
|
||||
/// based on the next super-label
|
||||
pub fn update_link_addresses(
|
||||
&mut self,
|
||||
base_symbol: &String,
|
||||
linker: &tisc::Linker
|
||||
) {
|
||||
for (name, def) in self.symbols.iter_mut() {
|
||||
if let Some(offset) = linker.get_link_addr( name ) {
|
||||
match def {
|
||||
SymbolDef::Procedure {
|
||||
in_types:_,out_types:_,
|
||||
link_addr,
|
||||
export:_
|
||||
} => {
|
||||
*link_addr = LinkAddr::Relative{
|
||||
symbol: base_symbol.clone(),
|
||||
offset
|
||||
};
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//<><><><><><>
|
||||
|
||||
pub fn declare_proc_parse(
|
||||
&mut self,
|
||||
name: &str,
|
||||
type_vars: Vec<&str>,
|
||||
in_types: Vec<&str>,
|
||||
out_types: Vec<&str>,
|
||||
) {
|
||||
for v in type_vars {
|
||||
self.typectx.write().unwrap().add_varname(v.into());
|
||||
}
|
||||
|
||||
self.declare_proc(
|
||||
String::from(name),
|
||||
in_types
|
||||
.into_iter()
|
||||
.map(|t| {
|
||||
self.typectx
|
||||
.write()
|
||||
.unwrap()
|
||||
.parse(t)
|
||||
.expect("parse typeterm")
|
||||
})
|
||||
.collect(),
|
||||
out_types
|
||||
.into_iter()
|
||||
.map(|t| {
|
||||
self.typectx
|
||||
.write()
|
||||
.unwrap()
|
||||
.parse(t)
|
||||
.expect("parse typeterm")
|
||||
})
|
||||
.collect(),
|
||||
false
|
||||
);
|
||||
}
|
||||
|
||||
pub fn declare_proc(
|
||||
&mut self,
|
||||
name: String,
|
||||
in_types: Vec<laddertypes::TypeTerm>,
|
||||
out_types: Vec<laddertypes::TypeTerm>,
|
||||
export: bool
|
||||
) {
|
||||
self.symbols.insert(
|
||||
name.clone(),
|
||||
SymbolDef::Procedure {
|
||||
link_addr: LinkAddr::Relative{ symbol: name, offset: 0 },
|
||||
in_types,
|
||||
out_types,
|
||||
export
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
//<><><><><>
|
||||
|
||||
pub fn declare_var_parse(&mut self, name: &str, typ: &str) {
|
||||
let typ = self
|
||||
.typectx
|
||||
.write()
|
||||
.unwrap()
|
||||
.parse(typ)
|
||||
.expect("parse typeterm");
|
||||
self.declare_var(String::from(name), typ);
|
||||
}
|
||||
|
||||
pub fn declare_var(&mut self, name: String, typ: laddertypes::TypeTerm) -> tisc::VM_Word {
|
||||
let stack_ref = self.frame_size as tisc::VM_Word;
|
||||
self.frame_size += 1;
|
||||
|
||||
self.symbols
|
||||
.insert(name, SymbolDef::FrameRef { typ, stack_ref });
|
||||
|
||||
stack_ref
|
||||
}
|
||||
|
||||
//<><><><><><>
|
||||
|
||||
pub fn declare_static_parse(&mut self, name: &str, typ: &str) {
|
||||
let typ = self
|
||||
.typectx
|
||||
.write()
|
||||
.unwrap()
|
||||
.parse(typ)
|
||||
.expect("parse typeterm");
|
||||
self.declare_static(String::from(name), typ);
|
||||
}
|
||||
|
||||
pub fn declare_static(&mut self, name: String, typ: laddertypes::TypeTerm) {
|
||||
self.symbols.insert(
|
||||
name,
|
||||
SymbolDef::StaticRef {
|
||||
typ,
|
||||
link_addr: None,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue