Compare commits

...

5 commits

Author SHA1 Message Date
34a129d101
lexer: add input region for each token 2024-05-13 22:55:24 +02:00
c910265531
runtime: adapt to new instruction set 2024-05-13 21:51:28 +02:00
f26e24bba1
cargo fmt 2024-05-12 18:58:39 +02:00
49c72e8930
parse type annotations to ast 2024-05-12 18:56:10 +02:00
f54f630b38
adapt Abstraction variant of LTExpr to allow multiple parameters
This avoids unneccesary recursive chaining and also allows abstractions with zero parameters.
2024-05-12 04:22:37 +02:00
7 changed files with 846 additions and 577 deletions

View file

@ -1,92 +1,104 @@
use {
std::{
use std::{
boxed::Box,
sync::{Arc, RwLock}
}
sync::{Arc, RwLock},
};
#[derive(Clone, Debug)]
pub enum Statement {
Assignment {
var_id: String,
val_expr: LTExpr
val_expr: LTExpr,
},
LetAssign {
typ: Option<TypeTag>,
var_id: String,
val_expr: LTExpr,
},
WhileLoop {
condition: LTExpr,
body: Vec<Statement>
body: Vec<Statement>,
},
Return(LTExpr),
Expr(LTExpr)
Expr(LTExpr),
}
#[derive(Clone, Debug)]
pub enum TypeError {
ParseError(laddertypes::parser::ParseError),
Mismatch {
expected: laddertypes::TypeTerm,
received: laddertypes::TypeTerm,
},
}
pub type TypeTag = Result<laddertypes::TypeTerm, TypeError>;
#[derive(Clone, Debug)]
pub enum LTExpr {
Literal {
typ: Option< laddertypes::TypeTerm >,
val: tisc::VM_Word
typ: Option<TypeTag>,
val: tisc::VM_Word,
},
Symbol {
typ: Option< laddertypes::TypeTerm >,
typ: Option<TypeTag>,
symbol: String,
},
Application {
typ: Option<TypeTag>,
head: Box<LTExpr>,
body: Vec<LTExpr>
body: Vec<LTExpr>,
},
Abstraction {
arg_id: String,
arg_type: Option< laddertypes::TypeTerm >,
val_expr: Box<LTExpr>
args: Vec<(String, Option<TypeTag>)>,
body: Box<LTExpr>,
},
Branch {
condition: Box<LTExpr>,
if_expr: Box<LTExpr>,
else_expr: Box<LTExpr>
else_expr: Box<LTExpr>,
},
Block {
statements: Vec<Statement>
}
statements: Vec<Statement>,
},
}
impl LTExpr {
pub fn symbol(str: &str) -> Self {
LTExpr::Symbol {
typ: None, //typectx.write().unwrap().parse("<Ref memory::Word>~Symbol~<Seq Char>").expect("parse typeterm"),
symbol: String::from(str)
symbol: String::from(str),
}
}
pub fn lit_uint(val: u64) -> Self {
LTExpr::Literal {
typ: None, //typectx.write().unwrap().parse("_2^64~machine::UInt64~machine::Word").expect("parse typeterm"),
val: val as tisc::VM_Word
val: val as tisc::VM_Word,
}
}
pub fn abstraction(arg_id: &str, arg_typ: &str, val_expr: LTExpr) -> LTExpr {
pub fn abstraction(args: Vec<(&str, &str)>, body: LTExpr) -> LTExpr {
LTExpr::Abstraction {
arg_id: String::from(arg_id),
arg_type: None,//typectx.write().unwrap().parse(arg_typ).expect("parse typeterm"),
val_expr: Box::new(val_expr)
args: args
.into_iter()
.map(
|(arg_name, arg_type)| (arg_name.into(), None), //typectx.write().unwrap().parse(t).expect("parse typeterm")
)
.collect(),
body: Box::new(body),
}
}
pub fn application(head: LTExpr, body: Vec<LTExpr>) -> Self {
LTExpr::Application {
typ: None,
head: Box::new(head),
body: body
body: body,
}
}
pub fn block(body: Vec<Statement>) -> Self {
LTExpr::Block {
statements: body
}
LTExpr::Block { statements: body }
}
}
@ -94,7 +106,7 @@ impl Statement {
pub fn while_loop(cond: LTExpr, body: Vec<Statement>) -> Self {
Statement::WhileLoop {
condition: cond,
body
body,
}
}
}
@ -141,4 +153,3 @@ impl LTExpr {
}
}
*/

View file

@ -1,4 +1,3 @@
#[derive(PartialEq, Eq, Clone, Debug)]
pub enum LTIRToken {
Symbol(String),
@ -8,9 +7,9 @@ pub enum LTIRToken {
// SingleQuote(String),
// DoubleQuote(String),
// TripleQuote(String),
Lambda,
AssignType,
LambdaBody,
AssignType(String),
AssignValue,
ExprOpen,
@ -24,37 +23,41 @@ pub enum LTIRToken {
#[derive(PartialEq, Eq, Clone, Debug)]
pub enum LexError {
InvalidDigit,
InvalidChar
InvalidChar,
}
#[derive(PartialEq, Eq, Clone, Debug)]
pub enum LexerState {
Any,
TypeTerm(String),
Sym(String),
Num(i64),
Char( Option<char> )
Char(Option<char>),
}
impl LexerState {
fn into_token(self) -> Option<LTIRToken> {
match self {
LexerState::Any => None,
LexerState::TypeTerm(s) => Some(LTIRToken::AssignType(s)),
LexerState::Sym(s) => Some(LTIRToken::Symbol(s)),
LexerState::Num(n) => Some(LTIRToken::Num(n)),
LexerState::Char(c) => Some(LTIRToken::Char(c?))
LexerState::Char(c) => Some(LTIRToken::Char(c?)),
}
}
}
pub struct LTIRLexer<It>
where It: std::iter::Iterator<Item = char>
where
It: std::iter::Iterator<Item = char>,
{
chars: std::iter::Peekable<It>,
position: usize
}
impl<It> LTIRLexer<It>
where It: Iterator<Item = char>
where
It: Iterator<Item = char>,
{
pub fn into_inner(self) -> std::iter::Peekable<It> {
self.chars
@ -62,98 +65,196 @@ where It: Iterator<Item = char>
}
impl<It> From<It> for LTIRLexer<It>
where It: Iterator<Item = char>
where
It: Iterator<Item = char>,
{
fn from(chars: It) -> Self {
LTIRLexer {
chars: chars.peekable()
chars: chars.peekable(),
position: 0,
}
}
}
#[derive(Clone, Debug)]
pub struct InputRegionTag {
begin: usize,
end: usize
}
impl InputRegionTag {
pub fn max( a: InputRegionTag, b: InputRegionTag ) -> InputRegionTag {
InputRegionTag {
begin: usize::min( a.begin, b.begin ),
end: usize::max( a.end, b.end )
}
}
}
impl<It> Iterator for LTIRLexer<It>
where It: Iterator<Item = char>
where
It: Iterator<Item = char>,
{
type Item = Result<LTIRToken, LexError>;
type Item = (InputRegionTag, Result<LTIRToken, LexError>);
fn next(&mut self) -> Option<Self::Item> {
let mut state = LexerState::Any;
let mut region = InputRegionTag{
begin: self.position,
end: self.position
};
while let Some(c) = self.chars.peek() {
match &mut state {
// determine token type
LexerState::Any => {
match c {
'λ' => { self.chars.next(); return Some(Ok(LTIRToken::Lambda)); },
'(' => { self.chars.next(); return Some(Ok(LTIRToken::ExprOpen)); },
')' => { self.chars.next(); return Some(Ok(LTIRToken::ExprClose)); },
'{' => { self.chars.next(); return Some(Ok(LTIRToken::BlockOpen)); },
'}' => { self.chars.next(); return Some(Ok(LTIRToken::BlockClose)); },
':' => { self.chars.next(); return Some(Ok(LTIRToken::AssignType)); },
'=' => { self.chars.next(); return Some(Ok(LTIRToken::AssignValue)); },
';' => { self.chars.next(); return Some(Ok(LTIRToken::StatementSep)); },
'\'' => { self.chars.next(); state = LexerState::Char(None); },
LexerState::Any => match c {
'λ' => {
self.chars.next();
self.position += 1;
region.end += 1;
return Some((region, Ok(LTIRToken::Lambda)));
}
'.' | '↦' => {
self.chars.next();
self.position += 1;
region.end += 1;
return Some((region, Ok(LTIRToken::LambdaBody)));
}
'(' => {
self.chars.next();
self.position += 1;
region.end += 1;
return Some((region, Ok(LTIRToken::ExprOpen)));
}
')' => {
self.chars.next();
self.position += 1;
region.end += 1;
return Some((region, Ok(LTIRToken::ExprClose)));
}
'{' => {
self.chars.next();
self.position += 1;
region.end += 1;
return Some((region, Ok(LTIRToken::BlockOpen)));
}
'}' => {
self.chars.next();
self.position += 1;
region.end += 1;
return Some((region, Ok(LTIRToken::BlockClose)));
}
':' => {
self.chars.next();
self.position += 1;
region.end += 1;
state = LexerState::TypeTerm(String::new());
}
'=' => {
self.chars.next();
self.position += 1;
region.end += 1;
return Some((region, Ok(LTIRToken::AssignValue)));
}
';' => {
self.chars.next();
self.position += 1;
region.end += 1;
return Some((region, Ok(LTIRToken::StatementSep)));
}
'\'' => {
self.chars.next();
self.position += 1;
region.end += 1;
state = LexerState::Char(None);
}
c => {
if c.is_whitespace() {
self.chars.next();
self.position += 1;
region.begin += 1;
region.end += 1;
} else if c.is_digit(10) {
state = LexerState::Num(0);
} else {
state = LexerState::Sym(String::new());
}
}
}
}
},
LexerState::Char(val) => {
*val = Some(
match self.chars.next() {
self.position += 2;
region.end += 2;
*val = Some(match self.chars.next() {
Some('\\') => {
self.position += 1;
region.end += 1;
match self.chars.next() {
Some('0') => '\0',
Some('n') => '\n',
Some('t') => '\t',
Some(c) => c,
None => {
return Some(Err(LexError::InvalidChar));
}
return Some((region, Err(LexError::InvalidChar)));
}
}
},
Some(c) => c,
None => {
return Some(Err(LexError::InvalidChar));
return Some((region, Err(LexError::InvalidChar)));
}
});
match self.chars.next() {
Some('\'') => {
if let Some(token) = state.clone().into_token() {
return Some(Ok(token));
return Some((region, Ok(token)));
}
}
_ => {
return Some(Err(LexError::InvalidChar));
return Some((region, Err(LexError::InvalidChar)));
}
}
}
LexerState::TypeTerm(s) => {
if *c == '=' || *c == '.' {
if let Some(token) = state.clone().into_token() {
return Some((region, Ok(token)));
}
} else {
if let Some(c) = self.chars.next() {
self.position += 1;
region.end += 1;
s.push(c);
}
}
}
_ => {
if c.is_whitespace()
|| *c == '(' || *c == ')'
|| *c == '{' || *c == '}'
|| *c == ';' || *c == '=' || *c == ':'
|| *c == '('
|| *c == ')'
|| *c == '{'
|| *c == '}'
|| *c == ';'
|| *c == '='
|| *c == ':'
|| *c == '.'
|| *c == '↦'
{
// finish the current token
if let Some(token) = state.clone().into_token() {
return Some(Ok(token));
return Some((region, Ok(token)));
}
} else {
// append to the current token
let c = self.chars.next().unwrap();
self.position += 1;
region.end += 1;
match &mut state {
LexerState::Sym(s) => {
@ -164,7 +265,7 @@ where It: Iterator<Item = char>
if let Some(d) = c.to_digit(10) {
*n = (*n) * 10 + d as i64;
} else {
return Some(Err(LexError::InvalidDigit));
return Some((region, Err(LexError::InvalidDigit)));
}
}
@ -176,11 +277,31 @@ where It: Iterator<Item = char>
}
if let Some(token) = state.into_token() {
Some(Ok(token))
Some((region, Ok(token)))
} else {
None
}
}
}
mod tests {
#[test]
fn test_lexer() {
let mut lexer = crate::lexer::LTIRLexer::from(
"let var1:=123;
let square =λx.* x x;
let sqrt = λx:~machine::Float64~machine::Word.(f64-sqrt x);
let magnitude =
λx:
.λy:
.sqrt (+ (* x x) (* y y));
"
.chars(),
);
for (range, token) in lexer {
eprintln!("[{:?}] {:?}", range, token);
}
}
}

View file

@ -1,30 +1,34 @@
use {
std::{boxed::{Box}, ops::Deref},
std::collections::HashMap,
std::sync::{Arc, RwLock},
std::{boxed::Box, ops::Deref},
};
mod expr;
mod symbols;
mod procedure_compiler;
mod runtime;
mod lexer;
mod parser;
mod procedure_compiler;
mod runtime;
mod symbols;
use crate::{
expr::{LTExpr, Statement},
symbols::{Scope},
procedure_compiler::ProcedureCompiler
procedure_compiler::ProcedureCompiler,
symbols::Scope,
};
fn compile(scope: &Arc<RwLock<Scope>>, name: &str, source: &str) -> Vec< tisc::assembler::AssemblyWord > {
fn compile(
scope: &Arc<RwLock<Scope>>,
name: &str,
source: &str,
) -> Vec<tisc::assembler::AssemblyWord> {
ProcedureCompiler::new(scope)
.compile(
&parser::parse_expr(
&mut lexer::LTIRLexer::from(
source.chars().peekable()
).peekable()
).expect("syntax error")
&scope.read().unwrap().typectx,
&mut lexer::LTIRLexer::from(source.chars().peekable()).peekable(),
)
.expect("syntax error"),
)
.into_asm(&name.into())
}
@ -40,39 +44,54 @@ fn main() {
/* define type of the symbol
*/
main_scope.write().unwrap()
.declare_static_parse(
main_scope.write().unwrap().declare_static_parse(
"hello-string",
"<Seq Char
~Ascii
~machine::Word>
~<NullTerminatedSeq machine::Word>"
~<NullTerminatedSeq machine::Word>",
);
main_scope.write().unwrap()
.declare_static_parse(
main_scope.write().unwrap().declare_static_parse(
"pfxstr",
"<Seq Char
~Ascii
~machine::Word>
~<LengthPrefixedSeq machine::Word>"
~<LengthPrefixedSeq machine::Word>",
);
/* link assembly-program to symbols
*/
linker.add_procedure("main", compile(&main_scope,
linker.add_procedure(
"main",
compile(
&main_scope,
"main",
"{
let print-nullterm = λstr {
let print-nullterm =
λ str : <Ref <Seq Char~Ascii~machine::Word>>
~ <Ref <NullTerminatedArray machine::Word>>
~ machine::Address
~ machine::Word
.
{
while (@ str) {
emit (@ str);
! str (i+ str 1);
}
};
let print-lenprefix = λstr {
let len = (@ str);
! str (i+ str 1);
let print-len =
λ len : _2^64
~ machine::UInt64
~ machine::Word
.
λ str : <Ref <Seq Char~Ascii~machine::Word>>
~ <Ref <Array machine::Word>>
~ machine::Address
~ machine::Word
.
{
let end = (i+ str len);
while (i- str end) {
emit (@ str);
@ -80,32 +99,79 @@ fn main() {
}
};
let hello = λ _ {
let print-lenprefix =
λ str : <Ref <Seq Char~Ascii~machine::Word>>
~ <Ref <LenPrefixArray machine::Word>>
~ <Ref <Struct
<len _2^64
~machine::UInt64
~machine::Word>
<data <Array machine::Word>>
>>
~ machine::Address
~ machine::Word
.
{
let len = (@ str);
! str (i+ str 1);
print-len len str;
};
let hello = λ.{
print-nullterm hello-string;
print-lenprefix pfxstr;
};
let isquare = λx (i* x x);
hello 'X';
let isquare = λx:. i* x x;
let imagnitude2 = λx:.λy:. i+ (isquare x) (isquare y);
let factorial = λn:.
if( n ){ i* n (factorial (i- n 1)); }
else { 1; };
factorial 20;
if ( i- (imagnitude2 10 20) 500 ) {
emit '?';
} else {
emit '!';
};
emit '\n';
emit (i+ '0' (isquare 3));
emit '\n';
}"));
};
linker.add_static("hello-string",
hello;
}",
),
);
linker.add_static(
"hello-string",
"Hallo Welt!\n\0"
.chars()
.map(|c| (c as u8) as tisc::VM_Word)
.collect());
linker.add_static("pfxstr",
vec![ 3, 'a' as tisc::VM_Word, 'b' as tisc::VM_Word, 'c' as tisc::VM_Word, 'd' as tisc::VM_Word ]
.collect(),
);
let main_addr = linker.get_link_addr(&"main".into()).expect("'main' not linked");
linker.add_static(
"pfxstr",
vec![
3,
'a' as tisc::VM_Word,
'b' as tisc::VM_Word,
'c' as tisc::VM_Word,
'd' as tisc::VM_Word,
],
);
let main_addr = linker
.get_link_addr(&"main".into())
.expect("'main' not linked");
vm.load(linker.link_total().expect("could not link"));
vm.execute(main_addr);
eprintln!("\n====\nVM execution finished\ndatastack = {:?}\n====", vm.data_stack);
eprintln!(
"\n====\nVM execution finished\ndatastack = {:?}\n====",
vm.data_stack
);
}

View file

@ -1,9 +1,12 @@
use {
std::iter::Peekable,
crate::{
expr::{LTExpr, Statement, TypeError, TypeTag},
lexer::{LTIRLexer, LTIRToken, LexError},
expr::{LTExpr, Statement}
}
},
std::{
iter::Peekable,
sync::{Arc, RwLock},
},
};
#[derive(Clone, Debug)]
@ -11,47 +14,72 @@ pub enum ParseError {
LexError(LexError),
UnexpectedClose,
UnexpectedEnd,
UnexpectedToken
UnexpectedToken,
}
pub fn parse_expect<It>(
tokens: &mut Peekable<LTIRLexer<It>>,
expected_token: LTIRToken
expected_token: LTIRToken,
) -> Result<(), ParseError>
where It: Iterator<Item = char>
where
It: Iterator<Item = char>,
{
match tokens.next() {
Some(Ok(t)) => {
Some((region, Ok(t))) => {
if t == expected_token {
Ok(())
} else {
Err(ParseError::UnexpectedToken)
}
},
Some(Err(err)) => Err(ParseError::LexError(err)),
None => Err(ParseError::UnexpectedEnd)
}
}
pub fn parse_symbol<It>(
tokens: &mut Peekable<LTIRLexer<It>>
) -> Result< String, ParseError >
where It: Iterator<Item = char>
{
match tokens.next() {
Some(Ok(LTIRToken::Symbol(name))) => Ok(name),
Some(Ok(_)) => Err(ParseError::UnexpectedToken),
Some(Err(err)) => Err(ParseError::LexError(err)),
Some((region, Err(err))) => Err(ParseError::LexError(err)),
None => Err(ParseError::UnexpectedEnd),
}
}
pub fn parse_statement<It>(
tokens: &mut Peekable<LTIRLexer<It>>
) -> Result< crate::expr::Statement, ParseError >
where It: Iterator<Item = char>
pub fn parse_symbol<It>(tokens: &mut Peekable<LTIRLexer<It>>) -> Result<String, ParseError>
where
It: Iterator<Item = char>,
{
if let Some(peektok) = tokens.peek() {
match tokens.next() {
Some((region, Ok(LTIRToken::Symbol(name)))) => Ok(name),
Some((region, Ok(_))) => Err(ParseError::UnexpectedToken),
Some((region, Err(err))) => Err(ParseError::LexError(err)),
None => Err(ParseError::UnexpectedEnd),
}
}
pub fn parse_type_tag<It>(
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
tokens: &mut Peekable<LTIRLexer<It>>,
) -> Option<TypeTag>
where
It: Iterator<Item = char>,
{
if let Some((region, peektok)) = tokens.peek().clone() {
match peektok.clone() {
Ok(LTIRToken::AssignType(typeterm_str)) => {
tokens.next();
match typectx.write().unwrap().parse(typeterm_str.as_str()) {
Ok(typeterm) => Some(Ok(typeterm)),
Err(parse_error) => Some(Err(TypeError::ParseError(parse_error))),
}
}
_ => None,
}
} else {
None
}
}
pub fn parse_statement<It>(
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
tokens: &mut Peekable<LTIRLexer<It>>,
) -> Result<crate::expr::Statement, ParseError>
where
It: Iterator<Item = char>,
{
if let Some((region, peektok)) = tokens.peek() {
match peektok {
Ok(LTIRToken::Symbol(sym)) => {
match sym.as_str() {
@ -59,55 +87,57 @@ where It: Iterator<Item = char>
tokens.next();
// todo accept address-expression instead of symbol
let name = parse_symbol(tokens)?;
let val_expr = parse_expr(tokens)?;
let val_expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::Assignment {
var_id: name,
val_expr
val_expr,
})
}
"let" => {
tokens.next();
let name = parse_symbol(tokens)?;
let typ = parse_type_tag(typectx, tokens);
let _ = parse_expect(tokens, LTIRToken::AssignValue);
let val_expr = parse_expr(tokens)?;
let val_expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::LetAssign {
typ,
var_id: name,
val_expr
val_expr,
})
}
"while" => {
tokens.next();
let _ = parse_expect(tokens, LTIRToken::ExprOpen)?;
let cond = parse_expr(tokens)?;
let cond = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::ExprClose)?;
Ok(Statement::WhileLoop {
condition: cond,
body: parse_block(tokens)?
body: parse_block(typectx, tokens)?,
})
}
"return" => {
tokens.next();
let expr = parse_expr(tokens)?;
let expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::Return(parse_expr(tokens)?))
Ok(Statement::Return(parse_expr(typectx, tokens)?))
}
_ => {
let expr = parse_expr(tokens)?;
let expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::Expr(expr))
}
}
}
Ok(_) => {
let expr = parse_expr(tokens)?;
let expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::Expr(expr))
},
Err(err) => Err(ParseError::LexError(err.clone()))
}
Err(err) => Err(ParseError::LexError(err.clone())),
}
} else {
Err(ParseError::UnexpectedEnd)
@ -115,21 +145,27 @@ where It: Iterator<Item = char>
}
pub fn parse_block<It>(
tokens: &mut Peekable<LTIRLexer<It>>
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
tokens: &mut Peekable<LTIRLexer<It>>,
) -> Result<Vec<Statement>, ParseError>
where It: Iterator<Item = char>
where
It: Iterator<Item = char>,
{
let _ = parse_expect(tokens, LTIRToken::BlockOpen)?;
let mut statements = Vec::new();
while let Some(peektok) = tokens.peek() {
while let Some((region, peektok)) = tokens.peek() {
match peektok {
Ok(LTIRToken::BlockClose) => {
tokens.next();
return Ok(statements)
return Ok(statements);
}
Ok(_) => {
statements.push(parse_statement(typectx, tokens)?);
}
Err(err) => {
return Err(ParseError::LexError(err.clone()));
}
Ok(_) => { statements.push( parse_statement(tokens)? ); }
Err(err) => { return Err(ParseError::LexError(err.clone())); }
}
}
@ -137,51 +173,47 @@ where It: Iterator<Item = char>
}
pub fn parse_atom<It>(
tokens: &mut Peekable<LTIRLexer<It>>
tokens: &mut Peekable<LTIRLexer<It>>,
) -> Result<crate::expr::LTExpr, ParseError>
where It: Iterator<Item = char>
where
It: Iterator<Item = char>,
{
match tokens.next() {
Some(Ok(LTIRToken::Symbol(sym))) => {
Ok(LTExpr::symbol(sym.as_str()))
}
Some(Ok(LTIRToken::Char(c))) => {
Ok(LTExpr::lit_uint(c as u64))
}
Some(Ok(LTIRToken::Num(n))) => {
Ok(LTExpr::lit_uint(n as u64))
}
Some(Ok(_)) => {
Err(ParseError::UnexpectedToken)
}
Some(Err(err)) => {
Err(ParseError::LexError(err))
}
None => {
Err(ParseError::UnexpectedEnd)
}
Some((region, Ok(LTIRToken::Symbol(sym)))) => Ok(LTExpr::symbol(sym.as_str())),
Some((region, Ok(LTIRToken::Char(c)))) => Ok(LTExpr::lit_uint(c as u64)),
Some((region, Ok(LTIRToken::Num(n)))) => Ok(LTExpr::lit_uint(n as u64)),
Some((region, Ok(_))) => Err(ParseError::UnexpectedToken),
Some((region, Err(err))) => Err(ParseError::LexError(err)),
None => Err(ParseError::UnexpectedEnd),
}
}
pub fn parse_expr<It>(
tokens: &mut Peekable<LTIRLexer<It>>
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
tokens: &mut Peekable<LTIRLexer<It>>,
) -> Result<crate::expr::LTExpr, ParseError>
where It: Iterator<Item = char>
where
It: Iterator<Item = char>,
{
let mut children = Vec::new();
while let Some(tok) = tokens.peek() {
while let Some((region, tok)) = tokens.peek() {
match tok {
Ok(LTIRToken::Lambda) => {
if children.len() == 0 {
tokens.next();
let name = parse_symbol(tokens)?;
let body = parse_expr(tokens)?;
let mut args = Vec::new();
while let Some((region, Ok(LTIRToken::Symbol(_)))) = tokens.peek() {
args.push((parse_symbol(tokens)?, parse_type_tag(typectx, tokens)));
}
let _ = parse_expect(tokens, LTIRToken::LambdaBody);
let body = parse_expr(typectx, tokens)?;
return Ok(LTExpr::Abstraction {
arg_id: name,
arg_type: None,
val_expr: Box::new(body)
args,
body: Box::new(body),
});
} else {
return Err(ParseError::UnexpectedToken);
@ -189,7 +221,7 @@ where It: Iterator<Item = char>
}
Ok(LTIRToken::ExprOpen) => {
tokens.next();
while let Some(peektok) = tokens.peek() {
while let Some((region, peektok)) = tokens.peek() {
match peektok {
Ok(LTIRToken::ExprClose) => {
tokens.next();
@ -197,28 +229,35 @@ where It: Iterator<Item = char>
}
_ => {}
}
children.push(parse_expr(tokens)?);
children.push(parse_expr(typectx, tokens)?);
}
},
Ok(LTIRToken::ExprClose) => { break; }
Ok(LTIRToken::BlockOpen) => { children.push( LTExpr::block(parse_block(tokens)?)); }
Ok(LTIRToken::BlockClose) => { break; }
Ok(LTIRToken::StatementSep) => { break; }
Ok(LTIRToken::Symbol(name)) => {
match name.as_str() {
}
Ok(LTIRToken::ExprClose) => {
break;
}
Ok(LTIRToken::BlockOpen) => {
children.push(LTExpr::block(parse_block(typectx, tokens)?));
}
Ok(LTIRToken::BlockClose) => {
break;
}
Ok(LTIRToken::StatementSep) => {
break;
}
Ok(LTIRToken::Symbol(name)) => match name.as_str() {
"if" => {
tokens.next();
let _ = parse_expect(tokens, LTIRToken::ExprOpen)?;
let cond = parse_expr(tokens)?;
let cond = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::ExprClose)?;
let if_expr = LTExpr::block(parse_block(tokens)?);
let if_expr = LTExpr::block(parse_block(typectx, tokens)?);
let mut else_expr = LTExpr::block(vec![]);
if let Some(peektok) = tokens.peek() {
if let Some((region, peektok)) = tokens.peek() {
if let Ok(LTIRToken::Symbol(name)) = peektok {
if name == "else" {
tokens.next();
else_expr = parse_expr(tokens)?;
else_expr = parse_expr(typectx, tokens)?;
}
}
}
@ -226,27 +265,30 @@ where It: Iterator<Item = char>
children.push(LTExpr::Branch {
condition: Box::new(cond),
if_expr: Box::new(if_expr),
else_expr: Box::new(else_expr)
else_expr: Box::new(else_expr),
});
}
name => {
children.push(parse_atom(tokens)?);
}
},
Ok(atom) => {
children.push(parse_atom(tokens)?);
}
Err(err) => {
return Err(ParseError::LexError(err.clone()));
}
Ok(atom) => { children.push(parse_atom(tokens)?); }
Err(err) => { return Err(ParseError::LexError(err.clone())); }
}
}
if children.len() > 0 {
let head = children.remove(0);
Ok(LTExpr::Application {
typ: None,
head: Box::new(head),
body: children
body: children,
})
} else {
Err(ParseError::UnexpectedEnd)
}
}

View file

@ -1,17 +1,13 @@
use {
std::{
sync::{Arc, RwLock},
ops::Deref,
},
tisc::{
assembler::AssemblyWord,
linker::LinkAddr
},
crate::{
expr::{LTExpr, Statement},
symbols::{Scope, SymbolDef}
}
symbols::{Scope, SymbolDef},
},
std::{
ops::Deref,
sync::{Arc, RwLock},
},
tisc::{assembler::AssemblyWord, linker::LinkAddr},
};
pub struct ProcedureCompiler {
@ -36,22 +32,26 @@ impl ProcedureCompiler {
let body = self.asm.build();
self.linker.add_procedure("__procedure_body__", body);
let body_addr = self.linker.get_link_addr(&"__procedure_body__".into()).unwrap();
let subroutines = self.linker.link_relative(&"__subroutines__".into()).expect("link error");
let body_addr = self
.linker
.get_link_addr(&"__procedure_body__".into())
.unwrap();
let subroutines = self
.linker
.link_relative(&"__subroutines__".into())
.expect("link error");
let mut entry = tisc::Assembler::new();
if data_frame_size > 0 {
entry = entry
.lit(data_frame_size)
.call("data-frame-alloc");
entry = entry.lit(data_frame_size).call("data-frame-alloc");
}
entry = entry
.call_symbol( LinkAddr::Relative{ symbol: "__subroutines__".into(), offset: body_addr });
entry = entry.call_symbol(LinkAddr::Relative {
symbol: "__subroutines__".into(),
offset: body_addr,
});
if data_frame_size > 0 {
entry = entry
.lit(data_frame_size)
.call("data-frame-drop");
entry = entry.lit(data_frame_size).call("data-frame-drop");
}
let mut superlink = tisc::Linker::new();
@ -79,17 +79,21 @@ impl ProcedureCompiler {
match self.symbols.read().unwrap().get(var_id) {
Some(SymbolDef::FrameRef { typ, stack_ref }) => {
self.asm = self.asm
.lit(stack_ref)
.call("data-frame-set");
self.asm = self.asm.lit(stack_ref).call("data-frame-set");
}
Some(SymbolDef::StaticRef { typ, link_addr }) => {
self.asm = self.asm
self.asm = self
.asm
.static_ref(var_id.as_str())
.inst(tisc::VM_Instruction::Store);
}
Some(SymbolDef::Procedure{ in_types, out_types, link_addr }) => {
self.asm = self.asm
Some(SymbolDef::Procedure {
in_types,
out_types,
link_addr,
}) => {
self.asm = self
.asm
.call(var_id.as_str())
.inst(tisc::VM_Instruction::Store);
}
@ -98,39 +102,33 @@ impl ProcedureCompiler {
}
}
}
Statement::LetAssign{ var_id, val_expr } => {
match val_expr {
LTExpr::Abstraction { arg_id:_, arg_type:_, val_expr:_ } => {
self.symbols.write().unwrap()
.declare_proc(
var_id.clone(),
vec![],vec![]
);
let lambda_procedure =
ProcedureCompiler::new(&self.symbols)
Statement::LetAssign {
typ,
var_id,
val_expr,
} => match val_expr {
LTExpr::Abstraction { args: _, body: _ } => {
self.symbols
.write()
.unwrap()
.declare_proc(var_id.clone(), vec![], vec![]);
let lambda_procedure = ProcedureCompiler::new(&self.symbols)
.compile(val_expr)
.into_asm(var_id);
self.linker.add_procedure(
var_id,
lambda_procedure
);
self.linker.add_procedure(var_id, lambda_procedure);
}
_ => {
self.symbols.write().unwrap()
.declare_var(
var_id.clone(),
laddertypes::TypeTerm::unit()
);
self = self.compile_statement(
&Statement::Assignment {
self.symbols
.write()
.unwrap()
.declare_var(var_id.clone(), laddertypes::TypeTerm::unit());
self = self.compile_statement(&Statement::Assignment {
var_id: var_id.clone(),
val_expr: val_expr.clone()
}
);
}
}
val_expr: val_expr.clone(),
});
}
},
Statement::WhileLoop { condition, body } => {
let asm = self.asm;
@ -159,47 +157,53 @@ impl ProcedureCompiler {
pub fn compile(mut self, expr: &LTExpr) -> Self {
match expr {
LTExpr::Symbol { typ, symbol } => {
match self.symbols.read().unwrap().get(symbol) {
LTExpr::Symbol { typ, symbol } => match self.symbols.read().unwrap().get(symbol) {
Some(SymbolDef::FrameRef { typ, stack_ref }) => {
self.asm = self.asm
.lit( stack_ref )
.call("data-frame-get");
self.asm = self.asm.lit(stack_ref).call("data-frame-get");
}
Some(SymbolDef::StaticRef { typ, link_addr }) => {
self.asm = self.asm.static_ref(symbol.as_str());
}
Some(SymbolDef::Procedure{ in_types, out_types, link_addr }) => {
Some(SymbolDef::Procedure {
in_types,
out_types,
link_addr,
}) => {
self.asm = self.asm.call(symbol.as_str());
}
None => {
eprintln!("undefined symbol '{}'!", symbol);
}
}
}
},
LTExpr::Literal { typ, val } => {
self.asm = self.asm.lit(*val);
}
LTExpr::Application { head, body } => {
LTExpr::Application { typ, head, body } => {
for arg in body.iter().rev() {
self = self.compile(arg);
}
self = self.compile(head);
}
LTExpr::Abstraction { arg_id: arg_name, arg_type, val_expr } => {
let id = self.symbols
.write().unwrap()
.declare_var(
arg_name.clone(),
laddertypes::TypeTerm::unit());
self.asm = self.asm
.lit( id )
.call("data-frame-set");
self = self.compile(val_expr);
LTExpr::Abstraction { args, body } => {
for (arg_name, arg_type) in args.iter() {
if let Some(Ok(typeterm)) = arg_type {
let id = self
.symbols
.write()
.unwrap()
.declare_var(arg_name.clone(), typeterm.clone());
self.asm = self.asm.lit(id).call("data-frame-set");
} else {
eprintln!("invalid type {:?} for argument {}", arg_type, arg_name);
}
LTExpr::Branch { condition, if_expr, else_expr } => {
}
self = self.compile(body);
}
LTExpr::Branch {
condition,
if_expr,
else_expr,
} => {
self = self.compile(condition);
let asm = self.asm;
@ -221,4 +225,3 @@ impl ProcedureCompiler {
self
}
}

View file

@ -1,11 +1,7 @@
use {
crate::{expr::LTExpr, procedure_compiler::ProcedureCompiler, symbols::Scope},
std::sync::{Arc, RwLock},
crate::{
expr::LTExpr,
symbols::Scope,
procedure_compiler::ProcedureCompiler
},
tisc::linker::Linker
tisc::linker::Linker,
};
pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
@ -19,12 +15,15 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
"dup",
vec!["T"],
vec!["T~machine::Word"],
vec![ "T~machine::Word",
"T~machine::Word" ]
vec!["T~machine::Word", "T~machine::Word"],
);
linker.add_procedure("dup", tisc::Assembler::new().inst( tisc::VM_Instruction::Dup ).build());
linker.add_procedure(
"dup",
tisc::Assembler::new()
.inst(tisc::VM_Instruction::Dup)
.build(),
);
/* Put a single Ascii character on stdout
*/
@ -32,11 +31,15 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
"emit",
vec![],
vec!["Char~Ascii~machine::Word"],
vec![]
vec![],
);
linker.add_procedure("emit", tisc::Assembler::new().inst( tisc::VM_Instruction::Emit ).build());
linker.add_procedure(
"emit",
tisc::Assembler::new()
.inst(tisc::VM_Instruction::Emit)
.build(),
);
/* The top two items must be native u64 integers,
* which are replaced by their sum.
@ -46,87 +49,92 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
symbols.write().unwrap().declare_proc_parse(
"i+",
vec![],
vec![ "_2^64~machine::UInt64~machine::Word",
"_2^64~machine::UInt64~machine::Word" ],
vec![ "_2^64~machine::UInt64~machine::Word" ]
vec![
"_2^64~machine::UInt64~machine::Word",
"_2^64~machine::UInt64~machine::Word",
],
vec!["_2^64~machine::UInt64~machine::Word"],
);
linker.add_procedure(
"i+",
tisc::Assembler::new()
.inst( tisc::VM_Instruction::Add )
.build()
.inst(tisc::VM_Instruction::IntAdd)
.build(),
);
/* Floating-point Additionu
/* Floating-point Addition
*/
symbols.write().unwrap().declare_proc_parse(
"f+",
vec![],
vec![ "~machine::f64~machine::Word",
"~machine::f64~machine::Word" ],
vec![ "~machine::f64~machine::Word" ]
vec![
"~machine::f64~machine::Word",
"~machine::f64~machine::Word",
],
vec!["~machine::f64~machine::Word"],
);
linker.add_procedure(
"f+",
tisc::Assembler::new()
.inst( tisc::VM_Instruction::Addf )
.build()
.inst(tisc::VM_Instruction::FltAdd)
.build(),
);
/* Integer Subtraction
*/
symbols.write().unwrap().declare_proc_parse(
"i-",
vec![],
vec![ "_2^64~machine::UInt64~machine::Word",
"_2^64~machine::UInt64~machine::Word" ],
vec![ "_2^64~machine::UInt64~machine::Word" ]
vec![
"_2^64~machine::UInt64~machine::Word",
"_2^64~machine::UInt64~machine::Word",
],
vec!["_2^64~machine::UInt64~machine::Word"],
);
linker.add_procedure(
"i-",
tisc::Assembler::new()
.inst(tisc::VM_Instruction::BitwiseNot)
.inst(tisc::VM_Instruction::Swap)
.inst(tisc::VM_Instruction::BitNeg)
.lit(1)
.inst(tisc::VM_Instruction::Add)
.inst(tisc::VM_Instruction::Add)
.build()
.inst(tisc::VM_Instruction::IntAdd)
.inst(tisc::VM_Instruction::IntAdd)
.build(),
);
/* Fetch memory address
*/
symbols.write().unwrap().declare_proc_parse("@",
symbols.write().unwrap().declare_proc_parse(
"@",
vec![],
vec!["<MutRef T~machine::Word>~machine::Address~machine::Word"],
vec![ "T~machine::Word" ]
vec!["T~machine::Word"],
);
linker.add_procedure(
"@",
tisc::Assembler::new()
.inst(tisc::VM_Instruction::Fetch)
.build()
.build(),
);
/* Store to memory
*/
symbols.write().unwrap().declare_proc_parse("!",
symbols.write().unwrap().declare_proc_parse(
"!",
vec![],
vec![
"<MutRef T~machine::Word>~machine::Address~machine::Word",
"T~machine::Word"
"T~machine::Word",
],
vec![]
vec![],
);
linker.add_procedure(
"!",
tisc::Assembler::new()
.inst(tisc::VM_Instruction::Store)
.build()
.build(),
);
/*
@ -142,121 +150,115 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
symbols.write().unwrap().declare_proc_parse(
"i*",
vec![],
vec![ "_2^64~machine::UInt64~machine::Word",
"_2^64~machine::UInt64~machine::Word" ],
vec![ "_2^64~machine::UInt64~machine::Word" ]);
vec![
"_2^64~machine::UInt64~machine::Word",
"_2^64~machine::UInt64~machine::Word",
],
vec!["_2^64~machine::UInt64~machine::Word"],
);
linker.add_procedure(
"i*",
tisc::Assembler::new()
.lit(0) // [ a b ] -- [ a b sum ]
.while_loop(
// condition
tisc::Assembler::new()
// [ a b sum ] -- [ a b sum b ]
.lit(2)
.inst(tisc::VM_Instruction::Pick),
// body
tisc::Assembler::new()
// [ a b sum ] -- [ a b sum a ]
.lit(3)
.inst(tisc::VM_Instruction::Pick)
// [ a b sum a -- a b (sum+a) ]
.inst( tisc::VM_Instruction::Add )
.inst(tisc::VM_Instruction::IntAdd)
// [ a b sum -- a sum b ]
.inst(tisc::VM_Instruction::Swap)
// [ a sum b -- a sum b 1 ]
.lit(1)
// [ a sum b -- a sum (b-1) ]
.call( "i-" )
// [ a sum b -- a b sum ]
.inst(tisc::VM_Instruction::Swap)
.call("i-")
// [ a sum b -- a b sum ]
.inst(tisc::VM_Instruction::Swap),
)
// [ a b sum -- a sum b ]
.lit(2).inst(tisc::VM_Instruction::Roll)
.lit(2)
.inst(tisc::VM_Instruction::Roll)
// [ a sum b -- a sum ]
.inst(tisc::VM_Instruction::Drop)
// [ a sum -- sum a ]
.lit(2).inst(tisc::VM_Instruction::Roll)
.lit(2)
.inst(tisc::VM_Instruction::Roll)
// [ sum a -- sum ]
.inst(tisc::VM_Instruction::Drop)
.build()
.build(),
);
symbols.write().unwrap().declare_static_parse(
"data-frame-ptr",
"<MutRef <Seq machine::Word>>~machine::Address~machine::Word"
"<MutRef <Seq machine::Word>>~machine::Address~machine::Word",
);
linker.add_static("data-frame-ptr", vec![0x1000]);
symbols.write().unwrap().declare_proc_parse(
"data-frame-set",
vec!["T"],
vec!["T~machine::Word",
"<RefMut T~machine::Word>~LocalVariableId~machine::UInt64~machine::Word"],
vec![]);
vec![
"T~machine::Word",
"<RefMut T~machine::Word>~LocalVariableId~machine::UInt64~machine::Word",
],
vec![],
);
linker.add_procedure(
"data-frame-set",
tisc::Assembler::new()
.static_ref("data-frame-ptr")
.inst(tisc::VM_Instruction::Fetch)
.inst( tisc::VM_Instruction::Add )
.inst(tisc::VM_Instruction::IntAdd)
.inst(tisc::VM_Instruction::Store)
.build()
.build(),
);
symbols.write().unwrap().declare_proc_parse(
"data-frame-get",
vec!["T"],
vec!["<Ref T~machine::Word>~DataFrameRef~machine::UInt64~machine::Word"],
vec!["T~machine::Word"]
vec!["T~machine::Word"],
);
linker.add_procedure(
"data-frame-get",
tisc::Assembler::new()
.static_ref("data-frame-ptr")
.inst(tisc::VM_Instruction::Fetch)
.inst( tisc::VM_Instruction::Add )
.inst(tisc::VM_Instruction::IntAdd)
.inst(tisc::VM_Instruction::Fetch)
.build()
.build(),
);
symbols.write().unwrap().declare_proc_parse(
"data-frame-alloc",
vec![],
vec![],
vec![]);
symbols
.write()
.unwrap()
.declare_proc_parse("data-frame-alloc", vec![], vec![], vec![]);
linker.add_procedure(
"data-frame-alloc",
tisc::Assembler::new()
.static_ref("data-frame-ptr")
.inst(tisc::VM_Instruction::Fetch)
.inst( tisc::VM_Instruction::Swap )
.call("i-")
.static_ref("data-frame-ptr")
.inst(tisc::VM_Instruction::Store)
.build()
.build(),
);
symbols.write().unwrap().declare_proc_parse(
"data-frame-drop",
vec![],
vec![],
vec![]);
symbols
.write()
.unwrap()
.declare_proc_parse("data-frame-drop", vec![], vec![], vec![]);
linker.add_procedure(
"data-frame-drop",
@ -266,9 +268,8 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
.call("i+")
.static_ref("data-frame-ptr")
.inst(tisc::VM_Instruction::Store)
.build()
.build(),
);
symbols
}

View file

@ -1,9 +1,9 @@
use {
crate::expr::LTExpr,
std::{
collections::HashMap,
sync::{Arc, RwLock},
},
crate::expr::LTExpr
};
#[derive(Clone, Debug)]
@ -14,27 +14,36 @@ pub enum SymbolDef {
},
StaticRef {
typ: laddertypes::TypeTerm,
link_addr: Option< tisc::VM_Word >
link_addr: Option<tisc::VM_Word>,
},
Procedure {
in_types: Vec<laddertypes::TypeTerm>,
out_types: Vec<laddertypes::TypeTerm>,
link_addr: Option< tisc::VM_Word >
}
link_addr: Option<tisc::VM_Word>,
},
}
impl SymbolDef {
pub fn get_type(&self, typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>) -> laddertypes::TypeTerm {
pub fn get_type(
&self,
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
) -> laddertypes::TypeTerm {
match self {
SymbolDef::FrameRef { typ, stack_ref: _ } => typ.clone(),
SymbolDef::StaticRef { typ, link_addr: _ } => typ.clone(),
SymbolDef::Procedure { in_types, out_types, link_addr: _ } => {
laddertypes::TypeTerm::App(vec![
typectx.write().unwrap().parse("Fn").expect("parse typeterm"),
SymbolDef::Procedure {
in_types,
out_types,
link_addr: _,
} => laddertypes::TypeTerm::App(vec![
typectx
.write()
.unwrap()
.parse("Fn")
.expect("parse typeterm"),
laddertypes::TypeTerm::App(in_types.clone()),
laddertypes::TypeTerm::App( out_types.clone() )
])
}
laddertypes::TypeTerm::App(out_types.clone()),
]),
}
}
}
@ -58,7 +67,7 @@ pub struct Scope {
/* parent scope whose all
* symbols are inherited
*/
parent: Option< Arc<RwLock<Scope>> >
parent: Option<Arc<RwLock<Scope>>>,
}
impl Scope {
@ -67,7 +76,7 @@ impl Scope {
symbols: HashMap::new(),
typectx: Arc::new(RwLock::new(laddertypes::dict::TypeDict::new())),
frame_size: 0,
parent: None
parent: None,
}))
}
@ -95,12 +104,11 @@ impl Scope {
None => {
if let Some(parent) = self.parent.as_ref() {
match parent.read().unwrap().get(name) {
Some(SymbolDef::FrameRef {
typ, stack_ref
}) => Some(SymbolDef::FrameRef {
typ: typ.clone(), stack_ref: stack_ref + self.get_frame_size() as i64
Some(SymbolDef::FrameRef { typ, stack_ref }) => Some(SymbolDef::FrameRef {
typ: typ.clone(),
stack_ref: stack_ref + self.get_frame_size() as i64,
}),
x => x.clone()
x => x.clone(),
}
} else {
None
@ -124,11 +132,12 @@ impl Scope {
*/
//<><><><><><>
pub fn declare_proc_parse(&mut self,
pub fn declare_proc_parse(
&mut self,
name: &str,
type_vars: Vec<&str>,
in_types: Vec<&str>,
out_types: Vec<&str>
out_types: Vec<&str>,
) {
for v in type_vars {
self.typectx.write().unwrap().add_varname(v.into());
@ -136,70 +145,86 @@ impl Scope {
self.declare_proc(
String::from(name),
in_types.into_iter().map(|t| self.typectx.write().unwrap().parse(t).expect("parse typeterm")).collect(),
out_types.into_iter().map(|t| self.typectx.write().unwrap().parse(t).expect("parse typeterm")).collect()
in_types
.into_iter()
.map(|t| {
self.typectx
.write()
.unwrap()
.parse(t)
.expect("parse typeterm")
})
.collect(),
out_types
.into_iter()
.map(|t| {
self.typectx
.write()
.unwrap()
.parse(t)
.expect("parse typeterm")
})
.collect(),
);
}
pub fn declare_proc(&mut self,
pub fn declare_proc(
&mut self,
name: String,
in_types: Vec<laddertypes::TypeTerm>,
out_types: Vec< laddertypes::TypeTerm >
out_types: Vec<laddertypes::TypeTerm>,
) {
self.symbols.insert(name, SymbolDef::Procedure {
self.symbols.insert(
name,
SymbolDef::Procedure {
link_addr: None, //LinkAddr::Relative{ name, offset: 0 },
in_types,
out_types
});
out_types,
},
);
}
//<><><><><>
pub fn declare_var_parse(&mut self,
name: &str,
typ: &str
) {
let typ = self.typectx.write().unwrap().parse(typ).expect("parse typeterm");
self.declare_var(
String::from(name),
typ
);
pub fn declare_var_parse(&mut self, name: &str, typ: &str) {
let typ = self
.typectx
.write()
.unwrap()
.parse(typ)
.expect("parse typeterm");
self.declare_var(String::from(name), typ);
}
pub fn declare_var(&mut self, name: String, typ: laddertypes::TypeTerm) -> tisc::VM_Word {
let stack_ref = self.frame_size as tisc::VM_Word;
self.frame_size += 1;
self.symbols.insert(name, SymbolDef::FrameRef {
typ,
stack_ref
});
self.symbols
.insert(name, SymbolDef::FrameRef { typ, stack_ref });
stack_ref
}
//<><><><><><>
pub fn declare_static_parse(
&mut self,
name: &str,
typ: &str
) {
let typ = self.typectx
.write().unwrap()
.parse(typ).expect("parse typeterm");
pub fn declare_static_parse(&mut self, name: &str, typ: &str) {
let typ = self
.typectx
.write()
.unwrap()
.parse(typ)
.expect("parse typeterm");
self.declare_static(String::from(name), typ);
}
pub fn declare_static(
&mut self,
name: String,
typ: laddertypes::TypeTerm
) {
self.symbols.insert(name, SymbolDef::StaticRef {
pub fn declare_static(&mut self, name: String, typ: laddertypes::TypeTerm) {
self.symbols.insert(
name,
SymbolDef::StaticRef {
typ,
link_addr: None
});
link_addr: None,
},
);
}
}