Compare commits

...

5 commits

Author SHA1 Message Date
34a129d101
lexer: add input region for each token 2024-05-13 22:55:24 +02:00
c910265531
runtime: adapt to new instruction set 2024-05-13 21:51:28 +02:00
f26e24bba1
cargo fmt 2024-05-12 18:58:39 +02:00
49c72e8930
parse type annotations to ast 2024-05-12 18:56:10 +02:00
f54f630b38
adapt Abstraction variant of LTExpr to allow multiple parameters
This avoids unneccesary recursive chaining and also allows abstractions with zero parameters.
2024-05-12 04:22:37 +02:00
7 changed files with 846 additions and 577 deletions

View file

@ -1,92 +1,104 @@
use std::{
use {
std::{
boxed::Box, boxed::Box,
sync::{Arc, RwLock} sync::{Arc, RwLock},
}
}; };
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum Statement { pub enum Statement {
Assignment { Assignment {
var_id: String, var_id: String,
val_expr: LTExpr val_expr: LTExpr,
}, },
LetAssign { LetAssign {
typ: Option<TypeTag>,
var_id: String, var_id: String,
val_expr: LTExpr, val_expr: LTExpr,
}, },
WhileLoop { WhileLoop {
condition: LTExpr, condition: LTExpr,
body: Vec<Statement> body: Vec<Statement>,
}, },
Return(LTExpr), Return(LTExpr),
Expr(LTExpr) Expr(LTExpr),
} }
#[derive(Clone, Debug)]
pub enum TypeError {
ParseError(laddertypes::parser::ParseError),
Mismatch {
expected: laddertypes::TypeTerm,
received: laddertypes::TypeTerm,
},
}
pub type TypeTag = Result<laddertypes::TypeTerm, TypeError>;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum LTExpr { pub enum LTExpr {
Literal { Literal {
typ: Option< laddertypes::TypeTerm >, typ: Option<TypeTag>,
val: tisc::VM_Word val: tisc::VM_Word,
}, },
Symbol { Symbol {
typ: Option< laddertypes::TypeTerm >, typ: Option<TypeTag>,
symbol: String, symbol: String,
}, },
Application { Application {
typ: Option<TypeTag>,
head: Box<LTExpr>, head: Box<LTExpr>,
body: Vec<LTExpr> body: Vec<LTExpr>,
}, },
Abstraction { Abstraction {
arg_id: String, args: Vec<(String, Option<TypeTag>)>,
arg_type: Option< laddertypes::TypeTerm >, body: Box<LTExpr>,
val_expr: Box<LTExpr>
}, },
Branch { Branch {
condition: Box<LTExpr>, condition: Box<LTExpr>,
if_expr: Box<LTExpr>, if_expr: Box<LTExpr>,
else_expr: Box<LTExpr> else_expr: Box<LTExpr>,
}, },
Block { Block {
statements: Vec<Statement> statements: Vec<Statement>,
} },
} }
impl LTExpr { impl LTExpr {
pub fn symbol(str: &str) -> Self { pub fn symbol(str: &str) -> Self {
LTExpr::Symbol { LTExpr::Symbol {
typ: None, //typectx.write().unwrap().parse("<Ref memory::Word>~Symbol~<Seq Char>").expect("parse typeterm"), typ: None, //typectx.write().unwrap().parse("<Ref memory::Word>~Symbol~<Seq Char>").expect("parse typeterm"),
symbol: String::from(str) symbol: String::from(str),
} }
} }
pub fn lit_uint(val: u64) -> Self { pub fn lit_uint(val: u64) -> Self {
LTExpr::Literal { LTExpr::Literal {
typ: None, //typectx.write().unwrap().parse("_2^64~machine::UInt64~machine::Word").expect("parse typeterm"), typ: None, //typectx.write().unwrap().parse("_2^64~machine::UInt64~machine::Word").expect("parse typeterm"),
val: val as tisc::VM_Word val: val as tisc::VM_Word,
} }
} }
pub fn abstraction(arg_id: &str, arg_typ: &str, val_expr: LTExpr) -> LTExpr { pub fn abstraction(args: Vec<(&str, &str)>, body: LTExpr) -> LTExpr {
LTExpr::Abstraction { LTExpr::Abstraction {
arg_id: String::from(arg_id), args: args
arg_type: None,//typectx.write().unwrap().parse(arg_typ).expect("parse typeterm"), .into_iter()
val_expr: Box::new(val_expr) .map(
|(arg_name, arg_type)| (arg_name.into(), None), //typectx.write().unwrap().parse(t).expect("parse typeterm")
)
.collect(),
body: Box::new(body),
} }
} }
pub fn application(head: LTExpr, body: Vec<LTExpr>) -> Self { pub fn application(head: LTExpr, body: Vec<LTExpr>) -> Self {
LTExpr::Application { LTExpr::Application {
typ: None,
head: Box::new(head), head: Box::new(head),
body: body body: body,
} }
} }
pub fn block(body: Vec<Statement>) -> Self { pub fn block(body: Vec<Statement>) -> Self {
LTExpr::Block { LTExpr::Block { statements: body }
statements: body
}
} }
} }
@ -94,7 +106,7 @@ impl Statement {
pub fn while_loop(cond: LTExpr, body: Vec<Statement>) -> Self { pub fn while_loop(cond: LTExpr, body: Vec<Statement>) -> Self {
Statement::WhileLoop { Statement::WhileLoop {
condition: cond, condition: cond,
body body,
} }
} }
} }
@ -141,4 +153,3 @@ impl LTExpr {
} }
} }
*/ */

View file

@ -1,4 +1,3 @@
#[derive(PartialEq, Eq, Clone, Debug)] #[derive(PartialEq, Eq, Clone, Debug)]
pub enum LTIRToken { pub enum LTIRToken {
Symbol(String), Symbol(String),
@ -8,9 +7,9 @@ pub enum LTIRToken {
// SingleQuote(String), // SingleQuote(String),
// DoubleQuote(String), // DoubleQuote(String),
// TripleQuote(String), // TripleQuote(String),
Lambda, Lambda,
AssignType, LambdaBody,
AssignType(String),
AssignValue, AssignValue,
ExprOpen, ExprOpen,
@ -24,37 +23,41 @@ pub enum LTIRToken {
#[derive(PartialEq, Eq, Clone, Debug)] #[derive(PartialEq, Eq, Clone, Debug)]
pub enum LexError { pub enum LexError {
InvalidDigit, InvalidDigit,
InvalidChar InvalidChar,
} }
#[derive(PartialEq, Eq, Clone, Debug)] #[derive(PartialEq, Eq, Clone, Debug)]
pub enum LexerState { pub enum LexerState {
Any, Any,
TypeTerm(String),
Sym(String), Sym(String),
Num(i64), Num(i64),
Char( Option<char> ) Char(Option<char>),
} }
impl LexerState { impl LexerState {
fn into_token(self) -> Option<LTIRToken> { fn into_token(self) -> Option<LTIRToken> {
match self { match self {
LexerState::Any => None, LexerState::Any => None,
LexerState::TypeTerm(s) => Some(LTIRToken::AssignType(s)),
LexerState::Sym(s) => Some(LTIRToken::Symbol(s)), LexerState::Sym(s) => Some(LTIRToken::Symbol(s)),
LexerState::Num(n) => Some(LTIRToken::Num(n)), LexerState::Num(n) => Some(LTIRToken::Num(n)),
LexerState::Char(c) => Some(LTIRToken::Char(c?)) LexerState::Char(c) => Some(LTIRToken::Char(c?)),
} }
} }
} }
pub struct LTIRLexer<It> pub struct LTIRLexer<It>
where It: std::iter::Iterator<Item = char> where
It: std::iter::Iterator<Item = char>,
{ {
chars: std::iter::Peekable<It>, chars: std::iter::Peekable<It>,
position: usize
} }
impl<It> LTIRLexer<It> impl<It> LTIRLexer<It>
where It: Iterator<Item = char> where
It: Iterator<Item = char>,
{ {
pub fn into_inner(self) -> std::iter::Peekable<It> { pub fn into_inner(self) -> std::iter::Peekable<It> {
self.chars self.chars
@ -62,98 +65,196 @@ where It: Iterator<Item = char>
} }
impl<It> From<It> for LTIRLexer<It> impl<It> From<It> for LTIRLexer<It>
where It: Iterator<Item = char> where
It: Iterator<Item = char>,
{ {
fn from(chars: It) -> Self { fn from(chars: It) -> Self {
LTIRLexer { LTIRLexer {
chars: chars.peekable() chars: chars.peekable(),
position: 0,
}
}
}
#[derive(Clone, Debug)]
pub struct InputRegionTag {
begin: usize,
end: usize
}
impl InputRegionTag {
pub fn max( a: InputRegionTag, b: InputRegionTag ) -> InputRegionTag {
InputRegionTag {
begin: usize::min( a.begin, b.begin ),
end: usize::max( a.end, b.end )
} }
} }
} }
impl<It> Iterator for LTIRLexer<It> impl<It> Iterator for LTIRLexer<It>
where It: Iterator<Item = char> where
It: Iterator<Item = char>,
{ {
type Item = Result<LTIRToken, LexError>; type Item = (InputRegionTag, Result<LTIRToken, LexError>);
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
let mut state = LexerState::Any; let mut state = LexerState::Any;
let mut region = InputRegionTag{
begin: self.position,
end: self.position
};
while let Some(c) = self.chars.peek() { while let Some(c) = self.chars.peek() {
match &mut state { match &mut state {
// determine token type // determine token type
LexerState::Any => { LexerState::Any => match c {
match c { 'λ' => {
'λ' => { self.chars.next(); return Some(Ok(LTIRToken::Lambda)); }, self.chars.next();
'(' => { self.chars.next(); return Some(Ok(LTIRToken::ExprOpen)); }, self.position += 1;
')' => { self.chars.next(); return Some(Ok(LTIRToken::ExprClose)); }, region.end += 1;
'{' => { self.chars.next(); return Some(Ok(LTIRToken::BlockOpen)); }, return Some((region, Ok(LTIRToken::Lambda)));
'}' => { self.chars.next(); return Some(Ok(LTIRToken::BlockClose)); }, }
':' => { self.chars.next(); return Some(Ok(LTIRToken::AssignType)); }, '.' | '↦' => {
'=' => { self.chars.next(); return Some(Ok(LTIRToken::AssignValue)); }, self.chars.next();
';' => { self.chars.next(); return Some(Ok(LTIRToken::StatementSep)); }, self.position += 1;
'\'' => { self.chars.next(); state = LexerState::Char(None); }, region.end += 1;
return Some((region, Ok(LTIRToken::LambdaBody)));
}
'(' => {
self.chars.next();
self.position += 1;
region.end += 1;
return Some((region, Ok(LTIRToken::ExprOpen)));
}
')' => {
self.chars.next();
self.position += 1;
region.end += 1;
return Some((region, Ok(LTIRToken::ExprClose)));
}
'{' => {
self.chars.next();
self.position += 1;
region.end += 1;
return Some((region, Ok(LTIRToken::BlockOpen)));
}
'}' => {
self.chars.next();
self.position += 1;
region.end += 1;
return Some((region, Ok(LTIRToken::BlockClose)));
}
':' => {
self.chars.next();
self.position += 1;
region.end += 1;
state = LexerState::TypeTerm(String::new());
}
'=' => {
self.chars.next();
self.position += 1;
region.end += 1;
return Some((region, Ok(LTIRToken::AssignValue)));
}
';' => {
self.chars.next();
self.position += 1;
region.end += 1;
return Some((region, Ok(LTIRToken::StatementSep)));
}
'\'' => {
self.chars.next();
self.position += 1;
region.end += 1;
state = LexerState::Char(None);
}
c => { c => {
if c.is_whitespace() { if c.is_whitespace() {
self.chars.next(); self.chars.next();
self.position += 1;
region.begin += 1;
region.end += 1;
} else if c.is_digit(10) { } else if c.is_digit(10) {
state = LexerState::Num(0); state = LexerState::Num(0);
} else { } else {
state = LexerState::Sym(String::new()); state = LexerState::Sym(String::new());
} }
} }
} },
}
LexerState::Char(val) => { LexerState::Char(val) => {
*val = Some( self.position += 2;
match self.chars.next() { region.end += 2;
*val = Some(match self.chars.next() {
Some('\\') => { Some('\\') => {
self.position += 1;
region.end += 1;
match self.chars.next() { match self.chars.next() {
Some('0') => '\0', Some('0') => '\0',
Some('n') => '\n', Some('n') => '\n',
Some('t') => '\t', Some('t') => '\t',
Some(c) => c, Some(c) => c,
None => { None => {
return Some(Err(LexError::InvalidChar)); return Some((region, Err(LexError::InvalidChar)));
}
} }
} }
},
Some(c) => c, Some(c) => c,
None => { None => {
return Some(Err(LexError::InvalidChar)); return Some((region, Err(LexError::InvalidChar)));
} }
}); });
match self.chars.next() { match self.chars.next() {
Some('\'') => { Some('\'') => {
if let Some(token) = state.clone().into_token() { if let Some(token) = state.clone().into_token() {
return Some(Ok(token)); return Some((region, Ok(token)));
} }
} }
_ => { _ => {
return Some(Err(LexError::InvalidChar)); return Some((region, Err(LexError::InvalidChar)));
}
}
}
LexerState::TypeTerm(s) => {
if *c == '=' || *c == '.' {
if let Some(token) = state.clone().into_token() {
return Some((region, Ok(token)));
}
} else {
if let Some(c) = self.chars.next() {
self.position += 1;
region.end += 1;
s.push(c);
} }
} }
} }
_ => { _ => {
if c.is_whitespace() if c.is_whitespace()
|| *c == '(' || *c == ')' || *c == '('
|| *c == '{' || *c == '}' || *c == ')'
|| *c == ';' || *c == '=' || *c == ':' || *c == '{'
|| *c == '}'
|| *c == ';'
|| *c == '='
|| *c == ':'
|| *c == '.'
|| *c == '↦'
{ {
// finish the current token // finish the current token
if let Some(token) = state.clone().into_token() { if let Some(token) = state.clone().into_token() {
return Some(Ok(token)); return Some((region, Ok(token)));
} }
} else { } else {
// append to the current token // append to the current token
let c = self.chars.next().unwrap(); let c = self.chars.next().unwrap();
self.position += 1;
region.end += 1;
match &mut state { match &mut state {
LexerState::Sym(s) => { LexerState::Sym(s) => {
@ -164,7 +265,7 @@ where It: Iterator<Item = char>
if let Some(d) = c.to_digit(10) { if let Some(d) = c.to_digit(10) {
*n = (*n) * 10 + d as i64; *n = (*n) * 10 + d as i64;
} else { } else {
return Some(Err(LexError::InvalidDigit)); return Some((region, Err(LexError::InvalidDigit)));
} }
} }
@ -176,11 +277,31 @@ where It: Iterator<Item = char>
} }
if let Some(token) = state.into_token() { if let Some(token) = state.into_token() {
Some(Ok(token)) Some((region, Ok(token)))
} else { } else {
None None
} }
} }
} }
mod tests {
#[test]
fn test_lexer() {
let mut lexer = crate::lexer::LTIRLexer::from(
"let var1:=123;
let square =λx.* x x;
let sqrt = λx:~machine::Float64~machine::Word.(f64-sqrt x);
let magnitude =
λx:
.λy:
.sqrt (+ (* x x) (* y y));
"
.chars(),
);
for (range, token) in lexer {
eprintln!("[{:?}] {:?}", range, token);
}
}
}

View file

@ -1,30 +1,34 @@
use { use {
std::{boxed::{Box}, ops::Deref},
std::collections::HashMap, std::collections::HashMap,
std::sync::{Arc, RwLock}, std::sync::{Arc, RwLock},
std::{boxed::Box, ops::Deref},
}; };
mod expr; mod expr;
mod symbols;
mod procedure_compiler;
mod runtime;
mod lexer; mod lexer;
mod parser; mod parser;
mod procedure_compiler;
mod runtime;
mod symbols;
use crate::{ use crate::{
expr::{LTExpr, Statement}, expr::{LTExpr, Statement},
symbols::{Scope}, procedure_compiler::ProcedureCompiler,
procedure_compiler::ProcedureCompiler symbols::Scope,
}; };
fn compile(scope: &Arc<RwLock<Scope>>, name: &str, source: &str) -> Vec< tisc::assembler::AssemblyWord > { fn compile(
scope: &Arc<RwLock<Scope>>,
name: &str,
source: &str,
) -> Vec<tisc::assembler::AssemblyWord> {
ProcedureCompiler::new(scope) ProcedureCompiler::new(scope)
.compile( .compile(
&parser::parse_expr( &parser::parse_expr(
&mut lexer::LTIRLexer::from( &scope.read().unwrap().typectx,
source.chars().peekable() &mut lexer::LTIRLexer::from(source.chars().peekable()).peekable(),
).peekable() )
).expect("syntax error") .expect("syntax error"),
) )
.into_asm(&name.into()) .into_asm(&name.into())
} }
@ -40,39 +44,54 @@ fn main() {
/* define type of the symbol /* define type of the symbol
*/ */
main_scope.write().unwrap() main_scope.write().unwrap().declare_static_parse(
.declare_static_parse(
"hello-string", "hello-string",
"<Seq Char "<Seq Char
~Ascii ~Ascii
~machine::Word> ~machine::Word>
~<NullTerminatedSeq machine::Word>" ~<NullTerminatedSeq machine::Word>",
); );
main_scope.write().unwrap() main_scope.write().unwrap().declare_static_parse(
.declare_static_parse(
"pfxstr", "pfxstr",
"<Seq Char "<Seq Char
~Ascii ~Ascii
~machine::Word> ~machine::Word>
~<LengthPrefixedSeq machine::Word>" ~<LengthPrefixedSeq machine::Word>",
); );
/* link assembly-program to symbols /* link assembly-program to symbols
*/ */
linker.add_procedure("main", compile(&main_scope, linker.add_procedure(
"main",
compile(
&main_scope,
"main", "main",
"{ "{
let print-nullterm = λstr { let print-nullterm =
λ str : <Ref <Seq Char~Ascii~machine::Word>>
~ <Ref <NullTerminatedArray machine::Word>>
~ machine::Address
~ machine::Word
.
{
while (@ str) { while (@ str) {
emit (@ str); emit (@ str);
! str (i+ str 1); ! str (i+ str 1);
} }
}; };
let print-lenprefix = λstr { let print-len =
let len = (@ str); λ len : _2^64
! str (i+ str 1); ~ machine::UInt64
~ machine::Word
.
λ str : <Ref <Seq Char~Ascii~machine::Word>>
~ <Ref <Array machine::Word>>
~ machine::Address
~ machine::Word
.
{
let end = (i+ str len); let end = (i+ str len);
while (i- str end) { while (i- str end) {
emit (@ str); emit (@ str);
@ -80,32 +99,79 @@ fn main() {
} }
}; };
let hello = λ _ { let print-lenprefix =
λ str : <Ref <Seq Char~Ascii~machine::Word>>
~ <Ref <LenPrefixArray machine::Word>>
~ <Ref <Struct
<len _2^64
~machine::UInt64
~machine::Word>
<data <Array machine::Word>>
>>
~ machine::Address
~ machine::Word
.
{
let len = (@ str);
! str (i+ str 1);
print-len len str;
};
let hello = λ.{
print-nullterm hello-string; print-nullterm hello-string;
print-lenprefix pfxstr; print-lenprefix pfxstr;
};
let isquare = λx (i* x x);
hello 'X'; let isquare = λx:. i* x x;
let imagnitude2 = λx:.λy:. i+ (isquare x) (isquare y);
let factorial = λn:.
if( n ){ i* n (factorial (i- n 1)); }
else { 1; };
factorial 20;
if ( i- (imagnitude2 10 20) 500 ) {
emit '?';
} else {
emit '!';
};
emit '\n'; emit '\n';
emit (i+ '0' (isquare 3)); emit (i+ '0' (isquare 3));
emit '\n'; emit '\n';
}")); };
linker.add_static("hello-string", hello;
}",
),
);
linker.add_static(
"hello-string",
"Hallo Welt!\n\0" "Hallo Welt!\n\0"
.chars() .chars()
.map(|c| (c as u8) as tisc::VM_Word) .map(|c| (c as u8) as tisc::VM_Word)
.collect()); .collect(),
linker.add_static("pfxstr",
vec![ 3, 'a' as tisc::VM_Word, 'b' as tisc::VM_Word, 'c' as tisc::VM_Word, 'd' as tisc::VM_Word ]
); );
let main_addr = linker.get_link_addr(&"main".into()).expect("'main' not linked"); linker.add_static(
"pfxstr",
vec![
3,
'a' as tisc::VM_Word,
'b' as tisc::VM_Word,
'c' as tisc::VM_Word,
'd' as tisc::VM_Word,
],
);
let main_addr = linker
.get_link_addr(&"main".into())
.expect("'main' not linked");
vm.load(linker.link_total().expect("could not link")); vm.load(linker.link_total().expect("could not link"));
vm.execute(main_addr); vm.execute(main_addr);
eprintln!("\n====\nVM execution finished\ndatastack = {:?}\n====", vm.data_stack); eprintln!(
"\n====\nVM execution finished\ndatastack = {:?}\n====",
vm.data_stack
);
} }

View file

@ -1,9 +1,12 @@
use { use {
std::iter::Peekable,
crate::{ crate::{
expr::{LTExpr, Statement, TypeError, TypeTag},
lexer::{LTIRLexer, LTIRToken, LexError}, lexer::{LTIRLexer, LTIRToken, LexError},
expr::{LTExpr, Statement} },
} std::{
iter::Peekable,
sync::{Arc, RwLock},
},
}; };
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -11,47 +14,72 @@ pub enum ParseError {
LexError(LexError), LexError(LexError),
UnexpectedClose, UnexpectedClose,
UnexpectedEnd, UnexpectedEnd,
UnexpectedToken UnexpectedToken,
} }
pub fn parse_expect<It>( pub fn parse_expect<It>(
tokens: &mut Peekable<LTIRLexer<It>>, tokens: &mut Peekable<LTIRLexer<It>>,
expected_token: LTIRToken expected_token: LTIRToken,
) -> Result<(), ParseError> ) -> Result<(), ParseError>
where It: Iterator<Item = char> where
It: Iterator<Item = char>,
{ {
match tokens.next() { match tokens.next() {
Some(Ok(t)) => { Some((region, Ok(t))) => {
if t == expected_token { if t == expected_token {
Ok(()) Ok(())
} else { } else {
Err(ParseError::UnexpectedToken) Err(ParseError::UnexpectedToken)
} }
},
Some(Err(err)) => Err(ParseError::LexError(err)),
None => Err(ParseError::UnexpectedEnd)
} }
} Some((region, Err(err))) => Err(ParseError::LexError(err)),
pub fn parse_symbol<It>(
tokens: &mut Peekable<LTIRLexer<It>>
) -> Result< String, ParseError >
where It: Iterator<Item = char>
{
match tokens.next() {
Some(Ok(LTIRToken::Symbol(name))) => Ok(name),
Some(Ok(_)) => Err(ParseError::UnexpectedToken),
Some(Err(err)) => Err(ParseError::LexError(err)),
None => Err(ParseError::UnexpectedEnd), None => Err(ParseError::UnexpectedEnd),
} }
} }
pub fn parse_statement<It>( pub fn parse_symbol<It>(tokens: &mut Peekable<LTIRLexer<It>>) -> Result<String, ParseError>
tokens: &mut Peekable<LTIRLexer<It>> where
) -> Result< crate::expr::Statement, ParseError > It: Iterator<Item = char>,
where It: Iterator<Item = char>
{ {
if let Some(peektok) = tokens.peek() { match tokens.next() {
Some((region, Ok(LTIRToken::Symbol(name)))) => Ok(name),
Some((region, Ok(_))) => Err(ParseError::UnexpectedToken),
Some((region, Err(err))) => Err(ParseError::LexError(err)),
None => Err(ParseError::UnexpectedEnd),
}
}
pub fn parse_type_tag<It>(
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
tokens: &mut Peekable<LTIRLexer<It>>,
) -> Option<TypeTag>
where
It: Iterator<Item = char>,
{
if let Some((region, peektok)) = tokens.peek().clone() {
match peektok.clone() {
Ok(LTIRToken::AssignType(typeterm_str)) => {
tokens.next();
match typectx.write().unwrap().parse(typeterm_str.as_str()) {
Ok(typeterm) => Some(Ok(typeterm)),
Err(parse_error) => Some(Err(TypeError::ParseError(parse_error))),
}
}
_ => None,
}
} else {
None
}
}
pub fn parse_statement<It>(
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
tokens: &mut Peekable<LTIRLexer<It>>,
) -> Result<crate::expr::Statement, ParseError>
where
It: Iterator<Item = char>,
{
if let Some((region, peektok)) = tokens.peek() {
match peektok { match peektok {
Ok(LTIRToken::Symbol(sym)) => { Ok(LTIRToken::Symbol(sym)) => {
match sym.as_str() { match sym.as_str() {
@ -59,55 +87,57 @@ where It: Iterator<Item = char>
tokens.next(); tokens.next();
// todo accept address-expression instead of symbol // todo accept address-expression instead of symbol
let name = parse_symbol(tokens)?; let name = parse_symbol(tokens)?;
let val_expr = parse_expr(tokens)?; let val_expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?; let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::Assignment { Ok(Statement::Assignment {
var_id: name, var_id: name,
val_expr val_expr,
}) })
} }
"let" => { "let" => {
tokens.next(); tokens.next();
let name = parse_symbol(tokens)?; let name = parse_symbol(tokens)?;
let typ = parse_type_tag(typectx, tokens);
let _ = parse_expect(tokens, LTIRToken::AssignValue); let _ = parse_expect(tokens, LTIRToken::AssignValue);
let val_expr = parse_expr(tokens)?; let val_expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?; let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::LetAssign { Ok(Statement::LetAssign {
typ,
var_id: name, var_id: name,
val_expr val_expr,
}) })
} }
"while" => { "while" => {
tokens.next(); tokens.next();
let _ = parse_expect(tokens, LTIRToken::ExprOpen)?; let _ = parse_expect(tokens, LTIRToken::ExprOpen)?;
let cond = parse_expr(tokens)?; let cond = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::ExprClose)?; let _ = parse_expect(tokens, LTIRToken::ExprClose)?;
Ok(Statement::WhileLoop { Ok(Statement::WhileLoop {
condition: cond, condition: cond,
body: parse_block(tokens)? body: parse_block(typectx, tokens)?,
}) })
} }
"return" => { "return" => {
tokens.next(); tokens.next();
let expr = parse_expr(tokens)?; let expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?; let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::Return(parse_expr(tokens)?)) Ok(Statement::Return(parse_expr(typectx, tokens)?))
} }
_ => { _ => {
let expr = parse_expr(tokens)?; let expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?; let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::Expr(expr)) Ok(Statement::Expr(expr))
} }
} }
} }
Ok(_) => { Ok(_) => {
let expr = parse_expr(tokens)?; let expr = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::StatementSep)?; let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
Ok(Statement::Expr(expr)) Ok(Statement::Expr(expr))
}, }
Err(err) => Err(ParseError::LexError(err.clone())) Err(err) => Err(ParseError::LexError(err.clone())),
} }
} else { } else {
Err(ParseError::UnexpectedEnd) Err(ParseError::UnexpectedEnd)
@ -115,21 +145,27 @@ where It: Iterator<Item = char>
} }
pub fn parse_block<It>( pub fn parse_block<It>(
tokens: &mut Peekable<LTIRLexer<It>> typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
tokens: &mut Peekable<LTIRLexer<It>>,
) -> Result<Vec<Statement>, ParseError> ) -> Result<Vec<Statement>, ParseError>
where It: Iterator<Item = char> where
It: Iterator<Item = char>,
{ {
let _ = parse_expect(tokens, LTIRToken::BlockOpen)?; let _ = parse_expect(tokens, LTIRToken::BlockOpen)?;
let mut statements = Vec::new(); let mut statements = Vec::new();
while let Some(peektok) = tokens.peek() { while let Some((region, peektok)) = tokens.peek() {
match peektok { match peektok {
Ok(LTIRToken::BlockClose) => { Ok(LTIRToken::BlockClose) => {
tokens.next(); tokens.next();
return Ok(statements) return Ok(statements);
}
Ok(_) => {
statements.push(parse_statement(typectx, tokens)?);
}
Err(err) => {
return Err(ParseError::LexError(err.clone()));
} }
Ok(_) => { statements.push( parse_statement(tokens)? ); }
Err(err) => { return Err(ParseError::LexError(err.clone())); }
} }
} }
@ -137,51 +173,47 @@ where It: Iterator<Item = char>
} }
pub fn parse_atom<It>( pub fn parse_atom<It>(
tokens: &mut Peekable<LTIRLexer<It>> tokens: &mut Peekable<LTIRLexer<It>>,
) -> Result<crate::expr::LTExpr, ParseError> ) -> Result<crate::expr::LTExpr, ParseError>
where It: Iterator<Item = char> where
It: Iterator<Item = char>,
{ {
match tokens.next() { match tokens.next() {
Some(Ok(LTIRToken::Symbol(sym))) => { Some((region, Ok(LTIRToken::Symbol(sym)))) => Ok(LTExpr::symbol(sym.as_str())),
Ok(LTExpr::symbol(sym.as_str())) Some((region, Ok(LTIRToken::Char(c)))) => Ok(LTExpr::lit_uint(c as u64)),
} Some((region, Ok(LTIRToken::Num(n)))) => Ok(LTExpr::lit_uint(n as u64)),
Some(Ok(LTIRToken::Char(c))) => { Some((region, Ok(_))) => Err(ParseError::UnexpectedToken),
Ok(LTExpr::lit_uint(c as u64)) Some((region, Err(err))) => Err(ParseError::LexError(err)),
} None => Err(ParseError::UnexpectedEnd),
Some(Ok(LTIRToken::Num(n))) => {
Ok(LTExpr::lit_uint(n as u64))
}
Some(Ok(_)) => {
Err(ParseError::UnexpectedToken)
}
Some(Err(err)) => {
Err(ParseError::LexError(err))
}
None => {
Err(ParseError::UnexpectedEnd)
}
} }
} }
pub fn parse_expr<It>( pub fn parse_expr<It>(
tokens: &mut Peekable<LTIRLexer<It>> typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
tokens: &mut Peekable<LTIRLexer<It>>,
) -> Result<crate::expr::LTExpr, ParseError> ) -> Result<crate::expr::LTExpr, ParseError>
where It: Iterator<Item = char> where
It: Iterator<Item = char>,
{ {
let mut children = Vec::new(); let mut children = Vec::new();
while let Some(tok) = tokens.peek() { while let Some((region, tok)) = tokens.peek() {
match tok { match tok {
Ok(LTIRToken::Lambda) => { Ok(LTIRToken::Lambda) => {
if children.len() == 0 { if children.len() == 0 {
tokens.next(); tokens.next();
let name = parse_symbol(tokens)?;
let body = parse_expr(tokens)?; let mut args = Vec::new();
while let Some((region, Ok(LTIRToken::Symbol(_)))) = tokens.peek() {
args.push((parse_symbol(tokens)?, parse_type_tag(typectx, tokens)));
}
let _ = parse_expect(tokens, LTIRToken::LambdaBody);
let body = parse_expr(typectx, tokens)?;
return Ok(LTExpr::Abstraction { return Ok(LTExpr::Abstraction {
arg_id: name, args,
arg_type: None, body: Box::new(body),
val_expr: Box::new(body)
}); });
} else { } else {
return Err(ParseError::UnexpectedToken); return Err(ParseError::UnexpectedToken);
@ -189,7 +221,7 @@ where It: Iterator<Item = char>
} }
Ok(LTIRToken::ExprOpen) => { Ok(LTIRToken::ExprOpen) => {
tokens.next(); tokens.next();
while let Some(peektok) = tokens.peek() { while let Some((region, peektok)) = tokens.peek() {
match peektok { match peektok {
Ok(LTIRToken::ExprClose) => { Ok(LTIRToken::ExprClose) => {
tokens.next(); tokens.next();
@ -197,28 +229,35 @@ where It: Iterator<Item = char>
} }
_ => {} _ => {}
} }
children.push(parse_expr(tokens)?); children.push(parse_expr(typectx, tokens)?);
} }
}, }
Ok(LTIRToken::ExprClose) => { break; } Ok(LTIRToken::ExprClose) => {
Ok(LTIRToken::BlockOpen) => { children.push( LTExpr::block(parse_block(tokens)?)); } break;
Ok(LTIRToken::BlockClose) => { break; } }
Ok(LTIRToken::StatementSep) => { break; } Ok(LTIRToken::BlockOpen) => {
Ok(LTIRToken::Symbol(name)) => { children.push(LTExpr::block(parse_block(typectx, tokens)?));
match name.as_str() { }
Ok(LTIRToken::BlockClose) => {
break;
}
Ok(LTIRToken::StatementSep) => {
break;
}
Ok(LTIRToken::Symbol(name)) => match name.as_str() {
"if" => { "if" => {
tokens.next(); tokens.next();
let _ = parse_expect(tokens, LTIRToken::ExprOpen)?; let _ = parse_expect(tokens, LTIRToken::ExprOpen)?;
let cond = parse_expr(tokens)?; let cond = parse_expr(typectx, tokens)?;
let _ = parse_expect(tokens, LTIRToken::ExprClose)?; let _ = parse_expect(tokens, LTIRToken::ExprClose)?;
let if_expr = LTExpr::block(parse_block(tokens)?); let if_expr = LTExpr::block(parse_block(typectx, tokens)?);
let mut else_expr = LTExpr::block(vec![]); let mut else_expr = LTExpr::block(vec![]);
if let Some(peektok) = tokens.peek() { if let Some((region, peektok)) = tokens.peek() {
if let Ok(LTIRToken::Symbol(name)) = peektok { if let Ok(LTIRToken::Symbol(name)) = peektok {
if name == "else" { if name == "else" {
tokens.next(); tokens.next();
else_expr = parse_expr(tokens)?; else_expr = parse_expr(typectx, tokens)?;
} }
} }
} }
@ -226,27 +265,30 @@ where It: Iterator<Item = char>
children.push(LTExpr::Branch { children.push(LTExpr::Branch {
condition: Box::new(cond), condition: Box::new(cond),
if_expr: Box::new(if_expr), if_expr: Box::new(if_expr),
else_expr: Box::new(else_expr) else_expr: Box::new(else_expr),
}); });
} }
name => { name => {
children.push(parse_atom(tokens)?); children.push(parse_atom(tokens)?);
} }
},
Ok(atom) => {
children.push(parse_atom(tokens)?);
} }
Err(err) => {
return Err(ParseError::LexError(err.clone()));
} }
Ok(atom) => { children.push(parse_atom(tokens)?); }
Err(err) => { return Err(ParseError::LexError(err.clone())); }
} }
} }
if children.len() > 0 { if children.len() > 0 {
let head = children.remove(0); let head = children.remove(0);
Ok(LTExpr::Application { Ok(LTExpr::Application {
typ: None,
head: Box::new(head), head: Box::new(head),
body: children body: children,
}) })
} else { } else {
Err(ParseError::UnexpectedEnd) Err(ParseError::UnexpectedEnd)
} }
} }

View file

@ -1,17 +1,13 @@
use { use {
std::{
sync::{Arc, RwLock},
ops::Deref,
},
tisc::{
assembler::AssemblyWord,
linker::LinkAddr
},
crate::{ crate::{
expr::{LTExpr, Statement}, expr::{LTExpr, Statement},
symbols::{Scope, SymbolDef} symbols::{Scope, SymbolDef},
} },
std::{
ops::Deref,
sync::{Arc, RwLock},
},
tisc::{assembler::AssemblyWord, linker::LinkAddr},
}; };
pub struct ProcedureCompiler { pub struct ProcedureCompiler {
@ -36,22 +32,26 @@ impl ProcedureCompiler {
let body = self.asm.build(); let body = self.asm.build();
self.linker.add_procedure("__procedure_body__", body); self.linker.add_procedure("__procedure_body__", body);
let body_addr = self.linker.get_link_addr(&"__procedure_body__".into()).unwrap(); let body_addr = self
let subroutines = self.linker.link_relative(&"__subroutines__".into()).expect("link error"); .linker
.get_link_addr(&"__procedure_body__".into())
.unwrap();
let subroutines = self
.linker
.link_relative(&"__subroutines__".into())
.expect("link error");
let mut entry = tisc::Assembler::new(); let mut entry = tisc::Assembler::new();
if data_frame_size > 0 { if data_frame_size > 0 {
entry = entry entry = entry.lit(data_frame_size).call("data-frame-alloc");
.lit(data_frame_size)
.call("data-frame-alloc");
} }
entry = entry entry = entry.call_symbol(LinkAddr::Relative {
.call_symbol( LinkAddr::Relative{ symbol: "__subroutines__".into(), offset: body_addr }); symbol: "__subroutines__".into(),
offset: body_addr,
});
if data_frame_size > 0 { if data_frame_size > 0 {
entry = entry entry = entry.lit(data_frame_size).call("data-frame-drop");
.lit(data_frame_size)
.call("data-frame-drop");
} }
let mut superlink = tisc::Linker::new(); let mut superlink = tisc::Linker::new();
@ -79,17 +79,21 @@ impl ProcedureCompiler {
match self.symbols.read().unwrap().get(var_id) { match self.symbols.read().unwrap().get(var_id) {
Some(SymbolDef::FrameRef { typ, stack_ref }) => { Some(SymbolDef::FrameRef { typ, stack_ref }) => {
self.asm = self.asm self.asm = self.asm.lit(stack_ref).call("data-frame-set");
.lit(stack_ref)
.call("data-frame-set");
} }
Some(SymbolDef::StaticRef { typ, link_addr }) => { Some(SymbolDef::StaticRef { typ, link_addr }) => {
self.asm = self.asm self.asm = self
.asm
.static_ref(var_id.as_str()) .static_ref(var_id.as_str())
.inst(tisc::VM_Instruction::Store); .inst(tisc::VM_Instruction::Store);
} }
Some(SymbolDef::Procedure{ in_types, out_types, link_addr }) => { Some(SymbolDef::Procedure {
self.asm = self.asm in_types,
out_types,
link_addr,
}) => {
self.asm = self
.asm
.call(var_id.as_str()) .call(var_id.as_str())
.inst(tisc::VM_Instruction::Store); .inst(tisc::VM_Instruction::Store);
} }
@ -98,39 +102,33 @@ impl ProcedureCompiler {
} }
} }
} }
Statement::LetAssign{ var_id, val_expr } => { Statement::LetAssign {
match val_expr { typ,
LTExpr::Abstraction { arg_id:_, arg_type:_, val_expr:_ } => { var_id,
self.symbols.write().unwrap() val_expr,
.declare_proc( } => match val_expr {
var_id.clone(), LTExpr::Abstraction { args: _, body: _ } => {
vec![],vec![] self.symbols
); .write()
let lambda_procedure = .unwrap()
ProcedureCompiler::new(&self.symbols) .declare_proc(var_id.clone(), vec![], vec![]);
let lambda_procedure = ProcedureCompiler::new(&self.symbols)
.compile(val_expr) .compile(val_expr)
.into_asm(var_id); .into_asm(var_id);
self.linker.add_procedure( self.linker.add_procedure(var_id, lambda_procedure);
var_id,
lambda_procedure
);
} }
_ => { _ => {
self.symbols.write().unwrap() self.symbols
.declare_var( .write()
var_id.clone(), .unwrap()
laddertypes::TypeTerm::unit() .declare_var(var_id.clone(), laddertypes::TypeTerm::unit());
); self = self.compile_statement(&Statement::Assignment {
self = self.compile_statement(
&Statement::Assignment {
var_id: var_id.clone(), var_id: var_id.clone(),
val_expr: val_expr.clone() val_expr: val_expr.clone(),
} });
);
}
}
} }
},
Statement::WhileLoop { condition, body } => { Statement::WhileLoop { condition, body } => {
let asm = self.asm; let asm = self.asm;
@ -159,47 +157,53 @@ impl ProcedureCompiler {
pub fn compile(mut self, expr: &LTExpr) -> Self { pub fn compile(mut self, expr: &LTExpr) -> Self {
match expr { match expr {
LTExpr::Symbol { typ, symbol } => { LTExpr::Symbol { typ, symbol } => match self.symbols.read().unwrap().get(symbol) {
match self.symbols.read().unwrap().get(symbol) {
Some(SymbolDef::FrameRef { typ, stack_ref }) => { Some(SymbolDef::FrameRef { typ, stack_ref }) => {
self.asm = self.asm self.asm = self.asm.lit(stack_ref).call("data-frame-get");
.lit( stack_ref )
.call("data-frame-get");
} }
Some(SymbolDef::StaticRef { typ, link_addr }) => { Some(SymbolDef::StaticRef { typ, link_addr }) => {
self.asm = self.asm.static_ref(symbol.as_str()); self.asm = self.asm.static_ref(symbol.as_str());
} }
Some(SymbolDef::Procedure{ in_types, out_types, link_addr }) => { Some(SymbolDef::Procedure {
in_types,
out_types,
link_addr,
}) => {
self.asm = self.asm.call(symbol.as_str()); self.asm = self.asm.call(symbol.as_str());
} }
None => { None => {
eprintln!("undefined symbol '{}'!", symbol); eprintln!("undefined symbol '{}'!", symbol);
} }
} },
}
LTExpr::Literal { typ, val } => { LTExpr::Literal { typ, val } => {
self.asm = self.asm.lit(*val); self.asm = self.asm.lit(*val);
} }
LTExpr::Application { head, body } => { LTExpr::Application { typ, head, body } => {
for arg in body.iter().rev() { for arg in body.iter().rev() {
self = self.compile(arg); self = self.compile(arg);
} }
self = self.compile(head); self = self.compile(head);
} }
LTExpr::Abstraction { arg_id: arg_name, arg_type, val_expr } => { LTExpr::Abstraction { args, body } => {
let id = self.symbols for (arg_name, arg_type) in args.iter() {
.write().unwrap() if let Some(Ok(typeterm)) = arg_type {
.declare_var( let id = self
arg_name.clone(), .symbols
laddertypes::TypeTerm::unit()); .write()
.unwrap()
self.asm = self.asm .declare_var(arg_name.clone(), typeterm.clone());
.lit( id ) self.asm = self.asm.lit(id).call("data-frame-set");
.call("data-frame-set"); } else {
eprintln!("invalid type {:?} for argument {}", arg_type, arg_name);
self = self.compile(val_expr);
} }
LTExpr::Branch { condition, if_expr, else_expr } => { }
self = self.compile(body);
}
LTExpr::Branch {
condition,
if_expr,
else_expr,
} => {
self = self.compile(condition); self = self.compile(condition);
let asm = self.asm; let asm = self.asm;
@ -221,4 +225,3 @@ impl ProcedureCompiler {
self self
} }
} }

View file

@ -1,11 +1,7 @@
use { use {
crate::{expr::LTExpr, procedure_compiler::ProcedureCompiler, symbols::Scope},
std::sync::{Arc, RwLock}, std::sync::{Arc, RwLock},
crate::{ tisc::linker::Linker,
expr::LTExpr,
symbols::Scope,
procedure_compiler::ProcedureCompiler
},
tisc::linker::Linker
}; };
pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> { pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
@ -19,12 +15,15 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
"dup", "dup",
vec!["T"], vec!["T"],
vec!["T~machine::Word"], vec!["T~machine::Word"],
vec![ "T~machine::Word", vec!["T~machine::Word", "T~machine::Word"],
"T~machine::Word" ]
); );
linker.add_procedure("dup", tisc::Assembler::new().inst( tisc::VM_Instruction::Dup ).build()); linker.add_procedure(
"dup",
tisc::Assembler::new()
.inst(tisc::VM_Instruction::Dup)
.build(),
);
/* Put a single Ascii character on stdout /* Put a single Ascii character on stdout
*/ */
@ -32,11 +31,15 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
"emit", "emit",
vec![], vec![],
vec!["Char~Ascii~machine::Word"], vec!["Char~Ascii~machine::Word"],
vec![] vec![],
); );
linker.add_procedure("emit", tisc::Assembler::new().inst( tisc::VM_Instruction::Emit ).build()); linker.add_procedure(
"emit",
tisc::Assembler::new()
.inst(tisc::VM_Instruction::Emit)
.build(),
);
/* The top two items must be native u64 integers, /* The top two items must be native u64 integers,
* which are replaced by their sum. * which are replaced by their sum.
@ -46,87 +49,92 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"i+", "i+",
vec![], vec![],
vec![ "_2^64~machine::UInt64~machine::Word", vec![
"_2^64~machine::UInt64~machine::Word" ], "_2^64~machine::UInt64~machine::Word",
vec![ "_2^64~machine::UInt64~machine::Word" ] "_2^64~machine::UInt64~machine::Word",
],
vec!["_2^64~machine::UInt64~machine::Word"],
); );
linker.add_procedure( linker.add_procedure(
"i+", "i+",
tisc::Assembler::new() tisc::Assembler::new()
.inst( tisc::VM_Instruction::Add ) .inst(tisc::VM_Instruction::IntAdd)
.build() .build(),
); );
/* Floating-point Addition
/* Floating-point Additionu
*/ */
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"f+", "f+",
vec![], vec![],
vec![ "~machine::f64~machine::Word", vec![
"~machine::f64~machine::Word" ], "~machine::f64~machine::Word",
vec![ "~machine::f64~machine::Word" ] "~machine::f64~machine::Word",
],
vec!["~machine::f64~machine::Word"],
); );
linker.add_procedure( linker.add_procedure(
"f+", "f+",
tisc::Assembler::new() tisc::Assembler::new()
.inst( tisc::VM_Instruction::Addf ) .inst(tisc::VM_Instruction::FltAdd)
.build() .build(),
); );
/* Integer Subtraction /* Integer Subtraction
*/ */
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"i-", "i-",
vec![], vec![],
vec![ "_2^64~machine::UInt64~machine::Word", vec![
"_2^64~machine::UInt64~machine::Word" ], "_2^64~machine::UInt64~machine::Word",
vec![ "_2^64~machine::UInt64~machine::Word" ] "_2^64~machine::UInt64~machine::Word",
],
vec!["_2^64~machine::UInt64~machine::Word"],
); );
linker.add_procedure( linker.add_procedure(
"i-", "i-",
tisc::Assembler::new() tisc::Assembler::new()
.inst(tisc::VM_Instruction::BitwiseNot) .inst(tisc::VM_Instruction::Swap)
.inst(tisc::VM_Instruction::BitNeg)
.lit(1) .lit(1)
.inst(tisc::VM_Instruction::Add) .inst(tisc::VM_Instruction::IntAdd)
.inst(tisc::VM_Instruction::Add) .inst(tisc::VM_Instruction::IntAdd)
.build() .build(),
); );
/* Fetch memory address /* Fetch memory address
*/ */
symbols.write().unwrap().declare_proc_parse("@", symbols.write().unwrap().declare_proc_parse(
"@",
vec![], vec![],
vec!["<MutRef T~machine::Word>~machine::Address~machine::Word"], vec!["<MutRef T~machine::Word>~machine::Address~machine::Word"],
vec![ "T~machine::Word" ] vec!["T~machine::Word"],
); );
linker.add_procedure( linker.add_procedure(
"@", "@",
tisc::Assembler::new() tisc::Assembler::new()
.inst(tisc::VM_Instruction::Fetch) .inst(tisc::VM_Instruction::Fetch)
.build() .build(),
); );
/* Store to memory /* Store to memory
*/ */
symbols.write().unwrap().declare_proc_parse("!", symbols.write().unwrap().declare_proc_parse(
"!",
vec![], vec![],
vec![ vec![
"<MutRef T~machine::Word>~machine::Address~machine::Word", "<MutRef T~machine::Word>~machine::Address~machine::Word",
"T~machine::Word" "T~machine::Word",
], ],
vec![] vec![],
); );
linker.add_procedure( linker.add_procedure(
"!", "!",
tisc::Assembler::new() tisc::Assembler::new()
.inst(tisc::VM_Instruction::Store) .inst(tisc::VM_Instruction::Store)
.build() .build(),
); );
/* /*
@ -142,121 +150,115 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"i*", "i*",
vec![], vec![],
vec![ "_2^64~machine::UInt64~machine::Word", vec![
"_2^64~machine::UInt64~machine::Word" ], "_2^64~machine::UInt64~machine::Word",
vec![ "_2^64~machine::UInt64~machine::Word" ]); "_2^64~machine::UInt64~machine::Word",
],
vec!["_2^64~machine::UInt64~machine::Word"],
);
linker.add_procedure( linker.add_procedure(
"i*", "i*",
tisc::Assembler::new() tisc::Assembler::new()
.lit(0) // [ a b ] -- [ a b sum ] .lit(0) // [ a b ] -- [ a b sum ]
.while_loop( .while_loop(
// condition // condition
tisc::Assembler::new() tisc::Assembler::new()
// [ a b sum ] -- [ a b sum b ] // [ a b sum ] -- [ a b sum b ]
.lit(2) .lit(2)
.inst(tisc::VM_Instruction::Pick), .inst(tisc::VM_Instruction::Pick),
// body // body
tisc::Assembler::new() tisc::Assembler::new()
// [ a b sum ] -- [ a b sum a ] // [ a b sum ] -- [ a b sum a ]
.lit(3) .lit(3)
.inst(tisc::VM_Instruction::Pick) .inst(tisc::VM_Instruction::Pick)
// [ a b sum a -- a b (sum+a) ] // [ a b sum a -- a b (sum+a) ]
.inst( tisc::VM_Instruction::Add ) .inst(tisc::VM_Instruction::IntAdd)
// [ a b sum -- a sum b ] // [ a b sum -- a sum b ]
.inst(tisc::VM_Instruction::Swap) .inst(tisc::VM_Instruction::Swap)
// [ a sum b -- a sum b 1 ] // [ a sum b -- a sum b 1 ]
.lit(1) .lit(1)
// [ a sum b -- a sum (b-1) ] // [ a sum b -- a sum (b-1) ]
.call( "i-" )
// [ a sum b -- a b sum ]
.inst(tisc::VM_Instruction::Swap) .inst(tisc::VM_Instruction::Swap)
.call("i-")
// [ a sum b -- a b sum ]
.inst(tisc::VM_Instruction::Swap),
) )
// [ a b sum -- a sum b ] // [ a b sum -- a sum b ]
.lit(2).inst(tisc::VM_Instruction::Roll) .lit(2)
.inst(tisc::VM_Instruction::Roll)
// [ a sum b -- a sum ] // [ a sum b -- a sum ]
.inst(tisc::VM_Instruction::Drop) .inst(tisc::VM_Instruction::Drop)
// [ a sum -- sum a ] // [ a sum -- sum a ]
.lit(2).inst(tisc::VM_Instruction::Roll) .lit(2)
.inst(tisc::VM_Instruction::Roll)
// [ sum a -- sum ] // [ sum a -- sum ]
.inst(tisc::VM_Instruction::Drop) .inst(tisc::VM_Instruction::Drop)
.build(),
.build()
); );
symbols.write().unwrap().declare_static_parse( symbols.write().unwrap().declare_static_parse(
"data-frame-ptr", "data-frame-ptr",
"<MutRef <Seq machine::Word>>~machine::Address~machine::Word" "<MutRef <Seq machine::Word>>~machine::Address~machine::Word",
); );
linker.add_static("data-frame-ptr", vec![0x1000]); linker.add_static("data-frame-ptr", vec![0x1000]);
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"data-frame-set", "data-frame-set",
vec!["T"], vec!["T"],
vec!["T~machine::Word", vec![
"<RefMut T~machine::Word>~LocalVariableId~machine::UInt64~machine::Word"], "T~machine::Word",
vec![]); "<RefMut T~machine::Word>~LocalVariableId~machine::UInt64~machine::Word",
],
vec![],
);
linker.add_procedure( linker.add_procedure(
"data-frame-set", "data-frame-set",
tisc::Assembler::new() tisc::Assembler::new()
.static_ref("data-frame-ptr") .static_ref("data-frame-ptr")
.inst(tisc::VM_Instruction::Fetch) .inst(tisc::VM_Instruction::Fetch)
.inst( tisc::VM_Instruction::Add ) .inst(tisc::VM_Instruction::IntAdd)
.inst(tisc::VM_Instruction::Store) .inst(tisc::VM_Instruction::Store)
.build() .build(),
); );
symbols.write().unwrap().declare_proc_parse( symbols.write().unwrap().declare_proc_parse(
"data-frame-get", "data-frame-get",
vec!["T"], vec!["T"],
vec!["<Ref T~machine::Word>~DataFrameRef~machine::UInt64~machine::Word"], vec!["<Ref T~machine::Word>~DataFrameRef~machine::UInt64~machine::Word"],
vec!["T~machine::Word"] vec!["T~machine::Word"],
); );
linker.add_procedure( linker.add_procedure(
"data-frame-get", "data-frame-get",
tisc::Assembler::new() tisc::Assembler::new()
.static_ref("data-frame-ptr") .static_ref("data-frame-ptr")
.inst(tisc::VM_Instruction::Fetch) .inst(tisc::VM_Instruction::Fetch)
.inst( tisc::VM_Instruction::Add ) .inst(tisc::VM_Instruction::IntAdd)
.inst(tisc::VM_Instruction::Fetch) .inst(tisc::VM_Instruction::Fetch)
.build() .build(),
); );
symbols
symbols.write().unwrap().declare_proc_parse( .write()
"data-frame-alloc", .unwrap()
vec![], .declare_proc_parse("data-frame-alloc", vec![], vec![], vec![]);
vec![],
vec![]);
linker.add_procedure( linker.add_procedure(
"data-frame-alloc", "data-frame-alloc",
tisc::Assembler::new() tisc::Assembler::new()
.static_ref("data-frame-ptr") .static_ref("data-frame-ptr")
.inst(tisc::VM_Instruction::Fetch) .inst(tisc::VM_Instruction::Fetch)
.inst( tisc::VM_Instruction::Swap )
.call("i-") .call("i-")
.static_ref("data-frame-ptr") .static_ref("data-frame-ptr")
.inst(tisc::VM_Instruction::Store) .inst(tisc::VM_Instruction::Store)
.build() .build(),
); );
symbols.write().unwrap().declare_proc_parse( symbols
"data-frame-drop", .write()
vec![], .unwrap()
vec![], .declare_proc_parse("data-frame-drop", vec![], vec![], vec![]);
vec![]);
linker.add_procedure( linker.add_procedure(
"data-frame-drop", "data-frame-drop",
@ -266,9 +268,8 @@ pub fn init_runtime(linker: &mut Linker) -> Arc<RwLock<Scope>> {
.call("i+") .call("i+")
.static_ref("data-frame-ptr") .static_ref("data-frame-ptr")
.inst(tisc::VM_Instruction::Store) .inst(tisc::VM_Instruction::Store)
.build() .build(),
); );
symbols symbols
} }

View file

@ -1,9 +1,9 @@
use { use {
crate::expr::LTExpr,
std::{ std::{
collections::HashMap, collections::HashMap,
sync::{Arc, RwLock}, sync::{Arc, RwLock},
}, },
crate::expr::LTExpr
}; };
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -14,27 +14,36 @@ pub enum SymbolDef {
}, },
StaticRef { StaticRef {
typ: laddertypes::TypeTerm, typ: laddertypes::TypeTerm,
link_addr: Option< tisc::VM_Word > link_addr: Option<tisc::VM_Word>,
}, },
Procedure { Procedure {
in_types: Vec<laddertypes::TypeTerm>, in_types: Vec<laddertypes::TypeTerm>,
out_types: Vec<laddertypes::TypeTerm>, out_types: Vec<laddertypes::TypeTerm>,
link_addr: Option< tisc::VM_Word > link_addr: Option<tisc::VM_Word>,
} },
} }
impl SymbolDef { impl SymbolDef {
pub fn get_type(&self, typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>) -> laddertypes::TypeTerm { pub fn get_type(
&self,
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
) -> laddertypes::TypeTerm {
match self { match self {
SymbolDef::FrameRef { typ, stack_ref: _ } => typ.clone(), SymbolDef::FrameRef { typ, stack_ref: _ } => typ.clone(),
SymbolDef::StaticRef { typ, link_addr: _ } => typ.clone(), SymbolDef::StaticRef { typ, link_addr: _ } => typ.clone(),
SymbolDef::Procedure { in_types, out_types, link_addr: _ } => { SymbolDef::Procedure {
laddertypes::TypeTerm::App(vec![ in_types,
typectx.write().unwrap().parse("Fn").expect("parse typeterm"), out_types,
link_addr: _,
} => laddertypes::TypeTerm::App(vec![
typectx
.write()
.unwrap()
.parse("Fn")
.expect("parse typeterm"),
laddertypes::TypeTerm::App(in_types.clone()), laddertypes::TypeTerm::App(in_types.clone()),
laddertypes::TypeTerm::App( out_types.clone() ) laddertypes::TypeTerm::App(out_types.clone()),
]) ]),
}
} }
} }
} }
@ -58,7 +67,7 @@ pub struct Scope {
/* parent scope whose all /* parent scope whose all
* symbols are inherited * symbols are inherited
*/ */
parent: Option< Arc<RwLock<Scope>> > parent: Option<Arc<RwLock<Scope>>>,
} }
impl Scope { impl Scope {
@ -67,7 +76,7 @@ impl Scope {
symbols: HashMap::new(), symbols: HashMap::new(),
typectx: Arc::new(RwLock::new(laddertypes::dict::TypeDict::new())), typectx: Arc::new(RwLock::new(laddertypes::dict::TypeDict::new())),
frame_size: 0, frame_size: 0,
parent: None parent: None,
})) }))
} }
@ -95,12 +104,11 @@ impl Scope {
None => { None => {
if let Some(parent) = self.parent.as_ref() { if let Some(parent) = self.parent.as_ref() {
match parent.read().unwrap().get(name) { match parent.read().unwrap().get(name) {
Some(SymbolDef::FrameRef { Some(SymbolDef::FrameRef { typ, stack_ref }) => Some(SymbolDef::FrameRef {
typ, stack_ref typ: typ.clone(),
}) => Some(SymbolDef::FrameRef { stack_ref: stack_ref + self.get_frame_size() as i64,
typ: typ.clone(), stack_ref: stack_ref + self.get_frame_size() as i64
}), }),
x => x.clone() x => x.clone(),
} }
} else { } else {
None None
@ -124,11 +132,12 @@ impl Scope {
*/ */
//<><><><><><> //<><><><><><>
pub fn declare_proc_parse(&mut self, pub fn declare_proc_parse(
&mut self,
name: &str, name: &str,
type_vars: Vec<&str>, type_vars: Vec<&str>,
in_types: Vec<&str>, in_types: Vec<&str>,
out_types: Vec<&str> out_types: Vec<&str>,
) { ) {
for v in type_vars { for v in type_vars {
self.typectx.write().unwrap().add_varname(v.into()); self.typectx.write().unwrap().add_varname(v.into());
@ -136,70 +145,86 @@ impl Scope {
self.declare_proc( self.declare_proc(
String::from(name), String::from(name),
in_types.into_iter().map(|t| self.typectx.write().unwrap().parse(t).expect("parse typeterm")).collect(), in_types
out_types.into_iter().map(|t| self.typectx.write().unwrap().parse(t).expect("parse typeterm")).collect() .into_iter()
.map(|t| {
self.typectx
.write()
.unwrap()
.parse(t)
.expect("parse typeterm")
})
.collect(),
out_types
.into_iter()
.map(|t| {
self.typectx
.write()
.unwrap()
.parse(t)
.expect("parse typeterm")
})
.collect(),
); );
} }
pub fn declare_proc(&mut self, pub fn declare_proc(
&mut self,
name: String, name: String,
in_types: Vec<laddertypes::TypeTerm>, in_types: Vec<laddertypes::TypeTerm>,
out_types: Vec< laddertypes::TypeTerm > out_types: Vec<laddertypes::TypeTerm>,
) { ) {
self.symbols.insert(name, SymbolDef::Procedure { self.symbols.insert(
name,
SymbolDef::Procedure {
link_addr: None, //LinkAddr::Relative{ name, offset: 0 }, link_addr: None, //LinkAddr::Relative{ name, offset: 0 },
in_types, in_types,
out_types out_types,
}); },
);
} }
//<><><><><> //<><><><><>
pub fn declare_var_parse(&mut self, pub fn declare_var_parse(&mut self, name: &str, typ: &str) {
name: &str, let typ = self
typ: &str .typectx
) { .write()
let typ = self.typectx.write().unwrap().parse(typ).expect("parse typeterm"); .unwrap()
self.declare_var( .parse(typ)
String::from(name), .expect("parse typeterm");
typ self.declare_var(String::from(name), typ);
);
} }
pub fn declare_var(&mut self, name: String, typ: laddertypes::TypeTerm) -> tisc::VM_Word { pub fn declare_var(&mut self, name: String, typ: laddertypes::TypeTerm) -> tisc::VM_Word {
let stack_ref = self.frame_size as tisc::VM_Word; let stack_ref = self.frame_size as tisc::VM_Word;
self.frame_size += 1; self.frame_size += 1;
self.symbols.insert(name, SymbolDef::FrameRef { self.symbols
typ, .insert(name, SymbolDef::FrameRef { typ, stack_ref });
stack_ref
});
stack_ref stack_ref
} }
//<><><><><><> //<><><><><><>
pub fn declare_static_parse( pub fn declare_static_parse(&mut self, name: &str, typ: &str) {
&mut self, let typ = self
name: &str, .typectx
typ: &str .write()
) { .unwrap()
let typ = self.typectx .parse(typ)
.write().unwrap() .expect("parse typeterm");
.parse(typ).expect("parse typeterm");
self.declare_static(String::from(name), typ); self.declare_static(String::from(name), typ);
} }
pub fn declare_static( pub fn declare_static(&mut self, name: String, typ: laddertypes::TypeTerm) {
&mut self, self.symbols.insert(
name: String, name,
typ: laddertypes::TypeTerm SymbolDef::StaticRef {
) {
self.symbols.insert(name, SymbolDef::StaticRef {
typ, typ,
link_addr: None link_addr: None,
}); },
);
} }
} }