adapt to TypeDict trait

This commit is contained in:
Michael Sippel 2024-10-04 02:14:54 +02:00
parent 0cbbcd5b24
commit 8fd59f04ee
Signed by: senvas
GPG key ID: F96CF119C34B64A6
3 changed files with 112 additions and 65 deletions

View file

@ -3,6 +3,11 @@ use {
expr::{LTExpr, Statement, TypeError, TypeTag}, expr::{LTExpr, Statement, TypeError, TypeTag},
lexer::{LTIRLexer, LTIRToken, LexError, InputRegionTag}, lexer::{LTIRLexer, LTIRToken, LexError, InputRegionTag},
}, },
laddertypes::{
dict::TypeDict,
parser::ParseLadderType,
unparser::UnparseLadderType
},
std::{ std::{
iter::Peekable, iter::Peekable,
sync::{Arc, RwLock}, sync::{Arc, RwLock},
@ -54,7 +59,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
* `: T` * `: T`
*/ */
pub fn parse_type_tag<It>( pub fn parse_type_tag<It>(
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>, typectx: &mut impl TypeDict,
tokens: &mut Peekable<It>, tokens: &mut Peekable<It>,
) -> Result<Option<(InputRegionTag, laddertypes::TypeTerm)>, (InputRegionTag, ParseError)> ) -> Result<Option<(InputRegionTag, laddertypes::TypeTerm)>, (InputRegionTag, ParseError)>
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)> where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
@ -64,7 +69,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
match peektok { match peektok {
Ok(LTIRToken::AssignType(typeterm_str)) => { Ok(LTIRToken::AssignType(typeterm_str)) => {
tokens.next(); tokens.next();
match typectx.write().unwrap().parse(typeterm_str.as_str()) { match typectx.parse(typeterm_str.as_str()) {
Ok(typeterm) => Ok(Some((region, typeterm))), Ok(typeterm) => Ok(Some((region, typeterm))),
Err(parse_error) => Err((region, ParseError::TypeParseError(parse_error))), Err(parse_error) => Err((region, ParseError::TypeParseError(parse_error))),
} }
@ -109,7 +114,7 @@ impl VariableBinding {
* or `x : T` * or `x : T`
*/ */
pub fn parse_binding_expr<It>( pub fn parse_binding_expr<It>(
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>, typectx: &mut impl TypeDict,
tokens: &mut Peekable<It>, tokens: &mut Peekable<It>,
) -> Result< VariableBinding, (InputRegionTag, ParseError)> ) -> Result< VariableBinding, (InputRegionTag, ParseError)>
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)> where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
@ -142,7 +147,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
* `{ x:T; y:U; ... }` * `{ x:T; y:U; ... }`
*/ */
pub fn parse_binding_block<It>( pub fn parse_binding_block<It>(
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>, typectx: &mut impl TypeDict,
tokens: &mut Peekable<It>, tokens: &mut Peekable<It>,
) -> Result< Vec<VariableBinding>, (InputRegionTag, ParseError)> ) -> Result< Vec<VariableBinding>, (InputRegionTag, ParseError)>
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)> where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
@ -175,7 +180,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
} }
pub fn parse_statement<It>( pub fn parse_statement<It>(
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>, typectx: &mut impl TypeDict,
tokens: &mut Peekable<It>, tokens: &mut Peekable<It>,
) -> Result<crate::expr::Statement, (InputRegionTag, ParseError)> ) -> Result<crate::expr::Statement, (InputRegionTag, ParseError)>
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)> where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
@ -253,7 +258,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
} }
pub fn parse_statement_block<It>( pub fn parse_statement_block<It>(
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>, typectx: &mut impl TypeDict,
tokens: &mut Peekable<It>, tokens: &mut Peekable<It>,
) -> Result<Vec<Statement>, (InputRegionTag, ParseError)> ) -> Result<Vec<Statement>, (InputRegionTag, ParseError)>
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)> where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
@ -280,6 +285,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
} }
pub fn parse_atom<It>( pub fn parse_atom<It>(
typectx: &mut impl TypeDict,
tokens: &mut Peekable<It>, tokens: &mut Peekable<It>,
) -> Result<crate::expr::LTExpr, (InputRegionTag, ParseError)> ) -> Result<crate::expr::LTExpr, (InputRegionTag, ParseError)>
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)> where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
@ -295,7 +301,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
} }
pub fn parse_expr<It>( pub fn parse_expr<It>(
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>, typectx: &mut impl TypeDict,
tokens: &mut Peekable<It>, tokens: &mut Peekable<It>,
) -> Result<crate::expr::LTExpr, (InputRegionTag, ParseError)> ) -> Result<crate::expr::LTExpr, (InputRegionTag, ParseError)>
where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)> where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
@ -354,9 +360,9 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
children.push(LTExpr::StringLiteral{ region, value }); children.push(LTExpr::StringLiteral{ region, value });
} }
Ok(LTIRToken::Ascend(type_str)) => { Ok(LTIRToken::Ascend(type_str)) => {
let region = region.clone(); let mut region = region.clone();
let typ = let typ =
match typectx.write().unwrap().parse(type_str) { match typectx.parse(type_str) {
Ok(t) => Ok(t), Ok(t) => Ok(t),
Err(e) => Err(TypeError::ParseError(e)) Err(e) => Err(TypeError::ParseError(e))
}; };
@ -374,7 +380,7 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
Ok(LTIRToken::Descend(type_str)) => { Ok(LTIRToken::Descend(type_str)) => {
let region = region.clone(); let region = region.clone();
let typ = let typ =
match typectx.write().unwrap().parse(type_str) { match typectx.parse(type_str) {
Ok(t) => Ok(t), Ok(t) => Ok(t),
Err(e) => Err(TypeError::ParseError(e)) Err(e) => Err(TypeError::ParseError(e))
}; };
@ -425,11 +431,11 @@ where It: Iterator<Item = (InputRegionTag, Result<LTIRToken, LexError>)>
}); });
}, },
name => { name => {
children.push(parse_atom(tokens)?); children.push(parse_atom(typectx, tokens)?);
} }
}, },
Ok(atom) => { Ok(atom) => {
children.push(parse_atom(tokens)?); children.push(parse_atom(typectx, tokens)?);
} }
Err(err) => { Err(err) => {
return Err((*region, ParseError::LexError(err.clone()))); return Err((*region, ParseError::LexError(err.clone())));

View file

@ -1,5 +1,9 @@
use { use {
crate::expr::LTExpr, crate::expr::LTExpr,
laddertypes::{
TypeDict, TypeID,
parser::ParseLadderType
},
std::{ std::{
collections::HashMap, collections::HashMap,
sync::{Arc, RwLock}, sync::{Arc, RwLock},
@ -28,7 +32,7 @@ pub enum SymbolDef {
impl SymbolDef { impl SymbolDef {
pub fn get_type( pub fn get_type(
&self, &self,
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>, typedict: &mut impl laddertypes::dict::TypeDict,
) -> laddertypes::TypeTerm { ) -> laddertypes::TypeTerm {
match self { match self {
SymbolDef::FrameRef { typ, stack_ref: _ } => typ.clone(), SymbolDef::FrameRef { typ, stack_ref: _ } => typ.clone(),
@ -38,23 +42,31 @@ impl SymbolDef {
out_types, out_types,
link_addr: _, link_addr: _,
export: _, export: _,
} => laddertypes::TypeTerm::App( } => {
std::iter::once( let mut out_types = out_types.clone();
typectx let out_type =
.write() if out_types.len() == 1 {
.unwrap() out_types.pop().unwrap()
.parse("Func") } else {
.expect("parse typeterm") laddertypes::TypeTerm::App(
).chain( std::iter::once(
in_types.clone().into_iter() typedict.parse("Struct").unwrap()
).chain( ).chain(
out_types.into_iter()
).collect()
)
};
laddertypes::TypeTerm::App(
std::iter::once( std::iter::once(
typectx.write().unwrap().parse("Struct").expect("parse typeterm") typedict.parse("Func").expect("parse typeterm")
).chain( ).chain(
out_types.clone().into_iter() in_types.clone().into_iter()
) ).chain(
).collect() std::iter::once(out_type)
), ).collect()
)
},
} }
} }
} }
@ -68,7 +80,7 @@ pub struct Scope {
/* type symbols /* type symbols
*/ */
pub typectx: Arc<RwLock<laddertypes::TypeDict>>, typedict: Arc<RwLock<laddertypes::BimapTypeDict>>,
/* number of words required for /* number of words required for
* the stack frame of this scope * the stack frame of this scope
@ -81,28 +93,58 @@ pub struct Scope {
parent: Option<Arc<RwLock<Scope>>>, parent: Option<Arc<RwLock<Scope>>>,
} }
impl TypeDict for Scope {
fn insert(&mut self, name: String, id: TypeID) {
self.typedict.write().unwrap().insert(name,id)
}
fn add_varname(&mut self, vn: String) -> TypeID {
self.typedict.add_varname(vn)
}
fn add_typename(&mut self, tn: String) -> TypeID {
if let Some(parent) = self.parent.as_mut() {
parent.add_typename(tn)
} else {
self.typedict.add_typename(tn)
}
}
fn get_typeid(&self, tn: &String) -> Option<TypeID> {
if let Some(id) = self.typedict.get_typeid(tn) {
Some(id)
} else {
if let Some(parent) = self.parent.as_ref() {
parent.get_typeid(tn)
} else {
None
}
}
}
fn get_typename(&self, tid: &TypeID) -> Option<String> {
if let Some(name) = self.typedict.get_typename(tid) {
Some(name)
} else {
if let Some(parent) = self.parent.as_ref() {
parent.get_typename(tid)
} else {
None
}
}
}
}
impl Scope { impl Scope {
pub fn new() -> Arc<RwLock<Self>> { pub fn new() -> Arc<RwLock<Self>> {
Arc::new(RwLock::new(Scope { Arc::new(RwLock::new(Scope {
symbols: HashMap::new(), symbols: HashMap::new(),
typectx: Arc::new(RwLock::new(laddertypes::dict::TypeDict::new())), typedict: Arc::new(RwLock::new(laddertypes::dict::BimapTypeDict::new())),
frame_size: 0, frame_size: 0,
parent: None, parent: None,
})) }))
} }
pub fn with_parent(parent: &Arc<RwLock<Scope>>) -> Arc<RwLock<Self>> { pub fn with_parent(parent: &Arc<RwLock<Scope>>) -> Arc<RwLock<Self>> {
let s = Scope { let mut s = Scope::new();
symbols: HashMap::new(), s.write().unwrap().parent = Some(parent.clone());
s
// todo: create proper child scope
typectx: parent.read().unwrap().typectx.clone(),
frame_size: 0,
parent: Some(parent.clone()),
};
Arc::new(RwLock::new(s))
} }
pub fn export(self) -> Vec<(String, SymbolDef)> { pub fn export(self) -> Vec<(String, SymbolDef)> {
@ -146,6 +188,14 @@ impl Scope {
} }
} }
pub fn get_type(&mut self, name: &str) -> Option<laddertypes::TypeTerm> {
if let Some(sdef) = self.get(name) {
Some(sdef.get_type( &mut self.typedict ))
} else {
None
}
}
/// takes the link-addresses from a Linker /// takes the link-addresses from a Linker
/// and updates the symbol table to relative addresses /// and updates the symbol table to relative addresses
/// based on the next super-label /// based on the next super-label
@ -184,29 +234,26 @@ impl Scope {
out_types: Vec<&str>, out_types: Vec<&str>,
) { ) {
for v in type_vars { for v in type_vars {
self.typectx.write().unwrap().add_varname(v.into()); self.add_varname(v.into());
} }
let mut td = self.typedict.clone();
self.declare_proc( self.declare_proc(
String::from(name), String::from(name),
in_types in_types
.into_iter() .into_iter()
.map(|t| { .map(move |t| {
self.typectx td.parse(t).expect("parse typeterm")
.write()
.unwrap()
.parse(t)
.expect("parse typeterm")
}) })
.collect(), .collect(),
out_types out_types
.into_iter() .into_iter()
.map(|t| { .map({
self.typectx let mut td = self.typedict.clone();
.write() move |t| {
.unwrap() td.parse(t).expect("parse typeterm")
.parse(t) }
.expect("parse typeterm")
}) })
.collect(), .collect(),
false false
@ -235,9 +282,6 @@ impl Scope {
pub fn declare_var_parse(&mut self, name: &str, typ: &str) { pub fn declare_var_parse(&mut self, name: &str, typ: &str) {
let typ = self let typ = self
.typectx
.write()
.unwrap()
.parse(typ) .parse(typ)
.expect("parse typeterm"); .expect("parse typeterm");
self.declare_var(String::from(name), typ); self.declare_var(String::from(name), typ);
@ -257,9 +301,6 @@ impl Scope {
pub fn declare_static_parse(&mut self, name: &str, typ: &str) { pub fn declare_static_parse(&mut self, name: &str, typ: &str) {
let typ = self let typ = self
.typectx
.write()
.unwrap()
.parse(typ) .parse(typ)
.expect("parse typeterm"); .expect("parse typeterm");
self.declare_static(String::from(name), typ); self.declare_static(String::from(name), typ);

View file

@ -6,6 +6,7 @@ use {
std::{boxed::Box, ops::Deref}, std::{boxed::Box, ops::Deref},
std::io::Write, std::io::Write,
tiny_ansi::TinyAnsi, tiny_ansi::TinyAnsi,
laddertypes::dict::TypeDict,
ltcore::{ ltcore::{
lexer::InputRegionTag, lexer::InputRegionTag,
expr::{LTExpr, Statement}, expr::{LTExpr, Statement},
@ -32,8 +33,7 @@ fn main() {
let mut linker = tisc::Linker::new(); let mut linker = tisc::Linker::new();
let root_scope = ltcore::runtime::init_runtime(&mut linker); let root_scope = ltcore::runtime::init_runtime(&mut linker);
let main_scope = Scope::with_parent(&root_scope); let mut main_scope = Scope::with_parent(&root_scope);
let typectx = main_scope.read().unwrap().typectx.clone();
for path in args.sources { for path in args.sources {
let iter_chars = iterate_text::file::characters::IterateFileCharacters::new(path.clone()); let iter_chars = iterate_text::file::characters::IterateFileCharacters::new(path.clone());
@ -49,8 +49,8 @@ fn main() {
}) })
.peekable(); .peekable();
match ltcore::parser::parse_expr( &typectx, &mut program_tokens ) { match ltcore::parser::parse_expr( &mut main_scope, &mut program_tokens ) {
Ok( ast ) => { Ok( mut ast ) => {
let (exports, diagnostics, proc_code) = ProcedureCompiler::new(&main_scope) let (exports, diagnostics, proc_code) = ProcedureCompiler::new(&main_scope)
.compile(&ast) .compile(&ast)
.into_asm(&path); .into_asm(&path);