294 lines
9.7 KiB
Rust
294 lines
9.7 KiB
Rust
use {
|
|
crate::{
|
|
expr::{LTExpr, Statement, TypeError, TypeTag},
|
|
lexer::{LTIRLexer, LTIRToken, LexError},
|
|
},
|
|
std::{
|
|
iter::Peekable,
|
|
sync::{Arc, RwLock},
|
|
},
|
|
};
|
|
|
|
#[derive(Clone, Debug)]
|
|
pub enum ParseError {
|
|
LexError(LexError),
|
|
UnexpectedClose,
|
|
UnexpectedEnd,
|
|
UnexpectedToken,
|
|
}
|
|
|
|
pub fn parse_expect<It>(
|
|
tokens: &mut Peekable<LTIRLexer<It>>,
|
|
expected_token: LTIRToken,
|
|
) -> Result<(), ParseError>
|
|
where
|
|
It: Iterator<Item = char>,
|
|
{
|
|
match tokens.next() {
|
|
Some((region, Ok(t))) => {
|
|
if t == expected_token {
|
|
Ok(())
|
|
} else {
|
|
Err(ParseError::UnexpectedToken)
|
|
}
|
|
}
|
|
Some((region, Err(err))) => Err(ParseError::LexError(err)),
|
|
None => Err(ParseError::UnexpectedEnd),
|
|
}
|
|
}
|
|
|
|
pub fn parse_symbol<It>(tokens: &mut Peekable<LTIRLexer<It>>) -> Result<String, ParseError>
|
|
where
|
|
It: Iterator<Item = char>,
|
|
{
|
|
match tokens.next() {
|
|
Some((region, Ok(LTIRToken::Symbol(name)))) => Ok(name),
|
|
Some((region, Ok(_))) => Err(ParseError::UnexpectedToken),
|
|
Some((region, Err(err))) => Err(ParseError::LexError(err)),
|
|
None => Err(ParseError::UnexpectedEnd),
|
|
}
|
|
}
|
|
|
|
pub fn parse_type_tag<It>(
|
|
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
|
tokens: &mut Peekable<LTIRLexer<It>>,
|
|
) -> Option<TypeTag>
|
|
where
|
|
It: Iterator<Item = char>,
|
|
{
|
|
if let Some((region, peektok)) = tokens.peek().clone() {
|
|
match peektok.clone() {
|
|
Ok(LTIRToken::AssignType(typeterm_str)) => {
|
|
tokens.next();
|
|
match typectx.write().unwrap().parse(typeterm_str.as_str()) {
|
|
Ok(typeterm) => Some(Ok(typeterm)),
|
|
Err(parse_error) => Some(Err(TypeError::ParseError(parse_error))),
|
|
}
|
|
}
|
|
_ => None,
|
|
}
|
|
} else {
|
|
None
|
|
}
|
|
}
|
|
|
|
pub fn parse_statement<It>(
|
|
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
|
tokens: &mut Peekable<LTIRLexer<It>>,
|
|
) -> Result<crate::expr::Statement, ParseError>
|
|
where
|
|
It: Iterator<Item = char>,
|
|
{
|
|
if let Some((region, peektok)) = tokens.peek() {
|
|
match peektok {
|
|
Ok(LTIRToken::Symbol(sym)) => {
|
|
match sym.as_str() {
|
|
"!" => {
|
|
tokens.next();
|
|
// todo accept address-expression instead of symbol
|
|
let name = parse_symbol(tokens)?;
|
|
let val_expr = parse_expr(typectx, tokens)?;
|
|
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
|
|
|
Ok(Statement::Assignment {
|
|
var_id: name,
|
|
val_expr,
|
|
})
|
|
}
|
|
"let" => {
|
|
tokens.next();
|
|
let name = parse_symbol(tokens)?;
|
|
let typ = parse_type_tag(typectx, tokens);
|
|
let _ = parse_expect(tokens, LTIRToken::AssignValue);
|
|
let val_expr = parse_expr(typectx, tokens)?;
|
|
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
|
|
|
Ok(Statement::LetAssign {
|
|
typ,
|
|
var_id: name,
|
|
val_expr,
|
|
})
|
|
}
|
|
"while" => {
|
|
tokens.next();
|
|
let _ = parse_expect(tokens, LTIRToken::ExprOpen)?;
|
|
let cond = parse_expr(typectx, tokens)?;
|
|
let _ = parse_expect(tokens, LTIRToken::ExprClose)?;
|
|
Ok(Statement::WhileLoop {
|
|
condition: cond,
|
|
body: parse_block(typectx, tokens)?,
|
|
})
|
|
}
|
|
"return" => {
|
|
tokens.next();
|
|
let expr = parse_expr(typectx, tokens)?;
|
|
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
|
Ok(Statement::Return(parse_expr(typectx, tokens)?))
|
|
}
|
|
_ => {
|
|
let expr = parse_expr(typectx, tokens)?;
|
|
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
|
Ok(Statement::Expr(expr))
|
|
}
|
|
}
|
|
}
|
|
Ok(_) => {
|
|
let expr = parse_expr(typectx, tokens)?;
|
|
let _ = parse_expect(tokens, LTIRToken::StatementSep)?;
|
|
Ok(Statement::Expr(expr))
|
|
}
|
|
Err(err) => Err(ParseError::LexError(err.clone())),
|
|
}
|
|
} else {
|
|
Err(ParseError::UnexpectedEnd)
|
|
}
|
|
}
|
|
|
|
pub fn parse_block<It>(
|
|
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
|
tokens: &mut Peekable<LTIRLexer<It>>,
|
|
) -> Result<Vec<Statement>, ParseError>
|
|
where
|
|
It: Iterator<Item = char>,
|
|
{
|
|
let _ = parse_expect(tokens, LTIRToken::BlockOpen)?;
|
|
|
|
let mut statements = Vec::new();
|
|
while let Some((region, peektok)) = tokens.peek() {
|
|
match peektok {
|
|
Ok(LTIRToken::BlockClose) => {
|
|
tokens.next();
|
|
return Ok(statements);
|
|
}
|
|
Ok(_) => {
|
|
statements.push(parse_statement(typectx, tokens)?);
|
|
}
|
|
Err(err) => {
|
|
return Err(ParseError::LexError(err.clone()));
|
|
}
|
|
}
|
|
}
|
|
|
|
Err(ParseError::UnexpectedEnd)
|
|
}
|
|
|
|
pub fn parse_atom<It>(
|
|
tokens: &mut Peekable<LTIRLexer<It>>,
|
|
) -> Result<crate::expr::LTExpr, ParseError>
|
|
where
|
|
It: Iterator<Item = char>,
|
|
{
|
|
match tokens.next() {
|
|
Some((region, Ok(LTIRToken::Symbol(sym)))) => Ok(LTExpr::symbol(sym.as_str())),
|
|
Some((region, Ok(LTIRToken::Char(c)))) => Ok(LTExpr::lit_uint(c as u64)),
|
|
Some((region, Ok(LTIRToken::Num(n)))) => Ok(LTExpr::lit_uint(n as u64)),
|
|
Some((region, Ok(_))) => Err(ParseError::UnexpectedToken),
|
|
Some((region, Err(err))) => Err(ParseError::LexError(err)),
|
|
None => Err(ParseError::UnexpectedEnd),
|
|
}
|
|
}
|
|
|
|
pub fn parse_expr<It>(
|
|
typectx: &Arc<RwLock<laddertypes::dict::TypeDict>>,
|
|
tokens: &mut Peekable<LTIRLexer<It>>,
|
|
) -> Result<crate::expr::LTExpr, ParseError>
|
|
where
|
|
It: Iterator<Item = char>,
|
|
{
|
|
let mut children = Vec::new();
|
|
|
|
while let Some((region, tok)) = tokens.peek() {
|
|
match tok {
|
|
Ok(LTIRToken::Lambda) => {
|
|
if children.len() == 0 {
|
|
tokens.next();
|
|
|
|
let mut args = Vec::new();
|
|
while let Some((region, Ok(LTIRToken::Symbol(_)))) = tokens.peek() {
|
|
args.push((parse_symbol(tokens)?, parse_type_tag(typectx, tokens)));
|
|
}
|
|
|
|
let _ = parse_expect(tokens, LTIRToken::LambdaBody);
|
|
let body = parse_expr(typectx, tokens)?;
|
|
|
|
return Ok(LTExpr::Abstraction {
|
|
args,
|
|
body: Box::new(body),
|
|
});
|
|
} else {
|
|
return Err(ParseError::UnexpectedToken);
|
|
}
|
|
}
|
|
Ok(LTIRToken::ExprOpen) => {
|
|
tokens.next();
|
|
while let Some((region, peektok)) = tokens.peek() {
|
|
match peektok {
|
|
Ok(LTIRToken::ExprClose) => {
|
|
tokens.next();
|
|
break;
|
|
}
|
|
_ => {}
|
|
}
|
|
children.push(parse_expr(typectx, tokens)?);
|
|
}
|
|
}
|
|
Ok(LTIRToken::ExprClose) => {
|
|
break;
|
|
}
|
|
Ok(LTIRToken::BlockOpen) => {
|
|
children.push(LTExpr::block(parse_block(typectx, tokens)?));
|
|
}
|
|
Ok(LTIRToken::BlockClose) => {
|
|
break;
|
|
}
|
|
Ok(LTIRToken::StatementSep) => {
|
|
break;
|
|
}
|
|
Ok(LTIRToken::Symbol(name)) => match name.as_str() {
|
|
"if" => {
|
|
tokens.next();
|
|
let _ = parse_expect(tokens, LTIRToken::ExprOpen)?;
|
|
let cond = parse_expr(typectx, tokens)?;
|
|
let _ = parse_expect(tokens, LTIRToken::ExprClose)?;
|
|
let if_expr = LTExpr::block(parse_block(typectx, tokens)?);
|
|
let mut else_expr = LTExpr::block(vec![]);
|
|
|
|
if let Some((region, peektok)) = tokens.peek() {
|
|
if let Ok(LTIRToken::Symbol(name)) = peektok {
|
|
if name == "else" {
|
|
tokens.next();
|
|
else_expr = parse_expr(typectx, tokens)?;
|
|
}
|
|
}
|
|
}
|
|
|
|
children.push(LTExpr::Branch {
|
|
condition: Box::new(cond),
|
|
if_expr: Box::new(if_expr),
|
|
else_expr: Box::new(else_expr),
|
|
});
|
|
}
|
|
name => {
|
|
children.push(parse_atom(tokens)?);
|
|
}
|
|
},
|
|
Ok(atom) => {
|
|
children.push(parse_atom(tokens)?);
|
|
}
|
|
Err(err) => {
|
|
return Err(ParseError::LexError(err.clone()));
|
|
}
|
|
}
|
|
}
|
|
|
|
if children.len() > 0 {
|
|
let head = children.remove(0);
|
|
Ok(LTExpr::Application {
|
|
typ: None,
|
|
head: Box::new(head),
|
|
body: children,
|
|
})
|
|
} else {
|
|
Err(ParseError::UnexpectedEnd)
|
|
}
|
|
}
|