parser: remove FromStr & always improve parse() function

This commit is contained in:
Michael Sippel 2023-10-02 15:05:25 +02:00
parent 2d3b234e6e
commit e60c60cbff
Signed by: senvas
GPG key ID: F96CF119C34B64A6
3 changed files with 55 additions and 68 deletions

View file

@ -20,24 +20,23 @@ pub enum ParseError {
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\ //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl std::str::FromStr for TypeTerm {
type Err = ParseError;
fn from_str(s : &str) -> Result<Self, Self::Err> {
// creating a new context every time is not that useful..
let mut dict = TypeDict::new();
dict.parse(&mut LadderTypeLexer::from(s.chars()).peekable())
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl TypeDict { impl TypeDict {
pub fn type_from_str(&mut self, s: &str) -> Result<TypeTerm, ParseError> { pub fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
self.parse(&mut LadderTypeLexer::from(s.chars()).peekable()) let mut tokens = LadderTypeLexer::from(s.chars()).peekable();
match self.parse_ladder(&mut tokens) {
Ok(t) => {
if let Some(_tok) = tokens.peek() {
Err(ParseError::UnexpectedToken)
} else {
Ok(t)
}
}
Err(err) => Err(err)
}
} }
fn parse_app<It>( &mut self, tokens: &mut Peekable<LadderTypeLexer<It>> ) -> Result<TypeTerm, ParseError> fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
where It: Iterator<Item = char> where It: Iterator<Item = char>
{ {
let mut args = Vec::new(); let mut args = Vec::new();
@ -48,7 +47,7 @@ impl TypeDict {
return Ok(TypeTerm::App(args)); return Ok(TypeTerm::App(args));
} }
_ => { _ => {
match self.parse_partial(tokens) { match self.parse_ladder(tokens) {
Ok(a) => { args.push(a); } Ok(a) => { args.push(a); }
Err(err) => { return Err(err); } Err(err) => { return Err(err); }
} }
@ -58,7 +57,7 @@ impl TypeDict {
Err(ParseError::UnexpectedEnd) Err(ParseError::UnexpectedEnd)
} }
fn parse_rung<It>( &mut self, tokens: &mut Peekable<LadderTypeLexer<It>> ) -> Result<TypeTerm, ParseError> fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
where It: Iterator<Item = char> where It: Iterator<Item = char>
{ {
match tokens.next() { match tokens.next() {
@ -79,8 +78,8 @@ impl TypeDict {
None => Err(ParseError::UnexpectedEnd) None => Err(ParseError::UnexpectedEnd)
} }
} }
fn parse_partial<It>( &mut self, tokens: &mut Peekable<LadderTypeLexer<It>> ) -> Result<TypeTerm, ParseError> fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
where It: Iterator<Item = char> where It: Iterator<Item = char>
{ {
let mut rungs = Vec::new(); let mut rungs = Vec::new();
@ -119,22 +118,6 @@ impl TypeDict {
_ => Ok(TypeTerm::Ladder(rungs)), _ => Ok(TypeTerm::Ladder(rungs)),
} }
} }
pub fn parse<It>( &mut self, tokens: &mut Peekable<LadderTypeLexer<It>> ) -> Result<TypeTerm, ParseError>
where It: Iterator<Item = char>
{
match self.parse_partial(tokens) {
Ok(t) => {
if let Some(_tok) = tokens.peek() {
Err(ParseError::UnexpectedToken)
} else {
Ok(t)
}
}
Err(err) => Err(err)
}
}
} }
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\ //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

View file

@ -7,51 +7,55 @@ use {
#[test] #[test]
fn test_curry() { fn test_curry() {
let mut dict = TypeDict::new();
assert_eq!( assert_eq!(
TypeTerm::from_str("<A B C>").unwrap().curry(), dict.parse("<A B C>").unwrap().curry(),
TypeTerm::from_str("<<A B> C>").unwrap() dict.parse("<<A B> C>").unwrap()
); );
assert_eq!( assert_eq!(
TypeTerm::from_str("<A B C D>").unwrap().curry(), dict.parse("<A B C D>").unwrap().curry(),
TypeTerm::from_str("<<<A B> C> D>").unwrap() dict.parse("<<<A B> C> D>").unwrap()
); );
assert_eq!( assert_eq!(
TypeTerm::from_str("<A B C D E F G H I J K>").unwrap().curry(), dict.parse("<A B C D E F G H I J K>").unwrap().curry(),
TypeTerm::from_str("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap() dict.parse("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap()
); );
assert_eq!( assert_eq!(
TypeTerm::from_str("<A~X B C>").unwrap().curry(), dict.parse("<A~X B C>").unwrap().curry(),
TypeTerm::from_str("<<A~X B> C>").unwrap() dict.parse("<<A~X B> C>").unwrap()
); );
assert_eq!( assert_eq!(
TypeTerm::from_str("<A B C~Y~Z> ~ K").unwrap().curry(), dict.parse("<A B C~Y~Z> ~ K").unwrap().curry(),
TypeTerm::from_str("< <A B> C~Y~Z > ~ K").unwrap() dict.parse("< <A B> C~Y~Z > ~ K").unwrap()
); );
} }
#[test] #[test]
fn test_decurry() { fn test_decurry() {
let mut dict = TypeDict::new();
assert_eq!( assert_eq!(
TypeTerm::from_str("<<A B> C>").unwrap().decurry(), dict.parse("<<A B> C>").unwrap().decurry(),
TypeTerm::from_str("<A B C>").unwrap() dict.parse("<A B C>").unwrap()
); );
assert_eq!( assert_eq!(
TypeTerm::from_str("<<<A B> C> D>").unwrap().decurry(), dict.parse("<<<A B> C> D>").unwrap().decurry(),
TypeTerm::from_str("<A B C D>").unwrap(), dict.parse("<A B C D>").unwrap(),
); );
assert_eq!( assert_eq!(
TypeTerm::from_str("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap().decurry(), dict.parse("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap().decurry(),
TypeTerm::from_str("<A B C D E F G H I J K>").unwrap() dict.parse("<A B C D E F G H I J K>").unwrap()
); );
assert_eq!( assert_eq!(
TypeTerm::from_str("<<A~X B> C>").unwrap().decurry(), dict.parse("<<A~X B> C>").unwrap().decurry(),
TypeTerm::from_str("<A~X B C>").unwrap() dict.parse("<A~X B C>").unwrap()
); );
assert_eq!( assert_eq!(
TypeTerm::from_str("<<A~X B> C~Y>~K").unwrap().decurry(), dict.parse("<<A~X B> C~Y>~K").unwrap().decurry(),
TypeTerm::from_str("<A~X B C~Y> ~K").unwrap() dict.parse("<A~X B C~Y> ~K").unwrap()
); );
} }

View file

@ -10,7 +10,7 @@ use {
fn test_parser_id() { fn test_parser_id() {
assert_eq!( assert_eq!(
Ok(TypeTerm::TypeID(TypeID::Fun(0))), Ok(TypeTerm::TypeID(TypeID::Fun(0))),
TypeTerm::from_str("A") TypeDict::new().parse("A")
); );
} }
@ -18,7 +18,7 @@ fn test_parser_id() {
fn test_parser_num() { fn test_parser_num() {
assert_eq!( assert_eq!(
Ok(TypeTerm::Num(1234)), Ok(TypeTerm::Num(1234)),
TypeTerm::from_str("1234") TypeDict::new().parse("1234")
); );
} }
@ -26,21 +26,21 @@ fn test_parser_num() {
fn test_parser_char() { fn test_parser_char() {
assert_eq!( assert_eq!(
Ok(TypeTerm::Char('x')), Ok(TypeTerm::Char('x')),
TypeTerm::from_str("'x'") TypeDict::new().parse("'x'")
); );
} }
#[test] #[test]
fn test_parser_app() { fn test_parser_app() {
assert_eq!( assert_eq!(
TypeTerm::from_str("<A B>"), TypeDict::new().parse("<A B>"),
Ok(TypeTerm::App(vec![ Ok(TypeTerm::App(vec![
TypeTerm::TypeID(TypeID::Fun(0)), TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::TypeID(TypeID::Fun(1)), TypeTerm::TypeID(TypeID::Fun(1)),
])) ]))
); );
assert_eq!( assert_eq!(
TypeTerm::from_str("<A B C>"), TypeDict::new().parse("<A B C>"),
Ok(TypeTerm::App(vec![ Ok(TypeTerm::App(vec![
TypeTerm::TypeID(TypeID::Fun(0)), TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::TypeID(TypeID::Fun(1)), TypeTerm::TypeID(TypeID::Fun(1)),
@ -52,7 +52,7 @@ fn test_parser_app() {
#[test] #[test]
fn test_parser_unexpected_close() { fn test_parser_unexpected_close() {
assert_eq!( assert_eq!(
TypeTerm::from_str(">"), TypeDict::new().parse(">"),
Err(ParseError::UnexpectedClose) Err(ParseError::UnexpectedClose)
); );
} }
@ -60,7 +60,7 @@ fn test_parser_unexpected_close() {
#[test] #[test]
fn test_parser_unexpected_token() { fn test_parser_unexpected_token() {
assert_eq!( assert_eq!(
TypeTerm::from_str("A B"), TypeDict::new().parse("A B"),
Err(ParseError::UnexpectedToken) Err(ParseError::UnexpectedToken)
); );
} }
@ -68,14 +68,14 @@ fn test_parser_unexpected_token() {
#[test] #[test]
fn test_parser_ladder() { fn test_parser_ladder() {
assert_eq!( assert_eq!(
TypeTerm::from_str("A~B"), TypeDict::new().parse("A~B"),
Ok(TypeTerm::Ladder(vec![ Ok(TypeTerm::Ladder(vec![
TypeTerm::TypeID(TypeID::Fun(0)), TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::TypeID(TypeID::Fun(1)), TypeTerm::TypeID(TypeID::Fun(1)),
])) ]))
); );
assert_eq!( assert_eq!(
TypeTerm::from_str("A~B~C"), TypeDict::new().parse("A~B~C"),
Ok(TypeTerm::Ladder(vec![ Ok(TypeTerm::Ladder(vec![
TypeTerm::TypeID(TypeID::Fun(0)), TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::TypeID(TypeID::Fun(1)), TypeTerm::TypeID(TypeID::Fun(1)),
@ -87,7 +87,7 @@ fn test_parser_ladder() {
#[test] #[test]
fn test_parser_ladder_outside() { fn test_parser_ladder_outside() {
assert_eq!( assert_eq!(
TypeTerm::from_str("<A B>~C"), TypeDict::new().parse("<A B>~C"),
Ok(TypeTerm::Ladder(vec![ Ok(TypeTerm::Ladder(vec![
TypeTerm::App(vec![ TypeTerm::App(vec![
TypeTerm::TypeID(TypeID::Fun(0)), TypeTerm::TypeID(TypeID::Fun(0)),
@ -101,7 +101,7 @@ fn test_parser_ladder_outside() {
#[test] #[test]
fn test_parser_ladder_inside() { fn test_parser_ladder_inside() {
assert_eq!( assert_eq!(
TypeTerm::from_str("<A B~C>"), TypeDict::new().parse("<A B~C>"),
Ok(TypeTerm::App(vec![ Ok(TypeTerm::App(vec![
TypeTerm::TypeID(TypeID::Fun(0)), TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::Ladder(vec![ TypeTerm::Ladder(vec![
@ -115,7 +115,7 @@ fn test_parser_ladder_inside() {
#[test] #[test]
fn test_parser_ladder_between() { fn test_parser_ladder_between() {
assert_eq!( assert_eq!(
TypeTerm::from_str("<A B~<C D>>"), TypeDict::new().parse("<A B~<C D>>"),
Ok(TypeTerm::App(vec![ Ok(TypeTerm::App(vec![
TypeTerm::TypeID(TypeID::Fun(0)), TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::Ladder(vec![ TypeTerm::Ladder(vec![
@ -133,7 +133,7 @@ fn test_parser_ladder_between() {
#[test] #[test]
fn test_parser_ladder_large() { fn test_parser_ladder_large() {
assert_eq!( assert_eq!(
TypeTerm::from_str( TypeDict::new().parse(
"<Seq Date "<Seq Date
~<TimeSince UnixEpoch> ~<TimeSince UnixEpoch>
~<Duration Seconds> ~<Duration Seconds>