parser: remove FromStr & always improve parse() function
This commit is contained in:
parent
2d3b234e6e
commit
e60c60cbff
3 changed files with 55 additions and 68 deletions
|
@ -20,24 +20,23 @@ pub enum ParseError {
|
|||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl std::str::FromStr for TypeTerm {
|
||||
type Err = ParseError;
|
||||
|
||||
fn from_str(s : &str) -> Result<Self, Self::Err> {
|
||||
// creating a new context every time is not that useful..
|
||||
let mut dict = TypeDict::new();
|
||||
dict.parse(&mut LadderTypeLexer::from(s.chars()).peekable())
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl TypeDict {
|
||||
pub fn type_from_str(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
|
||||
self.parse(&mut LadderTypeLexer::from(s.chars()).peekable())
|
||||
pub fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
|
||||
let mut tokens = LadderTypeLexer::from(s.chars()).peekable();
|
||||
|
||||
match self.parse_ladder(&mut tokens) {
|
||||
Ok(t) => {
|
||||
if let Some(_tok) = tokens.peek() {
|
||||
Err(ParseError::UnexpectedToken)
|
||||
} else {
|
||||
Ok(t)
|
||||
}
|
||||
}
|
||||
Err(err) => Err(err)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_app<It>( &mut self, tokens: &mut Peekable<LadderTypeLexer<It>> ) -> Result<TypeTerm, ParseError>
|
||||
fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>
|
||||
{
|
||||
let mut args = Vec::new();
|
||||
|
@ -48,7 +47,7 @@ impl TypeDict {
|
|||
return Ok(TypeTerm::App(args));
|
||||
}
|
||||
_ => {
|
||||
match self.parse_partial(tokens) {
|
||||
match self.parse_ladder(tokens) {
|
||||
Ok(a) => { args.push(a); }
|
||||
Err(err) => { return Err(err); }
|
||||
}
|
||||
|
@ -58,7 +57,7 @@ impl TypeDict {
|
|||
Err(ParseError::UnexpectedEnd)
|
||||
}
|
||||
|
||||
fn parse_rung<It>( &mut self, tokens: &mut Peekable<LadderTypeLexer<It>> ) -> Result<TypeTerm, ParseError>
|
||||
fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>
|
||||
{
|
||||
match tokens.next() {
|
||||
|
@ -79,8 +78,8 @@ impl TypeDict {
|
|||
None => Err(ParseError::UnexpectedEnd)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_partial<It>( &mut self, tokens: &mut Peekable<LadderTypeLexer<It>> ) -> Result<TypeTerm, ParseError>
|
||||
|
||||
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>
|
||||
{
|
||||
let mut rungs = Vec::new();
|
||||
|
@ -119,22 +118,6 @@ impl TypeDict {
|
|||
_ => Ok(TypeTerm::Ladder(rungs)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse<It>( &mut self, tokens: &mut Peekable<LadderTypeLexer<It>> ) -> Result<TypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>
|
||||
{
|
||||
match self.parse_partial(tokens) {
|
||||
Ok(t) => {
|
||||
if let Some(_tok) = tokens.peek() {
|
||||
Err(ParseError::UnexpectedToken)
|
||||
} else {
|
||||
Ok(t)
|
||||
}
|
||||
}
|
||||
Err(err) => Err(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
|
|
|
@ -7,51 +7,55 @@ use {
|
|||
|
||||
#[test]
|
||||
fn test_curry() {
|
||||
let mut dict = TypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<A B C>").unwrap().curry(),
|
||||
TypeTerm::from_str("<<A B> C>").unwrap()
|
||||
dict.parse("<A B C>").unwrap().curry(),
|
||||
dict.parse("<<A B> C>").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<A B C D>").unwrap().curry(),
|
||||
TypeTerm::from_str("<<<A B> C> D>").unwrap()
|
||||
dict.parse("<A B C D>").unwrap().curry(),
|
||||
dict.parse("<<<A B> C> D>").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<A B C D E F G H I J K>").unwrap().curry(),
|
||||
TypeTerm::from_str("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap()
|
||||
dict.parse("<A B C D E F G H I J K>").unwrap().curry(),
|
||||
dict.parse("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<A~X B C>").unwrap().curry(),
|
||||
TypeTerm::from_str("<<A~X B> C>").unwrap()
|
||||
dict.parse("<A~X B C>").unwrap().curry(),
|
||||
dict.parse("<<A~X B> C>").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<A B C~Y~Z> ~ K").unwrap().curry(),
|
||||
TypeTerm::from_str("< <A B> C~Y~Z > ~ K").unwrap()
|
||||
dict.parse("<A B C~Y~Z> ~ K").unwrap().curry(),
|
||||
dict.parse("< <A B> C~Y~Z > ~ K").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decurry() {
|
||||
let mut dict = TypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<<A B> C>").unwrap().decurry(),
|
||||
TypeTerm::from_str("<A B C>").unwrap()
|
||||
dict.parse("<<A B> C>").unwrap().decurry(),
|
||||
dict.parse("<A B C>").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<<<A B> C> D>").unwrap().decurry(),
|
||||
TypeTerm::from_str("<A B C D>").unwrap(),
|
||||
dict.parse("<<<A B> C> D>").unwrap().decurry(),
|
||||
dict.parse("<A B C D>").unwrap(),
|
||||
);
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap().decurry(),
|
||||
TypeTerm::from_str("<A B C D E F G H I J K>").unwrap()
|
||||
dict.parse("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap().decurry(),
|
||||
dict.parse("<A B C D E F G H I J K>").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<<A~X B> C>").unwrap().decurry(),
|
||||
TypeTerm::from_str("<A~X B C>").unwrap()
|
||||
dict.parse("<<A~X B> C>").unwrap().decurry(),
|
||||
dict.parse("<A~X B C>").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<<A~X B> C~Y>~K").unwrap().decurry(),
|
||||
TypeTerm::from_str("<A~X B C~Y> ~K").unwrap()
|
||||
dict.parse("<<A~X B> C~Y>~K").unwrap().decurry(),
|
||||
dict.parse("<A~X B C~Y> ~K").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ use {
|
|||
fn test_parser_id() {
|
||||
assert_eq!(
|
||||
Ok(TypeTerm::TypeID(TypeID::Fun(0))),
|
||||
TypeTerm::from_str("A")
|
||||
TypeDict::new().parse("A")
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -18,7 +18,7 @@ fn test_parser_id() {
|
|||
fn test_parser_num() {
|
||||
assert_eq!(
|
||||
Ok(TypeTerm::Num(1234)),
|
||||
TypeTerm::from_str("1234")
|
||||
TypeDict::new().parse("1234")
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -26,21 +26,21 @@ fn test_parser_num() {
|
|||
fn test_parser_char() {
|
||||
assert_eq!(
|
||||
Ok(TypeTerm::Char('x')),
|
||||
TypeTerm::from_str("'x'")
|
||||
TypeDict::new().parse("'x'")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parser_app() {
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<A B>"),
|
||||
TypeDict::new().parse("<A B>"),
|
||||
Ok(TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
]))
|
||||
);
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<A B C>"),
|
||||
TypeDict::new().parse("<A B C>"),
|
||||
Ok(TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
|
@ -52,7 +52,7 @@ fn test_parser_app() {
|
|||
#[test]
|
||||
fn test_parser_unexpected_close() {
|
||||
assert_eq!(
|
||||
TypeTerm::from_str(">"),
|
||||
TypeDict::new().parse(">"),
|
||||
Err(ParseError::UnexpectedClose)
|
||||
);
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ fn test_parser_unexpected_close() {
|
|||
#[test]
|
||||
fn test_parser_unexpected_token() {
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("A B"),
|
||||
TypeDict::new().parse("A B"),
|
||||
Err(ParseError::UnexpectedToken)
|
||||
);
|
||||
}
|
||||
|
@ -68,14 +68,14 @@ fn test_parser_unexpected_token() {
|
|||
#[test]
|
||||
fn test_parser_ladder() {
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("A~B"),
|
||||
TypeDict::new().parse("A~B"),
|
||||
Ok(TypeTerm::Ladder(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
]))
|
||||
);
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("A~B~C"),
|
||||
TypeDict::new().parse("A~B~C"),
|
||||
Ok(TypeTerm::Ladder(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
|
@ -87,7 +87,7 @@ fn test_parser_ladder() {
|
|||
#[test]
|
||||
fn test_parser_ladder_outside() {
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<A B>~C"),
|
||||
TypeDict::new().parse("<A B>~C"),
|
||||
Ok(TypeTerm::Ladder(vec![
|
||||
TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
|
@ -101,7 +101,7 @@ fn test_parser_ladder_outside() {
|
|||
#[test]
|
||||
fn test_parser_ladder_inside() {
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<A B~C>"),
|
||||
TypeDict::new().parse("<A B~C>"),
|
||||
Ok(TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::Ladder(vec![
|
||||
|
@ -115,7 +115,7 @@ fn test_parser_ladder_inside() {
|
|||
#[test]
|
||||
fn test_parser_ladder_between() {
|
||||
assert_eq!(
|
||||
TypeTerm::from_str("<A B~<C D>>"),
|
||||
TypeDict::new().parse("<A B~<C D>>"),
|
||||
Ok(TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::Ladder(vec![
|
||||
|
@ -133,7 +133,7 @@ fn test_parser_ladder_between() {
|
|||
#[test]
|
||||
fn test_parser_ladder_large() {
|
||||
assert_eq!(
|
||||
TypeTerm::from_str(
|
||||
TypeDict::new().parse(
|
||||
"<Seq Date
|
||||
~<TimeSince UnixEpoch>
|
||||
~<Duration Seconds>
|
||||
|
|
Loading…
Reference in a new issue