diff --git a/Cargo.toml b/Cargo.toml index 0a57fd3..0ab224b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,3 +4,9 @@ edition = "2018" name = "laddertypes" version = "0.1.0" +#[lib] +#proc-macro = true + +[dependencies] +laddertype-macro = { path = "./laddertype-macro" } + diff --git a/src/parser.rs b/src/parser.rs index 85ff9b4..7292c2e 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -20,10 +20,14 @@ pub enum ParseError { //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\ -impl TypeDict { +impl TypeDict { pub fn parse(&mut self, s: &str) -> Result { - let mut tokens = LadderTypeLexer::from(s.chars()).peekable(); + let mut tokens = LadderTypeLexer::from(s.chars()); + self.parse_tokens( tokens.peekable() ) + } + pub fn parse_tokens(&mut self, mut tokens: Peekable) -> Result + where It: Iterator> { match self.parse_ladder(&mut tokens) { Ok(t) => { if let Some(_tok) = tokens.peek() { @@ -36,8 +40,8 @@ impl TypeDict { } } - fn parse_app(&mut self, tokens: &mut Peekable>) -> Result - where It: Iterator + fn parse_app(&mut self, tokens: &mut Peekable) -> Result + where It: Iterator> { let mut args = Vec::new(); while let Some(tok) = tokens.peek() { @@ -57,8 +61,8 @@ impl TypeDict { Err(ParseError::UnexpectedEnd) } - fn parse_rung(&mut self, tokens: &mut Peekable>) -> Result - where It: Iterator + fn parse_rung(&mut self, tokens: &mut Peekable) -> Result + where It: Iterator> { match tokens.next() { Some(Ok(LadderTypeToken::Open)) => self.parse_app(tokens), @@ -79,8 +83,8 @@ impl TypeDict { } } - fn parse_ladder(&mut self, tokens: &mut Peekable>) -> Result - where It: Iterator + fn parse_ladder(&mut self, tokens: &mut Peekable) -> Result + where It: Iterator> { let mut rungs = Vec::new(); diff --git a/src/test/lexer.rs b/src/test/lexer.rs index a7ce90b..61bf9ee 100644 --- a/src/test/lexer.rs +++ b/src/test/lexer.rs @@ -153,4 +153,3 @@ fn test_lexer_large() { assert_eq!( lex.next(), None ); } - diff --git a/src/test/parser.rs b/src/test/parser.rs index 1166229..dd17604 100644 --- a/src/test/parser.rs +++ b/src/test/parser.rs @@ -143,7 +143,7 @@ fn test_parser_ladder_large() { assert_eq!( TypeDict::new().parse( " + ~ ~ ~ℕ ~ @@ -204,3 +204,54 @@ fn test_parser_ladder_large() { ); } +macro_rules! lt_tokenize { + ($symbol:ident) => { + crate::lexer::LadderTypeToken::Symbol( "$symbol".into() ) + } + (< $rest::tt) => { + crate::lexer::LadderTypeToken::Open, + lt_tokenize!($rest) + } + (> $rest::tt) => { + crate::lexer::LadderTypeToken::Close, + lt_tokenize!($rest) + } + (~ $rest::tt) => { + crate::lexer::LadderTypeToken::Ladder, + lt_tokenize!($rest) + } +} + +macro_rules! lt_parse { + ($dict:ident, $tokens:tt*) => { + $dict.parse_tokens( + vec![ + lt_tokenize!($tokens) + ].into_iter().peekable() + ) + } +} + + +#[test] +fn test_proc_macro() { + use laddertype_macro::laddertype; + use crate::lexer::LadderTypeToken; + + let mut dict = TypeDict::new(); + + let t1 = dict.parse_tokens(vec![ + Ok(crate::lexer::LadderTypeToken::Open), + Ok(crate::lexer::LadderTypeToken::Symbol("Seq".into())), + Ok(crate::lexer::LadderTypeToken::Symbol("Char".into())), + Ok(crate::lexer::LadderTypeToken::Close) + ].into_iter().peekable()); + + let t2 = dict.parse_tokens(vec![ + lt_tokenize!{ } + ].into_iter().peekable()); + //lt_parse!( dict, ); + + assert_eq!(t1, t2); +} +