Compare commits
1 commit
dev
...
topic-marc
Author | SHA1 | Date | |
---|---|---|---|
5b45f164fb |
4 changed files with 70 additions and 10 deletions
|
@ -4,3 +4,9 @@ edition = "2018"
|
|||
name = "laddertypes"
|
||||
version = "0.1.0"
|
||||
|
||||
#[lib]
|
||||
#proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
laddertype-macro = { path = "./laddertype-macro" }
|
||||
|
||||
|
|
|
@ -20,10 +20,14 @@ pub enum ParseError {
|
|||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl TypeDict {
|
||||
impl TypeDict {
|
||||
pub fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
|
||||
let mut tokens = LadderTypeLexer::from(s.chars()).peekable();
|
||||
let mut tokens = LadderTypeLexer::from(s.chars());
|
||||
self.parse_tokens( tokens.peekable() )
|
||||
}
|
||||
|
||||
pub fn parse_tokens<It>(&mut self, mut tokens: Peekable<It>) -> Result<TypeTerm, ParseError>
|
||||
where It: Iterator<Item = Result<LadderTypeToken, LexError>> {
|
||||
match self.parse_ladder(&mut tokens) {
|
||||
Ok(t) => {
|
||||
if let Some(_tok) = tokens.peek() {
|
||||
|
@ -36,8 +40,8 @@ impl TypeDict {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>
|
||||
fn parse_app<It>(&mut self, tokens: &mut Peekable<It>) -> Result<TypeTerm, ParseError>
|
||||
where It: Iterator<Item = Result<LadderTypeToken, LexError>>
|
||||
{
|
||||
let mut args = Vec::new();
|
||||
while let Some(tok) = tokens.peek() {
|
||||
|
@ -57,8 +61,8 @@ impl TypeDict {
|
|||
Err(ParseError::UnexpectedEnd)
|
||||
}
|
||||
|
||||
fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>
|
||||
fn parse_rung<It>(&mut self, tokens: &mut Peekable<It>) -> Result<TypeTerm, ParseError>
|
||||
where It: Iterator<Item = Result<LadderTypeToken, LexError>>
|
||||
{
|
||||
match tokens.next() {
|
||||
Some(Ok(LadderTypeToken::Open)) => self.parse_app(tokens),
|
||||
|
@ -79,8 +83,8 @@ impl TypeDict {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>
|
||||
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<It>) -> Result<TypeTerm, ParseError>
|
||||
where It: Iterator<Item = Result<LadderTypeToken, LexError>>
|
||||
{
|
||||
let mut rungs = Vec::new();
|
||||
|
||||
|
|
|
@ -153,4 +153,3 @@ fn test_lexer_large() {
|
|||
|
||||
assert_eq!( lex.next(), None );
|
||||
}
|
||||
|
||||
|
|
|
@ -143,7 +143,7 @@ fn test_parser_ladder_large() {
|
|||
assert_eq!(
|
||||
TypeDict::new().parse(
|
||||
"<Seq Date
|
||||
~<TimeSince UnixEpoch>
|
||||
~<TimeSince UnixEpoch>
|
||||
~<Duration Seconds>
|
||||
~ℕ
|
||||
~<PosInt 10 BigEndian>
|
||||
|
@ -204,3 +204,54 @@ fn test_parser_ladder_large() {
|
|||
);
|
||||
}
|
||||
|
||||
macro_rules! lt_tokenize {
|
||||
($symbol:ident) => {
|
||||
crate::lexer::LadderTypeToken::Symbol( "$symbol".into() )
|
||||
}
|
||||
(< $rest::tt) => {
|
||||
crate::lexer::LadderTypeToken::Open,
|
||||
lt_tokenize!($rest)
|
||||
}
|
||||
(> $rest::tt) => {
|
||||
crate::lexer::LadderTypeToken::Close,
|
||||
lt_tokenize!($rest)
|
||||
}
|
||||
(~ $rest::tt) => {
|
||||
crate::lexer::LadderTypeToken::Ladder,
|
||||
lt_tokenize!($rest)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! lt_parse {
|
||||
($dict:ident, $tokens:tt*) => {
|
||||
$dict.parse_tokens(
|
||||
vec![
|
||||
lt_tokenize!($tokens)
|
||||
].into_iter().peekable()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_proc_macro() {
|
||||
use laddertype_macro::laddertype;
|
||||
use crate::lexer::LadderTypeToken;
|
||||
|
||||
let mut dict = TypeDict::new();
|
||||
|
||||
let t1 = dict.parse_tokens(vec![
|
||||
Ok(crate::lexer::LadderTypeToken::Open),
|
||||
Ok(crate::lexer::LadderTypeToken::Symbol("Seq".into())),
|
||||
Ok(crate::lexer::LadderTypeToken::Symbol("Char".into())),
|
||||
Ok(crate::lexer::LadderTypeToken::Close)
|
||||
].into_iter().peekable());
|
||||
|
||||
let t2 = dict.parse_tokens(vec![
|
||||
lt_tokenize!{ <Seq Char> }
|
||||
].into_iter().peekable());
|
||||
//lt_parse!( dict, <Seq Char> );
|
||||
|
||||
assert_eq!(t1, t2);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue