Compare commits

..

1 commit

Author SHA1 Message Date
5b45f164fb
macro wip 2024-02-27 03:37:09 +01:00
9 changed files with 70 additions and 223 deletions

View file

@ -4,3 +4,9 @@ edition = "2018"
name = "laddertypes"
version = "0.1.0"
#[lib]
#proc-macro = true
[dependencies]
laddertype-macro = { path = "./laddertype-macro" }

View file

@ -5,8 +5,6 @@ Rust Implementation of Ladder-Types (parsing, unification, rewriting, etc)
## Ladder Types
### Motivation
In order to implement complex datastructures and algorithms, usually
many layers of abstraction are built ontop of each other.
Consequently higher-level data types are encoded into lower-level data
@ -59,48 +57,6 @@ this:
1696093021:1696093039:1528324679:1539892301:1638141920:1688010253
```
### Syntax
In their core form, type-terms can be one of the following:
- (**Atomic Type**) | `SomeTypeName`
- (**Literal Integer**) | `0` | `1` | `2` | ...
- (**Literal Character**) | `'a'` | `'b'` | `'c'` | ...
- (**Literal String**) | `"abc"`
- (**Parameter Application**) | `<T1 T2>` given `T1` and `T2` are type-terms
- (**Ladder**) | `T1 ~ T2` given `T1` and `T2` are type-terms
Ontop of that, the following syntax-sugar is defined:
#### Complex Types
- `[ T ]` <===> `<Seq T>`
- `{ a:A b:B }` <===> `<Struct <"a" A> <"b" B>>`
- `a:A | b:B` <===> `<Enum <"a" A> <"b" B>>`
#### Function Types
- `A -> B` <===> `<Fn A B>`
#### Reference Types
- `*A` <===> `<Ptr A>`
- `&A` <===> `<ConstRef A>`
- `&!A` <===> `<MutRef A>`
### Equivalences
#### Currying
`<<A B> C>` <===> `<A B C>`
#### Ladder-Normal-Form
exhaustively apply `<A B~C>` ===> `<A B>~<A C>`
e.g. `[<Digit 10>]~[Char]~[Ascii]` is in **LNF**
#### Parameter-Normal-Form
exhaustively apply `<A B>~<A C>` ===> `<A B~C>`
e.g. `[<Digit 10>~Char~Ascii]` is in **PNF**
## How to use this crate
```rust
@ -117,19 +73,6 @@ fn main() {
}
```
## Roadmap
- [x] (Un-)Parsing
- [x] (De-)Currying
- [x] Unification
- [x] Ladder-Normal-Form
- [x] Parameter-Normal-Form
- [ ] (De)-Sugaring
- [ ] Seq
- [ ] Enum
- [ ] Struct
- [ ] References
- [ ] Function
## License
[GPLv3](COPYING)

View file

@ -7,7 +7,6 @@ pub mod parser;
pub mod unparser;
pub mod curry;
pub mod lnf;
pub mod pnf;
pub mod subtype;
pub mod unification;

View file

@ -20,10 +20,14 @@ pub enum ParseError {
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl TypeDict {
impl TypeDict {
pub fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
let mut tokens = LadderTypeLexer::from(s.chars()).peekable();
let mut tokens = LadderTypeLexer::from(s.chars());
self.parse_tokens( tokens.peekable() )
}
pub fn parse_tokens<It>(&mut self, mut tokens: Peekable<It>) -> Result<TypeTerm, ParseError>
where It: Iterator<Item = Result<LadderTypeToken, LexError>> {
match self.parse_ladder(&mut tokens) {
Ok(t) => {
if let Some(_tok) = tokens.peek() {
@ -36,8 +40,8 @@ impl TypeDict {
}
}
fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
where It: Iterator<Item = char>
fn parse_app<It>(&mut self, tokens: &mut Peekable<It>) -> Result<TypeTerm, ParseError>
where It: Iterator<Item = Result<LadderTypeToken, LexError>>
{
let mut args = Vec::new();
while let Some(tok) = tokens.peek() {
@ -57,8 +61,8 @@ impl TypeDict {
Err(ParseError::UnexpectedEnd)
}
fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
where It: Iterator<Item = char>
fn parse_rung<It>(&mut self, tokens: &mut Peekable<It>) -> Result<TypeTerm, ParseError>
where It: Iterator<Item = Result<LadderTypeToken, LexError>>
{
match tokens.next() {
Some(Ok(LadderTypeToken::Open)) => self.parse_app(tokens),
@ -79,8 +83,8 @@ impl TypeDict {
}
}
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
where It: Iterator<Item = char>
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<It>) -> Result<TypeTerm, ParseError>
where It: Iterator<Item = Result<LadderTypeToken, LexError>>
{
let mut rungs = Vec::new();

View file

@ -1,113 +0,0 @@
use crate::term::TypeTerm;
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl TypeTerm {
/// transmute type into Parameter-Normal-Form (PNF)
///
/// Example:
/// ```ignore
/// <Seq <Digit 10>>~<Seq Char>
/// ⇒ <Seq <Digit 10>~Char>
/// ```
pub fn param_normalize(self) -> Self {
match self {
TypeTerm::Ladder(mut rungs) => {
if rungs.len() > 0 {
// normalize all rungs separately
for r in rungs.iter_mut() {
*r = r.clone().param_normalize();
}
// take top-rung
match rungs.remove(0) {
TypeTerm::App(params_top) => {
let mut params_ladders = Vec::new();
let mut tail : Vec<TypeTerm> = Vec::new();
// append all other rungs to ladders inside
// the application
for p in params_top {
params_ladders.push(vec![ p ]);
}
for r in rungs {
match r {
TypeTerm::App(mut params_rung) => {
if params_rung.len() > 0 {
let mut first_param = params_rung.remove(0);
if first_param == params_ladders[0][0] {
for (l, p) in params_ladders.iter_mut().skip(1).zip(params_rung) {
l.push(p.param_normalize());
}
} else {
params_rung.insert(0, first_param);
tail.push(TypeTerm::App(params_rung));
}
}
}
TypeTerm::Ladder(mut rs) => {
for r in rs {
tail.push(r.param_normalize());
}
}
atomic => {
tail.push(atomic);
}
}
}
let head = TypeTerm::App(
params_ladders.into_iter()
.map(
|mut l| {
l.dedup();
match l.len() {
0 => TypeTerm::unit(),
1 => l.remove(0),
_ => TypeTerm::Ladder(l).param_normalize()
}
}
)
.collect()
);
if tail.len() > 0 {
tail.insert(0, head);
TypeTerm::Ladder(tail)
} else {
head
}
}
TypeTerm::Ladder(mut r) => {
r.append(&mut rungs);
TypeTerm::Ladder(r)
}
atomic => {
rungs.insert(0, atomic);
TypeTerm::Ladder(rungs)
}
}
} else {
TypeTerm::unit()
}
}
TypeTerm::App(params) => {
TypeTerm::App(
params.into_iter()
.map(|p| p.param_normalize())
.collect())
}
atomic => atomic
}
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

View file

@ -153,4 +153,3 @@ fn test_lexer_large() {
assert_eq!( lex.next(), None );
}

View file

@ -3,7 +3,6 @@ pub mod lexer;
pub mod parser;
pub mod curry;
pub mod lnf;
pub mod pnf;
pub mod subtype;
pub mod substitution;
pub mod unification;

View file

@ -143,7 +143,7 @@ fn test_parser_ladder_large() {
assert_eq!(
TypeDict::new().parse(
"<Seq Date
~<TimeSince UnixEpoch>
~<TimeSince UnixEpoch>
~<Duration Seconds>
~
~<PosInt 10 BigEndian>
@ -204,3 +204,54 @@ fn test_parser_ladder_large() {
);
}
macro_rules! lt_tokenize {
($symbol:ident) => {
crate::lexer::LadderTypeToken::Symbol( "$symbol".into() )
}
(< $rest::tt) => {
crate::lexer::LadderTypeToken::Open,
lt_tokenize!($rest)
}
(> $rest::tt) => {
crate::lexer::LadderTypeToken::Close,
lt_tokenize!($rest)
}
(~ $rest::tt) => {
crate::lexer::LadderTypeToken::Ladder,
lt_tokenize!($rest)
}
}
macro_rules! lt_parse {
($dict:ident, $tokens:tt*) => {
$dict.parse_tokens(
vec![
lt_tokenize!($tokens)
].into_iter().peekable()
)
}
}
#[test]
fn test_proc_macro() {
use laddertype_macro::laddertype;
use crate::lexer::LadderTypeToken;
let mut dict = TypeDict::new();
let t1 = dict.parse_tokens(vec![
Ok(crate::lexer::LadderTypeToken::Open),
Ok(crate::lexer::LadderTypeToken::Symbol("Seq".into())),
Ok(crate::lexer::LadderTypeToken::Symbol("Char".into())),
Ok(crate::lexer::LadderTypeToken::Close)
].into_iter().peekable());
let t2 = dict.parse_tokens(vec![
lt_tokenize!{ <Seq Char> }
].into_iter().peekable());
//lt_parse!( dict, <Seq Char> );
assert_eq!(t1, t2);
}

View file

@ -1,41 +0,0 @@
use crate::dict::TypeDict;
#[test]
fn test_param_normalize() {
let mut dict = TypeDict::new();
assert_eq!(
dict.parse("A~B~C").expect("parse error"),
dict.parse("A~B~C").expect("parse error").param_normalize(),
);
assert_eq!(
dict.parse("<A B>~C").expect("parse error"),
dict.parse("<A B>~C").expect("parse error").param_normalize(),
);
assert_eq!(
dict.parse("<A B~C>").expect("parse error"),
dict.parse("<A B>~<A C>").expect("parse error").param_normalize(),
);
assert_eq!(
dict.parse("<A B~C D~E>").expect("parse error"),
dict.parse("<A B D>~<A C D>~<A C E>").expect("parse errror").param_normalize(),
);
assert_eq!(
dict.parse("<Seq <Digit 10>~Char>").expect("parse error"),
dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror").param_normalize(),
);
assert_eq!(
dict.parse("<A <B C~D~E> F~G H H>").expect("parse error"),
dict.parse("<A <B C> F H H>
~<A <B D> F H H>
~<A <B E> F H H>
~<A <B E> G H H>").expect("parse errror")
.param_normalize(),
);
}