From 5b45f164fba4b309a21feec631701838ca70c651 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Tue, 27 Feb 2024 03:37:09 +0100
Subject: [PATCH 01/33] macro wip

---
 Cargo.toml         |  6 ++++++
 src/parser.rs      | 20 ++++++++++-------
 src/test/lexer.rs  |  1 -
 src/test/parser.rs | 53 +++++++++++++++++++++++++++++++++++++++++++++-
 4 files changed, 70 insertions(+), 10 deletions(-)

diff --git a/Cargo.toml b/Cargo.toml
index 0a57fd3..0ab224b 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -4,3 +4,9 @@ edition = "2018"
 name = "laddertypes"
 version = "0.1.0"
 
+#[lib]
+#proc-macro = true
+
+[dependencies]
+laddertype-macro = { path = "./laddertype-macro" }
+
diff --git a/src/parser.rs b/src/parser.rs
index 85ff9b4..7292c2e 100644
--- a/src/parser.rs
+++ b/src/parser.rs
@@ -20,10 +20,14 @@ pub enum ParseError {
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-impl TypeDict {
+impl TypeDict {    
     pub fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
-        let mut tokens = LadderTypeLexer::from(s.chars()).peekable();
+        let mut tokens = LadderTypeLexer::from(s.chars());
+        self.parse_tokens( tokens.peekable() )
+    }
 
+    pub fn parse_tokens<It>(&mut self, mut tokens: Peekable<It>) -> Result<TypeTerm, ParseError>
+    where It: Iterator<Item = Result<LadderTypeToken, LexError>> {
         match self.parse_ladder(&mut tokens) {
             Ok(t) => {
                 if let Some(_tok) = tokens.peek() {
@@ -36,8 +40,8 @@ impl TypeDict {
         }
     }
 
-    fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
-    where It: Iterator<Item = char>
+    fn parse_app<It>(&mut self, tokens: &mut Peekable<It>) -> Result<TypeTerm, ParseError>
+    where It: Iterator<Item = Result<LadderTypeToken, LexError>>
     {
         let mut args = Vec::new();
         while let Some(tok) = tokens.peek() {
@@ -57,8 +61,8 @@ impl TypeDict {
         Err(ParseError::UnexpectedEnd)
     }
 
-    fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
-    where It: Iterator<Item = char>
+    fn parse_rung<It>(&mut self, tokens: &mut Peekable<It>) -> Result<TypeTerm, ParseError>
+    where It: Iterator<Item = Result<LadderTypeToken, LexError>>
     {
         match tokens.next() {
             Some(Ok(LadderTypeToken::Open)) => self.parse_app(tokens),
@@ -79,8 +83,8 @@ impl TypeDict {
         }
     }
 
-    fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
-    where It: Iterator<Item = char>
+    fn parse_ladder<It>(&mut self, tokens: &mut Peekable<It>) -> Result<TypeTerm, ParseError>
+    where It: Iterator<Item = Result<LadderTypeToken, LexError>>
     {
         let mut rungs = Vec::new();
 
diff --git a/src/test/lexer.rs b/src/test/lexer.rs
index a7ce90b..61bf9ee 100644
--- a/src/test/lexer.rs
+++ b/src/test/lexer.rs
@@ -153,4 +153,3 @@ fn test_lexer_large() {
 
     assert_eq!( lex.next(), None );
 }
-
diff --git a/src/test/parser.rs b/src/test/parser.rs
index 1166229..dd17604 100644
--- a/src/test/parser.rs
+++ b/src/test/parser.rs
@@ -143,7 +143,7 @@ fn test_parser_ladder_large() {
     assert_eq!(
         TypeDict::new().parse(
             "<Seq Date
-                  ~<TimeSince UnixEpoch>
+                   ~<TimeSince UnixEpoch>
                   ~<Duration Seconds>
                   ~ℕ
                   ~<PosInt 10 BigEndian>
@@ -204,3 +204,54 @@ fn test_parser_ladder_large() {
     );
 }
 
+macro_rules! lt_tokenize {
+    ($symbol:ident) => {
+        crate::lexer::LadderTypeToken::Symbol( "$symbol".into() )
+    }
+    (< $rest::tt) => {
+        crate::lexer::LadderTypeToken::Open,
+        lt_tokenize!($rest)
+    }
+    (> $rest::tt) => {
+        crate::lexer::LadderTypeToken::Close,
+        lt_tokenize!($rest)
+    }
+    (~ $rest::tt) => {
+        crate::lexer::LadderTypeToken::Ladder,
+        lt_tokenize!($rest)
+    }
+}
+
+macro_rules! lt_parse {
+    ($dict:ident, $tokens:tt*) => {
+        $dict.parse_tokens(
+            vec![
+                lt_tokenize!($tokens)
+            ].into_iter().peekable()
+        )
+    }
+}
+
+
+#[test]
+fn test_proc_macro() {
+    use laddertype_macro::laddertype;
+    use crate::lexer::LadderTypeToken;
+
+    let mut dict = TypeDict::new();
+
+    let t1 = dict.parse_tokens(vec![
+        Ok(crate::lexer::LadderTypeToken::Open),
+        Ok(crate::lexer::LadderTypeToken::Symbol("Seq".into())),
+        Ok(crate::lexer::LadderTypeToken::Symbol("Char".into())),
+        Ok(crate::lexer::LadderTypeToken::Close)
+    ].into_iter().peekable());
+
+    let t2 = dict.parse_tokens(vec![
+        lt_tokenize!{ <Seq Char> }
+    ].into_iter().peekable());
+        //lt_parse!( dict, <Seq Char> );
+
+    assert_eq!(t1, t2);
+}
+

From 02d8815acd9c29934cd2c95180cd5da7214d9595 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Wed, 1 May 2024 15:10:29 +0200
Subject: [PATCH 02/33] add param_normalize() to get Parameter-Normal-Form
 (PNF)

---
 src/lib.rs      |   1 +
 src/pnf.rs      | 113 ++++++++++++++++++++++++++++++++++++++++++++++++
 src/test/mod.rs |   1 +
 src/test/pnf.rs |  41 ++++++++++++++++++
 4 files changed, 156 insertions(+)
 create mode 100644 src/pnf.rs
 create mode 100644 src/test/pnf.rs

diff --git a/src/lib.rs b/src/lib.rs
index 1a270cc..970c555 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -7,6 +7,7 @@ pub mod parser;
 pub mod unparser;
 pub mod curry;
 pub mod lnf;
+pub mod pnf;
 pub mod subtype;
 pub mod unification;
 
diff --git a/src/pnf.rs b/src/pnf.rs
new file mode 100644
index 0000000..4576be5
--- /dev/null
+++ b/src/pnf.rs
@@ -0,0 +1,113 @@
+use crate::term::TypeTerm;
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+impl TypeTerm {
+    /// transmute type into Parameter-Normal-Form (PNF)
+    ///
+    /// Example:
+    /// ```ignore
+    /// <Seq <Digit 10>>~<Seq Char>
+    /// ⇒ <Seq <Digit 10>~Char>
+    /// ```
+    pub fn param_normalize(self) -> Self {
+        match self {
+            TypeTerm::Ladder(mut rungs) => {
+                if rungs.len() > 0 {
+                    // normalize all rungs separately
+                    for r in rungs.iter_mut() {
+                        *r = r.clone().param_normalize();
+                    }
+
+                    // take top-rung
+                    match rungs.remove(0) {
+                        TypeTerm::App(params_top) => {
+                            let mut params_ladders = Vec::new();
+                            let mut tail : Vec<TypeTerm> = Vec::new();
+
+                            // append all other rungs to ladders inside
+                            // the application
+                            for p in params_top {
+                                params_ladders.push(vec![ p ]);
+                            }
+
+                            for r in rungs {
+                                match r {
+                                    TypeTerm::App(mut params_rung) => {
+                                        if params_rung.len() > 0 {
+                                            let mut first_param = params_rung.remove(0); 
+
+                                            if first_param == params_ladders[0][0] {
+                                               for (l, p) in params_ladders.iter_mut().skip(1).zip(params_rung) {
+                                                   l.push(p.param_normalize());
+                                               }
+                                            } else {
+                                               params_rung.insert(0, first_param);
+                                               tail.push(TypeTerm::App(params_rung));
+                                            }
+                                        }
+                                    }
+
+                                    TypeTerm::Ladder(mut rs) => {
+                                        for r in rs {
+                                            tail.push(r.param_normalize());
+                                        }
+                                    }
+
+                                    atomic => {
+                                        tail.push(atomic);
+                                    }
+                                }
+                            }
+
+                            let head = TypeTerm::App(
+                                params_ladders.into_iter()
+                                    .map(
+                                        |mut l| {
+                                            l.dedup();
+                                            match l.len() {
+                                                0 => TypeTerm::unit(),
+                                                1 => l.remove(0),
+                                                _ => TypeTerm::Ladder(l).param_normalize()
+                                            }
+                                        }
+                                    )
+                                    .collect()
+                            );
+
+                            if tail.len() > 0 {
+                                tail.insert(0, head);
+                                TypeTerm::Ladder(tail)
+                            } else {
+                                head
+                            }
+                        }
+
+                        TypeTerm::Ladder(mut r) => {
+                            r.append(&mut rungs);
+                            TypeTerm::Ladder(r)
+                        }
+
+                        atomic => {
+                            rungs.insert(0, atomic);
+                            TypeTerm::Ladder(rungs)
+                        }
+                    }
+                } else {
+                    TypeTerm::unit()
+                }
+            }
+
+            TypeTerm::App(params) => {
+                TypeTerm::App(
+                    params.into_iter()
+                        .map(|p| p.param_normalize())
+                        .collect())
+            }
+
+            atomic => atomic
+        }
+    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
diff --git a/src/test/mod.rs b/src/test/mod.rs
index d116412..29c14bc 100644
--- a/src/test/mod.rs
+++ b/src/test/mod.rs
@@ -3,6 +3,7 @@ pub mod lexer;
 pub mod parser;
 pub mod curry;
 pub mod lnf;
+pub mod pnf;
 pub mod subtype;
 pub mod substitution;
 pub mod unification;
diff --git a/src/test/pnf.rs b/src/test/pnf.rs
new file mode 100644
index 0000000..2303b3e
--- /dev/null
+++ b/src/test/pnf.rs
@@ -0,0 +1,41 @@
+use crate::dict::TypeDict;
+
+#[test]
+fn test_param_normalize() {
+    let mut dict = TypeDict::new();
+
+    assert_eq!(
+        dict.parse("A~B~C").expect("parse error"),
+        dict.parse("A~B~C").expect("parse error").param_normalize(),
+    );
+
+    assert_eq!(
+        dict.parse("<A B>~C").expect("parse error"),
+        dict.parse("<A B>~C").expect("parse error").param_normalize(),
+    );
+
+    assert_eq!(
+        dict.parse("<A B~C>").expect("parse error"),
+        dict.parse("<A B>~<A C>").expect("parse error").param_normalize(),
+    );
+
+    assert_eq!(
+        dict.parse("<A B~C D~E>").expect("parse error"),
+        dict.parse("<A B D>~<A C D>~<A C E>").expect("parse errror").param_normalize(),
+    );
+
+    assert_eq!(
+        dict.parse("<Seq <Digit 10>~Char>").expect("parse error"),
+        dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror").param_normalize(),
+    );
+
+    assert_eq!(
+        dict.parse("<A <B C~D~E> F~G H H>").expect("parse error"),
+        dict.parse("<A <B C> F H H>
+                   ~<A <B D> F H H>
+                   ~<A <B E> F H H>
+                   ~<A <B E> G H H>").expect("parse errror")
+               .param_normalize(),
+    );
+}
+

From 658134d56a291c9200382d828ec4105797730a56 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Wed, 1 May 2024 17:07:47 +0200
Subject: [PATCH 03/33] readme: add syntax description and roadmap

---
 README.md | 57 +++++++++++++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 57 insertions(+)

diff --git a/README.md b/README.md
index d4dc395..fa7cd86 100644
--- a/README.md
+++ b/README.md
@@ -5,6 +5,8 @@ Rust Implementation of Ladder-Types (parsing, unification, rewriting, etc)
 
 ## Ladder Types
 
+### Motivation
+
 In order to implement complex datastructures and algorithms, usually
 many layers of abstraction are built ontop of each other.
 Consequently higher-level data types are encoded into lower-level data
@@ -57,6 +59,48 @@ this:
 1696093021:1696093039:1528324679:1539892301:1638141920:1688010253
 ```
 
+### Syntax
+
+In their core form, type-terms can be one of the following:
+- (**Atomic Type**)        |   `SomeTypeName`
+- (**Literal Integer**)    |   `0` | `1` | `2` | ...
+- (**Literal Character**)  |   `'a'` | `'b'` | `'c'` | ...
+- (**Literal String**)     |   `"abc"`
+- (**Parameter Application**) | `<T1 T2>` given `T1` and `T2` are type-terms
+- (**Ladder**)                | `T1 ~ T2` given `T1` and `T2` are type-terms
+
+Ontop of that, the following syntax-sugar is defined:
+
+#### Complex Types
+- `[ T ]` <===> `<Seq T>`
+- `{ a:A b:B }` <===> `<Struct <"a" A> <"b" B>>`
+- `a:A | b:B` <===> `<Enum <"a" A> <"b" B>>`
+
+#### Function Types
+- `A -> B` <===> `<Fn A B>`
+
+#### Reference Types
+- `*A`  <===> `<Ptr A>`
+- `&A`  <===> `<ConstRef A>`
+- `&!A` <===> `<MutRef A>`
+
+
+### Equivalences
+
+#### Currying
+`<<A B> C>` <===> `<A B C>`
+
+#### Ladder-Normal-Form
+exhaustively apply `<A B~C>` ===> `<A B>~<A C>`
+
+e.g. `[<Digit 10>]~[Char]~[Ascii]` is in **LNF**
+
+#### Parameter-Normal-Form
+exhaustively apply `<A B>~<A C>` ===> `<A B~C>`
+
+e.g. `[<Digit 10>~Char~Ascii]` is in **PNF**
+
+
 ## How to use this crate
 
 ```rust
@@ -73,6 +117,19 @@ fn main() {
 }
 ```
 
+## Roadmap
+
+- [x] (Un-)Parsing
+- [x] (De-)Currying
+- [x] Unification
+- [x] Ladder-Normal-Form
+- [x] Parameter-Normal-Form
+- [ ] (De)-Sugaring
+  - [ ] Seq
+  - [ ] Enum
+  - [ ] Struct
+  - [ ] References
+  - [ ] Function
 
 ## License
 [GPLv3](COPYING)

From c03db48fd27756880affd288afe031328d305d7a Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sun, 4 Aug 2024 23:18:37 +0200
Subject: [PATCH 04/33] TypeID: add Copy trait

---
 src/dict.rs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/dict.rs b/src/dict.rs
index 83b63ee..93bb6ec 100644
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -2,7 +2,7 @@ use crate::bimap::Bimap;
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-#[derive(Eq, PartialEq, Hash, Clone, Debug)]
+#[derive(Eq, PartialEq, Hash, Clone, Copy, Debug)]
 pub enum TypeID {
     Fun(u64),
     Var(u64)

From c6bad6046a3914a9978050786d35596c4d36c108 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Wed, 2 Oct 2024 22:37:06 +0200
Subject: [PATCH 05/33] add sugared terms & pretty printing

---
 Cargo.toml    |   5 ++
 src/dict.rs   |   4 ++
 src/lib.rs    |   8 ++-
 src/pretty.rs | 148 ++++++++++++++++++++++++++++++++++++++++++++++++++
 src/sugar.rs  |  95 ++++++++++++++++++++++++++++++++
 src/term.rs   |   2 +-
 6 files changed, 259 insertions(+), 3 deletions(-)
 create mode 100644 src/pretty.rs
 create mode 100644 src/sugar.rs

diff --git a/Cargo.toml b/Cargo.toml
index 0a57fd3..429541c 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -4,3 +4,8 @@ edition = "2018"
 name = "laddertypes"
 version = "0.1.0"
 
+[dependencies]
+tiny-ansi = { version = "0.1.0", optional = true }
+
+[features]
+pretty = ["dep:tiny-ansi"]
diff --git a/src/dict.rs b/src/dict.rs
index 93bb6ec..419d599 100644
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -54,6 +54,10 @@ impl TypeDict {
     pub fn get_typeid(&self, tn: &String) -> Option<TypeID> {
         self.typenames.mλ.get(tn).cloned()
     }
+
+    pub fn get_varname(&self, var_id: u64) -> Option<String> {
+        self.typenames.my.get(&TypeID::Var(var_id)).cloned()
+    }
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
diff --git a/src/lib.rs b/src/lib.rs
index 970c555..2e9a163 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -5,6 +5,7 @@ pub mod term;
 pub mod lexer;
 pub mod parser;
 pub mod unparser;
+pub mod sugar;
 pub mod curry;
 pub mod lnf;
 pub mod pnf;
@@ -14,9 +15,12 @@ pub mod unification;
 #[cfg(test)]
 mod test;
 
+#[cfg(feature = "pretty")]
+mod pretty;
+
 pub use {
     dict::*,
     term::*,
-    unification::*
+    sugar::*,
+    unification::*,
 };
-
diff --git a/src/pretty.rs b/src/pretty.rs
new file mode 100644
index 0000000..1a4aa60
--- /dev/null
+++ b/src/pretty.rs
@@ -0,0 +1,148 @@
+use {
+    crate::TypeDict,
+    crate::sugar::SugaredTypeTerm,
+    tiny_ansi::TinyAnsi
+};
+
+impl SugaredTypeTerm {
+    pub fn pretty(&self, dict: &TypeDict, indent: u64) -> String {
+        let indent_width = 4;
+        match self {
+            SugaredTypeTerm::TypeID(id) => {
+                format!("{}", dict.get_typename(id).unwrap_or("??".bright_red())).bright_blue()
+            },
+
+            SugaredTypeTerm::Num(n) => {
+                format!("{}", n).bright_cyan()
+            }
+
+            SugaredTypeTerm::Char(c) => {
+                format!("'{}'", c)
+            }
+
+            SugaredTypeTerm::Univ(t) => {
+                format!("{} {} . {}",
+                    "∀".yellow().bold(),
+                    dict.get_varname(0).unwrap_or("??".into()).bright_blue(),
+                    t.pretty(dict,indent)
+                )
+            }
+
+            SugaredTypeTerm::Spec(args) => {
+                let mut s = String::new();
+                s.push_str(&"<".yellow().bold());
+                for i in 0..args.len() {
+                    let arg = &args[i];
+                    if i > 0 {
+                        s.push(' ');
+                    }
+                    s.push_str( &arg.pretty(dict,indent+1) );
+                }
+                s.push_str(&">".yellow().bold());
+                s
+            }
+
+            SugaredTypeTerm::Struct(args) => {
+                let mut s = String::new();
+                s.push_str(&"{".yellow().bold());
+                for arg in args {
+                    s.push('\n');
+                    for x in 0..(indent+1)*indent_width {
+                        s.push(' ');
+                    }
+                    s.push_str(&arg.pretty(dict, indent + 1));
+                    s.push_str(&";\n".bright_yellow());
+                }
+
+                s.push('\n');
+                for x in 0..indent*indent_width {
+                    s.push(' ');
+                }
+                s.push_str(&"}".yellow().bold());
+                s
+            }
+
+            SugaredTypeTerm::Enum(args) => {
+                let mut s = String::new();
+                s.push_str(&"(".yellow().bold());
+                for i in 0..args.len() {
+                    let arg = &args[i];
+                    s.push('\n');
+                    for x in 0..(indent+1)*indent_width {
+                        s.push(' ');
+                    }
+                    if i > 0 {
+                        s.push_str(&"| ".yellow().bold());
+                    }
+                    s.push_str(&arg.pretty(dict, indent + 1));
+                }
+
+                s.push('\n');
+                for x in 0..indent*indent_width {
+                    s.push(' ');
+                }
+                s.push_str(&")".yellow().bold());
+                s
+            }
+
+            SugaredTypeTerm::Seq(args) => {
+                let mut s = String::new();
+                s.push_str(&"[ ".yellow().bold());
+                for i in 0..args.len() {
+                    let arg = &args[i];
+                    if i > 0 {
+                        s.push(' ');
+                    }
+                    s.push_str(&arg.pretty(dict, indent+1));
+                }
+                s.push_str(&" ]".yellow().bold());
+                s
+            }
+
+            SugaredTypeTerm::Morph(args) => {
+                let mut s = String::new();
+                for arg in args {
+                    s.push_str(&"  ~~morph~~>  ".bright_yellow());
+                    s.push_str(&arg.pretty(dict, indent));
+                }
+                s
+            }
+
+            SugaredTypeTerm::Func(args) => {
+                let mut s = String::new();
+                for i in 0..args.len() {
+                    let arg = &args[i];
+                    if i > 0{
+                        s.push('\n');
+                        for x in 0..(indent*indent_width) {
+                            s.push(' ');
+                        }                
+                        s.push_str(&"-->  ".bright_yellow());
+                    } else {
+//                        s.push_str("   ");
+                    }
+                    s.push_str(&arg.pretty(dict, indent));
+                }
+                s
+            }
+
+            SugaredTypeTerm::Ladder(rungs) => {
+                let mut s = String::new();
+                for i in 0..rungs.len() {
+                    let rung = &rungs[i];
+                    if i > 0{
+                        s.push('\n');
+                        for x in 0..(indent*indent_width) {
+                            s.push(' ');
+                        }
+                        s.push_str(&"~ ".yellow());
+                    }
+                    s.push_str(&rung.pretty(dict, indent));
+                }
+                s
+            }
+        }
+    }
+}
+
+
diff --git a/src/sugar.rs b/src/sugar.rs
new file mode 100644
index 0000000..4d13f78
--- /dev/null
+++ b/src/sugar.rs
@@ -0,0 +1,95 @@
+use {
+    crate::{TypeTerm, TypeID}
+};
+
+pub enum SugaredTypeTerm {
+    TypeID(TypeID),
+    Num(i64),
+    Char(char),
+    Univ(Box< SugaredTypeTerm >),
+    Spec(Vec< SugaredTypeTerm >),
+    Func(Vec< SugaredTypeTerm >),
+    Morph(Vec< SugaredTypeTerm >),
+    Ladder(Vec< SugaredTypeTerm >),
+    Struct(Vec< SugaredTypeTerm >),
+    Enum(Vec< SugaredTypeTerm >),
+    Seq(Vec< SugaredTypeTerm >)
+}
+
+impl TypeTerm {
+    pub fn sugar(self: TypeTerm, dict: &mut crate::TypeDict) -> SugaredTypeTerm {
+        match self {
+            TypeTerm::TypeID(id) => SugaredTypeTerm::TypeID(id),
+            TypeTerm::Num(n) => SugaredTypeTerm::Num(n),
+            TypeTerm::Char(c) => SugaredTypeTerm::Char(c),
+            TypeTerm::App(args) => if let Some(first) = args.first() {
+                if first == &dict.parse("Func").unwrap() {
+                    SugaredTypeTerm::Func( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
+                }
+                else if first == &dict.parse("Morph").unwrap() {
+                    SugaredTypeTerm::Morph( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
+                }
+                else if first == &dict.parse("Struct").unwrap() {
+                    SugaredTypeTerm::Struct( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
+                }
+                else if first == &dict.parse("Enum").unwrap() {
+                    SugaredTypeTerm::Enum( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
+                }
+                else if first == &dict.parse("Seq").unwrap() {
+                    SugaredTypeTerm::Seq( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
+                }
+                else if first == &dict.parse("Spec").unwrap() {
+                    SugaredTypeTerm::Spec( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
+                }
+                else if first == &dict.parse("Univ").unwrap() {
+                    SugaredTypeTerm::Univ(Box::new(
+                        SugaredTypeTerm::Spec(
+                            args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect()
+                        )
+                    ))
+                }
+                else {
+                    SugaredTypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
+                }
+            } else {
+                SugaredTypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
+            },
+            TypeTerm::Ladder(rungs) =>            
+               SugaredTypeTerm::Ladder(rungs.into_iter().map(|t| t.sugar(dict)).collect())
+        }
+    }
+}
+
+impl SugaredTypeTerm {
+    pub fn desugar(self, dict: &mut crate::TypeDict) -> TypeTerm {
+        match self {
+            SugaredTypeTerm::TypeID(id) => TypeTerm::TypeID(id),
+            SugaredTypeTerm::Num(n) => TypeTerm::Num(n),
+            SugaredTypeTerm::Char(c) => TypeTerm::Char(c),
+            SugaredTypeTerm::Univ(t) => t.desugar(dict),
+            SugaredTypeTerm::Spec(ts) => TypeTerm::App(ts.into_iter().map(|t| t.desugar(dict)).collect()),
+            SugaredTypeTerm::Ladder(ts) => TypeTerm::Ladder(ts.into_iter().map(|t|t.desugar(dict)).collect()),
+            SugaredTypeTerm::Func(ts) => TypeTerm::App(
+                std::iter::once( dict.parse("Func").unwrap() ).chain(
+                    ts.into_iter().map(|t| t.desugar(dict))
+                ).collect()),
+            SugaredTypeTerm::Morph(ts) => TypeTerm::App(
+                std::iter::once( dict.parse("Morph").unwrap() ).chain(
+                    ts.into_iter().map(|t| t.desugar(dict))
+                ).collect()),
+            SugaredTypeTerm::Struct(ts) => TypeTerm::App(
+                std::iter::once( dict.parse("Struct").unwrap() ).chain(
+                    ts.into_iter().map(|t| t.desugar(dict))
+                ).collect()),
+            SugaredTypeTerm::Enum(ts) => TypeTerm::App(
+                std::iter::once( dict.parse("Enum").unwrap() ).chain(
+                    ts.into_iter().map(|t| t.desugar(dict))
+                ).collect()),
+            SugaredTypeTerm::Seq(ts) => TypeTerm::App(
+                std::iter::once( dict.parse("Seq").unwrap() ).chain(
+                    ts.into_iter().map(|t| t.desugar(dict))
+                ).collect()),
+        }
+    }
+}
+
diff --git a/src/term.rs b/src/term.rs
index f5e8f5f..29c7d27 100644
--- a/src/term.rs
+++ b/src/term.rs
@@ -57,7 +57,7 @@ impl TypeTerm {
     pub fn repr_as(&mut self, t: impl Into<TypeTerm>) -> &mut Self {
         match self {
             TypeTerm::Ladder(rungs) => {
-                rungs.push(t.into());                
+                rungs.push(t.into());
             }
 
             _ => {

From 4a6a35a89752f4f6843ceb9f58aa9d5311b2544c Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Tue, 4 Feb 2025 15:15:09 +0100
Subject: [PATCH 06/33] pnf: add test for collapsing first application argument

---
 src/test/pnf.rs | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/src/test/pnf.rs b/src/test/pnf.rs
index 2303b3e..00daa3a 100644
--- a/src/test/pnf.rs
+++ b/src/test/pnf.rs
@@ -29,6 +29,11 @@ fn test_param_normalize() {
         dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror").param_normalize(),
     );
 
+    assert_eq!(
+        dict.parse("<A~Y B>").expect("parse error"),
+        dict.parse("<A B>~<Y B>").expect("parse error").param_normalize(),
+    );
+
     assert_eq!(
         dict.parse("<A <B C~D~E> F~G H H>").expect("parse error"),
         dict.parse("<A <B C> F H H>

From a9a35aed1b3fc69a6cda95d43f9ae27bbd0f9ccd Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Wed, 5 Feb 2025 11:06:49 +0100
Subject: [PATCH 07/33] rewrite param_normalize()

---
 src/pnf.rs      | 151 ++++++++++++++++++++++++++++--------------------
 src/test/pnf.rs |  23 ++++++--
 2 files changed, 106 insertions(+), 68 deletions(-)

diff --git a/src/pnf.rs b/src/pnf.rs
index 4576be5..d3a6b20 100644
--- a/src/pnf.rs
+++ b/src/pnf.rs
@@ -2,6 +2,20 @@ use crate::term::TypeTerm;
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
+pub fn splice_ladders( mut upper: Vec< TypeTerm >, mut lower: Vec< TypeTerm >  ) -> Vec< TypeTerm > {
+    for i in 0 .. upper.len() {
+        if upper[i] == lower[0] {
+            let mut result_ladder = Vec::<TypeTerm>::new();
+            result_ladder.append(&mut upper[0..i].iter().cloned().collect());
+            result_ladder.append(&mut lower);
+            return result_ladder;
+        }
+    }
+
+    upper.append(&mut lower);
+    upper
+}
+
 impl TypeTerm {
     /// transmute type into Parameter-Normal-Form (PNF)
     ///
@@ -10,88 +24,99 @@ impl TypeTerm {
     /// <Seq <Digit 10>>~<Seq Char>
     /// ⇒ <Seq <Digit 10>~Char>
     /// ```
-    pub fn param_normalize(self) -> Self {
+    pub fn param_normalize(mut self) -> Self {
         match self {
             TypeTerm::Ladder(mut rungs) => {
                 if rungs.len() > 0 {
-                    // normalize all rungs separately
-                    for r in rungs.iter_mut() {
-                        *r = r.clone().param_normalize();
-                    }
+                    let mut new_rungs = Vec::new();
+                    while let Some(bottom) = rungs.pop() {
+                        if let Some(last_but) = rungs.last_mut() {
+                            match (bottom, last_but) {
+                                (TypeTerm::App(bot_args), TypeTerm::App(last_args)) => {
+                                    if bot_args.len() == last_args.len() {
+                                        let mut new_rung_params = Vec::new();
+                                        let mut require_break = false;
 
-                    // take top-rung
-                    match rungs.remove(0) {
-                        TypeTerm::App(params_top) => {
-                            let mut params_ladders = Vec::new();
-                            let mut tail : Vec<TypeTerm> = Vec::new();
+                                        if bot_args.len() > 0 {
+                                            if let Ok(_idx) =  last_args[0].is_syntactic_subtype_of(&bot_args[0]) {
+                                                for i in 0 .. bot_args.len() {
 
-                            // append all other rungs to ladders inside
-                            // the application
-                            for p in params_top {
-                                params_ladders.push(vec![ p ]);
-                            }
+                                                    let spliced_type_ladder = splice_ladders(
+                                                        last_args[i].clone().get_lnf_vec(),
+                                                        bot_args[i].clone().get_lnf_vec()
+                                                    );
+                                                    let spliced_type =
+                                                        if spliced_type_ladder.len() == 1 {
+                                                            spliced_type_ladder[0].clone()
+                                                        } else if spliced_type_ladder.len() > 1 {
+                                                            TypeTerm::Ladder(spliced_type_ladder)
+                                                        } else {
+                                                            TypeTerm::unit()
+                                                        };
 
-                            for r in rungs {
-                                match r {
-                                    TypeTerm::App(mut params_rung) => {
-                                        if params_rung.len() > 0 {
-                                            let mut first_param = params_rung.remove(0); 
+                                                    new_rung_params.push( spliced_type.param_normalize() );
+                                                }
 
-                                            if first_param == params_ladders[0][0] {
-                                               for (l, p) in params_ladders.iter_mut().skip(1).zip(params_rung) {
-                                                   l.push(p.param_normalize());
-                                               }
                                             } else {
-                                               params_rung.insert(0, first_param);
-                                               tail.push(TypeTerm::App(params_rung));
+                                                new_rung_params.push(
+                                                    TypeTerm::Ladder(vec![
+                                                        last_args[0].clone(),
+                                                        bot_args[0].clone()
+                                                    ]).normalize()
+                                                );
+
+                                                for i in 1 .. bot_args.len() {
+                                                    if let Ok(_idx) = last_args[i].is_syntactic_subtype_of(&bot_args[i]) {
+                                                        let spliced_type_ladder = splice_ladders(
+                                                            last_args[i].clone().get_lnf_vec(),
+                                                            bot_args[i].clone().get_lnf_vec()
+                                                        );
+                                                        let spliced_type =
+                                                            if spliced_type_ladder.len() == 1 {
+                                                                spliced_type_ladder[0].clone()
+                                                            } else if spliced_type_ladder.len() > 1 {
+                                                                TypeTerm::Ladder(spliced_type_ladder)
+                                                            } else {
+                                                                TypeTerm::unit()
+                                                            };
+
+                                                        new_rung_params.push( spliced_type.param_normalize() );
+                                                    } else {
+                                                        new_rung_params.push( bot_args[i].clone() );
+                                                        require_break = true;
+                                                    }
+                                                }
                                             }
                                         }
-                                    }
 
-                                    TypeTerm::Ladder(mut rs) => {
-                                        for r in rs {
-                                            tail.push(r.param_normalize());
+                                        if require_break {
+                                            new_rungs.push( TypeTerm::App(new_rung_params) );
+                                        } else {
+                                            rungs.pop();
+                                            rungs.push(TypeTerm::App(new_rung_params));
                                         }
-                                    }
 
-                                    atomic => {
-                                        tail.push(atomic);
+                                    } else {
+                                        new_rungs.push( TypeTerm::App(bot_args) );
                                     }
                                 }
+                                (bottom, last_buf) => {
+                                    new_rungs.push( bottom );
+                                }
                             }
-
-                            let head = TypeTerm::App(
-                                params_ladders.into_iter()
-                                    .map(
-                                        |mut l| {
-                                            l.dedup();
-                                            match l.len() {
-                                                0 => TypeTerm::unit(),
-                                                1 => l.remove(0),
-                                                _ => TypeTerm::Ladder(l).param_normalize()
-                                            }
-                                        }
-                                    )
-                                    .collect()
-                            );
-
-                            if tail.len() > 0 {
-                                tail.insert(0, head);
-                                TypeTerm::Ladder(tail)
-                            } else {
-                                head
-                            }
+                        } else {
+                            new_rungs.push( bottom );
                         }
+                    }
 
-                        TypeTerm::Ladder(mut r) => {
-                            r.append(&mut rungs);
-                            TypeTerm::Ladder(r)
-                        }
+                    new_rungs.reverse();
 
-                        atomic => {
-                            rungs.insert(0, atomic);
-                            TypeTerm::Ladder(rungs)
-                        }
+                    if new_rungs.len() > 1 {
+                        TypeTerm::Ladder(new_rungs)
+                    } else if new_rungs.len() == 1 {
+                        new_rungs[0].clone()
+                    } else {
+                        TypeTerm::unit()
                     }
                 } else {
                     TypeTerm::unit()
diff --git a/src/test/pnf.rs b/src/test/pnf.rs
index 00daa3a..e668849 100644
--- a/src/test/pnf.rs
+++ b/src/test/pnf.rs
@@ -19,27 +19,40 @@ fn test_param_normalize() {
         dict.parse("<A B>~<A C>").expect("parse error").param_normalize(),
     );
 
+    assert_eq!(
+        dict.parse("<A~Y B>").expect("parse error"),
+        dict.parse("<A B>~<Y B>").expect("parse error").param_normalize(),
+    );
+
     assert_eq!(
         dict.parse("<A B~C D~E>").expect("parse error"),
         dict.parse("<A B D>~<A C D>~<A C E>").expect("parse errror").param_normalize(),
     );
 
+    assert_eq!(
+        dict.parse("<A~X B~C D~E>").expect("parse error"),
+        dict.parse("<A B D>~<A B~C E>~<X C E>").expect("parse errror").param_normalize(),
+    );
+
     assert_eq!(
         dict.parse("<Seq <Digit 10>~Char>").expect("parse error"),
         dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror").param_normalize(),
     );
 
     assert_eq!(
-        dict.parse("<A~Y B>").expect("parse error"),
-        dict.parse("<A B>~<Y B>").expect("parse error").param_normalize(),
+        dict.parse("<Seq Char> ~ <<ValueDelim '\\0'> Char> ~ <<ValueDelim '\\0'> Ascii~x86.UInt8>").expect("parse error").param_normalize(),
+        dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~x86.UInt8>").expect("parse error")
+    );
+    assert_eq!(
+        dict.parse("<Seq Char~Ascii> ~ <<ValueDelim '\\0'> Char~Ascii> ~ <<ValueDelim '\\0'> x86.UInt8>").expect("parse error").param_normalize(),
+        dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~x86.UInt8>").expect("parse error")
     );
 
     assert_eq!(
-        dict.parse("<A <B C~D~E> F~G H H>").expect("parse error"),
+        dict.parse("<A~Y <B C~D~E> F H H>").expect("parse error"),
         dict.parse("<A <B C> F H H>
                    ~<A <B D> F H H>
-                   ~<A <B E> F H H>
-                   ~<A <B E> G H H>").expect("parse errror")
+                   ~<A~Y <B E> F H H>").expect("parse errror")
                .param_normalize(),
     );
 }

From e53edd23b96163b0f55b4ff98cdbdab2068f2f49 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sat, 8 Feb 2025 17:05:33 +0100
Subject: [PATCH 08/33] unification: remove unreachable pattern

---
 src/unification.rs | 4 ----
 1 file changed, 4 deletions(-)

diff --git a/src/unification.rs b/src/unification.rs
index abbc1fe..ac7ec19 100644
--- a/src/unification.rs
+++ b/src/unification.rs
@@ -67,10 +67,6 @@ impl UnificationProblem {
                 }
             }
 
-            (TypeTerm::Ladder(l1), TypeTerm::Ladder(l2)) => {
-                Err(UnificationError{ addr, t1: lhs, t2: rhs })
-            }
-
             _ => Err(UnificationError{ addr, t1: lhs, t2: rhs})
         }
     }

From e17a1a9462f8757fe4c5e4127becaf997a078e28 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sun, 9 Feb 2025 16:58:58 +0100
Subject: [PATCH 09/33] add subtype unification

---
 src/test/unification.rs |  57 +++++++++++++++++++++++
 src/unification.rs      | 101 +++++++++++++++++++++++++++++++++++++---
 2 files changed, 151 insertions(+), 7 deletions(-)

diff --git a/src/test/unification.rs b/src/test/unification.rs
index 34d355d..239b384 100644
--- a/src/test/unification.rs
+++ b/src/test/unification.rs
@@ -116,3 +116,60 @@ fn test_unification() {
     );
 }
 
+
+#[test]
+fn test_subtype_unification() {
+    let mut dict = TypeDict::new();
+
+    dict.add_varname(String::from("T"));
+    dict.add_varname(String::from("U"));
+    dict.add_varname(String::from("V"));
+    dict.add_varname(String::from("W"));
+
+    assert_eq!(
+        UnificationProblem::new(vec![
+            (dict.parse("<Seq~T <Digit 10> ~ Char>").unwrap(),
+                dict.parse("<Seq~<LengthPrefix x86.UInt64> Char ~ Ascii>").unwrap()),
+        ]).solve_subtype(),
+        Ok(
+            vec![
+                // T
+                (TypeID::Var(0), dict.parse("<LengthPrefix x86.UInt64>").unwrap())
+            ].into_iter().collect()
+        )
+    );
+
+    assert_eq!(
+        UnificationProblem::new(vec![
+            (dict.parse("U").unwrap(), dict.parse("<Seq Char>").unwrap()),
+            (dict.parse("T").unwrap(), dict.parse("<Seq U>").unwrap()),
+        ]).solve_subtype(),
+        Ok(
+            vec![
+                // T
+                (TypeID::Var(0), dict.parse("<Seq <Seq Char>>").unwrap()),
+
+                // U
+                (TypeID::Var(1), dict.parse("<Seq Char>").unwrap())
+            ].into_iter().collect()
+        )
+    );
+
+    assert_eq!(
+        UnificationProblem::new(vec![
+            (dict.parse("<Seq T>").unwrap(),
+                dict.parse("<Seq W~<Seq Char>>").unwrap()),
+            (dict.parse("<Seq ℕ~<PosInt 10 BigEndian>>").unwrap(),
+                dict.parse("<Seq~<LengthPrefix x86.UInt64> W>").unwrap()),
+        ]).solve_subtype(),
+        Ok(
+            vec![
+                // W
+                (TypeID::Var(3), dict.parse("ℕ~<PosInt 10 BigEndian>").unwrap()),
+
+                // T
+                (TypeID::Var(0), dict.parse("ℕ~<PosInt 10 BigEndian>~<Seq Char>").unwrap())
+            ].into_iter().collect()
+        )
+    );
+}
diff --git a/src/unification.rs b/src/unification.rs
index ac7ec19..443a9a2 100644
--- a/src/unification.rs
+++ b/src/unification.rs
@@ -25,6 +25,86 @@ impl UnificationProblem {
         }
     }
 
+    pub fn eval_subtype(&mut self, lhs: TypeTerm, rhs: TypeTerm, addr: Vec<usize>) -> Result<(), UnificationError> {
+        match (lhs.clone(), rhs.clone()) {
+            (TypeTerm::TypeID(TypeID::Var(varid)), t) |
+            (t, TypeTerm::TypeID(TypeID::Var(varid))) => {
+                self.σ.insert(TypeID::Var(varid), t.clone());
+
+                // update all values in substitution
+                let mut new_σ = HashMap::new();
+                for (v, tt) in self.σ.iter() {
+                    let mut tt = tt.clone().normalize();
+                    tt.apply_substitution(&|v| self.σ.get(v).cloned());
+                    eprintln!("update σ : {:?} --> {:?}", v, tt);
+                    new_σ.insert(v.clone(), tt);
+                }
+                self.σ = new_σ;
+
+                Ok(())
+            }
+
+            (TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
+                if a1 == a2 { Ok(()) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
+            }
+            (TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
+                if n1 == n2 { Ok(()) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
+            }
+            (TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
+                if c1 == c2 { Ok(()) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
+            }
+
+            (TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) => {
+                eprintln!("unification: check two ladders");
+                for i in 0..a1.len() {
+                    if let Some((_, _)) = a1[i].is_semantic_subtype_of( &a2[0] ) {
+                        for j in 0..a2.len() {
+                            if i+j < a1.len() {
+                                let mut new_addr = addr.clone();
+                                new_addr.push(i+j);
+                                self.eqs.push((a1[i+j].clone(), a2[j].clone(), new_addr))
+                            }
+                        }
+                        return Ok(())
+                    }
+                }
+
+                Err(UnificationError{ addr, t1: lhs, t2: rhs })
+            },
+
+            (t, TypeTerm::Ladder(a1)) => {
+                if let Some((idx, τ)) = TypeTerm::Ladder(a1.clone()).is_semantic_subtype_of(&t) {
+                    Ok(())
+                } else {
+                    Err(UnificationError{ addr, t1: TypeTerm::Ladder(a1), t2: t })
+                }
+            }
+
+            (TypeTerm::Ladder(a1), t) => {
+                if let Some((idx, τ)) = TypeTerm::Ladder(a1.clone()).is_semantic_subtype_of(&t) {
+                    Ok(())
+                } else {
+                    Err(UnificationError{ addr, t1: TypeTerm::Ladder(a1), t2: t })
+                }
+            }
+
+            (TypeTerm::App(a1), TypeTerm::App(a2)) => {
+                if a1.len() == a2.len() {
+                    for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate() {
+                        let mut new_addr = addr.clone();
+                        new_addr.push(i);
+                        self.eqs.push((x, y, new_addr));
+                    }
+                    Ok(())
+                } else {
+                    Err(UnificationError{ addr, t1: lhs, t2: rhs })
+                }
+            }
+
+            _ => Err(UnificationError{ addr, t1: lhs, t2: rhs})
+        }
+    }
+
     pub fn eval_equation(&mut self, lhs: TypeTerm, rhs: TypeTerm, addr: Vec<usize>) -> Result<(), UnificationError> {
         match (lhs.clone(), rhs.clone()) {
             (TypeTerm::TypeID(TypeID::Var(varid)), t) |
@@ -72,14 +152,22 @@ impl UnificationProblem {
     }
 
     pub fn solve(mut self) -> Result<HashMap<TypeID, TypeTerm>, UnificationError> {
-        while self.eqs.len() > 0 {
-            while let Some( (mut lhs,mut rhs,addr) ) = self.eqs.pop() {
-                lhs.apply_substitution(&|v| self.σ.get(v).cloned());
-                rhs.apply_substitution(&|v| self.σ.get(v).cloned());
-                self.eval_equation(lhs, rhs, addr)?;
-            }
+        while let Some( (mut lhs,mut rhs,addr) ) = self.eqs.pop() {
+            lhs.apply_substitution(&|v| self.σ.get(v).cloned());
+            rhs.apply_substitution(&|v| self.σ.get(v).cloned());
+            self.eval_equation(lhs, rhs, addr)?;
         }
+        Ok(self.σ)
+    }
 
+
+    pub fn solve_subtype(mut self) -> Result<HashMap<TypeID, TypeTerm>, UnificationError> {
+        while let Some( (mut lhs,mut rhs,addr) ) = self.eqs.pop() {
+            lhs.apply_substitution(&|v| self.σ.get(v).cloned());
+            rhs.apply_substitution(&|v| self.σ.get(v).cloned());
+            eprintln!("eval subtype LHS={:?} || RHS={:?}", lhs, rhs);
+            self.eval_subtype(lhs, rhs, addr)?;
+        }
         Ok(self.σ)
     }
 }
@@ -93,4 +181,3 @@ pub fn unify(
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
-

From f05ef075896b66f25b3997a7ae2b4ea1dc048936 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Thu, 13 Feb 2025 12:27:48 +0100
Subject: [PATCH 10/33] subtype unification

---
 src/unification.rs | 149 +++++++++++++++++++++++++++++++++++++--------
 1 file changed, 125 insertions(+), 24 deletions(-)

diff --git a/src/unification.rs b/src/unification.rs
index 443a9a2..1c0d2d9 100644
--- a/src/unification.rs
+++ b/src/unification.rs
@@ -25,7 +25,20 @@ impl UnificationProblem {
         }
     }
 
-    pub fn eval_subtype(&mut self, lhs: TypeTerm, rhs: TypeTerm, addr: Vec<usize>) -> Result<(), UnificationError> {
+    pub fn reapply_subst(&mut self) {
+        // update all values in substitution
+        let mut new_σ = HashMap::new();
+        for (v, tt) in self.σ.iter() {
+            let mut tt = tt.clone().normalize();
+            tt.apply_substitution(&|v| self.σ.get(v).cloned());
+            tt = tt.normalize();
+            //eprintln!("update σ : {:?} --> {:?}", v, tt);
+            new_σ.insert(v.clone(), tt);
+        }
+        self.σ = new_σ;
+    }
+
+    pub fn eval_subtype(&mut self, lhs: TypeTerm, rhs: TypeTerm, addr: Vec<usize>) -> Result<Vec<TypeTerm>, UnificationError> {
         match (lhs.clone(), rhs.clone()) {
             (TypeTerm::TypeID(TypeID::Var(varid)), t) |
             (t, TypeTerm::TypeID(TypeID::Var(varid))) => {
@@ -45,44 +58,56 @@ impl UnificationProblem {
             }
 
             (TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
-                if a1 == a2 { Ok(()) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
+                if a1 == a2 { Ok(vec![]) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
             }
             (TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
-                if n1 == n2 { Ok(()) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
+                if n1 == n2 { Ok(vec![]) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
             }
             (TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
-                if c1 == c2 { Ok(()) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
+                if c1 == c2 { Ok(vec![]) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
             }
 
             (TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) => {
-                eprintln!("unification: check two ladders");
+                let mut halo = Vec::new();
                 for i in 0..a1.len() {
-                    if let Some((_, _)) = a1[i].is_semantic_subtype_of( &a2[0] ) {
+                    if let Ok((r_halo, σ)) = subtype_unify( &a1[i], &a2[0] ) {
+                        //eprintln!("unified ladders at {}, r_halo = {:?}", i, r_halo);
+                        for (k,v) in σ.iter() {
+                            self.σ.insert(k.clone(),v.clone());
+                        }
+
                         for j in 0..a2.len() {
                             if i+j < a1.len() {
                                 let mut new_addr = addr.clone();
                                 new_addr.push(i+j);
-                                self.eqs.push((a1[i+j].clone(), a2[j].clone(), new_addr))
+                                self.eqs.push((a1[i+j].clone().apply_substitution(&|k| σ.get(k).cloned()).clone(),
+                                    a2[j].clone().apply_substitution(&|k| σ.get(k).cloned()).clone(),
+                                    new_addr));
                             }
                         }
-                        return Ok(())
+                        return Ok(halo)
+                    } else {
+                        halo.push(a1[i].clone());
+                        //eprintln!("could not unify ladders");
                     }
                 }
 
                 Err(UnificationError{ addr, t1: lhs, t2: rhs })
             },
 
-            (t, TypeTerm::Ladder(a1)) => {
-                if let Some((idx, τ)) = TypeTerm::Ladder(a1.clone()).is_semantic_subtype_of(&t) {
-                    Ok(())
+            (t, TypeTerm::Ladder(mut a1)) => {
+                if let Ok(mut halo) = self.eval_subtype( t.clone(), a1.first().unwrap().clone(), addr.clone() ) {
+                    a1.append(&mut halo);
+                    Ok(a1)
                 } else {
-                    Err(UnificationError{ addr, t1: TypeTerm::Ladder(a1), t2: t })
+                    Err(UnificationError{ addr, t1: t, t2: TypeTerm::Ladder(a1) })
                 }
             }
 
-            (TypeTerm::Ladder(a1), t) => {
-                if let Some((idx, τ)) = TypeTerm::Ladder(a1.clone()).is_semantic_subtype_of(&t) {
-                    Ok(())
+            (TypeTerm::Ladder(mut a1), t) => {
+                if let Ok(mut halo) = self.eval_subtype( a1.pop().unwrap(), t.clone(), addr.clone() ) {
+                    a1.append(&mut halo);
+                    Ok(a1)
                 } else {
                     Err(UnificationError{ addr, t1: TypeTerm::Ladder(a1), t2: t })
                 }
@@ -90,12 +115,31 @@ impl UnificationProblem {
 
             (TypeTerm::App(a1), TypeTerm::App(a2)) => {
                 if a1.len() == a2.len() {
+                    let mut halo_args = Vec::new();
+                    let mut halo_required = false;
+
                     for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate() {
                         let mut new_addr = addr.clone();
                         new_addr.push(i);
-                        self.eqs.push((x, y, new_addr));
+                        //self.eqs.push((x, y, new_addr));
+
+                        if let Ok(halo) = self.eval_subtype( x.clone(), y.clone(), new_addr ) {
+                            if halo.len() == 0 {
+                                halo_args.push(y.get_lnf_vec().first().unwrap().clone());
+                            } else {
+                                halo_args.push(TypeTerm::Ladder(halo));
+                                halo_required = true;
+                            }
+                        } else {
+                            return Err(UnificationError{ addr, t1: x, t2: y })
+                        }
+                    }
+
+                    if halo_required {
+                        Ok(vec![ TypeTerm::App(halo_args) ])
+                    } else {
+                        Ok(vec![])
                     }
-                    Ok(())
                 } else {
                     Err(UnificationError{ addr, t1: lhs, t2: rhs })
                 }
@@ -118,7 +162,9 @@ impl UnificationProblem {
                     tt.apply_substitution(&|v| self.σ.get(v).cloned());
                     new_σ.insert(v.clone(), tt);
                 }
-                self.σ = new_σ;
+
+                self.σ.insert(TypeID::Var(varid), t.clone());
+                self.reapply_subst();
 
                 Ok(())
             }
@@ -160,16 +206,63 @@ impl UnificationProblem {
         Ok(self.σ)
     }
 
+    pub fn solve_subtype(mut self) -> Result<(TypeTerm, HashMap<TypeID, TypeTerm>), UnificationError> {
 
-    pub fn solve_subtype(mut self) -> Result<HashMap<TypeID, TypeTerm>, UnificationError> {
-        while let Some( (mut lhs,mut rhs,addr) ) = self.eqs.pop() {
+        pub fn insert_halo_at(
+            t: &mut TypeTerm,
+            mut addr: Vec<usize>,
+            halo_type: TypeTerm
+        ) {
+            match t {
+                TypeTerm::Ladder(rungs) => {
+                    if let Some(idx) = addr.pop() {
+                        for i in rungs.len()..idx+1 {
+                            rungs.push(TypeTerm::unit());
+                        }
+                        insert_halo_at( &mut rungs[idx], addr, halo_type );
+                    } else {
+                        rungs.push(halo_type);
+                    }
+                },
+
+                TypeTerm::App(args) => {
+                    if let Some(idx) = addr.pop() {
+                        insert_halo_at( &mut args[idx], addr, halo_type );
+                    } else {
+                        *t = TypeTerm::Ladder(vec![
+                            halo_type,
+                            t.clone()
+                        ]);
+                    }
+                }
+
+                atomic => {
+
+                }
+            }
+        }
+
+        //let mut halo_type = TypeTerm::unit();
+        let mut halo_rungs = Vec::new();
+
+        while let Some( (mut lhs, mut rhs, mut addr) ) = self.eqs.pop() {
             lhs.apply_substitution(&|v| self.σ.get(v).cloned());
             rhs.apply_substitution(&|v| self.σ.get(v).cloned());
-            eprintln!("eval subtype LHS={:?} || RHS={:?}", lhs, rhs);
-            self.eval_subtype(lhs, rhs, addr)?;
+            //eprintln!("eval subtype\n\tLHS={:?}\n\tRHS={:?}\n", lhs, rhs);
+            let mut new_halo = self.eval_subtype(lhs, rhs, addr.clone())?;
+            if new_halo.len() > 0 {
+                //insert_halo_at( &mut halo_type, addr, TypeTerm::Ladder(new_halo) );
+                if addr.len() == 0 {
+                    halo_rungs.push(TypeTerm::Ladder(new_halo))
+                }
+            }
         }
-        Ok(self.σ)
-    }
+
+        let mut halo_type = TypeTerm::Ladder(halo_rungs);
+        halo_type = halo_type.normalize();
+        halo_type = halo_type.apply_substitution(&|k| self.σ.get(k).cloned()).clone();
+
+        Ok((halo_type.param_normalize(), self.σ))
 }
 
 pub fn unify(
@@ -180,4 +273,12 @@ pub fn unify(
     unification.solve()
 }
 
+pub fn subtype_unify(
+    t1: &TypeTerm,
+    t2: &TypeTerm
+) -> Result<(TypeTerm, HashMap<TypeID, TypeTerm>), UnificationError> {
+    let mut unification = UnificationProblem::new(vec![ (t1.clone(), t2.clone()) ]);
+    unification.solve_subtype()
+}
+
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

From b502b62479b42806b8076c23881a4744fda42359 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sat, 15 Feb 2025 17:21:12 +0100
Subject: [PATCH 11/33] unification: reject non-identity loops & add test cases

---
 src/term.rs             | 16 +++++++++++++++
 src/test/unification.rs | 30 ++++++++++++++++++++++------
 src/unification.rs      | 44 +++++++++++++++++------------------------
 3 files changed, 58 insertions(+), 32 deletions(-)

diff --git a/src/term.rs b/src/term.rs
index 29c7d27..240996f 100644
--- a/src/term.rs
+++ b/src/term.rs
@@ -79,6 +79,22 @@ impl TypeTerm {
         self.arg(TypeTerm::Char(c))
     }
 
+    pub fn contains_var(&self, var_id: u64) -> bool {
+        match self {
+            TypeTerm::TypeID(TypeID::Var(v)) => (&var_id == v),
+            TypeTerm::App(args) |
+            TypeTerm::Ladder(args) => {
+                for a in args.iter() {
+                    if a.contains_var(var_id) {
+                        return true;
+                    }
+                }
+                false
+            }
+            _ => false
+        }
+    }
+
     /// recursively apply substitution to all subterms,
     /// which will replace all occurences of variables which map
     /// some type-term in `subst`
diff --git a/src/test/unification.rs b/src/test/unification.rs
index 239b384..6c55a80 100644
--- a/src/test/unification.rs
+++ b/src/test/unification.rs
@@ -61,6 +61,19 @@ fn test_unification_error() {
             t2: dict.parse("B").unwrap()
         })
     );
+
+    assert_eq!(
+        crate::unify(
+            &dict.parse("T").unwrap(),
+            &dict.parse("<Seq T>").unwrap()
+        ),
+
+        Err(UnificationError {
+            addr: vec![],
+            t1: dict.parse("T").unwrap(),
+            t2: dict.parse("<Seq T>").unwrap()
+        })
+    );
 }
 
 #[test]
@@ -131,12 +144,13 @@ fn test_subtype_unification() {
             (dict.parse("<Seq~T <Digit 10> ~ Char>").unwrap(),
                 dict.parse("<Seq~<LengthPrefix x86.UInt64> Char ~ Ascii>").unwrap()),
         ]).solve_subtype(),
-        Ok(
+        Ok((
+            dict.parse("<Seq <Digit 10>>").unwrap(),
             vec![
                 // T
                 (TypeID::Var(0), dict.parse("<LengthPrefix x86.UInt64>").unwrap())
             ].into_iter().collect()
-        )
+        ))
     );
 
     assert_eq!(
@@ -144,7 +158,8 @@ fn test_subtype_unification() {
             (dict.parse("U").unwrap(), dict.parse("<Seq Char>").unwrap()),
             (dict.parse("T").unwrap(), dict.parse("<Seq U>").unwrap()),
         ]).solve_subtype(),
-        Ok(
+        Ok((
+            TypeTerm::unit(),
             vec![
                 // T
                 (TypeID::Var(0), dict.parse("<Seq <Seq Char>>").unwrap()),
@@ -152,7 +167,7 @@ fn test_subtype_unification() {
                 // U
                 (TypeID::Var(1), dict.parse("<Seq Char>").unwrap())
             ].into_iter().collect()
-        )
+        ))
     );
 
     assert_eq!(
@@ -162,7 +177,10 @@ fn test_subtype_unification() {
             (dict.parse("<Seq ℕ~<PosInt 10 BigEndian>>").unwrap(),
                 dict.parse("<Seq~<LengthPrefix x86.UInt64> W>").unwrap()),
         ]).solve_subtype(),
-        Ok(
+        Ok((
+            dict.parse("
+                <Seq~<LengthPrefix x86.UInt64> ℕ~<PosInt 10 BigEndian>>
+            ").unwrap(),
             vec![
                 // W
                 (TypeID::Var(3), dict.parse("ℕ~<PosInt 10 BigEndian>").unwrap()),
@@ -170,6 +188,6 @@ fn test_subtype_unification() {
                 // T
                 (TypeID::Var(0), dict.parse("ℕ~<PosInt 10 BigEndian>~<Seq Char>").unwrap())
             ].into_iter().collect()
-        )
+        ))
     );
 }
diff --git a/src/unification.rs b/src/unification.rs
index 1c0d2d9..e605af4 100644
--- a/src/unification.rs
+++ b/src/unification.rs
@@ -42,19 +42,15 @@ impl UnificationProblem {
         match (lhs.clone(), rhs.clone()) {
             (TypeTerm::TypeID(TypeID::Var(varid)), t) |
             (t, TypeTerm::TypeID(TypeID::Var(varid))) => {
-                self.σ.insert(TypeID::Var(varid), t.clone());
-
-                // update all values in substitution
-                let mut new_σ = HashMap::new();
-                for (v, tt) in self.σ.iter() {
-                    let mut tt = tt.clone().normalize();
-                    tt.apply_substitution(&|v| self.σ.get(v).cloned());
-                    eprintln!("update σ : {:?} --> {:?}", v, tt);
-                    new_σ.insert(v.clone(), tt);
+                if ! t.contains_var( varid ) {
+                    self.σ.insert(TypeID::Var(varid), t.clone());
+                    self.reapply_subst();
+                    Ok(vec![])
+                } else if t == TypeTerm::TypeID(TypeID::Var(varid)) {
+                    Ok(vec![])
+                } else {
+                    Err(UnificationError{ addr, t1: TypeTerm::TypeID(TypeID::Var(varid)), t2: t })
                 }
-                self.σ = new_σ;
-
-                Ok(())
             }
 
             (TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
@@ -153,20 +149,15 @@ impl UnificationProblem {
         match (lhs.clone(), rhs.clone()) {
             (TypeTerm::TypeID(TypeID::Var(varid)), t) |
             (t, TypeTerm::TypeID(TypeID::Var(varid))) => {
-                self.σ.insert(TypeID::Var(varid), t.clone());
-
-                // update all values in substitution
-                let mut new_σ = HashMap::new();
-                for (v, tt) in self.σ.iter() {
-                    let mut tt = tt.clone();
-                    tt.apply_substitution(&|v| self.σ.get(v).cloned());
-                    new_σ.insert(v.clone(), tt);
+                if ! t.contains_var( varid ) {
+                    self.σ.insert(TypeID::Var(varid), t.clone());
+                    self.reapply_subst();
+                    Ok(())
+                } else if t == TypeTerm::TypeID(TypeID::Var(varid)) {
+                    Ok(())
+                } else {
+                    Err(UnificationError{ addr, t1: TypeTerm::TypeID(TypeID::Var(varid)), t2: t })
                 }
-
-                self.σ.insert(TypeID::Var(varid), t.clone());
-                self.reapply_subst();
-
-                Ok(())
             }
 
             (TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
@@ -182,7 +173,7 @@ impl UnificationProblem {
             (TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) |
             (TypeTerm::App(a1), TypeTerm::App(a2)) => {
                 if a1.len() == a2.len() {
-                    for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate() {
+                    for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate().rev() {
                         let mut new_addr = addr.clone();
                         new_addr.push(i);
                         self.eqs.push((x, y, new_addr));
@@ -263,6 +254,7 @@ impl UnificationProblem {
         halo_type = halo_type.apply_substitution(&|k| self.σ.get(k).cloned()).clone();
 
         Ok((halo_type.param_normalize(), self.σ))
+    }
 }
 
 pub fn unify(

From 1b8768894e53af37d3e0d8c1a619dc24be5cee3a Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Tue, 25 Feb 2025 22:57:50 +0100
Subject: [PATCH 12/33] fix unification test

---
 src/test/unification.rs | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/src/test/unification.rs b/src/test/unification.rs
index 6c55a80..2d03d73 100644
--- a/src/test/unification.rs
+++ b/src/test/unification.rs
@@ -174,12 +174,12 @@ fn test_subtype_unification() {
         UnificationProblem::new(vec![
             (dict.parse("<Seq T>").unwrap(),
                 dict.parse("<Seq W~<Seq Char>>").unwrap()),
-            (dict.parse("<Seq ℕ~<PosInt 10 BigEndian>>").unwrap(),
-                dict.parse("<Seq~<LengthPrefix x86.UInt64> W>").unwrap()),
+            (dict.parse("<Seq~<LengthPrefix x86.UInt64> ℕ~<PosInt 10 BigEndian>>").unwrap(),
+                dict.parse("<<LengthPrefix x86.UInt64> W>").unwrap()),
         ]).solve_subtype(),
         Ok((
             dict.parse("
-                <Seq~<LengthPrefix x86.UInt64> ℕ~<PosInt 10 BigEndian>>
+                <Seq ℕ~<PosInt 10 BigEndian>>
             ").unwrap(),
             vec![
                 // W

From 099ba9b0dfd39e3ce26c98cf4299dcaee2549565 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Tue, 4 Feb 2025 14:26:37 +0100
Subject: [PATCH 13/33] pretty: output escape character for \0 and \n

---
 src/pretty.rs | 10 ++++++----
 1 file changed, 6 insertions(+), 4 deletions(-)

diff --git a/src/pretty.rs b/src/pretty.rs
index 1a4aa60..c5edf3d 100644
--- a/src/pretty.rs
+++ b/src/pretty.rs
@@ -17,7 +17,11 @@ impl SugaredTypeTerm {
             }
 
             SugaredTypeTerm::Char(c) => {
-                format!("'{}'", c)
+                match c {
+                    '\0' => format!("'\\0'"),
+                    '\n' => format!("'\\n'"),
+                    _ => format!("'{}'", c)
+                }
             }
 
             SugaredTypeTerm::Univ(t) => {
@@ -116,7 +120,7 @@ impl SugaredTypeTerm {
                         s.push('\n');
                         for x in 0..(indent*indent_width) {
                             s.push(' ');
-                        }                
+                        }
                         s.push_str(&"-->  ".bright_yellow());
                     } else {
 //                        s.push_str("   ");
@@ -144,5 +148,3 @@ impl SugaredTypeTerm {
         }
     }
 }
-
-

From af595bffdec3662536c44153f577838c9725aa56 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Fri, 14 Feb 2025 20:55:02 +0100
Subject: [PATCH 14/33] pretty format: use different colors for variables

---
 src/pretty.rs | 17 ++++++++++++-----
 1 file changed, 12 insertions(+), 5 deletions(-)

diff --git a/src/pretty.rs b/src/pretty.rs
index c5edf3d..40a7541 100644
--- a/src/pretty.rs
+++ b/src/pretty.rs
@@ -1,5 +1,5 @@
 use {
-    crate::TypeDict,
+    crate::{TypeDict, dict::TypeID},
     crate::sugar::SugaredTypeTerm,
     tiny_ansi::TinyAnsi
 };
@@ -9,11 +9,18 @@ impl SugaredTypeTerm {
         let indent_width = 4;
         match self {
             SugaredTypeTerm::TypeID(id) => {
-                format!("{}", dict.get_typename(id).unwrap_or("??".bright_red())).bright_blue()
+                match id {
+                    TypeID::Var(varid) => {
+                        format!("{}", dict.get_typename(id).unwrap_or("??".bright_red())).bright_magenta()
+                    },
+                    TypeID::Fun(funid) => {
+                        format!("{}", dict.get_typename(id).unwrap_or("??".bright_red())).blue().bold()
+                    }
+                }
             },
 
             SugaredTypeTerm::Num(n) => {
-                format!("{}", n).bright_cyan()
+                format!("{}", n).green().bold()
             }
 
             SugaredTypeTerm::Char(c) => {
@@ -34,7 +41,7 @@ impl SugaredTypeTerm {
 
             SugaredTypeTerm::Spec(args) => {
                 let mut s = String::new();
-                s.push_str(&"<".yellow().bold());
+                s.push_str(&"<".yellow());
                 for i in 0..args.len() {
                     let arg = &args[i];
                     if i > 0 {
@@ -42,7 +49,7 @@ impl SugaredTypeTerm {
                     }
                     s.push_str( &arg.pretty(dict,indent+1) );
                 }
-                s.push_str(&">".yellow().bold());
+                s.push_str(&">".yellow());
                 s
             }
 

From 25649084ab7be6a7cfd0e4b2a4d9338da02018d2 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Thu, 3 Oct 2024 23:40:04 +0200
Subject: [PATCH 15/33] make TypeDict a trait & BimapTypeDict an impl

- add TypeDict trait
- impl TypeDict for BimapTypeDict
- add Debug for Bimap & BimapTypeDict
- adapt tests
---
 src/bimap.rs             |  1 +
 src/dict.rs              | 77 ++++++++++++++++++++++++++++++----------
 src/parser.rs            | 17 +++++++--
 src/sugar.rs             |  7 ++--
 src/test/curry.rs        |  8 ++---
 src/test/lnf.rs          |  7 ++--
 src/test/parser.rs       | 33 +++++++++--------
 src/test/pnf.rs          |  5 ++-
 src/test/substitution.rs |  5 ++-
 src/test/subtype.rs      | 21 ++++++-----
 src/test/unification.rs  | 11 +++---
 src/unparser.rs          |  8 +++--
 12 files changed, 126 insertions(+), 74 deletions(-)

diff --git a/src/bimap.rs b/src/bimap.rs
index 9ea311a..9d0a96c 100644
--- a/src/bimap.rs
+++ b/src/bimap.rs
@@ -2,6 +2,7 @@ use std::{collections::HashMap, hash::Hash};
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
+#[derive(Debug)]
 pub struct Bimap<V: Eq + Hash, Λ: Eq + Hash> {
     pub mλ: HashMap<V, Λ>,
     pub my: HashMap<Λ, V>,
diff --git a/src/dict.rs b/src/dict.rs
index 419d599..67e22b3 100644
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -8,9 +8,28 @@ pub enum TypeID {
     Var(u64)
 }
 
+pub trait TypeDict {
+    fn insert(&mut self, name: String, id: TypeID);
+    fn add_varname(&mut self, vn: String) -> TypeID;
+    fn add_typename(&mut self, tn: String) -> TypeID;
+    fn get_typeid(&self, tn: &String) -> Option<TypeID>;
+    fn get_typename(&self, tid: &TypeID) -> Option<String>;
+
+    fn get_varname(&self, var_id: u64) -> Option<String> {
+        self.get_typename(&TypeID::Var(var_id))
+    }
+
+    fn add_synonym(&mut self, new: String, old: String) {
+        if let Some(tyid) = self.get_typeid(&old) {
+            self.insert(new, tyid);
+        }
+    }
+}
+
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-pub struct TypeDict {
+#[derive(Debug)]
+pub struct BimapTypeDict {
     typenames: Bimap<String, TypeID>,
     type_lit_counter: u64,
     type_var_counter: u64,
@@ -18,46 +37,66 @@ pub struct TypeDict {
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-impl TypeDict {
+impl BimapTypeDict {
     pub fn new() -> Self {
-        TypeDict {
+        BimapTypeDict {
             typenames: Bimap::new(),
             type_lit_counter: 0,
             type_var_counter: 0,
         }
     }
+}
 
-    pub fn add_varname(&mut self, tn: String) -> TypeID {
+impl TypeDict for BimapTypeDict {
+    fn insert(&mut self, name: String, id: TypeID) {
+        self.typenames.insert(name, id);
+    }
+
+    fn add_varname(&mut self, tn: String) -> TypeID {
         let tyid = TypeID::Var(self.type_var_counter);
         self.type_var_counter += 1;
-        self.typenames.insert(tn, tyid.clone());
+        self.insert(tn, tyid.clone());
         tyid
     }
 
-    pub fn add_typename(&mut self, tn: String) -> TypeID {
+    fn add_typename(&mut self, tn: String) -> TypeID {
         let tyid = TypeID::Fun(self.type_lit_counter);
         self.type_lit_counter += 1;
-        self.typenames.insert(tn, tyid.clone());
+        self.insert(tn, tyid.clone());
         tyid
     }
 
-    pub fn add_synonym(&mut self, new: String, old: String) {
-        if let Some(tyid) = self.get_typeid(&old) {
-            self.typenames.insert(new, tyid);
-        }
-    }
-
-    pub fn get_typename(&self, tid: &TypeID) -> Option<String> {
+    fn get_typename(&self, tid: &TypeID) -> Option<String> {
         self.typenames.my.get(tid).cloned()
     }
 
-    pub fn get_typeid(&self, tn: &String) -> Option<TypeID> {
+    fn get_typeid(&self, tn: &String) -> Option<TypeID> {
         self.typenames.mλ.get(tn).cloned()
     }
-
-    pub fn get_varname(&self, var_id: u64) -> Option<String> {
-        self.typenames.my.get(&TypeID::Var(var_id)).cloned()
-    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
+
+use std::sync::Arc;
+use std::ops::{Deref, DerefMut};
+use std::sync::RwLock;
+
+impl<T: TypeDict> TypeDict for Arc<RwLock<T>> {
+    fn insert(&mut self, name: String, id: TypeID) {
+        self.write().unwrap().insert(name, id);
+    }
+    fn add_varname(&mut self, vn: String) -> TypeID {
+        self.write().unwrap().add_varname(vn)
+    }
+    fn add_typename(&mut self, tn: String) -> TypeID {
+        self.write().unwrap().add_typename(tn)
+    }
+    fn get_typename(&self, tid: &TypeID)-> Option<String> {
+        self.read().unwrap().get_typename(tid)
+    }
+    fn get_typeid(&self, tn: &String) -> Option<TypeID> {
+        self.read().unwrap().get_typeid(tn)
+    }
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
diff --git a/src/parser.rs b/src/parser.rs
index 85ff9b4..6160ca6 100644
--- a/src/parser.rs
+++ b/src/parser.rs
@@ -18,10 +18,23 @@ pub enum ParseError {
     UnexpectedToken
 }
 
+pub trait ParseLadderType {
+    fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError>;
+    
+    fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
+    where It: Iterator<Item = char>;
+
+    fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
+    where It: Iterator<Item = char>;
+
+    fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
+    where It: Iterator<Item = char>;
+}
+
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-impl TypeDict {
-    pub fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
+impl<T: TypeDict> ParseLadderType for T {
+    fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
         let mut tokens = LadderTypeLexer::from(s.chars()).peekable();
 
         match self.parse_ladder(&mut tokens) {
diff --git a/src/sugar.rs b/src/sugar.rs
index 4d13f78..dea73ba 100644
--- a/src/sugar.rs
+++ b/src/sugar.rs
@@ -1,7 +1,8 @@
 use {
-    crate::{TypeTerm, TypeID}
+    crate::{TypeTerm, TypeID, parser::ParseLadderType}
 };
 
+#[derive(Clone)]
 pub enum SugaredTypeTerm {
     TypeID(TypeID),
     Num(i64),
@@ -17,7 +18,7 @@ pub enum SugaredTypeTerm {
 }
 
 impl TypeTerm {
-    pub fn sugar(self: TypeTerm, dict: &mut crate::TypeDict) -> SugaredTypeTerm {
+    pub fn sugar(self: TypeTerm, dict: &mut impl crate::TypeDict) -> SugaredTypeTerm {
         match self {
             TypeTerm::TypeID(id) => SugaredTypeTerm::TypeID(id),
             TypeTerm::Num(n) => SugaredTypeTerm::Num(n),
@@ -61,7 +62,7 @@ impl TypeTerm {
 }
 
 impl SugaredTypeTerm {
-    pub fn desugar(self, dict: &mut crate::TypeDict) -> TypeTerm {
+    pub fn desugar(self, dict: &mut impl crate::TypeDict) -> TypeTerm {
         match self {
             SugaredTypeTerm::TypeID(id) => TypeTerm::TypeID(id),
             SugaredTypeTerm::Num(n) => TypeTerm::Num(n),
diff --git a/src/test/curry.rs b/src/test/curry.rs
index c728a37..a814ab2 100644
--- a/src/test/curry.rs
+++ b/src/test/curry.rs
@@ -1,12 +1,12 @@
 use {
-    crate::{dict::*}
+    crate::{dict::*, parser::*}
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
 #[test]
 fn test_curry() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("<A B C>").unwrap().curry(),
@@ -33,7 +33,7 @@ fn test_curry() {
 
 #[test]
 fn test_decurry() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("<<A B> C>").unwrap().decurry(),
@@ -47,7 +47,7 @@ fn test_decurry() {
         dict.parse("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap().decurry(),
         dict.parse("<A B C D E F G H I J K>").unwrap()
     );
-    
+
     assert_eq!(
         dict.parse("<<A~X B> C>").unwrap().decurry(),
         dict.parse("<A~X B C>").unwrap()
diff --git a/src/test/lnf.rs b/src/test/lnf.rs
index 1c81a55..4b2a7c2 100644
--- a/src/test/lnf.rs
+++ b/src/test/lnf.rs
@@ -1,8 +1,8 @@
-use crate::dict::TypeDict;
+use crate::{dict::{BimapTypeDict}, parser::*};
 
 #[test]
 fn test_flat() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert!( dict.parse("A").expect("parse error").is_flat() );
     assert!( dict.parse("10").expect("parse error").is_flat() );
@@ -17,7 +17,7 @@ fn test_flat() {
 
 #[test]
 fn test_normalize() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("A~B~C").expect("parse error").normalize(),
@@ -54,4 +54,3 @@ fn test_normalize() {
     );
 
 }
-
diff --git a/src/test/parser.rs b/src/test/parser.rs
index 1166229..f650ae3 100644
--- a/src/test/parser.rs
+++ b/src/test/parser.rs
@@ -7,7 +7,7 @@ use {
 
 #[test]
 fn test_parser_id() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     dict.add_varname("T".into());
 
@@ -26,7 +26,7 @@ fn test_parser_id() {
 fn test_parser_num() {
     assert_eq!(
         Ok(TypeTerm::Num(1234)),
-        TypeDict::new().parse("1234")
+        BimapTypeDict::new().parse("1234")
     );
 }
 
@@ -34,21 +34,21 @@ fn test_parser_num() {
 fn test_parser_char() {
     assert_eq!(
         Ok(TypeTerm::Char('x')),
-        TypeDict::new().parse("'x'")
+        BimapTypeDict::new().parse("'x'")
     );
 }
 
 #[test]
 fn test_parser_app() {
     assert_eq!(
-        TypeDict::new().parse("<A B>"),
+        BimapTypeDict::new().parse("<A B>"),
         Ok(TypeTerm::App(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::TypeID(TypeID::Fun(1)),
         ]))
     );
     assert_eq!(
-        TypeDict::new().parse("<A B C>"),
+        BimapTypeDict::new().parse("<A B C>"),
         Ok(TypeTerm::App(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::TypeID(TypeID::Fun(1)),
@@ -60,7 +60,7 @@ fn test_parser_app() {
 #[test]
 fn test_parser_unexpected_close() {
     assert_eq!(
-        TypeDict::new().parse(">"),
+        BimapTypeDict::new().parse(">"),
         Err(ParseError::UnexpectedClose)
     );
 }
@@ -68,7 +68,7 @@ fn test_parser_unexpected_close() {
 #[test]
 fn test_parser_unexpected_token() {
     assert_eq!(
-        TypeDict::new().parse("A B"),
+        BimapTypeDict::new().parse("A B"),
         Err(ParseError::UnexpectedToken)
     );
 }
@@ -76,14 +76,14 @@ fn test_parser_unexpected_token() {
 #[test]
 fn test_parser_ladder() {
     assert_eq!(
-        TypeDict::new().parse("A~B"),
+        BimapTypeDict::new().parse("A~B"),
         Ok(TypeTerm::Ladder(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::TypeID(TypeID::Fun(1)),
         ]))
     );
     assert_eq!(
-        TypeDict::new().parse("A~B~C"),
+        BimapTypeDict::new().parse("A~B~C"),
         Ok(TypeTerm::Ladder(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::TypeID(TypeID::Fun(1)),
@@ -95,7 +95,7 @@ fn test_parser_ladder() {
 #[test]
 fn test_parser_ladder_outside() {
     assert_eq!(
-        TypeDict::new().parse("<A B>~C"),
+        BimapTypeDict::new().parse("<A B>~C"),
         Ok(TypeTerm::Ladder(vec![
             TypeTerm::App(vec![
                 TypeTerm::TypeID(TypeID::Fun(0)),
@@ -103,13 +103,13 @@ fn test_parser_ladder_outside() {
             ]),
             TypeTerm::TypeID(TypeID::Fun(2)),
         ]))
-    );    
+    );
 }
 
 #[test]
 fn test_parser_ladder_inside() {
     assert_eq!(
-        TypeDict::new().parse("<A B~C>"),
+        BimapTypeDict::new().parse("<A B~C>"),
         Ok(TypeTerm::App(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::Ladder(vec![
@@ -117,13 +117,13 @@ fn test_parser_ladder_inside() {
                 TypeTerm::TypeID(TypeID::Fun(2)),
             ])
         ]))
-    );    
+    );
 }
 
 #[test]
 fn test_parser_ladder_between() {
     assert_eq!(
-        TypeDict::new().parse("<A B~<C D>>"),
+        BimapTypeDict::new().parse("<A B~<C D>>"),
         Ok(TypeTerm::App(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::Ladder(vec![
@@ -134,14 +134,14 @@ fn test_parser_ladder_between() {
                 ])
             ])
         ]))
-    );    
+    );
 }
 
 
 #[test]
 fn test_parser_ladder_large() {
     assert_eq!(
-        TypeDict::new().parse(
+        BimapTypeDict::new().parse(
             "<Seq Date
                   ~<TimeSince UnixEpoch>
                   ~<Duration Seconds>
@@ -203,4 +203,3 @@ fn test_parser_ladder_large() {
         )
     );
 }
-
diff --git a/src/test/pnf.rs b/src/test/pnf.rs
index e668849..a1d5a33 100644
--- a/src/test/pnf.rs
+++ b/src/test/pnf.rs
@@ -1,8 +1,8 @@
-use crate::dict::TypeDict;
+use crate::{dict::BimapTypeDict, parser::*};
 
 #[test]
 fn test_param_normalize() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("A~B~C").expect("parse error"),
@@ -56,4 +56,3 @@ fn test_param_normalize() {
                .param_normalize(),
     );
 }
-
diff --git a/src/test/substitution.rs b/src/test/substitution.rs
index 7959b08..e8906b9 100644
--- a/src/test/substitution.rs
+++ b/src/test/substitution.rs
@@ -1,6 +1,6 @@
 
 use {
-    crate::{dict::*, term::*},
+    crate::{dict::*, term::*, parser::*, unparser::*},
     std::iter::FromIterator
 };
 
@@ -8,7 +8,7 @@ use {
 
 #[test]
 fn test_subst() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     let mut σ = std::collections::HashMap::new();
 
@@ -29,4 +29,3 @@ fn test_subst() {
         dict.parse("<Seq ℕ~<Seq Char>>").unwrap()
     );
 }
-
diff --git a/src/test/subtype.rs b/src/test/subtype.rs
index 08cc5c7..c993063 100644
--- a/src/test/subtype.rs
+++ b/src/test/subtype.rs
@@ -1,8 +1,8 @@
-use crate::dict::TypeDict;
+use crate::{dict::BimapTypeDict, parser::*, unparser::*};
 
 #[test]
 fn test_semantic_subtype() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("A~B~C").expect("parse error")
@@ -19,11 +19,11 @@ fn test_semantic_subtype() {
             ),
         Some((0, dict.parse("A~B1~C1").expect("parse errror")))
     );
-    
+
     assert_eq!(
         dict.parse("A~B~C1").expect("parse error")
             .is_semantic_subtype_of(
-                &dict.parse("B~C2").expect("parse errror")  
+                &dict.parse("B~C2").expect("parse errror")
             ),
         Some((1, dict.parse("B~C1").expect("parse errror")))
     );
@@ -31,12 +31,12 @@ fn test_semantic_subtype() {
 
 #[test]
 fn test_syntactic_subtype() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("A~B~C").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("A~B~C").expect("parse errror")  
+                &dict.parse("A~B~C").expect("parse errror")
             ),
         Ok(0)
     );
@@ -44,7 +44,7 @@ fn test_syntactic_subtype() {
     assert_eq!(
         dict.parse("A~B~C").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("B~C").expect("parse errror")  
+                &dict.parse("B~C").expect("parse errror")
             ),
         Ok(1)
     );
@@ -52,7 +52,7 @@ fn test_syntactic_subtype() {
     assert_eq!(
         dict.parse("A~B~C~D~E").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("C~D").expect("parse errror")  
+                &dict.parse("C~D").expect("parse errror")
             ),
         Ok(2)
     );
@@ -60,7 +60,7 @@ fn test_syntactic_subtype() {
     assert_eq!(
         dict.parse("A~B~C~D~E").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("C~G").expect("parse errror")  
+                &dict.parse("C~G").expect("parse errror")
             ),
         Err((2,3))
     );
@@ -68,7 +68,7 @@ fn test_syntactic_subtype() {
     assert_eq!(
         dict.parse("A~B~C~D~E").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("G~F~K").expect("parse errror")  
+                &dict.parse("G~F~K").expect("parse errror")
             ),
         Err((0,0))
     );
@@ -94,4 +94,3 @@ fn test_syntactic_subtype() {
         Ok(4)
     );
 }
-
diff --git a/src/test/unification.rs b/src/test/unification.rs
index 2d03d73..7811647 100644
--- a/src/test/unification.rs
+++ b/src/test/unification.rs
@@ -1,13 +1,13 @@
 
 use {
-    crate::{dict::*, term::*, unification::*},
+    crate::{dict::*, parser::*, unparser::*, term::*, unification::*},
     std::iter::FromIterator
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
 fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
     dict.add_varname(String::from("T"));
     dict.add_varname(String::from("U"));
     dict.add_varname(String::from("V"));
@@ -33,7 +33,7 @@ fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) {
 
 #[test]
 fn test_unification_error() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
     dict.add_varname(String::from("T"));
 
     assert_eq!(
@@ -89,7 +89,7 @@ fn test_unification() {
         true
     );
 
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     dict.add_varname(String::from("T"));
     dict.add_varname(String::from("U"));
@@ -129,10 +129,9 @@ fn test_unification() {
     );
 }
 
-
 #[test]
 fn test_subtype_unification() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     dict.add_varname(String::from("T"));
     dict.add_varname(String::from("U"));
diff --git a/src/unparser.rs b/src/unparser.rs
index ccf754d..b78494e 100644
--- a/src/unparser.rs
+++ b/src/unparser.rs
@@ -2,8 +2,12 @@ use crate::{dict::*, term::*};
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-impl TypeDict {
-    pub fn unparse(&self, t: &TypeTerm) -> String {
+pub trait UnparseLadderType {
+    fn unparse(&self, t: &TypeTerm) -> String;
+}
+
+impl<T: TypeDict> UnparseLadderType for T {
+    fn unparse(&self, t: &TypeTerm) -> String {
         match t {
             TypeTerm::TypeID(id) => self.get_typename(id).unwrap(),
             TypeTerm::Num(n) => format!("{}", n),

From fb2b54059d4c1b733ef4cf91384ce020ce1e0484 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Mon, 28 Oct 2024 20:00:11 +0100
Subject: [PATCH 16/33] add Send+Sync trait bound to TypeDict

---
 src/dict.rs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/dict.rs b/src/dict.rs
index 67e22b3..333f8dd 100644
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -8,7 +8,7 @@ pub enum TypeID {
     Var(u64)
 }
 
-pub trait TypeDict {
+pub trait TypeDict : Send + Sync {
     fn insert(&mut self, name: String, id: TypeID);
     fn add_varname(&mut self, vn: String) -> TypeID;
     fn add_typename(&mut self, tn: String) -> TypeID;

From e15db9d1f35e5c4fe74ab36fb59c3259e3ddcbaa Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Mon, 24 Mar 2025 10:11:16 +0100
Subject: [PATCH 17/33] type dict: get_typename_create

---
 src/dict.rs | 8 ++++++++
 1 file changed, 8 insertions(+)

diff --git a/src/dict.rs b/src/dict.rs
index 333f8dd..e5cb464 100644
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -24,6 +24,14 @@ pub trait TypeDict : Send + Sync {
             self.insert(new, tyid);
         }
     }
+
+    fn get_typeid_creat(&mut self, tn: &String) -> TypeID {
+        if let Some(id) = self.get_typeid(tn) {
+            id
+        } else {
+            self.add_typename(tn.clone())
+        }
+    }
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

From c4a26d11da9793c88f37aaa1116b700a9e9b55de Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sun, 6 Oct 2024 14:39:05 +0200
Subject: [PATCH 18/33] term: add check if term is empty

---
 src/sugar.rs | 18 ++++++++++++++++++
 1 file changed, 18 insertions(+)

diff --git a/src/sugar.rs b/src/sugar.rs
index dea73ba..77c04cb 100644
--- a/src/sugar.rs
+++ b/src/sugar.rs
@@ -92,5 +92,23 @@ impl SugaredTypeTerm {
                 ).collect()),
         }
     }
+
+    pub fn is_empty(&self) -> bool {
+        match self {
+            SugaredTypeTerm::TypeID(_) => false,
+            SugaredTypeTerm::Num(_) => false,
+            SugaredTypeTerm::Char(_) => false,
+            SugaredTypeTerm::Univ(t) => t.is_empty(),
+            SugaredTypeTerm::Spec(ts) |
+            SugaredTypeTerm::Ladder(ts) |
+            SugaredTypeTerm::Func(ts) |
+            SugaredTypeTerm::Morph(ts) |
+            SugaredTypeTerm::Struct(ts) |
+            SugaredTypeTerm::Enum(ts) |
+            SugaredTypeTerm::Seq(ts) => {
+                ts.iter().fold(true, |s,t|s&&t.is_empty())
+            }
+        }
+    }
 }
 

From aa675201842656ec15b3470af40473983e85ef3f Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Thu, 6 Mar 2025 23:35:29 +0100
Subject: [PATCH 19/33] term: add strip() to flatten ladders

---
 src/term.rs | 44 ++++++++++++++++++++++++++++++++++++++++++--
 1 file changed, 42 insertions(+), 2 deletions(-)

diff --git a/src/term.rs b/src/term.rs
index 240996f..14b886a 100644
--- a/src/term.rs
+++ b/src/term.rs
@@ -14,7 +14,7 @@ pub enum TypeTerm {
     Num(i64),
     Char(char),
 
-    
+
 
     /* Complex Terms */
 
@@ -47,7 +47,7 @@ impl TypeTerm {
                 *self = TypeTerm::App(vec![
                     self.clone(),
                     t.into()
-                ])                
+                ])
             }
         }
 
@@ -124,6 +124,46 @@ impl TypeTerm {
 
         self
     }
+
+    /* strip away empty ladders
+     * & unwrap singletons
+     */
+    pub fn strip(self) -> Self {
+        match self {
+            TypeTerm::Ladder(rungs) => {
+                let mut rungs :Vec<_> = rungs.into_iter()
+                    .filter_map(|mut r| {
+                        r = r.strip();
+                        if r != TypeTerm::unit() {
+                            Some(match r {
+                                TypeTerm::Ladder(r) => r,
+                                a => vec![ a ]
+                            })
+                        }
+                        else { None }
+                    })
+                .flatten()
+                .collect();
+
+                if rungs.len() == 1 {
+                    rungs.pop().unwrap()
+                } else {
+                    TypeTerm::Ladder(rungs)
+                }
+            },
+            TypeTerm::App(args) => {
+                let mut args :Vec<_> = args.into_iter().map(|arg| arg.strip()).collect();
+                if args.len() == 0 {
+                    TypeTerm::unit()
+                } else if args.len() == 1 {
+                    args.pop().unwrap()
+                } else {
+                    TypeTerm::App(args)
+                }
+            }
+            atom => atom
+        }
+    }
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

From deb097acd33152dc2eeec524a5221cd9144a5ae6 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Tue, 25 Mar 2025 16:31:04 +0100
Subject: [PATCH 20/33] term: add get_interface_type() to get the top rung of a
 ladder

---
 src/term.rs | 18 ++++++++++++++++++
 1 file changed, 18 insertions(+)

diff --git a/src/term.rs b/src/term.rs
index 14b886a..94156b1 100644
--- a/src/term.rs
+++ b/src/term.rs
@@ -164,6 +164,24 @@ impl TypeTerm {
             atom => atom
         }
     }
+
+
+
+    pub fn get_interface_type(&self) -> TypeTerm {
+        match self {
+            TypeTerm::Ladder(rungs) => {
+                if let Some(top) = rungs.first() {
+                    top.get_interface_type()
+                } else {
+                    TypeTerm::unit()
+                }
+            }
+            TypeTerm::App(args) => {
+                TypeTerm::App(args.iter().map(|a| a.get_interface_type()).collect())
+            }
+            atom => atom.clone()
+        }
+    }
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

From 82a6a20a31a32bbfc0237d28973343de9ede7769 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Thu, 6 Mar 2025 14:01:57 +0100
Subject: [PATCH 21/33] unification: allow subtype and equality constraints in
 one problem, solve both at the same time

---
 src/term.rs             |   2 -
 src/test/unification.rs | 119 +++++++--
 src/unification.rs      | 548 +++++++++++++++++++++++++++-------------
 3 files changed, 469 insertions(+), 200 deletions(-)

diff --git a/src/term.rs b/src/term.rs
index 94156b1..7f6d87a 100644
--- a/src/term.rs
+++ b/src/term.rs
@@ -165,8 +165,6 @@ impl TypeTerm {
         }
     }
 
-
-
     pub fn get_interface_type(&self) -> TypeTerm {
         match self {
             TypeTerm::Ladder(rungs) => {
diff --git a/src/test/unification.rs b/src/test/unification.rs
index 7811647..7a1acb3 100644
--- a/src/test/unification.rs
+++ b/src/test/unification.rs
@@ -97,11 +97,12 @@ fn test_unification() {
     dict.add_varname(String::from("W"));
 
     assert_eq!(
-        UnificationProblem::new(vec![
+        UnificationProblem::new_eq(vec![
             (dict.parse("U").unwrap(), dict.parse("<Seq Char>").unwrap()),
             (dict.parse("T").unwrap(), dict.parse("<Seq U>").unwrap()),
         ]).solve(),
-        Ok(
+        Ok((
+            vec![],
             vec![
                 // T
                 (TypeID::Var(0), dict.parse("<Seq <Seq Char>>").unwrap()),
@@ -109,15 +110,16 @@ fn test_unification() {
                 // U
                 (TypeID::Var(1), dict.parse("<Seq Char>").unwrap())
             ].into_iter().collect()
-        )
+        ))
     );
 
     assert_eq!(
-        UnificationProblem::new(vec![
+        UnificationProblem::new_eq(vec![
             (dict.parse("<Seq T>").unwrap(), dict.parse("<Seq W~<Seq Char>>").unwrap()),
             (dict.parse("<Seq ℕ>").unwrap(), dict.parse("<Seq W>").unwrap()),
         ]).solve(),
-        Ok(
+        Ok((
+            vec![],
             vec![
                 // W
                 (TypeID::Var(3), dict.parse("ℕ").unwrap()),
@@ -125,7 +127,7 @@ fn test_unification() {
                 // T
                 (TypeID::Var(0), dict.parse("ℕ~<Seq Char>").unwrap())
             ].into_iter().collect()
-        )
+        ))
     );
 }
 
@@ -139,12 +141,14 @@ fn test_subtype_unification() {
     dict.add_varname(String::from("W"));
 
     assert_eq!(
-        UnificationProblem::new(vec![
+        UnificationProblem::new_sub(vec![
             (dict.parse("<Seq~T <Digit 10> ~ Char>").unwrap(),
                 dict.parse("<Seq~<LengthPrefix x86.UInt64> Char ~ Ascii>").unwrap()),
-        ]).solve_subtype(),
+        ]).solve(),
         Ok((
-            dict.parse("<Seq <Digit 10>>").unwrap(),
+            vec![
+                dict.parse("<Seq <Digit 10>>").unwrap()
+            ],
             vec![
                 // T
                 (TypeID::Var(0), dict.parse("<LengthPrefix x86.UInt64>").unwrap())
@@ -153,12 +157,15 @@ fn test_subtype_unification() {
     );
 
     assert_eq!(
-        UnificationProblem::new(vec![
+        UnificationProblem::new_sub(vec![
             (dict.parse("U").unwrap(), dict.parse("<Seq Char>").unwrap()),
             (dict.parse("T").unwrap(), dict.parse("<Seq U>").unwrap()),
-        ]).solve_subtype(),
+        ]).solve(),
         Ok((
-            TypeTerm::unit(),
+            vec![
+                TypeTerm::unit(),
+                TypeTerm::unit(),
+            ],
             vec![
                 // T
                 (TypeID::Var(0), dict.parse("<Seq <Seq Char>>").unwrap()),
@@ -170,16 +177,17 @@ fn test_subtype_unification() {
     );
 
     assert_eq!(
-        UnificationProblem::new(vec![
+        UnificationProblem::new_sub(vec![
             (dict.parse("<Seq T>").unwrap(),
                 dict.parse("<Seq W~<Seq Char>>").unwrap()),
             (dict.parse("<Seq~<LengthPrefix x86.UInt64> ℕ~<PosInt 10 BigEndian>>").unwrap(),
                 dict.parse("<<LengthPrefix x86.UInt64> W>").unwrap()),
-        ]).solve_subtype(),
+        ]).solve(),
         Ok((
-            dict.parse("
-                <Seq ℕ~<PosInt 10 BigEndian>>
-            ").unwrap(),
+            vec![
+                TypeTerm::unit(),
+                dict.parse("<Seq ℕ>").unwrap(),
+            ],
             vec![
                 // W
                 (TypeID::Var(3), dict.parse("ℕ~<PosInt 10 BigEndian>").unwrap()),
@@ -189,4 +197,81 @@ fn test_subtype_unification() {
             ].into_iter().collect()
         ))
     );
+
+    assert_eq!(
+        subtype_unify(
+            &dict.parse("<Seq~List~Vec <Digit 16>~Char>").expect(""),
+            &dict.parse("<List~Vec Char>").expect("")
+        ),
+        Ok((
+            dict.parse("<Seq~List <Digit 16>>").expect(""),
+            vec![].into_iter().collect()
+        ))
+    );
+
+    assert_eq!(
+        subtype_unify(
+            &dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq~List~Vec <Digit 16>~Char>").expect(""),
+            &dict.parse("<List~Vec Char>").expect("")
+        ),
+        Ok((
+            dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq~List <Digit 16>>").expect(""),
+            vec![].into_iter().collect()
+        ))
+    );
+}
+
+
+#[test]
+pub fn test_subtype_delim() {
+    let mut dict = BimapTypeDict::new();
+
+    dict.add_varname(String::from("T"));
+    dict.add_varname(String::from("Delim"));
+
+    assert_eq!(
+        UnificationProblem::new_sub(vec![
+
+            (
+                //given type
+                dict.parse("
+                  < Seq <Seq <Digit 10>~Char~Ascii~UInt8> >
+                ~ < ValueSep ':' Char~Ascii~UInt8 >
+                ~ < Seq~<LengthPrefix UInt64> Char~Ascii~UInt8 >
+                ").expect(""),
+
+                //expected type
+                dict.parse("
+                  < Seq <Seq T> >
+                ~ < ValueSep Delim T >
+                ~ < Seq~<LengthPrefix UInt64> T >
+                ").expect("")
+            ),
+
+            // subtype bounds
+            (
+                dict.parse("T").expect(""),
+                dict.parse("UInt8").expect("")
+            ),
+            /* todo
+            (
+                dict.parse("<TypeOf Delim>").expect(""),
+                dict.parse("T").expect("")
+            ),
+            */
+        ]).solve(),
+        Ok((
+            // halo types for each rhs in the sub-equations
+            vec![
+                dict.parse("<Seq <Seq <Digit 10>>>").expect(""),
+                dict.parse("Char~Ascii").expect(""),
+            ],
+
+            // variable substitution
+            vec![
+                (dict.get_typeid(&"T".into()).unwrap(), dict.parse("Char~Ascii~UInt8").expect("")),
+                (dict.get_typeid(&"Delim".into()).unwrap(), TypeTerm::Char(':')),
+            ].into_iter().collect()
+        ))
+    );
 }
diff --git a/src/unification.rs b/src/unification.rs
index e605af4..03c3699 100644
--- a/src/unification.rs
+++ b/src/unification.rs
@@ -1,6 +1,5 @@
 use {
-    std::collections::HashMap,
-    crate::{term::*, dict::*}
+    crate::{dict::*, term::*}, std::{collections::HashMap, env::consts::ARCH}
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
@@ -12,21 +11,71 @@ pub struct UnificationError {
     pub t2: TypeTerm
 }
 
+#[derive(Clone, Debug)]
+pub struct UnificationPair {
+    addr: Vec<usize>,
+    halo: TypeTerm,
+
+    lhs: TypeTerm,
+    rhs: TypeTerm,
+}
+
+#[derive(Debug)]
 pub struct UnificationProblem {
-    eqs: Vec<(TypeTerm, TypeTerm, Vec<usize>)>,
-    σ: HashMap<TypeID, TypeTerm>
+    σ: HashMap<TypeID, TypeTerm>,
+    upper_bounds: HashMap< u64, TypeTerm >,
+    lower_bounds: HashMap< u64, TypeTerm >,
+    equal_pairs: Vec<UnificationPair>,
+    subtype_pairs: Vec<UnificationPair>,
+    trait_pairs: Vec<UnificationPair>
 }
 
 impl UnificationProblem {
-    pub fn new(eqs: Vec<(TypeTerm, TypeTerm)>) -> Self {
+    pub fn new(
+        equal_pairs: Vec<(TypeTerm, TypeTerm)>,
+        subtype_pairs: Vec<(TypeTerm, TypeTerm)>,
+        trait_pairs: Vec<(TypeTerm, TypeTerm)>
+    ) -> Self {
         UnificationProblem {
-            eqs: eqs.iter().map(|(lhs,rhs)| (lhs.clone(),rhs.clone(),vec![])).collect(),
-            σ: HashMap::new()
+            σ: HashMap::new(),
+
+            equal_pairs: equal_pairs.into_iter().map(|(lhs,rhs)|
+                UnificationPair{
+                    lhs,rhs,
+                    halo: TypeTerm::unit(),
+                    addr: Vec::new()
+                }).collect(),
+
+            subtype_pairs: subtype_pairs.into_iter().map(|(lhs,rhs)|
+                UnificationPair{
+                    lhs,rhs,
+                    halo: TypeTerm::unit(),
+                    addr: Vec::new()
+                }).collect(),
+
+            trait_pairs: trait_pairs.into_iter().map(|(lhs,rhs)|
+                UnificationPair{
+                    lhs,rhs,
+                    halo: TypeTerm::unit(),
+                    addr: Vec::new()
+                }).collect(),
+
+            upper_bounds: HashMap::new(),
+            lower_bounds: HashMap::new(),
         }
     }
 
+    pub fn new_eq(eqs: Vec<(TypeTerm, TypeTerm)>) -> Self {
+        UnificationProblem::new( eqs, Vec::new(), Vec::new() )
+    }
+
+    pub fn new_sub(subs: Vec<(TypeTerm, TypeTerm)>) -> Self {
+        UnificationProblem::new( Vec::new(), subs, Vec::new() )
+    }
+
+
+    /// update all values in substitution
     pub fn reapply_subst(&mut self) {
-        // update all values in substitution
         let mut new_σ = HashMap::new();
         for (v, tt) in self.σ.iter() {
             let mut tt = tt.clone().normalize();
@@ -38,222 +87,359 @@ impl UnificationProblem {
         self.σ = new_σ;
     }
 
-    pub fn eval_subtype(&mut self, lhs: TypeTerm, rhs: TypeTerm, addr: Vec<usize>) -> Result<Vec<TypeTerm>, UnificationError> {
-        match (lhs.clone(), rhs.clone()) {
+
+    pub fn eval_equation(&mut self, unification_pair: UnificationPair) -> Result<(), UnificationError> {
+        match (&unification_pair.lhs, &unification_pair.rhs) {
             (TypeTerm::TypeID(TypeID::Var(varid)), t) |
             (t, TypeTerm::TypeID(TypeID::Var(varid))) => {
-                if ! t.contains_var( varid ) {
-                    self.σ.insert(TypeID::Var(varid), t.clone());
+                if ! t.contains_var( *varid ) {
+                    self.σ.insert(TypeID::Var(*varid), t.clone());
                     self.reapply_subst();
-                    Ok(vec![])
-                } else if t == TypeTerm::TypeID(TypeID::Var(varid)) {
-                    Ok(vec![])
+                    Ok(())
+                } else if t == &TypeTerm::TypeID(TypeID::Var(*varid)) {
+                    Ok(())
                 } else {
-                    Err(UnificationError{ addr, t1: TypeTerm::TypeID(TypeID::Var(varid)), t2: t })
+                    Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(*varid)), t2: t.clone() })
                 }
             }
 
             (TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
-                if a1 == a2 { Ok(vec![]) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
+                if a1 == a2 { Ok(()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
             }
             (TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
-                if n1 == n2 { Ok(vec![]) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
+                if n1 == n2 { Ok(()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
             }
             (TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
-                if c1 == c2 { Ok(vec![]) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
-            }
-
-            (TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) => {
-                let mut halo = Vec::new();
-                for i in 0..a1.len() {
-                    if let Ok((r_halo, σ)) = subtype_unify( &a1[i], &a2[0] ) {
-                        //eprintln!("unified ladders at {}, r_halo = {:?}", i, r_halo);
-                        for (k,v) in σ.iter() {
-                            self.σ.insert(k.clone(),v.clone());
-                        }
-
-                        for j in 0..a2.len() {
-                            if i+j < a1.len() {
-                                let mut new_addr = addr.clone();
-                                new_addr.push(i+j);
-                                self.eqs.push((a1[i+j].clone().apply_substitution(&|k| σ.get(k).cloned()).clone(),
-                                    a2[j].clone().apply_substitution(&|k| σ.get(k).cloned()).clone(),
-                                    new_addr));
-                            }
-                        }
-                        return Ok(halo)
-                    } else {
-                        halo.push(a1[i].clone());
-                        //eprintln!("could not unify ladders");
-                    }
-                }
-
-                Err(UnificationError{ addr, t1: lhs, t2: rhs })
-            },
-
-            (t, TypeTerm::Ladder(mut a1)) => {
-                if let Ok(mut halo) = self.eval_subtype( t.clone(), a1.first().unwrap().clone(), addr.clone() ) {
-                    a1.append(&mut halo);
-                    Ok(a1)
-                } else {
-                    Err(UnificationError{ addr, t1: t, t2: TypeTerm::Ladder(a1) })
-                }
-            }
-
-            (TypeTerm::Ladder(mut a1), t) => {
-                if let Ok(mut halo) = self.eval_subtype( a1.pop().unwrap(), t.clone(), addr.clone() ) {
-                    a1.append(&mut halo);
-                    Ok(a1)
-                } else {
-                    Err(UnificationError{ addr, t1: TypeTerm::Ladder(a1), t2: t })
-                }
-            }
-
-            (TypeTerm::App(a1), TypeTerm::App(a2)) => {
-                if a1.len() == a2.len() {
-                    let mut halo_args = Vec::new();
-                    let mut halo_required = false;
-
-                    for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate() {
-                        let mut new_addr = addr.clone();
-                        new_addr.push(i);
-                        //self.eqs.push((x, y, new_addr));
-
-                        if let Ok(halo) = self.eval_subtype( x.clone(), y.clone(), new_addr ) {
-                            if halo.len() == 0 {
-                                halo_args.push(y.get_lnf_vec().first().unwrap().clone());
-                            } else {
-                                halo_args.push(TypeTerm::Ladder(halo));
-                                halo_required = true;
-                            }
-                        } else {
-                            return Err(UnificationError{ addr, t1: x, t2: y })
-                        }
-                    }
-
-                    if halo_required {
-                        Ok(vec![ TypeTerm::App(halo_args) ])
-                    } else {
-                        Ok(vec![])
-                    }
-                } else {
-                    Err(UnificationError{ addr, t1: lhs, t2: rhs })
-                }
-            }
-
-            _ => Err(UnificationError{ addr, t1: lhs, t2: rhs})
-        }
-    }
-
-    pub fn eval_equation(&mut self, lhs: TypeTerm, rhs: TypeTerm, addr: Vec<usize>) -> Result<(), UnificationError> {
-        match (lhs.clone(), rhs.clone()) {
-            (TypeTerm::TypeID(TypeID::Var(varid)), t) |
-            (t, TypeTerm::TypeID(TypeID::Var(varid))) => {
-                if ! t.contains_var( varid ) {
-                    self.σ.insert(TypeID::Var(varid), t.clone());
-                    self.reapply_subst();
-                    Ok(())
-                } else if t == TypeTerm::TypeID(TypeID::Var(varid)) {
-                    Ok(())
-                } else {
-                    Err(UnificationError{ addr, t1: TypeTerm::TypeID(TypeID::Var(varid)), t2: t })
-                }
-            }
-
-            (TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
-                if a1 == a2 { Ok(()) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
-            }
-            (TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
-                if n1 == n2 { Ok(()) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
-            }
-            (TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
-                if c1 == c2 { Ok(()) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
+                if c1 == c2 { Ok(()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
             }
 
             (TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) |
             (TypeTerm::App(a1), TypeTerm::App(a2)) => {
                 if a1.len() == a2.len() {
                     for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate().rev() {
-                        let mut new_addr = addr.clone();
+                        let mut new_addr = unification_pair.addr.clone();
                         new_addr.push(i);
-                        self.eqs.push((x, y, new_addr));
+                        self.equal_pairs.push(
+                            UnificationPair {
+                                lhs: x,
+                                rhs: y,
+                                halo: TypeTerm::unit(),
+                                addr: new_addr
+                            });
                     }
                     Ok(())
                 } else {
-                    Err(UnificationError{ addr, t1: lhs, t2: rhs })
+                    Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
                 }
             }
 
-            _ => Err(UnificationError{ addr, t1: lhs, t2: rhs})
+            _ => Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
         }
     }
 
-    pub fn solve(mut self) -> Result<HashMap<TypeID, TypeTerm>, UnificationError> {
-        while let Some( (mut lhs,mut rhs,addr) ) = self.eqs.pop() {
-            lhs.apply_substitution(&|v| self.σ.get(v).cloned());
-            rhs.apply_substitution(&|v| self.σ.get(v).cloned());
-            self.eval_equation(lhs, rhs, addr)?;
-        }
-        Ok(self.σ)
-    }
+    pub fn eval_subtype(&mut self, unification_pair: UnificationPair) -> Result<
+        // ok: halo type
+        TypeTerm,
+        // error
+        UnificationError
+    > {
+        match (unification_pair.lhs.clone(), unification_pair.rhs.clone()) {
 
-    pub fn solve_subtype(mut self) -> Result<(TypeTerm, HashMap<TypeID, TypeTerm>), UnificationError> {
+            /*
+             Variables
+            */
 
-        pub fn insert_halo_at(
-            t: &mut TypeTerm,
-            mut addr: Vec<usize>,
-            halo_type: TypeTerm
-        ) {
-            match t {
-                TypeTerm::Ladder(rungs) => {
-                    if let Some(idx) = addr.pop() {
-                        for i in rungs.len()..idx+1 {
-                            rungs.push(TypeTerm::unit());
+            (TypeTerm::TypeID(TypeID::Var(varid)), t) => {
+                eprintln!("variable <= t");
+
+                if let Some(upper_bound) = self.upper_bounds.get(&varid).cloned() {
+                    if let Ok(_halo) = self.eval_subtype(
+                        UnificationPair {
+                            lhs: t.clone(),
+                            rhs: upper_bound,
+
+                            halo: TypeTerm::unit(),
+                            addr: vec![]
                         }
-                        insert_halo_at( &mut rungs[idx], addr, halo_type );
-                    } else {
-                        rungs.push(halo_type);
+                    ) {
+                        // found a lower upper bound
+                        self.upper_bounds.insert(varid, t);
                     }
-                },
+                } else {
+                    self.upper_bounds.insert(varid, t);
+                }
+                Ok(TypeTerm::unit())
+            }
 
-                TypeTerm::App(args) => {
-                    if let Some(idx) = addr.pop() {
-                        insert_halo_at( &mut args[idx], addr, halo_type );
+            (t, TypeTerm::TypeID(TypeID::Var(varid))) => {
+                eprintln!("t <= variable");
+                if ! t.contains_var( varid ) {
+                   // let x = self.σ.get(&TypeID::Var(varid)).cloned();
+                    if let Some(lower_bound) = self.lower_bounds.get(&varid).cloned() {
+                        eprintln!("var already exists. check max. type");
+                        if let Ok(halo) = self.eval_subtype(
+                            UnificationPair {
+                                lhs: lower_bound.clone(),
+                                rhs: t.clone(),
+                                halo: TypeTerm::unit(),
+                                addr: vec![]
+                            }
+                        ) {
+                            eprintln!("found more general lower bound");
+                            eprintln!("set var {}'s lowerbound to {:?}", varid, t.clone());
+                            // generalize variable type to supertype
+                            self.lower_bounds.insert(varid, t.clone());
+                        } else if let Ok(halo) = self.eval_subtype(
+                            UnificationPair{
+                                lhs: t.clone(),
+                                rhs: lower_bound.clone(),
+                                halo: TypeTerm::unit(),
+                                addr: vec![]
+                            }
+                        ) {
+                            eprintln!("OK, is already larger type");
+                        } else {
+                            eprintln!("violated subtype restriction");
+                            return Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(varid)), t2: t });
+                        }
                     } else {
-                        *t = TypeTerm::Ladder(vec![
-                            halo_type,
-                            t.clone()
-                        ]);
+                        eprintln!("set var {}'s lowerbound to {:?}", varid, t.clone());
+                        self.lower_bounds.insert(varid, t.clone());
+                    }
+                    self.reapply_subst();
+                    Ok(TypeTerm::unit())
+                } else if t == TypeTerm::TypeID(TypeID::Var(varid)) {
+                    Ok(TypeTerm::unit())
+                } else {
+                    Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(varid)), t2: t })
+                }
+            }
+
+
+            /*
+             Atoms
+            */
+
+            (TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
+                if a1 == a2 { Ok(TypeTerm::unit()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs}) }
+            }
+            (TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
+                if n1 == n2 { Ok(TypeTerm::unit()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
+            }
+            (TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
+                if c1 == c2 { Ok(TypeTerm::unit()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
+            }
+
+
+            /*
+             Ladders
+            */
+
+            (TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) => {
+                let mut halo = Vec::new();
+                for i in 0..a1.len() {
+                    let mut new_addr = unification_pair.addr.clone();
+                    new_addr.push(i);
+                    if let Ok(r_halo) = self.eval_subtype( UnificationPair {
+                        lhs: a1[i].clone(),
+                        rhs: a2[0].clone(),
+
+                        halo: TypeTerm::unit(),
+                        addr: new_addr
+                    }) {
+                        eprintln!("unified ladders at {}, r_halo = {:?}", i, r_halo);
+
+                        for j in 0..a2.len() {
+                            if i+j < a1.len() {
+                                let mut new_addr = unification_pair.addr.clone();
+                                new_addr.push(i+j);
+
+                                let lhs = a1[i+j].clone();//.apply_substitution(&|k| self.σ.get(k).cloned()).clone();
+                                let rhs = a2[j].clone();//.apply_substitution(&|k| self.σ.get(k).cloned()).clone();
+
+                                if let Ok(rung_halo) = self.eval_subtype(
+                                    UnificationPair {
+                                        lhs: lhs.clone(), rhs: rhs.clone(),
+                                        addr: new_addr.clone(),
+                                        halo: TypeTerm::unit()
+                                    }
+                                ) {
+                                    if rung_halo != TypeTerm::unit() {
+                                        halo.push(rung_halo);
+                                    }
+                                } else {
+                                    return Err(UnificationError{ addr: new_addr, t1: lhs, t2: rhs })
+                                }
+                            }
+                        }
+
+                        return Ok(
+                            if halo.len() == 1 {
+                                halo.pop().unwrap()
+                            } else {
+                                TypeTerm::Ladder(halo)
+                            });
+                    } else {
+                        halo.push(a1[i].clone());
+                        //eprintln!("could not unify ladders");
                     }
                 }
 
-                atomic => {
+                Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
+            },
 
+            (t, TypeTerm::Ladder(a1)) => {
+                Err(UnificationError{ addr: unification_pair.addr, t1: t, t2: TypeTerm::Ladder(a1) })
+            }
+
+            (TypeTerm::Ladder(mut a1), t) => {
+                let mut new_addr = unification_pair.addr.clone();
+                new_addr.push( a1.len() -1 );
+                if let Ok(halo) = self.eval_subtype(
+                    UnificationPair {
+                        lhs: a1.pop().unwrap(),
+                        rhs: t.clone(),
+                        halo: TypeTerm::unit(),
+                        addr: new_addr
+                    }
+                ) {
+                    a1.push(halo);
+                    if a1.len() == 1 {
+                        Ok(a1.pop().unwrap())
+                    } else {
+                        Ok(TypeTerm::Ladder(a1))
+                    }
+                } else {
+                    Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::Ladder(a1), t2: t })
                 }
             }
-        }
 
-        //let mut halo_type = TypeTerm::unit();
-        let mut halo_rungs = Vec::new();
 
-        while let Some( (mut lhs, mut rhs, mut addr) ) = self.eqs.pop() {
-            lhs.apply_substitution(&|v| self.σ.get(v).cloned());
-            rhs.apply_substitution(&|v| self.σ.get(v).cloned());
-            //eprintln!("eval subtype\n\tLHS={:?}\n\tRHS={:?}\n", lhs, rhs);
-            let mut new_halo = self.eval_subtype(lhs, rhs, addr.clone())?;
-            if new_halo.len() > 0 {
-                //insert_halo_at( &mut halo_type, addr, TypeTerm::Ladder(new_halo) );
-                if addr.len() == 0 {
-                    halo_rungs.push(TypeTerm::Ladder(new_halo))
+            /*
+             Application
+            */
+
+            (TypeTerm::App(a1), TypeTerm::App(a2)) => {
+                eprintln!("sub unify {:?}, {:?}", a1, a2);
+                if a1.len() == a2.len() {
+                    let mut halo_args = Vec::new();
+                    let mut n_halos_required = 0;
+
+                    for (i, (mut x, mut y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate() {
+                        let mut new_addr = unification_pair.addr.clone();
+                        new_addr.push(i);
+
+                        x = x.strip();
+
+                        eprintln!("before strip: {:?}", y);
+                        y = y.strip();
+                        eprintln!("after strip: {:?}", y);
+
+                        eprintln!("APP<> eval {:?} \n ?<=? {:?} ", x, y);
+
+                        if let Ok(halo) = self.eval_subtype(
+                            UnificationPair {
+                                lhs: x.clone(),
+                                rhs: y.clone(),
+                                halo: TypeTerm::unit(),
+                                addr: new_addr,
+                            }
+                        ) {
+                            if halo == TypeTerm::unit() {
+                                let mut y = y.clone();
+                                y.apply_substitution(&|k| self.σ.get(k).cloned());
+                                y = y.strip();
+                                let mut top = y.get_lnf_vec().first().unwrap().clone();
+                                halo_args.push(top.clone());
+                                eprintln!("add top {:?}", top);
+                            } else {
+                                eprintln!("add halo {:?}", halo);
+                                if n_halos_required > 0 {
+                                    let x = &mut halo_args[n_halos_required-1];
+                                    if let TypeTerm::Ladder(argrs) = x {
+                                        let mut a = a2[n_halos_required-1].clone();
+                                        a.apply_substitution(&|k| self.σ.get(k).cloned());
+                                        a = a.get_lnf_vec().first().unwrap().clone();
+                                        argrs.push(a);
+                                    } else {
+                                        *x = TypeTerm::Ladder(vec![
+                                            x.clone(),
+                                            a2[n_halos_required-1].clone().get_lnf_vec().first().unwrap().clone()
+                                        ]);
+
+                                        x.apply_substitution(&|k| self.σ.get(k).cloned());
+                                    }
+                                }
+
+                                halo_args.push(halo);
+                                n_halos_required += 1;
+                            }
+                        } else {
+                            return Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs });
+                        }
+                    }
+
+                    if n_halos_required > 0 {
+                        eprintln!("halo args : {:?}", halo_args);
+                        Ok(TypeTerm::App(halo_args))
+                    } else {
+                        Ok(TypeTerm::unit())
+                    }
+                } else {
+                    Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
                 }
             }
+
+            _ => Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
+        }
+    }
+
+    pub fn solve(mut self) -> Result<(Vec<TypeTerm>, HashMap<TypeID, TypeTerm>), UnificationError> {
+        // solve equations
+        while let Some( mut equal_pair ) = self.equal_pairs.pop() {
+            equal_pair.lhs.apply_substitution(&|v| self.σ.get(v).cloned());
+            equal_pair.rhs.apply_substitution(&|v| self.σ.get(v).cloned());
+
+            self.eval_equation(equal_pair)?;
         }
 
-        let mut halo_type = TypeTerm::Ladder(halo_rungs);
-        halo_type = halo_type.normalize();
-        halo_type = halo_type.apply_substitution(&|k| self.σ.get(k).cloned()).clone();
+        // solve subtypes
+        eprintln!("------ SOLVE SUBTYPES ---- ");
+        for mut subtype_pair in self.subtype_pairs.clone().into_iter() {
+            subtype_pair.lhs.apply_substitution(&|v| self.σ.get(v).cloned());
+            subtype_pair.rhs.apply_substitution(&|v| self.σ.get(v).cloned());
+            let _halo = self.eval_subtype( subtype_pair.clone() )?.strip();
+        }
 
-        Ok((halo_type.param_normalize(), self.σ))
+        // add variables from subtype bounds
+        for (var_id, t) in self.upper_bounds.iter() {
+            eprintln!("VAR {} upper bound {:?}", var_id, t);
+            self.σ.insert(TypeID::Var(*var_id), t.clone().strip());
+        }
+
+        for (var_id, t) in self.lower_bounds.iter() {
+            eprintln!("VAR {} lower bound {:?}", var_id, t);
+            self.σ.insert(TypeID::Var(*var_id), t.clone().strip());
+        }
+
+        self.reapply_subst();
+
+        eprintln!("------ MAKE HALOS -----");
+        let mut halo_types = Vec::new();
+        for mut subtype_pair in self.subtype_pairs.clone().into_iter() {
+            subtype_pair.lhs = subtype_pair.lhs.apply_substitution(&|v| self.σ.get(v).cloned()).clone().strip();
+            subtype_pair.rhs = subtype_pair.rhs.apply_substitution(&|v| self.σ.get(v).cloned()).clone().strip();
+
+            let halo = self.eval_subtype( subtype_pair.clone() )?.strip();
+            halo_types.push(halo);
+        }
+
+        // solve traits
+        while let Some( trait_pair ) = self.trait_pairs.pop() {
+            unimplemented!();
+        }
+
+        Ok((halo_types, self.σ))
     }
 }
 
@@ -261,16 +447,16 @@ pub fn unify(
     t1: &TypeTerm,
     t2: &TypeTerm
 ) -> Result<HashMap<TypeID, TypeTerm>, UnificationError> {
-    let mut unification = UnificationProblem::new(vec![ (t1.clone(), t2.clone()) ]);
-    unification.solve()
+    let unification = UnificationProblem::new_eq(vec![ (t1.clone(), t2.clone()) ]);
+    Ok(unification.solve()?.1)
 }
 
 pub fn subtype_unify(
     t1: &TypeTerm,
     t2: &TypeTerm
 ) -> Result<(TypeTerm, HashMap<TypeID, TypeTerm>), UnificationError> {
-    let mut unification = UnificationProblem::new(vec![ (t1.clone(), t2.clone()) ]);
-    unification.solve_subtype()
+    let unification = UnificationProblem::new_sub(vec![ (t1.clone(), t2.clone()) ]);
+    unification.solve().map( |(halos,σ)| ( halos.first().cloned().unwrap_or(TypeTerm::unit()), σ) )
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

From 229c6193c40278c6645caf2bd9d49e8a8fff939b Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sun, 9 Mar 2025 13:26:51 +0100
Subject: [PATCH 22/33] subtype satisfaction: move rhs-variable assignment up
 in match-priority

---
 src/unification.rs | 114 ++++++++++++++++++++++-----------------------
 1 file changed, 56 insertions(+), 58 deletions(-)

diff --git a/src/unification.rs b/src/unification.rs
index 03c3699..ef8c0fd 100644
--- a/src/unification.rs
+++ b/src/unification.rs
@@ -149,15 +149,58 @@ impl UnificationProblem {
              Variables
             */
 
+            (t, TypeTerm::TypeID(TypeID::Var(varid))) => {
+//                eprintln!("t <= variable");
+                if ! t.contains_var( varid ) {
+                   // let x = self.σ.get(&TypeID::Var(varid)).cloned();
+                    if let Some(lower_bound) = self.lower_bounds.get(&varid).cloned() {
+//                        eprintln!("var already exists. check max. type");
+                        if let Ok(halo) = self.eval_subtype(
+                            UnificationPair {
+                                lhs: lower_bound.clone(),
+                                rhs: t.clone(),
+                                halo: TypeTerm::unit(),
+                                addr: vec![]
+                            }
+                        ) {
+//                            eprintln!("found more general lower bound");
+//                            eprintln!("set var {}'s lowerbound to {:?}", varid, t.clone());
+                            // generalize variable type to supertype
+                            self.lower_bounds.insert(varid, t.clone());
+                        } else if let Ok(halo) = self.eval_subtype(
+                            UnificationPair{
+                                lhs: t.clone(),
+                                rhs: lower_bound.clone(),
+                                halo: TypeTerm::unit(),
+                                addr: vec![]
+                            }
+                        ) {
+//                            eprintln!("OK, is already larger type");
+                        } else {
+//                            eprintln!("violated subtype restriction");
+                            return Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(varid)), t2: t });
+                        }
+                    } else {
+//                        eprintln!("set var {}'s lowerbound to {:?}", varid, t.clone());
+                        self.lower_bounds.insert(varid, t.clone());
+                    }
+                    self.reapply_subst();
+                    Ok(TypeTerm::unit())
+                } else if t == TypeTerm::TypeID(TypeID::Var(varid)) {
+                    Ok(TypeTerm::unit())
+                } else {
+                    Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(varid)), t2: t })
+                }
+            }
+
             (TypeTerm::TypeID(TypeID::Var(varid)), t) => {
-                eprintln!("variable <= t");
+//                eprintln!("variable <= t");
 
                 if let Some(upper_bound) = self.upper_bounds.get(&varid).cloned() {
                     if let Ok(_halo) = self.eval_subtype(
                         UnificationPair {
                             lhs: t.clone(),
                             rhs: upper_bound,
-
                             halo: TypeTerm::unit(),
                             addr: vec![]
                         }
@@ -171,49 +214,6 @@ impl UnificationProblem {
                 Ok(TypeTerm::unit())
             }
 
-            (t, TypeTerm::TypeID(TypeID::Var(varid))) => {
-                eprintln!("t <= variable");
-                if ! t.contains_var( varid ) {
-                   // let x = self.σ.get(&TypeID::Var(varid)).cloned();
-                    if let Some(lower_bound) = self.lower_bounds.get(&varid).cloned() {
-                        eprintln!("var already exists. check max. type");
-                        if let Ok(halo) = self.eval_subtype(
-                            UnificationPair {
-                                lhs: lower_bound.clone(),
-                                rhs: t.clone(),
-                                halo: TypeTerm::unit(),
-                                addr: vec![]
-                            }
-                        ) {
-                            eprintln!("found more general lower bound");
-                            eprintln!("set var {}'s lowerbound to {:?}", varid, t.clone());
-                            // generalize variable type to supertype
-                            self.lower_bounds.insert(varid, t.clone());
-                        } else if let Ok(halo) = self.eval_subtype(
-                            UnificationPair{
-                                lhs: t.clone(),
-                                rhs: lower_bound.clone(),
-                                halo: TypeTerm::unit(),
-                                addr: vec![]
-                            }
-                        ) {
-                            eprintln!("OK, is already larger type");
-                        } else {
-                            eprintln!("violated subtype restriction");
-                            return Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(varid)), t2: t });
-                        }
-                    } else {
-                        eprintln!("set var {}'s lowerbound to {:?}", varid, t.clone());
-                        self.lower_bounds.insert(varid, t.clone());
-                    }
-                    self.reapply_subst();
-                    Ok(TypeTerm::unit())
-                } else if t == TypeTerm::TypeID(TypeID::Var(varid)) {
-                    Ok(TypeTerm::unit())
-                } else {
-                    Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(varid)), t2: t })
-                }
-            }
 
 
             /*
@@ -247,7 +247,7 @@ impl UnificationProblem {
                         halo: TypeTerm::unit(),
                         addr: new_addr
                     }) {
-                        eprintln!("unified ladders at {}, r_halo = {:?}", i, r_halo);
+//                        eprintln!("unified ladders at {}, r_halo = {:?}", i, r_halo);
 
                         for j in 0..a2.len() {
                             if i+j < a1.len() {
@@ -320,7 +320,6 @@ impl UnificationProblem {
             */
 
             (TypeTerm::App(a1), TypeTerm::App(a2)) => {
-                eprintln!("sub unify {:?}, {:?}", a1, a2);
                 if a1.len() == a2.len() {
                     let mut halo_args = Vec::new();
                     let mut n_halos_required = 0;
@@ -331,11 +330,10 @@ impl UnificationProblem {
 
                         x = x.strip();
 
-                        eprintln!("before strip: {:?}", y);
+//                        eprintln!("before strip: {:?}", y);
                         y = y.strip();
-                        eprintln!("after strip: {:?}", y);
-
-                        eprintln!("APP<> eval {:?} \n ?<=? {:?} ", x, y);
+//                        eprintln!("after strip: {:?}", y);
+//                        eprintln!("APP<> eval {:?} \n ?<=? {:?} ", x, y);
 
                         if let Ok(halo) = self.eval_subtype(
                             UnificationPair {
@@ -351,9 +349,9 @@ impl UnificationProblem {
                                 y = y.strip();
                                 let mut top = y.get_lnf_vec().first().unwrap().clone();
                                 halo_args.push(top.clone());
-                                eprintln!("add top {:?}", top);
+//                                eprintln!("add top {:?}", top);
                             } else {
-                                eprintln!("add halo {:?}", halo);
+//                                eprintln!("add halo {:?}", halo);
                                 if n_halos_required > 0 {
                                     let x = &mut halo_args[n_halos_required-1];
                                     if let TypeTerm::Ladder(argrs) = x {
@@ -380,7 +378,7 @@ impl UnificationProblem {
                     }
 
                     if n_halos_required > 0 {
-                        eprintln!("halo args : {:?}", halo_args);
+//                        eprintln!("halo args : {:?}", halo_args);
                         Ok(TypeTerm::App(halo_args))
                     } else {
                         Ok(TypeTerm::unit())
@@ -404,7 +402,7 @@ impl UnificationProblem {
         }
 
         // solve subtypes
-        eprintln!("------ SOLVE SUBTYPES ---- ");
+//        eprintln!("------ SOLVE SUBTYPES ---- ");
         for mut subtype_pair in self.subtype_pairs.clone().into_iter() {
             subtype_pair.lhs.apply_substitution(&|v| self.σ.get(v).cloned());
             subtype_pair.rhs.apply_substitution(&|v| self.σ.get(v).cloned());
@@ -413,18 +411,18 @@ impl UnificationProblem {
 
         // add variables from subtype bounds
         for (var_id, t) in self.upper_bounds.iter() {
-            eprintln!("VAR {} upper bound {:?}", var_id, t);
+//            eprintln!("VAR {} upper bound {:?}", var_id, t);
             self.σ.insert(TypeID::Var(*var_id), t.clone().strip());
         }
 
         for (var_id, t) in self.lower_bounds.iter() {
-            eprintln!("VAR {} lower bound {:?}", var_id, t);
+//            eprintln!("VAR {} lower bound {:?}", var_id, t);
             self.σ.insert(TypeID::Var(*var_id), t.clone().strip());
         }
 
         self.reapply_subst();
 
-        eprintln!("------ MAKE HALOS -----");
+//        eprintln!("------ MAKE HALOS -----");
         let mut halo_types = Vec::new();
         for mut subtype_pair in self.subtype_pairs.clone().into_iter() {
             subtype_pair.lhs = subtype_pair.lhs.apply_substitution(&|v| self.σ.get(v).cloned()).clone().strip();

From a2fc025eea33f156c94683e7e4bd1fd4e0c8b29e Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Mon, 10 Mar 2025 18:20:00 +0100
Subject: [PATCH 23/33] add (failing) tests for subtype-satisfaction

- these tests fail and uncover a bug in the subtype unification algorithm where a trait-relationship is treated as subtype relationship which is not wanted
- add test with variable substitution
---
 src/test/unification.rs | 49 +++++++++++++++++++++++++++++++++++++++++
 1 file changed, 49 insertions(+)

diff --git a/src/test/unification.rs b/src/test/unification.rs
index 7a1acb3..feb637d 100644
--- a/src/test/unification.rs
+++ b/src/test/unification.rs
@@ -222,6 +222,55 @@ fn test_subtype_unification() {
 }
 
 
+#[test]
+fn test_trait_not_subtype() {
+    let mut dict = BimapTypeDict::new();
+
+    assert_eq!(
+        subtype_unify(
+            &dict.parse("A ~ B").expect(""),
+            &dict.parse("A ~ B ~ C").expect("")
+        ),
+        Err(UnificationError {
+            addr: vec![],
+            t1: dict.parse("A ~ B").expect(""),
+            t2: dict.parse("A ~ B ~ C").expect("")
+        })
+    );
+
+    assert_eq!(
+        subtype_unify(
+            &dict.parse("<Seq~List~Vec <Digit 10>~Char>").expect(""),
+            &dict.parse("<Seq~List~Vec Char~ReprTree>").expect("")
+        ),
+        Err(UnificationError {
+            addr: vec![1],
+            t1: dict.parse("<Digit 10> ~ Char").expect(""),
+            t2: dict.parse("Char ~ ReprTree").expect("")
+        })
+    );
+}
+
+#[test]
+fn test_reprtree_list_subtype() {
+    let mut dict = BimapTypeDict::new();
+
+    dict.add_varname("Item".into());
+
+    assert_eq!(
+        subtype_unify(
+            &dict.parse("<List~Vec <Digit 10>~Char~ReprTree>").expect(""),
+            &dict.parse("<List~Vec Item~ReprTree>").expect("")
+        ),
+        Ok((
+            TypeTerm::unit(),
+            vec![
+                (dict.get_typeid(&"Item".into()).unwrap(), dict.parse("<Digit 10>~Char").unwrap())
+            ].into_iter().collect()
+        ))
+    );
+}
+
 #[test]
 pub fn test_subtype_delim() {
     let mut dict = BimapTypeDict::new();

From 63eb4798ac88dcc468f1f334730927910606fa75 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Wed, 12 Mar 2025 16:38:39 +0100
Subject: [PATCH 24/33] work on subtype satisfaction

- correctly propagate error
- in case of subtype between two ladders, check that the matching sub-ladders end at the same bottom rung (to exclude trait-types from sub-types)
- rewrite subtype satisfaction of ladders to work from bottom up
- add more tests
---
 src/test/unification.rs |  70 +++++++++--
 src/unification.rs      | 267 +++++++++++++++++++++++++---------------
 2 files changed, 230 insertions(+), 107 deletions(-)

diff --git a/src/test/unification.rs b/src/test/unification.rs
index feb637d..6021dda 100644
--- a/src/test/unification.rs
+++ b/src/test/unification.rs
@@ -132,7 +132,61 @@ fn test_unification() {
 }
 
 #[test]
-fn test_subtype_unification() {
+fn test_subtype_unification1() {
+    let mut dict = BimapTypeDict::new();
+    dict.add_varname(String::from("T"));
+
+    assert_eq!(
+        UnificationProblem::new_sub(vec![
+            (dict.parse("A ~ B").unwrap(),
+                dict.parse("B").unwrap()),
+        ]).solve(),
+        Ok((
+            vec![ dict.parse("A").unwrap() ],
+            vec![].into_iter().collect()
+        ))
+    );
+
+    assert_eq!(
+        UnificationProblem::new_sub(vec![
+            (dict.parse("A ~ B ~ C ~ D").unwrap(),
+                dict.parse("C ~ D").unwrap()),
+        ]).solve(),
+        Ok((
+            vec![ dict.parse("A ~ B").unwrap() ],
+            vec![].into_iter().collect()
+        ))
+    );
+
+    assert_eq!(
+        UnificationProblem::new_sub(vec![
+            (dict.parse("A ~ B ~ C ~ D").unwrap(),
+                dict.parse("T ~ D").unwrap()),
+        ]).solve(),
+        Ok((
+            vec![ TypeTerm::unit() ],
+            vec![
+                (dict.get_typeid(&"T".into()).unwrap(), dict.parse("A ~ B ~ C").unwrap())
+            ].into_iter().collect()
+        ))
+    );
+
+    assert_eq!(
+        UnificationProblem::new_sub(vec![
+            (dict.parse("A ~ B ~ C ~ D").unwrap(),
+                dict.parse("B ~ T ~ D").unwrap()),
+        ]).solve(),
+        Ok((
+            vec![ dict.parse("A").unwrap() ],
+            vec![
+                (dict.get_typeid(&"T".into()).unwrap(), dict.parse("C").unwrap())
+            ].into_iter().collect()
+        ))
+    );
+}
+
+#[test]
+fn test_subtype_unification2() {
     let mut dict = BimapTypeDict::new();
 
     dict.add_varname(String::from("T"));
@@ -142,7 +196,7 @@ fn test_subtype_unification() {
 
     assert_eq!(
         UnificationProblem::new_sub(vec![
-            (dict.parse("<Seq~T <Digit 10> ~ Char>").unwrap(),
+            (dict.parse("<Seq~T <Digit 10> ~ Char ~ Ascii>").unwrap(),
                 dict.parse("<Seq~<LengthPrefix x86.UInt64> Char ~ Ascii>").unwrap()),
         ]).solve(),
         Ok((
@@ -232,9 +286,9 @@ fn test_trait_not_subtype() {
             &dict.parse("A ~ B ~ C").expect("")
         ),
         Err(UnificationError {
-            addr: vec![],
-            t1: dict.parse("A ~ B").expect(""),
-            t2: dict.parse("A ~ B ~ C").expect("")
+            addr: vec![1],
+            t1: dict.parse("B").expect(""),
+            t2: dict.parse("C").expect("")
         })
     );
 
@@ -244,9 +298,9 @@ fn test_trait_not_subtype() {
             &dict.parse("<Seq~List~Vec Char~ReprTree>").expect("")
         ),
         Err(UnificationError {
-            addr: vec![1],
-            t1: dict.parse("<Digit 10> ~ Char").expect(""),
-            t2: dict.parse("Char ~ ReprTree").expect("")
+            addr: vec![1,1],
+            t1: dict.parse("Char").expect(""),
+            t2: dict.parse("ReprTree").expect("")
         })
     );
 }
diff --git a/src/unification.rs b/src/unification.rs
index ef8c0fd..b8ae779 100644
--- a/src/unification.rs
+++ b/src/unification.rs
@@ -137,85 +137,119 @@ impl UnificationProblem {
         }
     }
 
+
+
+    pub fn add_lower_subtype_bound(&mut self, v: u64, new_lower_bound: TypeTerm) -> Result<(),()> {
+
+        if new_lower_bound == TypeTerm::TypeID(TypeID::Var(v)) {
+            return Ok(());
+        }
+
+        if new_lower_bound.contains_var(v) {
+            // loop
+            return Err(());
+        }
+
+        if let Some(lower_bound) = self.lower_bounds.get(&v).cloned() {
+//                        eprintln!("var already exists. check max. type");
+            if let Ok(halo) = self.eval_subtype(
+                UnificationPair {
+                    lhs: lower_bound.clone(),
+                    rhs: new_lower_bound.clone(),
+                    halo: TypeTerm::unit(),
+                    addr: vec![]
+                }
+            ) {
+//                            eprintln!("found more general lower bound");
+//                            eprintln!("set var {}'s lowerbound to {:?}", varid, t.clone());
+                // generalize variable type to supertype
+                self.lower_bounds.insert(v, new_lower_bound);
+                Ok(())
+            } else if let Ok(halo) = self.eval_subtype(
+                UnificationPair{
+                    lhs: new_lower_bound,
+                    rhs: lower_bound,
+                    halo: TypeTerm::unit(),
+                    addr: vec![]
+                }
+            ) {
+//                            eprintln!("OK, is already larger type");
+                 Ok(())
+            } else {
+//                            eprintln!("violated subtype restriction");
+                Err(())
+            }
+        } else {
+//                        eprintln!("set var {}'s lowerbound to {:?}", varid, t.clone());
+            self.lower_bounds.insert(v, new_lower_bound);
+            Ok(())
+        }
+    }
+
+
+    pub fn add_upper_subtype_bound(&mut self, v: u64, new_upper_bound: TypeTerm) -> Result<(),()> {
+        if new_upper_bound == TypeTerm::TypeID(TypeID::Var(v)) {
+            return Ok(());
+        }
+
+        if new_upper_bound.contains_var(v) {
+            // loop
+            return Err(());
+        }
+
+        if let Some(upper_bound) = self.upper_bounds.get(&v).cloned() {
+            if let Ok(_halo) = self.eval_subtype(
+                UnificationPair {
+                    lhs: new_upper_bound.clone(),
+                    rhs: upper_bound,
+                    halo: TypeTerm::unit(),
+                    addr: vec![]
+                }
+            ) {
+                // found a lower upper bound
+                self.upper_bounds.insert(v, new_upper_bound);
+                Ok(())
+            } else {
+                Err(())
+            }
+        } else {
+            self.upper_bounds.insert(v, new_upper_bound);
+            Ok(())
+        }
+    }
+
     pub fn eval_subtype(&mut self, unification_pair: UnificationPair) -> Result<
         // ok: halo type
         TypeTerm,
         // error
         UnificationError
     > {
+        eprintln!("eval_subtype {:?} <=? {:?}", unification_pair.lhs, unification_pair.rhs);
         match (unification_pair.lhs.clone(), unification_pair.rhs.clone()) {
 
             /*
              Variables
             */
 
-            (t, TypeTerm::TypeID(TypeID::Var(varid))) => {
-//                eprintln!("t <= variable");
-                if ! t.contains_var( varid ) {
-                   // let x = self.σ.get(&TypeID::Var(varid)).cloned();
-                    if let Some(lower_bound) = self.lower_bounds.get(&varid).cloned() {
-//                        eprintln!("var already exists. check max. type");
-                        if let Ok(halo) = self.eval_subtype(
-                            UnificationPair {
-                                lhs: lower_bound.clone(),
-                                rhs: t.clone(),
-                                halo: TypeTerm::unit(),
-                                addr: vec![]
-                            }
-                        ) {
-//                            eprintln!("found more general lower bound");
-//                            eprintln!("set var {}'s lowerbound to {:?}", varid, t.clone());
-                            // generalize variable type to supertype
-                            self.lower_bounds.insert(varid, t.clone());
-                        } else if let Ok(halo) = self.eval_subtype(
-                            UnificationPair{
-                                lhs: t.clone(),
-                                rhs: lower_bound.clone(),
-                                halo: TypeTerm::unit(),
-                                addr: vec![]
-                            }
-                        ) {
-//                            eprintln!("OK, is already larger type");
-                        } else {
-//                            eprintln!("violated subtype restriction");
-                            return Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(varid)), t2: t });
-                        }
-                    } else {
-//                        eprintln!("set var {}'s lowerbound to {:?}", varid, t.clone());
-                        self.lower_bounds.insert(varid, t.clone());
-                    }
-                    self.reapply_subst();
-                    Ok(TypeTerm::unit())
-                } else if t == TypeTerm::TypeID(TypeID::Var(varid)) {
+            (t, TypeTerm::TypeID(TypeID::Var(v))) => {
+                //eprintln!("t <= variable");
+                if self.add_lower_subtype_bound(v, t.clone()).is_ok() {
                     Ok(TypeTerm::unit())
                 } else {
-                    Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(varid)), t2: t })
+                    Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(v)), t2: t })
                 }
             }
 
-            (TypeTerm::TypeID(TypeID::Var(varid)), t) => {
-//                eprintln!("variable <= t");
-
-                if let Some(upper_bound) = self.upper_bounds.get(&varid).cloned() {
-                    if let Ok(_halo) = self.eval_subtype(
-                        UnificationPair {
-                            lhs: t.clone(),
-                            rhs: upper_bound,
-                            halo: TypeTerm::unit(),
-                            addr: vec![]
-                        }
-                    ) {
-                        // found a lower upper bound
-                        self.upper_bounds.insert(varid, t);
-                    }
+            (TypeTerm::TypeID(TypeID::Var(v)), t) => {
+                //eprintln!("variable <= t");
+                if self.add_upper_subtype_bound(v, t.clone()).is_ok() {
+                    Ok(TypeTerm::unit())
                 } else {
-                    self.upper_bounds.insert(varid, t);
+                    Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(v)), t2: t })
                 }
-                Ok(TypeTerm::unit())
             }
 
 
-
             /*
              Atoms
             */
@@ -236,56 +270,90 @@ impl UnificationProblem {
             */
 
             (TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) => {
-                let mut halo = Vec::new();
-                for i in 0..a1.len() {
-                    let mut new_addr = unification_pair.addr.clone();
-                    new_addr.push(i);
-                    if let Ok(r_halo) = self.eval_subtype( UnificationPair {
-                        lhs: a1[i].clone(),
-                        rhs: a2[0].clone(),
+                let mut l1_iter = a1.into_iter().enumerate().rev();
+                let mut l2_iter = a2.into_iter().rev();
 
-                        halo: TypeTerm::unit(),
-                        addr: new_addr
-                    }) {
-//                        eprintln!("unified ladders at {}, r_halo = {:?}", i, r_halo);
+                let mut halo_ladder = Vec::new();
 
-                        for j in 0..a2.len() {
-                            if i+j < a1.len() {
-                                let mut new_addr = unification_pair.addr.clone();
-                                new_addr.push(i+j);
+                while let Some(rhs) = l2_iter.next() {
+                    //eprintln!("take rhs = {:?}", rhs);
+                    if let Some((i, lhs)) = l1_iter.next() {
+                        //eprintln!("take lhs ({}) = {:?}", i, lhs);
+                        let mut addr = unification_pair.addr.clone();
+                        addr.push(i);
+                        //eprintln!("addr = {:?}", addr);
 
-                                let lhs = a1[i+j].clone();//.apply_substitution(&|k| self.σ.get(k).cloned()).clone();
-                                let rhs = a2[j].clone();//.apply_substitution(&|k| self.σ.get(k).cloned()).clone();
+                        match (lhs.clone(), rhs.clone()) {
+                            (t, TypeTerm::TypeID(TypeID::Var(v))) => {
 
-                                if let Ok(rung_halo) = self.eval_subtype(
-                                    UnificationPair {
-                                        lhs: lhs.clone(), rhs: rhs.clone(),
-                                        addr: new_addr.clone(),
-                                        halo: TypeTerm::unit()
+                                if self.add_upper_subtype_bound(v,t.clone()).is_ok() {
+                                    let mut new_upper_bound_ladder = vec![ t ];
+
+                                    if let Some(next_rhs) = l2_iter.next() {
+
+                                        // TODO
+
+                                    } else {
+                                        // take everything
+
+                                        while let Some((i,t)) = l1_iter.next() {
+                                            new_upper_bound_ladder.push(t);
+                                        }
                                     }
-                                ) {
-                                    if rung_halo != TypeTerm::unit() {
-                                        halo.push(rung_halo);
+
+                                    new_upper_bound_ladder.reverse();
+                                    if self.add_upper_subtype_bound(v, TypeTerm::Ladder(new_upper_bound_ladder)).is_ok() {
+                                        // ok
+                                    } else {
+                                        return Err(UnificationError {
+                                            addr,
+                                            t1: lhs,
+                                            t2: rhs
+                                        });
                                     }
                                 } else {
-                                    return Err(UnificationError{ addr: new_addr, t1: lhs, t2: rhs })
+                                    return Err(UnificationError {
+                                        addr,
+                                        t1: lhs,
+                                        t2: rhs
+                                    });
+                                }
+                            }
+                            (lhs, rhs) => {
+                                if let Ok(ψ) = self.eval_subtype(
+                                    UnificationPair {
+                                        lhs: lhs.clone(), rhs: rhs.clone(),
+                                        addr:addr.clone(), halo: TypeTerm::unit()
+                                    }
+                                ) {
+                                    // ok.
+                                    //eprintln!("rungs are subtypes. continue");
+                                    halo_ladder.push(ψ);
+                                } else {
+                                    return Err(UnificationError {
+                                        addr,
+                                        t1: lhs,
+                                        t2: rhs
+                                    });
                                 }
                             }
                         }
-
-                        return Ok(
-                            if halo.len() == 1 {
-                                halo.pop().unwrap()
-                            } else {
-                                TypeTerm::Ladder(halo)
-                            });
                     } else {
-                        halo.push(a1[i].clone());
-                        //eprintln!("could not unify ladders");
+                        // not a subtype,
+                        return Err(UnificationError {
+                            addr: vec![],
+                            t1: unification_pair.lhs,
+                            t2: unification_pair.rhs
+                        });
                     }
                 }
+                //eprintln!("left ladder fully consumed");
 
-                Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
+                for (i,t) in l1_iter {
+                    halo_ladder.push(t);
+                }
+                halo_ladder.reverse();
+                Ok(TypeTerm::Ladder(halo_ladder).strip().param_normalize())
             },
 
             (t, TypeTerm::Ladder(a1)) => {
@@ -335,7 +403,7 @@ impl UnificationProblem {
 //                        eprintln!("after strip: {:?}", y);
 //                        eprintln!("APP<> eval {:?} \n ?<=? {:?} ", x, y);
 
-                        if let Ok(halo) = self.eval_subtype(
+                        match self.eval_subtype(
                             UnificationPair {
                                 lhs: x.clone(),
                                 rhs: y.clone(),
@@ -343,15 +411,16 @@ impl UnificationProblem {
                                 addr: new_addr,
                             }
                         ) {
+                            Ok(halo) => {
                             if halo == TypeTerm::unit() {
                                 let mut y = y.clone();
                                 y.apply_substitution(&|k| self.σ.get(k).cloned());
                                 y = y.strip();
                                 let mut top = y.get_lnf_vec().first().unwrap().clone();
                                 halo_args.push(top.clone());
-//                                eprintln!("add top {:?}", top);
+                                //eprintln!("add top {:?}", top);
                             } else {
-//                                eprintln!("add halo {:?}", halo);
+                                //eprintln!("add halo {:?}", halo);
                                 if n_halos_required > 0 {
                                     let x = &mut halo_args[n_halos_required-1];
                                     if let TypeTerm::Ladder(argrs) = x {
@@ -372,13 +441,13 @@ impl UnificationProblem {
                                 halo_args.push(halo);
                                 n_halos_required += 1;
                             }
-                        } else {
-                            return Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs });
+                            },
+                            Err(err) => { return Err(err); }
                         }
                     }
 
                     if n_halos_required > 0 {
-//                        eprintln!("halo args : {:?}", halo_args);
+                        //eprintln!("halo args : {:?}", halo_args);
                         Ok(TypeTerm::App(halo_args))
                     } else {
                         Ok(TypeTerm::unit())

From 08a9bad0ad4d1e6073cb11767a11c24966fc23d3 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Mon, 24 Mar 2025 14:06:16 +0100
Subject: [PATCH 25/33] add Substitution trait

---
 src/lib.rs               |  3 ++
 src/substitution.rs      | 62 ++++++++++++++++++++++++++++++++++++++++
 src/term.rs              | 29 -------------------
 src/test/substitution.rs |  7 ++---
 src/test/unification.rs  |  4 +--
 src/unification.rs       | 20 ++++++-------
 6 files changed, 80 insertions(+), 45 deletions(-)
 create mode 100644 src/substitution.rs

diff --git a/src/lib.rs b/src/lib.rs
index 2e9a163..c9d2293 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -2,6 +2,8 @@
 pub mod bimap;
 pub mod dict;
 pub mod term;
+pub mod substitution;
+
 pub mod lexer;
 pub mod parser;
 pub mod unparser;
@@ -21,6 +23,7 @@ mod pretty;
 pub use {
     dict::*,
     term::*,
+    substitution::*,
     sugar::*,
     unification::*,
 };
diff --git a/src/substitution.rs b/src/substitution.rs
new file mode 100644
index 0000000..b0f70ff
--- /dev/null
+++ b/src/substitution.rs
@@ -0,0 +1,62 @@
+
+use crate::{
+    TypeID,
+    TypeTerm
+};
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+pub trait Substitution {
+    fn get(&self, t: &TypeID) -> Option< TypeTerm >;
+}
+
+impl<S: Fn(&TypeID)->Option<TypeTerm>> Substitution for S {
+    fn get(&self, t: &TypeID) -> Option< TypeTerm > {
+        (self)(t)
+    }
+}
+
+impl Substitution for std::collections::HashMap< TypeID, TypeTerm > {
+    fn get(&self, t: &TypeID) -> Option< TypeTerm > {
+        (self as &std::collections::HashMap< TypeID, TypeTerm >).get(t).cloned()
+    }
+}
+
+impl TypeTerm {
+    /// recursively apply substitution to all subterms,
+    /// which will replace all occurences of variables which map
+    /// some type-term in `subst`
+    pub fn apply_substitution(
+        &mut self,
+        σ: &impl Substitution
+    ) -> &mut Self {
+        self.apply_subst(σ)
+    }
+
+    pub fn apply_subst(
+        &mut self,
+        σ: &impl Substitution
+    ) -> &mut Self {
+        match self {
+            TypeTerm::TypeID(typid) => {
+                if let Some(t) = σ.get(typid) {
+                    *self = t;
+                }
+            }
+
+            TypeTerm::Ladder(rungs) => {
+                for r in rungs.iter_mut() {
+                    r.apply_subst(σ);
+                }
+            }
+            TypeTerm::App(args) => {
+                for r in args.iter_mut() {
+                    r.apply_subst(σ);
+                }
+            }
+            _ => {}
+        }
+
+        self
+    }
+}
diff --git a/src/term.rs b/src/term.rs
index 7f6d87a..edbb18d 100644
--- a/src/term.rs
+++ b/src/term.rs
@@ -95,35 +95,6 @@ impl TypeTerm {
         }
     }
 
-    /// recursively apply substitution to all subterms,
-    /// which will replace all occurences of variables which map
-    /// some type-term in `subst`
-    pub fn apply_substitution(
-        &mut self,
-        subst: &impl Fn(&TypeID) -> Option<TypeTerm>
-    ) -> &mut Self {
-        match self {
-            TypeTerm::TypeID(typid) => {
-                if let Some(t) = subst(typid) {
-                    *self = t;
-                }
-            }
-
-            TypeTerm::Ladder(rungs) => {
-                for r in rungs.iter_mut() {
-                    r.apply_substitution(subst);
-                }
-            }
-            TypeTerm::App(args) => {
-                for r in args.iter_mut() {
-                    r.apply_substitution(subst);
-                }
-            }
-            _ => {}
-        }
-
-        self
-    }
 
     /* strip away empty ladders
      * & unwrap singletons
diff --git a/src/test/substitution.rs b/src/test/substitution.rs
index e8906b9..91aa810 100644
--- a/src/test/substitution.rs
+++ b/src/test/substitution.rs
@@ -1,7 +1,7 @@
 
 use {
-    crate::{dict::*, term::*, parser::*, unparser::*},
-    std::iter::FromIterator
+    crate::{dict::*, term::*, parser::*, unparser::*, substitution::*},
+    std::iter::FromIterator,
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
@@ -24,8 +24,7 @@ fn test_subst() {
 
 
     assert_eq!(
-        dict.parse("<Seq T~U>").unwrap()
-            .apply_substitution(&|typid|{ σ.get(typid).cloned() }).clone(),
+        dict.parse("<Seq T~U>").unwrap().apply_subst(&σ).clone(),
         dict.parse("<Seq ℕ~<Seq Char>>").unwrap()
     );
 }
diff --git a/src/test/unification.rs b/src/test/unification.rs
index 6021dda..99ea7ed 100644
--- a/src/test/unification.rs
+++ b/src/test/unification.rs
@@ -23,8 +23,8 @@ fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) {
         let σ = σ.unwrap();
 
         assert_eq!(
-            t1.apply_substitution(&|v| σ.get(v).cloned()),
-            t2.apply_substitution(&|v| σ.get(v).cloned())
+            t1.apply_subst(&σ),
+            t2.apply_subst(&σ)
         );
     } else {
         assert!(! σ.is_ok());
diff --git a/src/unification.rs b/src/unification.rs
index b8ae779..c1c0d0a 100644
--- a/src/unification.rs
+++ b/src/unification.rs
@@ -79,7 +79,7 @@ impl UnificationProblem {
         let mut new_σ = HashMap::new();
         for (v, tt) in self.σ.iter() {
             let mut tt = tt.clone().normalize();
-            tt.apply_substitution(&|v| self.σ.get(v).cloned());
+            tt.apply_subst(&self.σ);
             tt = tt.normalize();
             //eprintln!("update σ : {:?} --> {:?}", v, tt);
             new_σ.insert(v.clone(), tt);
@@ -414,7 +414,7 @@ impl UnificationProblem {
                             Ok(halo) => {
                             if halo == TypeTerm::unit() {
                                 let mut y = y.clone();
-                                y.apply_substitution(&|k| self.σ.get(k).cloned());
+                                y.apply_subst(&self.σ);
                                 y = y.strip();
                                 let mut top = y.get_lnf_vec().first().unwrap().clone();
                                 halo_args.push(top.clone());
@@ -425,7 +425,7 @@ impl UnificationProblem {
                                     let x = &mut halo_args[n_halos_required-1];
                                     if let TypeTerm::Ladder(argrs) = x {
                                         let mut a = a2[n_halos_required-1].clone();
-                                        a.apply_substitution(&|k| self.σ.get(k).cloned());
+                                        a.apply_subst(&self.σ);
                                         a = a.get_lnf_vec().first().unwrap().clone();
                                         argrs.push(a);
                                     } else {
@@ -434,7 +434,7 @@ impl UnificationProblem {
                                             a2[n_halos_required-1].clone().get_lnf_vec().first().unwrap().clone()
                                         ]);
 
-                                        x.apply_substitution(&|k| self.σ.get(k).cloned());
+                                        x.apply_subst(&self.σ);
                                     }
                                 }
 
@@ -464,8 +464,8 @@ impl UnificationProblem {
     pub fn solve(mut self) -> Result<(Vec<TypeTerm>, HashMap<TypeID, TypeTerm>), UnificationError> {
         // solve equations
         while let Some( mut equal_pair ) = self.equal_pairs.pop() {
-            equal_pair.lhs.apply_substitution(&|v| self.σ.get(v).cloned());
-            equal_pair.rhs.apply_substitution(&|v| self.σ.get(v).cloned());
+            equal_pair.lhs.apply_subst(&self.σ);
+            equal_pair.rhs.apply_subst(&self.σ);
 
             self.eval_equation(equal_pair)?;
         }
@@ -473,8 +473,8 @@ impl UnificationProblem {
         // solve subtypes
 //        eprintln!("------ SOLVE SUBTYPES ---- ");
         for mut subtype_pair in self.subtype_pairs.clone().into_iter() {
-            subtype_pair.lhs.apply_substitution(&|v| self.σ.get(v).cloned());
-            subtype_pair.rhs.apply_substitution(&|v| self.σ.get(v).cloned());
+            subtype_pair.lhs.apply_subst(&self.σ);
+            subtype_pair.rhs.apply_subst(&self.σ);
             let _halo = self.eval_subtype( subtype_pair.clone() )?.strip();
         }
 
@@ -494,8 +494,8 @@ impl UnificationProblem {
 //        eprintln!("------ MAKE HALOS -----");
         let mut halo_types = Vec::new();
         for mut subtype_pair in self.subtype_pairs.clone().into_iter() {
-            subtype_pair.lhs = subtype_pair.lhs.apply_substitution(&|v| self.σ.get(v).cloned()).clone().strip();
-            subtype_pair.rhs = subtype_pair.rhs.apply_substitution(&|v| self.σ.get(v).cloned()).clone().strip();
+            subtype_pair.lhs = subtype_pair.lhs.apply_subst(&self.σ).clone().strip();
+            subtype_pair.rhs = subtype_pair.rhs.apply_subst(&self.σ).clone().strip();
 
             let halo = self.eval_subtype( subtype_pair.clone() )?.strip();
             halo_types.push(halo);

From ed9f3093060db4a8cf5a35020e46f273c682d37f Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sun, 1 Jun 2025 16:22:46 +0200
Subject: [PATCH 26/33] add type alias for HashMapSubst

---
 src/substitution.rs | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/src/substitution.rs b/src/substitution.rs
index b0f70ff..80de6bf 100644
--- a/src/substitution.rs
+++ b/src/substitution.rs
@@ -22,6 +22,8 @@ impl Substitution for std::collections::HashMap< TypeID, TypeTerm > {
     }
 }
 
+pub type HashMapSubst = std::collections::HashMap< TypeID, TypeTerm >;
+
 impl TypeTerm {
     /// recursively apply substitution to all subterms,
     /// which will replace all occurences of variables which map

From 7c4fbf92982090d3377c832906bdebf7619f6c8a Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sun, 4 Aug 2024 23:23:20 +0200
Subject: [PATCH 27/33] initial implementation of morphism base & graph search

---
 src/lib.rs      |   1 +
 src/morphism.rs | 195 ++++++++++++++++++++++++++++++++++++++++++++++++
 2 files changed, 196 insertions(+)
 create mode 100644 src/morphism.rs

diff --git a/src/lib.rs b/src/lib.rs
index c9d2293..6a210a0 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -13,6 +13,7 @@ pub mod lnf;
 pub mod pnf;
 pub mod subtype;
 pub mod unification;
+pub mod morphism;
 
 #[cfg(test)]
 mod test;
diff --git a/src/morphism.rs b/src/morphism.rs
new file mode 100644
index 0000000..e42d6bb
--- /dev/null
+++ b/src/morphism.rs
@@ -0,0 +1,195 @@
+use {
+    crate::{
+        TypeTerm, TypeID,
+        unification::UnificationProblem,
+    },
+    std::collections::HashMap
+};
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+pub struct MorphismType {
+    pub src_type: TypeTerm,
+    pub dst_type: TypeTerm,
+}
+
+pub struct MorphismBase<Morphism: Clone> {
+    morphisms: Vec< (MorphismType, Morphism) >,
+    list_typeid: TypeID
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+impl MorphismType {
+    fn normalize(self) -> Self {
+        MorphismType {
+            src_type: self.src_type.normalize(),
+            dst_type: self.dst_type.normalize()
+        }
+    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+impl<Morphism: Clone> MorphismBase<Morphism> {
+    pub fn new() -> Self {
+        MorphismBase {
+            morphisms: Vec::new(),
+
+            // FIXME: magic number
+            list_typeid: TypeID::Fun(10)
+        }
+    }
+
+    pub fn add_morphism(&mut self, morph_type: MorphismType, morphism: Morphism) {
+        self.morphisms.push( (morph_type.normalize(), morphism) );
+    }
+
+    pub fn enum_morphisms(&self, src_type: &TypeTerm)
+    -> Vec< (HashMap<TypeID, TypeTerm>, TypeTerm) >
+    {
+        let mut dst_types = Vec::new();
+
+        // first enumerate all "direct" morphisms,
+        for (ty,m) in self.morphisms.iter() {
+            if let Ok(σ) = crate::unification::unify(
+                &ty.src_type,
+                &src_type.clone().normalize()
+            ) {
+                let dst_type =
+                    ty.dst_type.clone()
+                        .apply_substitution(&σ)
+                        .clone();
+
+                dst_types.push( (σ, dst_type) );
+            }
+        }
+
+        // ..then all "list-map" morphisms.
+        // Check if we have a List type, and if so, see what the Item type is
+
+        // TODO: function for generating fresh variables
+        let item_variable = TypeID::Var(100);
+
+        if let Ok(σ) = crate::unification::unify(
+            &TypeTerm::App(vec![
+                TypeTerm::TypeID(self.list_typeid),
+                TypeTerm::TypeID(item_variable)
+            ]),
+            &src_type.clone().param_normalize(),
+        ) {
+            let src_item_type = σ.get(&item_variable).unwrap().clone();
+
+            for (γ, dst_item_type) in self.enum_morphisms( &src_item_type ) {
+                let dst_type =
+                        TypeTerm::App(vec![
+                            TypeTerm::TypeID(self.list_typeid),
+                            dst_item_type.clone()
+                                .apply_substitution(&γ).clone()
+                        ]).normalize();
+
+                dst_types.push( (γ.clone(), dst_type) );
+            }
+        }
+
+        dst_types
+    }
+
+    pub fn enum_morphisms_with_subtyping(&self, src_type: &TypeTerm)
+    -> Vec< (TypeTerm, TypeTerm) >
+    {
+        let mut src_lnf = src_type.clone().get_lnf_vec();
+        let mut halo_lnf = vec![];
+        let mut dst_types = Vec::new();
+
+        while src_lnf.len() > 0 {
+            let src_type = TypeTerm::Ladder( src_lnf.clone() );
+            let halo_type = TypeTerm::Ladder( halo_lnf.clone() );
+
+            for (σ, t) in self.enum_morphisms( &src_type ) {
+                dst_types.push(
+                    (halo_type.clone()
+                        .apply_substitution(&σ).clone(),
+                    t.clone()
+                        .apply_substitution(&σ).clone()
+                    )
+                );
+            }
+
+            // continue with next supertype
+            halo_lnf.push(src_lnf.remove(0));
+        }
+
+        dst_types
+    }
+
+    /* performs DFS to find a morphism-path for a given type
+     * will return the first matching path, not the shortest
+     */
+    pub fn find_morphism_path(&self, ty: MorphismType)
+    -> Option< Vec<TypeTerm> >
+    {
+        let ty = ty.normalize();
+        let mut visited = Vec::new();
+        let mut queue = vec![
+            vec![ ty.src_type.clone().normalize() ]
+        ];
+
+        while let Some(current_path) = queue.pop() {
+            let current_type = current_path.last().unwrap();
+
+            if ! visited.contains( current_type ) {
+                visited.push( current_type.clone() );
+
+                for (h, t) in self.enum_morphisms_with_subtyping(&current_type) {
+                    let tt = TypeTerm::Ladder( vec![ h, t ] ).normalize();
+
+                    if ! visited.contains( &tt ) {
+                        let unification_result = crate::unification::unify(&tt, &ty.dst_type);
+                        let mut new_path = current_path.clone();
+
+                        new_path.push( tt );
+
+                        if let Ok(σ) = unification_result {
+                            new_path = new_path.into_iter().map(
+                                |mut t: TypeTerm| t.apply_substitution(&σ).clone()
+                            ).collect::<Vec<TypeTerm>>();
+
+                            return Some(new_path);
+                        } else {
+                            queue.push( new_path );
+                        }
+                    }
+                }
+            }
+        }
+
+        None
+    }
+
+    pub fn find_morphism(&self, ty: &MorphismType)
+    -> Option< Morphism > {
+
+        // TODO
+
+        None
+    }
+
+    pub fn find_list_map_morphism(&self, item_ty: &MorphismType)
+    -> Option< Morphism > {
+
+        // TODO
+
+        None
+    }
+
+    pub fn find_morphism_with_subtyping(&self, ty: &MorphismType)
+    -> Option<( Morphism, TypeTerm, HashMap<TypeID, TypeTerm> )> {
+
+        // TODO
+
+        None
+    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

From 0d891e867703ddb73f3991e56f456eba8d84797d Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sun, 4 Aug 2024 23:47:59 +0200
Subject: [PATCH 28/33] add test for find_morphism_path()

---
 src/morphism.rs      |  6 ++--
 src/test/mod.rs      |  1 +
 src/test/morphism.rs | 69 ++++++++++++++++++++++++++++++++++++++++++++
 3 files changed, 72 insertions(+), 4 deletions(-)
 create mode 100644 src/test/morphism.rs

diff --git a/src/morphism.rs b/src/morphism.rs
index e42d6bb..34b463b 100644
--- a/src/morphism.rs
+++ b/src/morphism.rs
@@ -32,12 +32,10 @@ impl MorphismType {
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
 impl<Morphism: Clone> MorphismBase<Morphism> {
-    pub fn new() -> Self {
+    pub fn new(list_typeid: TypeID) -> Self {
         MorphismBase {
             morphisms: Vec::new(),
-
-            // FIXME: magic number
-            list_typeid: TypeID::Fun(10)
+            list_typeid
         }
     }
 
diff --git a/src/test/mod.rs b/src/test/mod.rs
index 29c14bc..41f5e71 100644
--- a/src/test/mod.rs
+++ b/src/test/mod.rs
@@ -7,4 +7,5 @@ pub mod pnf;
 pub mod subtype;
 pub mod substitution;
 pub mod unification;
+pub mod morphism;
 
diff --git a/src/test/morphism.rs b/src/test/morphism.rs
new file mode 100644
index 0000000..5c992eb
--- /dev/null
+++ b/src/test/morphism.rs
@@ -0,0 +1,69 @@
+use {
+    crate::{dict::*, morphism::*, parser::*}
+};
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+#[test]
+fn test_morphism_path() {
+    let mut dict = BimapTypeDict::new();
+    let mut base = MorphismBase::<u64>::new( dict.add_typename("Seq".into()) );
+
+    dict.add_varname("Radix".into());
+    dict.add_varname("SrcRadix".into());
+    dict.add_varname("DstRadix".into());
+
+    base.add_morphism(
+        MorphismType{
+            src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
+            dst_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap()
+        },
+        11
+    );
+    base.add_morphism(
+        MorphismType{
+            src_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap(),
+            dst_type: dict.parse("<Digit Radix> ~ Char").unwrap()
+        },
+        22
+    );
+    base.add_morphism(
+        MorphismType{
+            src_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
+            dst_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
+        },
+        333
+    );
+    base.add_morphism(
+        MorphismType{
+            src_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
+            dst_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
+        },
+        444
+    );
+    base.add_morphism(
+        MorphismType{
+            src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix>~ℤ_2^64~machine.UInt64>").unwrap(),
+            dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix>~ℤ_2^64~machine.UInt64>").unwrap()
+        },
+        555
+    );
+
+
+    assert_eq!(
+        base.find_morphism_path(MorphismType {
+            src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+            dst_type: dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
+        }),
+        Some(
+            vec![
+                dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().normalize(),
+                dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().normalize(),
+            ]
+        )
+    );
+}

From aa7a39bf17bdd78891c286e8fb07744e80e05834 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Mon, 5 Aug 2024 02:54:35 +0200
Subject: [PATCH 29/33] improvements over initial graph search implementation

- turn Morphism into trait and add find_morphism() function
- morphism base: store vec of seq-types
- morphism base: find shortest path instead of just some path
- fix returned halo type in find_morphism_with_subtyping()
- fix find_morphism_path:
    * also apply substitution from src-type match
    * get this substitution as result from `enum_morphisms_with_subtyping`
---
 src/lib.rs           |   1 +
 src/morphism.rs      | 184 ++++++++++++++++++++++++++++++-------------
 src/test/morphism.rs |  93 +++++++++++++++++-----
 3 files changed, 204 insertions(+), 74 deletions(-)

diff --git a/src/lib.rs b/src/lib.rs
index 6a210a0..f8f73aa 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -27,4 +27,5 @@ pub use {
     substitution::*,
     sugar::*,
     unification::*,
+    morphism::*
 };
diff --git a/src/morphism.rs b/src/morphism.rs
index 34b463b..bed5b6a 100644
--- a/src/morphism.rs
+++ b/src/morphism.rs
@@ -8,20 +8,31 @@ use {
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
+#[derive(Clone, PartialEq, Eq, Debug)]
 pub struct MorphismType {
     pub src_type: TypeTerm,
     pub dst_type: TypeTerm,
 }
 
-pub struct MorphismBase<Morphism: Clone> {
-    morphisms: Vec< (MorphismType, Morphism) >,
-    list_typeid: TypeID
+pub trait Morphism : Sized {
+    fn get_type(&self) -> MorphismType;
+    fn map_morphism(&self, seq_type: TypeTerm) -> Option< Self >;
+
+    fn weight(&self) -> u64 {
+        1
+    }
+}
+
+#[derive(Clone)]
+pub struct MorphismBase<M: Morphism + Clone> {
+    morphisms: Vec< M >,
+    seq_types: Vec< TypeTerm >
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
 impl MorphismType {
-    fn normalize(self) -> Self {
+    pub fn normalize(self) -> Self {
         MorphismType {
             src_type: self.src_type.normalize(),
             dst_type: self.dst_type.normalize()
@@ -31,16 +42,16 @@ impl MorphismType {
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-impl<Morphism: Clone> MorphismBase<Morphism> {
-    pub fn new(list_typeid: TypeID) -> Self {
+impl<M: Morphism + Clone> MorphismBase<M> {
+    pub fn new(seq_types: Vec<TypeTerm>) -> Self {
         MorphismBase {
             morphisms: Vec::new(),
-            list_typeid
+            seq_types
         }
     }
 
-    pub fn add_morphism(&mut self, morph_type: MorphismType, morphism: Morphism) {
-        self.morphisms.push( (morph_type.normalize(), morphism) );
+    pub fn add_morphism(&mut self, m: M) {
+        self.morphisms.push( m );
     }
 
     pub fn enum_morphisms(&self, src_type: &TypeTerm)
@@ -49,15 +60,15 @@ impl<Morphism: Clone> MorphismBase<Morphism> {
         let mut dst_types = Vec::new();
 
         // first enumerate all "direct" morphisms,
-        for (ty,m) in self.morphisms.iter() {
+        for m in self.morphisms.iter() {
             if let Ok(σ) = crate::unification::unify(
-                &ty.src_type,
+                &m.get_type().src_type,
                 &src_type.clone().normalize()
             ) {
                 let dst_type =
-                    ty.dst_type.clone()
-                        .apply_substitution(&σ)
-                        .clone();
+                    m.get_type().dst_type.clone()
+                    .apply_substitution( &σ )
+                    .clone();
 
                 dst_types.push( (σ, dst_type) );
             }
@@ -69,9 +80,10 @@ impl<Morphism: Clone> MorphismBase<Morphism> {
         // TODO: function for generating fresh variables
         let item_variable = TypeID::Var(100);
 
+        for seq_type in self.seq_types.iter() {
         if let Ok(σ) = crate::unification::unify(
             &TypeTerm::App(vec![
-                TypeTerm::TypeID(self.list_typeid),
+                seq_type.clone(),
                 TypeTerm::TypeID(item_variable)
             ]),
             &src_type.clone().param_normalize(),
@@ -81,7 +93,7 @@ impl<Morphism: Clone> MorphismBase<Morphism> {
             for (γ, dst_item_type) in self.enum_morphisms( &src_item_type ) {
                 let dst_type =
                         TypeTerm::App(vec![
-                            TypeTerm::TypeID(self.list_typeid),
+                            seq_type.clone(),
                             dst_item_type.clone()
                                 .apply_substitution(&γ).clone()
                         ]).normalize();
@@ -89,29 +101,32 @@ impl<Morphism: Clone> MorphismBase<Morphism> {
                 dst_types.push( (γ.clone(), dst_type) );
             }
         }
+        }
 
         dst_types
     }
 
-    pub fn enum_morphisms_with_subtyping(&self, src_type: &TypeTerm)
-    -> Vec< (TypeTerm, TypeTerm) >
-    {
+    pub fn enum_morphisms_with_subtyping(
+        &self,
+        src_type: &TypeTerm,
+    ) -> Vec<(TypeTerm, TypeTerm, HashMap<TypeID, TypeTerm>)> {
         let mut src_lnf = src_type.clone().get_lnf_vec();
         let mut halo_lnf = vec![];
         let mut dst_types = Vec::new();
 
         while src_lnf.len() > 0 {
-            let src_type = TypeTerm::Ladder( src_lnf.clone() );
-            let halo_type = TypeTerm::Ladder( halo_lnf.clone() );
+            let src_type = TypeTerm::Ladder(src_lnf.clone());
+            let halo_type = TypeTerm::Ladder(halo_lnf.clone());
 
-            for (σ, t) in self.enum_morphisms( &src_type ) {
-                dst_types.push(
-                    (halo_type.clone()
-                        .apply_substitution(&σ).clone(),
-                    t.clone()
-                        .apply_substitution(&σ).clone()
-                    )
-                );
+            for (σ, t) in self.enum_morphisms(&src_type) {
+                dst_types.push((
+                    halo_type
+                        .clone()
+                        .apply_substitution(&σ)
+                        .clone(),
+                    t.clone().apply_substitution(&σ).clone(),
+                    σ,
+                ));
             }
 
             // continue with next supertype
@@ -121,41 +136,46 @@ impl<Morphism: Clone> MorphismBase<Morphism> {
         dst_types
     }
 
-    /* performs DFS to find a morphism-path for a given type
-     * will return the first matching path, not the shortest
+    /* try to find shortest morphism-path for a given type
      */
     pub fn find_morphism_path(&self, ty: MorphismType)
     -> Option< Vec<TypeTerm> >
     {
         let ty = ty.normalize();
-        let mut visited = Vec::new();
+
         let mut queue = vec![
-            vec![ ty.src_type.clone().normalize() ]
+            (0, vec![ ty.src_type.clone().normalize() ])
         ];
 
-        while let Some(current_path) = queue.pop() {
-            let current_type = current_path.last().unwrap();
+        while ! queue.is_empty() {
+            queue.sort_by( |&(w1,_),&(w2,_)| w2.cmp(&w1));
 
-            if ! visited.contains( current_type ) {
-                visited.push( current_type.clone() );
+            if let Some((current_weight, current_path)) = queue.pop() {
+                let current_type = current_path.last().unwrap();
+                for (h, t, σp) in self.enum_morphisms_with_subtyping(&current_type) {
+                    let tt = TypeTerm::Ladder(vec![h, t]).normalize();
 
-                for (h, t) in self.enum_morphisms_with_subtyping(&current_type) {
-                    let tt = TypeTerm::Ladder( vec![ h, t ] ).normalize();
-
-                    if ! visited.contains( &tt ) {
+                    if !current_path.contains(&tt) {
                         let unification_result = crate::unification::unify(&tt, &ty.dst_type);
+                        let morphism_weight = 1;
+                        /*self.find_morphism( &tt ).unwrap().0.get_weight()*/
+
+                        let new_weight = current_weight + morphism_weight;
                         let mut new_path = current_path.clone();
 
-                        new_path.push( tt );
+                        new_path.push(tt);
+
+                        for n in new_path.iter_mut() {
+                            n.apply_substitution(&σp);
+                        }
 
                         if let Ok(σ) = unification_result {
-                            new_path = new_path.into_iter().map(
-                                |mut t: TypeTerm| t.apply_substitution(&σ).clone()
-                            ).collect::<Vec<TypeTerm>>();
-
+                            for n in new_path.iter_mut() {
+                                n.apply_substitution(&σ);
+                            }
                             return Some(new_path);
                         } else {
-                            queue.push( new_path );
+                            queue.push((new_weight, new_path));
                         }
                     }
                 }
@@ -165,26 +185,78 @@ impl<Morphism: Clone> MorphismBase<Morphism> {
         None
     }
 
+
     pub fn find_morphism(&self, ty: &MorphismType)
-    -> Option< Morphism > {
+    -> Option< ( M, HashMap<TypeID, TypeTerm> ) > {
 
-        // TODO
+        // try to find primitive morphism
+        for m in self.morphisms.iter() {
+            let unification_problem = UnificationProblem::new_sub(
+                vec![
+                    ( ty.src_type.clone().normalize(), m.get_type().src_type.clone() ),
+                    ( ty.dst_type.clone().normalize(), m.get_type().dst_type.clone() )
+                ]
+            );
 
-        None
-    }
+            let unification_result = unification_problem.solve();
+            if let Ok((ψ,σ)) = unification_result {
+                return Some((m.clone(), σ));
+            }
+        }
 
-    pub fn find_list_map_morphism(&self, item_ty: &MorphismType)
-    -> Option< Morphism > {
+        // try list-map morphism
+        for seq_type in self.seq_types.iter() {
+            eprintln!("try seq type {:?}", seq_type);
 
-        // TODO
+            eprintln!("");
+
+            if let Ok((ψ,σ)) = UnificationProblem::new_sub(vec![
+                (ty.src_type.clone().param_normalize(),
+                    TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(100)) ])),
+                (ty.dst_type.clone().param_normalize(),
+                    TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(101)) ])),
+            ]).solve() {
+                // TODO: use real fresh variable names
+                let item_morph_type = MorphismType {
+                    src_type: σ.get(&TypeID::Var(100)).unwrap().clone(),
+                    dst_type: σ.get(&TypeID::Var(101)).unwrap().clone(),
+                }.normalize();
+
+                if let Some((m, σ)) = self.find_morphism( &item_morph_type ) {
+                    if let Some(list_morph) = m.map_morphism( seq_type.clone() ) {
+                        return Some( (list_morph, σ) );
+                    }
+                }
+            }
+        }
 
         None
     }
 
     pub fn find_morphism_with_subtyping(&self, ty: &MorphismType)
-    -> Option<( Morphism, TypeTerm, HashMap<TypeID, TypeTerm> )> {
+    -> Option<( M, TypeTerm, HashMap<TypeID, TypeTerm> )> {
+        let mut src_lnf = ty.src_type.clone().get_lnf_vec();
+        let mut dst_lnf = ty.dst_type.clone().get_lnf_vec();
+        let mut halo = vec![];
 
-        // TODO
+        while src_lnf.len() > 0 && dst_lnf.len() > 0 {
+            if let Some((m, σ)) = self.find_morphism(&MorphismType{
+                src_type: TypeTerm::Ladder(src_lnf.clone()),
+                dst_type: TypeTerm::Ladder(dst_lnf.clone())
+            }) {
+                halo.push(src_lnf.get(0).unwrap().clone());
+                return Some((m,
+                    TypeTerm::Ladder(halo).apply_substitution(&σ).clone(),
+                    σ));
+            } else {
+                if src_lnf[0] == dst_lnf[0] {
+                    src_lnf.remove(0);
+                    halo.push(dst_lnf.remove(0));
+                } else {
+                    return None;
+                }
+            }
+        }
 
         None
     }
diff --git a/src/test/morphism.rs b/src/test/morphism.rs
index 5c992eb..6e951e4 100644
--- a/src/test/morphism.rs
+++ b/src/test/morphism.rs
@@ -1,55 +1,72 @@
 use {
-    crate::{dict::*, morphism::*, parser::*}
+    crate::{dict::*, morphism::*, parser::*, TypeTerm}
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
+#[derive(Clone, Debug, PartialEq)]
+struct DummyMorphism(MorphismType);
+
+impl Morphism for DummyMorphism {
+    fn get_type(&self) -> MorphismType {
+        self.0.clone().normalize()
+    }
+
+    fn map_morphism(&self, seq_type: TypeTerm) -> Option<DummyMorphism> {
+        Some(DummyMorphism(MorphismType {
+            src_type: TypeTerm::App(vec![
+                seq_type.clone(),
+                self.0.src_type.clone()
+            ]),
+
+            dst_type: TypeTerm::App(vec![
+                seq_type.clone(),
+                self.0.dst_type.clone()
+            ])
+        }))
+    }
+}
+
 #[test]
 fn test_morphism_path() {
     let mut dict = BimapTypeDict::new();
-    let mut base = MorphismBase::<u64>::new( dict.add_typename("Seq".into()) );
+    let mut base = MorphismBase::<DummyMorphism>::new( vec![ dict.parse("Seq").expect("") ] );
 
     dict.add_varname("Radix".into());
     dict.add_varname("SrcRadix".into());
     dict.add_varname("DstRadix".into());
 
     base.add_morphism(
-        MorphismType{
+        DummyMorphism(MorphismType{
             src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
             dst_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap()
-        },
-        11
+        })
     );
     base.add_morphism(
-        MorphismType{
+        DummyMorphism(MorphismType{
             src_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap(),
             dst_type: dict.parse("<Digit Radix> ~ Char").unwrap()
-        },
-        22
+        })
     );
     base.add_morphism(
-        MorphismType{
+        DummyMorphism(MorphismType{
             src_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
             dst_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
-        },
-        333
+        })
     );
     base.add_morphism(
-        MorphismType{
+        DummyMorphism(MorphismType{
             src_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
             dst_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
-        },
-        444
+        })
     );
     base.add_morphism(
-        MorphismType{
+        DummyMorphism(MorphismType{
             src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix>~ℤ_2^64~machine.UInt64>").unwrap(),
             dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix>~ℤ_2^64~machine.UInt64>").unwrap()
-        },
-        555
+        })
     );
 
-
     assert_eq!(
         base.find_morphism_path(MorphismType {
             src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
@@ -66,4 +83,44 @@ fn test_morphism_path() {
             ]
         )
     );
+
+    assert_eq!(
+        base.find_morphism_path(MorphismType {
+            src_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+            dst_type: dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
+        }),
+        Some(
+            vec![
+                dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().normalize(),
+            ]
+        )
+    );
+
+    assert_eq!(
+        base.find_morphism_with_subtyping(
+            &MorphismType {
+                src_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+                dst_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+            }
+        ),
+
+        Some((
+                DummyMorphism(MorphismType{
+                    src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
+                    dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+                }),
+
+                dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian>").unwrap(),
+
+                vec![
+                    (dict.get_typeid(&"Radix".into()).unwrap(),
+                    dict.parse("10").unwrap())
+                ].into_iter().collect::<std::collections::HashMap<TypeID, TypeTerm>>()
+        ))
+    );
 }

From 59d182135f55b70509a3ec55ba06224c0ec6cb1a Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sat, 15 Feb 2025 17:17:43 +0100
Subject: [PATCH 30/33] rewrite enum_morphisms & find_morphism_path

- introduce `MorphismInstantiation` which instantiates a
  morphism-template using a type-substitution and a halo type.
- find_morphism_path returns list of `MorphismInstatiation`.
- correctly apply substitutions
- add more path search tests
---
 src/morphism.rs      | 435 ++++++++++++++++++++++++++-----------------
 src/test/morphism.rs | 323 ++++++++++++++++++++++++++++++--
 src/unification.rs   |   2 +-
 3 files changed, 575 insertions(+), 185 deletions(-)

diff --git a/src/morphism.rs b/src/morphism.rs
index bed5b6a..7bcd9d4 100644
--- a/src/morphism.rs
+++ b/src/morphism.rs
@@ -1,9 +1,8 @@
 use {
     crate::{
-        TypeTerm, TypeID,
-        unification::UnificationProblem,
+        subtype_unify, sugar::SugaredTypeTerm, unification::UnificationProblem, unparser::*, TypeDict, TypeID, TypeTerm
     },
-    std::collections::HashMap
+    std::{collections::HashMap, u64}
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
@@ -14,6 +13,17 @@ pub struct MorphismType {
     pub dst_type: TypeTerm,
 }
 
+impl MorphismType {
+    pub fn normalize(self) -> Self {
+        MorphismType {
+            src_type: self.src_type.normalize().param_normalize(),
+            dst_type: self.dst_type.normalize().param_normalize()
+        }
+    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
 pub trait Morphism : Sized {
     fn get_type(&self) -> MorphismType;
     fn map_morphism(&self, seq_type: TypeTerm) -> Option< Self >;
@@ -23,25 +33,50 @@ pub trait Morphism : Sized {
     }
 }
 
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+#[derive(Clone, Debug, PartialEq)]
+pub struct MorphismInstance<M: Morphism + Clone> {
+    pub halo: TypeTerm,
+    pub m: M,
+    pub σ: HashMap<TypeID, TypeTerm>
+}
+
+impl<M: Morphism + Clone> MorphismInstance<M> {
+    pub fn get_type(&self) -> MorphismType {
+        MorphismType {
+            src_type: TypeTerm::Ladder(vec![
+                self.halo.clone(),
+                self.m.get_type().src_type.clone()
+            ]).apply_substitution(&self.σ)
+            .clone(),
+
+            dst_type: TypeTerm::Ladder(vec![
+                self.halo.clone(),
+                self.m.get_type().dst_type.clone()
+            ]).apply_substitution(&self.σ)
+            .clone()
+        }.normalize()
+    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+#[derive(Clone)]
+pub struct MorphismPath<M: Morphism + Clone> {
+    pub weight: u64,
+    pub cur_type: TypeTerm,
+    pub morphisms: Vec< MorphismInstance<M> >
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
 #[derive(Clone)]
 pub struct MorphismBase<M: Morphism + Clone> {
     morphisms: Vec< M >,
     seq_types: Vec< TypeTerm >
 }
 
-//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
-
-impl MorphismType {
-    pub fn normalize(self) -> Self {
-        MorphismType {
-            src_type: self.src_type.normalize(),
-            dst_type: self.dst_type.normalize()
-        }
-    }
-}
-
-//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
-
 impl<M: Morphism + Clone> MorphismBase<M> {
     pub fn new(seq_types: Vec<TypeTerm>) -> Self {
         MorphismBase {
@@ -54,167 +89,237 @@ impl<M: Morphism + Clone> MorphismBase<M> {
         self.morphisms.push( m );
     }
 
-    pub fn enum_morphisms(&self, src_type: &TypeTerm)
-    -> Vec< (HashMap<TypeID, TypeTerm>, TypeTerm) >
+    pub fn enum_direct_morphisms(&self, src_type: &TypeTerm)
+    -> Vec< MorphismInstance<M> >
     {
         let mut dst_types = Vec::new();
-
-        // first enumerate all "direct" morphisms,
         for m in self.morphisms.iter() {
-            if let Ok(σ) = crate::unification::unify(
-                &m.get_type().src_type,
-                &src_type.clone().normalize()
+            if let Ok((halo, σ)) = crate::unification::subtype_unify(
+                &src_type.clone().param_normalize(),
+                &m.get_type().src_type.param_normalize(),
             ) {
-                let dst_type =
-                    m.get_type().dst_type.clone()
-                    .apply_substitution( &σ )
-                    .clone();
-
-                dst_types.push( (σ, dst_type) );
+                dst_types.push(MorphismInstance{ halo, m: m.clone(), σ });
             }
         }
+        dst_types
+    }
+
+    pub fn enum_map_morphisms(&self, src_type: &TypeTerm)
+    -> Vec< MorphismInstance<M> > {
+        let src_type = src_type.clone().param_normalize();
+        let mut dst_types = Vec::new();
 
-        // ..then all "list-map" morphisms.
         // Check if we have a List type, and if so, see what the Item type is
-
         // TODO: function for generating fresh variables
-        let item_variable = TypeID::Var(100);
+        let item_variable = TypeID::Var(800);
 
         for seq_type in self.seq_types.iter() {
-        if let Ok(σ) = crate::unification::unify(
-            &TypeTerm::App(vec![
-                seq_type.clone(),
-                TypeTerm::TypeID(item_variable)
-            ]),
-            &src_type.clone().param_normalize(),
-        ) {
-            let src_item_type = σ.get(&item_variable).unwrap().clone();
+            if let Ok((halo, σ)) = crate::unification::subtype_unify(
+                &src_type,
+                &TypeTerm::App(vec![
+                    seq_type.clone(),
+                    TypeTerm::TypeID(item_variable)
+                ])
+            ) {
+                let src_item_type = σ.get(&item_variable).expect("var not in unificator").clone();
+                for item_morph_inst in self.enum_morphisms( &src_item_type ) {
 
-            for (γ, dst_item_type) in self.enum_morphisms( &src_item_type ) {
-                let dst_type =
-                        TypeTerm::App(vec![
-                            seq_type.clone(),
-                            dst_item_type.clone()
-                                .apply_substitution(&γ).clone()
-                        ]).normalize();
+                    let mut dst_halo_ladder = vec![ halo.clone() ];
+                    if item_morph_inst.halo != TypeTerm::unit() {
+                        dst_halo_ladder.push(
+                            TypeTerm::App(vec![
+                                seq_type.clone().get_lnf_vec().first().unwrap().clone(),
+                                item_morph_inst.halo.clone()
+                            ]));
+                    }
 
-                dst_types.push( (γ.clone(), dst_type) );
-            }
-        }
-        }
-
-        dst_types
-    }
-
-    pub fn enum_morphisms_with_subtyping(
-        &self,
-        src_type: &TypeTerm,
-    ) -> Vec<(TypeTerm, TypeTerm, HashMap<TypeID, TypeTerm>)> {
-        let mut src_lnf = src_type.clone().get_lnf_vec();
-        let mut halo_lnf = vec![];
-        let mut dst_types = Vec::new();
-
-        while src_lnf.len() > 0 {
-            let src_type = TypeTerm::Ladder(src_lnf.clone());
-            let halo_type = TypeTerm::Ladder(halo_lnf.clone());
-
-            for (σ, t) in self.enum_morphisms(&src_type) {
-                dst_types.push((
-                    halo_type
-                        .clone()
-                        .apply_substitution(&σ)
-                        .clone(),
-                    t.clone().apply_substitution(&σ).clone(),
-                    σ,
-                ));
-            }
-
-            // continue with next supertype
-            halo_lnf.push(src_lnf.remove(0));
-        }
-
-        dst_types
-    }
-
-    /* try to find shortest morphism-path for a given type
-     */
-    pub fn find_morphism_path(&self, ty: MorphismType)
-    -> Option< Vec<TypeTerm> >
-    {
-        let ty = ty.normalize();
-
-        let mut queue = vec![
-            (0, vec![ ty.src_type.clone().normalize() ])
-        ];
-
-        while ! queue.is_empty() {
-            queue.sort_by( |&(w1,_),&(w2,_)| w2.cmp(&w1));
-
-            if let Some((current_weight, current_path)) = queue.pop() {
-                let current_type = current_path.last().unwrap();
-                for (h, t, σp) in self.enum_morphisms_with_subtyping(&current_type) {
-                    let tt = TypeTerm::Ladder(vec![h, t]).normalize();
-
-                    if !current_path.contains(&tt) {
-                        let unification_result = crate::unification::unify(&tt, &ty.dst_type);
-                        let morphism_weight = 1;
-                        /*self.find_morphism( &tt ).unwrap().0.get_weight()*/
-
-                        let new_weight = current_weight + morphism_weight;
-                        let mut new_path = current_path.clone();
-
-                        new_path.push(tt);
-
-                        for n in new_path.iter_mut() {
-                            n.apply_substitution(&σp);
-                        }
-
-                        if let Ok(σ) = unification_result {
-                            for n in new_path.iter_mut() {
-                                n.apply_substitution(&σ);
+                    if let Some( map_morph ) = item_morph_inst.m.map_morphism( seq_type.clone() ) {
+                        dst_types.push(
+                            MorphismInstance {
+                                halo: TypeTerm::Ladder(dst_halo_ladder).normalize(),
+                                m: map_morph,
+                                σ: item_morph_inst.σ
                             }
-                            return Some(new_path);
-                        } else {
-                            queue.push((new_weight, new_path));
-                        }
+                        );
+                    } else {
+                        eprintln!("could not get map morphism");
                     }
                 }
             }
         }
 
+        dst_types
+    }
+
+    pub fn enum_morphisms(&self, src_type: &TypeTerm) -> Vec< MorphismInstance<M> > {
+        let mut dst_types = Vec::new();
+        dst_types.append(&mut self.enum_direct_morphisms(src_type));
+        dst_types.append(&mut self.enum_map_morphisms(src_type));
+        dst_types
+    }
+
+    /* try to find shortest morphism-path for a given type
+     */
+    pub fn find_morphism_path(&self, ty: MorphismType
+//        , type_dict: &mut impl TypeDict
+    )
+    -> Option< Vec<MorphismInstance<M>> >
+    {
+        let ty = ty.normalize();
+        let mut queue = vec![
+            MorphismPath::<M> { weight: 0, cur_type: ty.src_type.clone(), morphisms: vec![] }
+        ];
+
+        while ! queue.is_empty() {
+            queue.sort_by( |p1,p2| p2.weight.cmp(&p1.weight));
+
+            if let Some(mut cur_path) = queue.pop() {
+                if let Ok((halo, σ)) = crate::unification::subtype_unify( &cur_path.cur_type, &ty.dst_type ) {
+                    /* found path,
+                     * now apply substitution and trim to variables in terms of each step
+                     */
+                    for n in cur_path.morphisms.iter_mut() {
+                        let src_type = n.m.get_type().src_type;
+                        let dst_type = n.m.get_type().dst_type;
+
+                        let mut new_σ = HashMap::new();
+                        for (k,v) in σ.iter() {
+                            if let TypeID::Var(varid) = k {
+                                if src_type.contains_var(*varid)
+                                || dst_type.contains_var(*varid) {
+                                    new_σ.insert(
+                                        k.clone(),
+                                        v.clone().apply_substitution(&σ).clone().strip()
+                                    );
+                                }
+                            }
+                        }
+                        for (k,v) in n.σ.iter() {
+                            if let TypeID::Var(varid) = k {
+                                if src_type.contains_var(*varid)
+                                || dst_type.contains_var(*varid) {
+                                    new_σ.insert(
+                                        k.clone(),
+                                        v.clone().apply_substitution(&σ).clone().strip()
+                                    );
+                                }
+                            }
+                        }
+
+                        n.halo = n.halo.clone().apply_substitution(&σ).clone().strip();
+
+                        n.σ = new_σ;
+                    }
+
+                    return Some(cur_path.morphisms);
+                }
+
+                //eprintln!("cur path (w ={}) : @ {:?}", cur_path.weight, cur_path.cur_type);//.clone().sugar(type_dict).pretty(type_dict, 0) );
+                for mut next_morph_inst in self.enum_morphisms(&cur_path.cur_type) {
+                    let dst_type = next_morph_inst.get_type().dst_type;
+//                    eprintln!("try morph to {}", dst_type.clone().sugar(type_dict).pretty(type_dict, 0));
+
+                    let mut creates_loop = false;
+
+                    let mut new_path = cur_path.clone();
+                    for n in new_path.morphisms.iter_mut() {
+                        let mut new_σ = HashMap::new();
+
+                        for (k,v) in next_morph_inst.σ.iter() {
+                            new_σ.insert(
+                                k.clone(),
+                                v.clone().apply_substitution(&next_morph_inst.σ).clone()
+                            );
+                        }
+
+                        for (k,v) in n.σ.iter() {
+                            new_σ.insert(
+                                k.clone(),
+                                v.clone().apply_substitution(&next_morph_inst.σ).clone()
+                            );
+                        }
+
+                        n.halo = n.halo.clone().apply_substitution(&next_morph_inst.σ).clone().strip();
+
+                        n.σ = new_σ;
+                    }
+
+                    for m in new_path.morphisms.iter() {
+                        if m.get_type().src_type == dst_type {
+                            creates_loop = true;
+                            break;
+                        }
+                    }
+
+                    if ! creates_loop {
+                        new_path.weight += next_morph_inst.m.weight();
+                        new_path.cur_type = dst_type;
+
+                        new_path.morphisms.push(next_morph_inst);
+                        queue.push(new_path);
+                    }
+                }
+            }
+        }
         None
     }
 
-
-    pub fn find_morphism(&self, ty: &MorphismType)
-    -> Option< ( M, HashMap<TypeID, TypeTerm> ) > {
-
-        // try to find primitive morphism
+    pub fn find_direct_morphism(&self,
+        ty: &MorphismType,
+        dict: &mut impl TypeDict
+    ) -> Option< MorphismInstance<M> > {
+        eprintln!("find direct morph");
         for m in self.morphisms.iter() {
-            let unification_problem = UnificationProblem::new_sub(
-                vec![
-                    ( ty.src_type.clone().normalize(), m.get_type().src_type.clone() ),
-                    ( ty.dst_type.clone().normalize(), m.get_type().dst_type.clone() )
-                ]
-            );
+            let ty = ty.clone().normalize();
+            let morph_type = m.get_type().normalize();
 
-            let unification_result = unification_problem.solve();
-            if let Ok((ψ,σ)) = unification_result {
-                return Some((m.clone(), σ));
+            eprintln!("find direct morph:\n   {}  <=   {}",
+                            dict.unparse(&ty.src_type), dict.unparse(&morph_type.src_type),
+                        );
+
+            if let Ok((halo, σ)) = subtype_unify(&ty.src_type, &morph_type.src_type) {
+                eprintln!("halo: {}", dict.unparse(&halo));
+
+                let dst_type = TypeTerm::Ladder(vec![
+                    halo.clone(),
+                    morph_type.dst_type.clone()
+                ]);
+                eprintln!("----------->   {}  <=   {}",
+                    dict.unparse(&dst_type), dict.unparse(&ty.dst_type)
+                );
+
+                /*
+                let unification_problem = UnificationProblem::new(
+                                vec![
+                                    ( ty.src_type, morph_type.src_type ),
+                                    ( morph_type.dst_type, ty.dst_type )
+                                ]
+                            );*/
+
+                if let Ok((halo2, σ2)) = subtype_unify(&dst_type, &ty.dst_type) {
+                    eprintln!("match. halo2 = {}", dict.unparse(&halo2));
+                    return Some(MorphismInstance {
+                        m: m.clone(),
+                        halo: halo2,
+                        σ,
+                    });
+                }
             }
         }
+        None
+    }
 
-        // try list-map morphism
+    pub fn find_map_morphism(&self, ty: &MorphismType, dict: &mut impl TypeDict) -> Option< MorphismInstance<M> > {
         for seq_type in self.seq_types.iter() {
             eprintln!("try seq type {:?}", seq_type);
 
             eprintln!("");
 
-            if let Ok((ψ,σ)) = UnificationProblem::new_sub(vec![
+            if let Ok((halos, σ)) = UnificationProblem::new_sub(vec![
                 (ty.src_type.clone().param_normalize(),
                     TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(100)) ])),
-                (ty.dst_type.clone().param_normalize(),
-                    TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(101)) ])),
+                (TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(101)) ]),
+                    ty.dst_type.clone().param_normalize()),
             ]).solve() {
                 // TODO: use real fresh variable names
                 let item_morph_type = MorphismType {
@@ -222,9 +327,14 @@ impl<M: Morphism + Clone> MorphismBase<M> {
                     dst_type: σ.get(&TypeID::Var(101)).unwrap().clone(),
                 }.normalize();
 
-                if let Some((m, σ)) = self.find_morphism( &item_morph_type ) {
-                    if let Some(list_morph) = m.map_morphism( seq_type.clone() ) {
-                        return Some( (list_morph, σ) );
+                //eprintln!("Map Morph: try to find item-morph with type {:?}", item_morph_type);
+                if let Some(item_morph_inst) = self.find_morphism( &item_morph_type, dict ) {
+                    if let Some( list_morph ) = item_morph_inst.m.map_morphism( seq_type.clone() ) {
+                        return Some( MorphismInstance {
+                            m: list_morph,
+                            σ,
+                            halo: halos[0].clone()
+                        } );
                     }
                 }
             }
@@ -233,31 +343,16 @@ impl<M: Morphism + Clone> MorphismBase<M> {
         None
     }
 
-    pub fn find_morphism_with_subtyping(&self, ty: &MorphismType)
-    -> Option<( M, TypeTerm, HashMap<TypeID, TypeTerm> )> {
-        let mut src_lnf = ty.src_type.clone().get_lnf_vec();
-        let mut dst_lnf = ty.dst_type.clone().get_lnf_vec();
-        let mut halo = vec![];
-
-        while src_lnf.len() > 0 && dst_lnf.len() > 0 {
-            if let Some((m, σ)) = self.find_morphism(&MorphismType{
-                src_type: TypeTerm::Ladder(src_lnf.clone()),
-                dst_type: TypeTerm::Ladder(dst_lnf.clone())
-            }) {
-                halo.push(src_lnf.get(0).unwrap().clone());
-                return Some((m,
-                    TypeTerm::Ladder(halo).apply_substitution(&σ).clone(),
-                    σ));
-            } else {
-                if src_lnf[0] == dst_lnf[0] {
-                    src_lnf.remove(0);
-                    halo.push(dst_lnf.remove(0));
-                } else {
-                    return None;
-                }
-            }
+    pub fn find_morphism(&self, ty: &MorphismType,
+        dict: &mut impl TypeDict
+    )
+    -> Option< MorphismInstance<M> > {
+        if let Some(m) = self.find_direct_morphism(ty, dict) {
+            return Some(m);
+        }
+        if let Some(m) = self.find_map_morphism(ty, dict) {
+            return Some(m);
         }
-
         None
     }
 }
diff --git a/src/test/morphism.rs b/src/test/morphism.rs
index 6e951e4..1a1b8e1 100644
--- a/src/test/morphism.rs
+++ b/src/test/morphism.rs
@@ -1,5 +1,5 @@
 use {
-    crate::{dict::*, morphism::*, parser::*, TypeTerm}
+    crate::{dict::*, morphism::*, parser::*, unparser::*, TypeTerm}
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
@@ -27,8 +27,7 @@ impl Morphism for DummyMorphism {
     }
 }
 
-#[test]
-fn test_morphism_path() {
+fn morphism_test_setup() -> ( BimapTypeDict, MorphismBase<DummyMorphism> ) {
     let mut dict = BimapTypeDict::new();
     let mut base = MorphismBase::<DummyMorphism>::new( vec![ dict.parse("Seq").expect("") ] );
 
@@ -67,23 +66,315 @@ fn test_morphism_path() {
         })
     );
 
+    (dict, base)
+}
+
+#[test]
+fn test_morphism_path1() {
+    let (mut dict, mut base) = morphism_test_setup();
+
+    let path = base.find_morphism_path(MorphismType {
+        src_type: dict.parse("<Digit 10> ~ Char").unwrap(),
+        dst_type: dict.parse("<Digit 10> ~ ℤ_2^64 ~ machine.UInt64").unwrap(),
+    });
+
     assert_eq!(
-        base.find_morphism_path(MorphismType {
-            src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
-            dst_type: dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
-        }),
+        path,
         Some(
             vec![
-                dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().normalize(),
-                dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
-                dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
-                dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
-                dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
-                dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().normalize(),
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
+                    ].into_iter().collect(),
+                    halo: TypeTerm::unit(),
+                    m: DummyMorphism(MorphismType {
+                        src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
+                        dst_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap()
+                    }),
+                }
+            ]
+    ));
+}
+
+
+#[test]
+fn test_morphism_path2() {
+    let (mut dict, mut base) = morphism_test_setup();
+
+    let path = base.find_morphism_path(MorphismType {
+        src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+        dst_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
+    });
+
+    assert_eq!(
+        path,
+        Some(
+            vec![
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
+                    ].into_iter().collect(),
+                    halo: dict.parse("ℕ ~ <PosInt 10 BigEndian>").expect(""),
+                    m: DummyMorphism(MorphismType {
+                        src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
+                        dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+                    }),
+                }
+            ]
+    ));
+}
+
+
+#[test]
+fn test_morphism_path3() {
+    let (mut dict, mut base) = morphism_test_setup();
+
+    let path = base.find_morphism_path(MorphismType {
+        src_type: dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+        dst_type: dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
+    });
+
+    fn print_subst(m: &std::collections::HashMap<TypeID, TypeTerm>, dict: &mut impl TypeDict) {
+        eprintln!("{{");
+
+        for (k,v) in m.iter() {
+            eprintln!("    {} --> {}",
+                dict.get_typename(k).unwrap(),
+                dict.unparse(v)
+            );
+        }
+
+        eprintln!("}}");
+    }
+
+    if let Some(path) = path.as_ref() {
+        for n in path.iter() {
+            eprintln!("
+ψ = {}
+morph {}
+  --> {}
+with
+            ",
+            n.halo.clone().sugar(&mut dict).pretty(&mut dict, 0),
+            n.m.get_type().src_type.sugar(&mut dict).pretty(&mut dict, 0),
+            n.m.get_type().dst_type.sugar(&mut dict).pretty(&mut dict, 0),
+            );
+            print_subst(&n.σ, &mut dict)
+        }
+    }
+
+    assert_eq!(
+        path,
+        Some(
+            vec![
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
+                    ].into_iter().collect(),
+                    halo: dict.parse("ℕ ~ <PosInt 10 LittleEndian>").expect(""),
+                    m: DummyMorphism(MorphismType {
+                        src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
+                        dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+                    }),
+                },
+
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"SrcRadix".into()).unwrap(), TypeTerm::Num(10)),
+                        (dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16)),
+                    ].into_iter().collect(),
+                    halo: TypeTerm::unit(),
+                    m: DummyMorphism(MorphismType {
+                        src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
+                        dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+                    }),
+                }
+            ]
+    ));
+}
+
+
+
+#[test]
+fn test_morphism_path4() {
+    let (mut dict, mut base) = morphism_test_setup();
+
+    let path = base.find_morphism_path(MorphismType {
+        src_type: dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+        dst_type: dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
+    });
+
+    fn print_subst(m: &std::collections::HashMap<TypeID, TypeTerm>, dict: &mut impl TypeDict) {
+        eprintln!("{{");
+
+        for (k,v) in m.iter() {
+            eprintln!("    {} --> {}",
+                dict.get_typename(k).unwrap(),
+                dict.unparse(v)
+            );
+        }
+
+        eprintln!("}}");
+    }
+
+    if let Some(path) = path.as_ref() {
+        for n in path.iter() {
+            eprintln!("
+ψ = {}
+morph {}
+  --> {}
+with
+            ",
+            n.halo.clone().sugar(&mut dict).pretty(&mut dict, 0),
+            n.m.get_type().src_type.sugar(&mut dict).pretty(&mut dict, 0),
+            n.m.get_type().dst_type.sugar(&mut dict).pretty(&mut dict, 0),
+            );
+            print_subst(&n.σ, &mut dict)
+        }
+    }
+
+    assert_eq!(
+        path,
+        Some(
+            vec![
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
+                    ].into_iter().collect(),
+                    halo: dict.parse("ℕ ~ <PosInt 10 LittleEndian>").expect(""),
+                    m: DummyMorphism(MorphismType {
+                        src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
+                        dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+                    }),
+                },
+
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"SrcRadix".into()).unwrap(), TypeTerm::Num(10)),
+                        (dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16)),
+                    ].into_iter().collect(),
+                    halo: TypeTerm::unit(),
+                    m: DummyMorphism(MorphismType {
+                        src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
+                        dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+                    }),
+                },
+
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(16)),
+                    ].into_iter().collect(),
+                    halo: dict.parse("ℕ ~ <PosInt 16 LittleEndian>").expect(""),
+                    m: DummyMorphism(MorphismType {
+                        src_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),                        
+                        dst_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap()
+                    }),
+                },
+                
+            ]
+    ));
+}
+
+
+
+
+#[test]
+fn test_morphism_path_posint() {
+    let (mut dict, mut base) = morphism_test_setup();
+
+    let path = base.find_morphism_path(MorphismType {
+        src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+        dst_type: dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap(),
+    });
+
+    fn print_subst(m: &std::collections::HashMap<TypeID, TypeTerm>, dict: &mut impl TypeDict) {
+        eprintln!("{{");
+
+        for (k,v) in m.iter() {
+            eprintln!("    {} --> {}",
+                dict.get_typename(k).unwrap(),
+                dict.unparse(v)
+            );
+        }
+
+        eprintln!("}}");
+    }
+
+    if let Some(path) = path.as_ref() {
+        for n in path.iter() {
+            eprintln!("
+ψ = {}
+morph {}
+  --> {}
+with
+            ",
+            n.halo.clone().sugar(&mut dict).pretty(&mut dict, 0),
+            n.m.get_type().src_type.sugar(&mut dict).pretty(&mut dict, 0),
+            n.m.get_type().dst_type.sugar(&mut dict).pretty(&mut dict, 0),
+            );
+            print_subst(&n.σ, &mut dict)
+        }
+    }
+
+    assert_eq!(
+        path,
+        Some(
+            vec![
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
+                    ].into_iter().collect(),
+                    halo: dict.parse("ℕ ~ <PosInt 10 BigEndian>").unwrap(),
+                    m: DummyMorphism(MorphismType {
+                        src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
+                        dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+                    }),
+                },
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
+                    ].into_iter().collect(),
+                    halo: TypeTerm::unit(),
+                    m: DummyMorphism(MorphismType{
+                        src_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
+                        dst_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+                    }),
+                },
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"SrcRadix".into()).unwrap(), TypeTerm::Num(10)),
+                        (dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16)),
+                    ].into_iter().collect(),
+                    halo: TypeTerm::unit(),
+                    m: DummyMorphism(MorphismType{
+                        src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
+                        dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+                    }),
+                },
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(16)),
+                    ].into_iter().collect(),
+                    halo: TypeTerm::unit(),
+                    m: DummyMorphism(MorphismType{
+                        src_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
+                        dst_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
+                    }),
+                },
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(16))
+                    ].into_iter().collect(),
+                    halo: dict.parse("ℕ ~ <PosInt 16 BigEndian>").unwrap(),
+                    m: DummyMorphism(MorphismType{
+                        src_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
+                        dst_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap()
+                    })
+                }
             ]
         )
     );
-
+/*
     assert_eq!(
         base.find_morphism_path(MorphismType {
             src_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
@@ -100,7 +391,10 @@ fn test_morphism_path() {
             ]
         )
     );
+    */
 
+
+/*
     assert_eq!(
         base.find_morphism_with_subtyping(
             &MorphismType {
@@ -123,4 +417,5 @@ fn test_morphism_path() {
                 ].into_iter().collect::<std::collections::HashMap<TypeID, TypeTerm>>()
         ))
     );
+    */
 }
diff --git a/src/unification.rs b/src/unification.rs
index c1c0d0a..5dceaf6 100644
--- a/src/unification.rs
+++ b/src/unification.rs
@@ -1,5 +1,5 @@
 use {
-    crate::{dict::*, term::*}, std::{collections::HashMap, env::consts::ARCH}
+    crate::{dict::*, term::*}, std::{collections::HashMap}
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

From d56d4089d1cd1ffb941e5a4e612ac32def550748 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Wed, 12 Mar 2025 15:10:44 +0100
Subject: [PATCH 31/33] add test for find_morphism_path

---
 src/test/morphism.rs | 206 +++++++++++++++++++++++++++----------------
 1 file changed, 128 insertions(+), 78 deletions(-)

diff --git a/src/test/morphism.rs b/src/test/morphism.rs
index 1a1b8e1..a0f3b05 100644
--- a/src/test/morphism.rs
+++ b/src/test/morphism.rs
@@ -4,6 +4,37 @@ use {
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
+fn print_subst(m: &std::collections::HashMap<TypeID, TypeTerm>, dict: &mut impl TypeDict) {
+    eprintln!("{{");
+
+    for (k,v) in m.iter() {
+        eprintln!("    {} --> {}",
+            dict.get_typename(k).unwrap(),
+            dict.unparse(v)
+        );
+    }
+
+    eprintln!("}}");
+}
+
+fn print_path(dict: &mut impl TypeDict, path: &Vec<MorphismInstance<DummyMorphism>>) {
+    for n in path.iter() {
+        eprintln!("
+ψ = {}
+morph {}
+--> {}
+with
+        ",
+        n.halo.clone().sugar(dict).pretty(dict, 0),
+        n.m.get_type().src_type.sugar(dict).pretty(dict, 0),
+        n.m.get_type().dst_type.sugar(dict).pretty(dict, 0),
+        );
+        print_subst(&n.σ, dict)
+    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
 #[derive(Clone, Debug, PartialEq)]
 struct DummyMorphism(MorphismType);
 
@@ -134,33 +165,8 @@ fn test_morphism_path3() {
         dst_type: dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
     });
 
-    fn print_subst(m: &std::collections::HashMap<TypeID, TypeTerm>, dict: &mut impl TypeDict) {
-        eprintln!("{{");
-
-        for (k,v) in m.iter() {
-            eprintln!("    {} --> {}",
-                dict.get_typename(k).unwrap(),
-                dict.unparse(v)
-            );
-        }
-
-        eprintln!("}}");
-    }
-
     if let Some(path) = path.as_ref() {
-        for n in path.iter() {
-            eprintln!("
-ψ = {}
-morph {}
-  --> {}
-with
-            ",
-            n.halo.clone().sugar(&mut dict).pretty(&mut dict, 0),
-            n.m.get_type().src_type.sugar(&mut dict).pretty(&mut dict, 0),
-            n.m.get_type().dst_type.sugar(&mut dict).pretty(&mut dict, 0),
-            );
-            print_subst(&n.σ, &mut dict)
-        }
+        print_path(&mut dict, path);
     }
 
     assert_eq!(
@@ -204,33 +210,8 @@ fn test_morphism_path4() {
         dst_type: dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
     });
 
-    fn print_subst(m: &std::collections::HashMap<TypeID, TypeTerm>, dict: &mut impl TypeDict) {
-        eprintln!("{{");
-
-        for (k,v) in m.iter() {
-            eprintln!("    {} --> {}",
-                dict.get_typename(k).unwrap(),
-                dict.unparse(v)
-            );
-        }
-
-        eprintln!("}}");
-    }
-
     if let Some(path) = path.as_ref() {
-        for n in path.iter() {
-            eprintln!("
-ψ = {}
-morph {}
-  --> {}
-with
-            ",
-            n.halo.clone().sugar(&mut dict).pretty(&mut dict, 0),
-            n.m.get_type().src_type.sugar(&mut dict).pretty(&mut dict, 0),
-            n.m.get_type().dst_type.sugar(&mut dict).pretty(&mut dict, 0),
-            );
-            print_subst(&n.σ, &mut dict)
-        }
+        print_path(&mut dict, path);
     }
 
     assert_eq!(
@@ -287,33 +268,8 @@ fn test_morphism_path_posint() {
         dst_type: dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap(),
     });
 
-    fn print_subst(m: &std::collections::HashMap<TypeID, TypeTerm>, dict: &mut impl TypeDict) {
-        eprintln!("{{");
-
-        for (k,v) in m.iter() {
-            eprintln!("    {} --> {}",
-                dict.get_typename(k).unwrap(),
-                dict.unparse(v)
-            );
-        }
-
-        eprintln!("}}");
-    }
-
     if let Some(path) = path.as_ref() {
-        for n in path.iter() {
-            eprintln!("
-ψ = {}
-morph {}
-  --> {}
-with
-            ",
-            n.halo.clone().sugar(&mut dict).pretty(&mut dict, 0),
-            n.m.get_type().src_type.sugar(&mut dict).pretty(&mut dict, 0),
-            n.m.get_type().dst_type.sugar(&mut dict).pretty(&mut dict, 0),
-            );
-            print_subst(&n.σ, &mut dict)
-        }
+        print_path(&mut dict, path);
     }
 
     assert_eq!(
@@ -419,3 +375,97 @@ with
     );
     */
 }
+
+use std::collections::HashMap;
+
+#[test]
+fn test_morphism_path_listedit()
+{
+    let mut dict = BimapTypeDict::new();
+    let mut base = MorphismBase::<DummyMorphism>::new( vec![ dict.parse("List").expect("") ] );
+
+    base.add_morphism(
+        DummyMorphism(MorphismType{
+            src_type: dict.parse("Char").unwrap(),
+            dst_type: dict.parse("Char ~ EditTree").unwrap()
+        })
+    );
+    base.add_morphism(
+        DummyMorphism(MorphismType{
+            src_type: dict.parse("<List~Vec Char>").unwrap(),
+            dst_type: dict.parse("<List Char>").unwrap()
+        })
+    );
+    base.add_morphism(
+        DummyMorphism(MorphismType{
+            src_type: dict.parse("<List Char>").unwrap(),
+            dst_type: dict.parse("<List Char~ReprTree>").unwrap()
+        })
+    );
+    base.add_morphism(
+        DummyMorphism(MorphismType{
+            src_type: dict.parse("<List ReprTree>").unwrap(),
+            dst_type: dict.parse("<List~Vec ReprTree>").unwrap()
+        })
+    );
+    base.add_morphism(
+        DummyMorphism(MorphismType{
+            src_type: dict.parse("<List~Vec Char~ReprTree>").unwrap(),
+            dst_type: dict.parse("<List Char> ~ EditTree").unwrap()
+        })
+    );
+    base.add_morphism(
+        DummyMorphism(MorphismType{
+            src_type: dict.parse("<List~Vec Char~ReprTree>").unwrap(),
+            dst_type: dict.parse("<List Char> ~ EditTree").unwrap()
+        })
+    );
+
+
+    let path = base.find_morphism_path(MorphismType {
+        src_type: dict.parse("<Seq~List~Vec <Digit 10>~Char>").unwrap(),
+        dst_type: dict.parse("<Seq~List <Digit 10>~Char> ~ EditTree").unwrap(),
+    });
+
+    if let Some(path) = path.as_ref() {
+        print_path(&mut dict, path);
+    }
+
+    assert_eq!(
+        path,
+        Some(vec![
+            MorphismInstance {
+                m: DummyMorphism(MorphismType{
+                    src_type: dict.parse("<List~Vec Char>").unwrap(),
+                    dst_type: dict.parse("<List Char>").unwrap()
+                }),
+                halo: dict.parse("<Seq~List <Digit 10>>").unwrap(),
+                σ: HashMap::new()
+            },
+            MorphismInstance {
+                m: DummyMorphism(MorphismType{
+                    src_type: dict.parse("<List Char>").unwrap(),
+                    dst_type: dict.parse("<List Char~ReprTree>").unwrap()
+                }),
+                halo: dict.parse("<Seq~List <Digit 10>>").unwrap(),
+                σ: HashMap::new()
+            },
+            MorphismInstance {
+                m: DummyMorphism(MorphismType{
+                    src_type: dict.parse("<List ReprTree>").unwrap(),
+                    dst_type: dict.parse("<List~Vec ReprTree>").unwrap()
+                }),
+                halo: dict.parse("<Seq~List <Digit 10>~Char>").unwrap(),
+                σ: HashMap::new()
+            },
+            MorphismInstance {
+                m: DummyMorphism(MorphismType{
+                    src_type: dict.parse("<List~Vec Char~ReprTree>").unwrap(),
+                    dst_type: dict.parse("<List Char> ~ EditTree").unwrap()
+                }),
+                halo: dict.parse("<Seq~List <Digit 10>>").unwrap(),
+                σ: HashMap::new()
+            },
+        ])
+    );
+}

From de86b3f153e8e39b91f3b2c4a20f601f630950f2 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Fri, 21 Mar 2025 16:13:54 +0100
Subject: [PATCH 32/33] split path search & morphism base into separate files

---
 src/lib.rs           |   2 +
 src/morphism.rs      | 297 -------------------------------------------
 src/morphism_base.rs | 183 ++++++++++++++++++++++++++
 src/morphism_path.rs | 136 ++++++++++++++++++++
 src/test/morphism.rs |  26 ++--
 src/unification.rs   |   2 +-
 6 files changed, 335 insertions(+), 311 deletions(-)
 create mode 100644 src/morphism_base.rs
 create mode 100644 src/morphism_path.rs

diff --git a/src/lib.rs b/src/lib.rs
index f8f73aa..98cb5a2 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -14,6 +14,8 @@ pub mod pnf;
 pub mod subtype;
 pub mod unification;
 pub mod morphism;
+pub mod morphism_base;
+pub mod morphism_path;
 
 #[cfg(test)]
 mod test;
diff --git a/src/morphism.rs b/src/morphism.rs
index 7bcd9d4..06361dc 100644
--- a/src/morphism.rs
+++ b/src/morphism.rs
@@ -61,300 +61,3 @@ impl<M: Morphism + Clone> MorphismInstance<M> {
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
-
-#[derive(Clone)]
-pub struct MorphismPath<M: Morphism + Clone> {
-    pub weight: u64,
-    pub cur_type: TypeTerm,
-    pub morphisms: Vec< MorphismInstance<M> >
-}
-
-//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
-
-#[derive(Clone)]
-pub struct MorphismBase<M: Morphism + Clone> {
-    morphisms: Vec< M >,
-    seq_types: Vec< TypeTerm >
-}
-
-impl<M: Morphism + Clone> MorphismBase<M> {
-    pub fn new(seq_types: Vec<TypeTerm>) -> Self {
-        MorphismBase {
-            morphisms: Vec::new(),
-            seq_types
-        }
-    }
-
-    pub fn add_morphism(&mut self, m: M) {
-        self.morphisms.push( m );
-    }
-
-    pub fn enum_direct_morphisms(&self, src_type: &TypeTerm)
-    -> Vec< MorphismInstance<M> >
-    {
-        let mut dst_types = Vec::new();
-        for m in self.morphisms.iter() {
-            if let Ok((halo, σ)) = crate::unification::subtype_unify(
-                &src_type.clone().param_normalize(),
-                &m.get_type().src_type.param_normalize(),
-            ) {
-                dst_types.push(MorphismInstance{ halo, m: m.clone(), σ });
-            }
-        }
-        dst_types
-    }
-
-    pub fn enum_map_morphisms(&self, src_type: &TypeTerm)
-    -> Vec< MorphismInstance<M> > {
-        let src_type = src_type.clone().param_normalize();
-        let mut dst_types = Vec::new();
-
-        // Check if we have a List type, and if so, see what the Item type is
-        // TODO: function for generating fresh variables
-        let item_variable = TypeID::Var(800);
-
-        for seq_type in self.seq_types.iter() {
-            if let Ok((halo, σ)) = crate::unification::subtype_unify(
-                &src_type,
-                &TypeTerm::App(vec![
-                    seq_type.clone(),
-                    TypeTerm::TypeID(item_variable)
-                ])
-            ) {
-                let src_item_type = σ.get(&item_variable).expect("var not in unificator").clone();
-                for item_morph_inst in self.enum_morphisms( &src_item_type ) {
-
-                    let mut dst_halo_ladder = vec![ halo.clone() ];
-                    if item_morph_inst.halo != TypeTerm::unit() {
-                        dst_halo_ladder.push(
-                            TypeTerm::App(vec![
-                                seq_type.clone().get_lnf_vec().first().unwrap().clone(),
-                                item_morph_inst.halo.clone()
-                            ]));
-                    }
-
-                    if let Some( map_morph ) = item_morph_inst.m.map_morphism( seq_type.clone() ) {
-                        dst_types.push(
-                            MorphismInstance {
-                                halo: TypeTerm::Ladder(dst_halo_ladder).normalize(),
-                                m: map_morph,
-                                σ: item_morph_inst.σ
-                            }
-                        );
-                    } else {
-                        eprintln!("could not get map morphism");
-                    }
-                }
-            }
-        }
-
-        dst_types
-    }
-
-    pub fn enum_morphisms(&self, src_type: &TypeTerm) -> Vec< MorphismInstance<M> > {
-        let mut dst_types = Vec::new();
-        dst_types.append(&mut self.enum_direct_morphisms(src_type));
-        dst_types.append(&mut self.enum_map_morphisms(src_type));
-        dst_types
-    }
-
-    /* try to find shortest morphism-path for a given type
-     */
-    pub fn find_morphism_path(&self, ty: MorphismType
-//        , type_dict: &mut impl TypeDict
-    )
-    -> Option< Vec<MorphismInstance<M>> >
-    {
-        let ty = ty.normalize();
-        let mut queue = vec![
-            MorphismPath::<M> { weight: 0, cur_type: ty.src_type.clone(), morphisms: vec![] }
-        ];
-
-        while ! queue.is_empty() {
-            queue.sort_by( |p1,p2| p2.weight.cmp(&p1.weight));
-
-            if let Some(mut cur_path) = queue.pop() {
-                if let Ok((halo, σ)) = crate::unification::subtype_unify( &cur_path.cur_type, &ty.dst_type ) {
-                    /* found path,
-                     * now apply substitution and trim to variables in terms of each step
-                     */
-                    for n in cur_path.morphisms.iter_mut() {
-                        let src_type = n.m.get_type().src_type;
-                        let dst_type = n.m.get_type().dst_type;
-
-                        let mut new_σ = HashMap::new();
-                        for (k,v) in σ.iter() {
-                            if let TypeID::Var(varid) = k {
-                                if src_type.contains_var(*varid)
-                                || dst_type.contains_var(*varid) {
-                                    new_σ.insert(
-                                        k.clone(),
-                                        v.clone().apply_substitution(&σ).clone().strip()
-                                    );
-                                }
-                            }
-                        }
-                        for (k,v) in n.σ.iter() {
-                            if let TypeID::Var(varid) = k {
-                                if src_type.contains_var(*varid)
-                                || dst_type.contains_var(*varid) {
-                                    new_σ.insert(
-                                        k.clone(),
-                                        v.clone().apply_substitution(&σ).clone().strip()
-                                    );
-                                }
-                            }
-                        }
-
-                        n.halo = n.halo.clone().apply_substitution(&σ).clone().strip();
-
-                        n.σ = new_σ;
-                    }
-
-                    return Some(cur_path.morphisms);
-                }
-
-                //eprintln!("cur path (w ={}) : @ {:?}", cur_path.weight, cur_path.cur_type);//.clone().sugar(type_dict).pretty(type_dict, 0) );
-                for mut next_morph_inst in self.enum_morphisms(&cur_path.cur_type) {
-                    let dst_type = next_morph_inst.get_type().dst_type;
-//                    eprintln!("try morph to {}", dst_type.clone().sugar(type_dict).pretty(type_dict, 0));
-
-                    let mut creates_loop = false;
-
-                    let mut new_path = cur_path.clone();
-                    for n in new_path.morphisms.iter_mut() {
-                        let mut new_σ = HashMap::new();
-
-                        for (k,v) in next_morph_inst.σ.iter() {
-                            new_σ.insert(
-                                k.clone(),
-                                v.clone().apply_substitution(&next_morph_inst.σ).clone()
-                            );
-                        }
-
-                        for (k,v) in n.σ.iter() {
-                            new_σ.insert(
-                                k.clone(),
-                                v.clone().apply_substitution(&next_morph_inst.σ).clone()
-                            );
-                        }
-
-                        n.halo = n.halo.clone().apply_substitution(&next_morph_inst.σ).clone().strip();
-
-                        n.σ = new_σ;
-                    }
-
-                    for m in new_path.morphisms.iter() {
-                        if m.get_type().src_type == dst_type {
-                            creates_loop = true;
-                            break;
-                        }
-                    }
-
-                    if ! creates_loop {
-                        new_path.weight += next_morph_inst.m.weight();
-                        new_path.cur_type = dst_type;
-
-                        new_path.morphisms.push(next_morph_inst);
-                        queue.push(new_path);
-                    }
-                }
-            }
-        }
-        None
-    }
-
-    pub fn find_direct_morphism(&self,
-        ty: &MorphismType,
-        dict: &mut impl TypeDict
-    ) -> Option< MorphismInstance<M> > {
-        eprintln!("find direct morph");
-        for m in self.morphisms.iter() {
-            let ty = ty.clone().normalize();
-            let morph_type = m.get_type().normalize();
-
-            eprintln!("find direct morph:\n   {}  <=   {}",
-                            dict.unparse(&ty.src_type), dict.unparse(&morph_type.src_type),
-                        );
-
-            if let Ok((halo, σ)) = subtype_unify(&ty.src_type, &morph_type.src_type) {
-                eprintln!("halo: {}", dict.unparse(&halo));
-
-                let dst_type = TypeTerm::Ladder(vec![
-                    halo.clone(),
-                    morph_type.dst_type.clone()
-                ]);
-                eprintln!("----------->   {}  <=   {}",
-                    dict.unparse(&dst_type), dict.unparse(&ty.dst_type)
-                );
-
-                /*
-                let unification_problem = UnificationProblem::new(
-                                vec![
-                                    ( ty.src_type, morph_type.src_type ),
-                                    ( morph_type.dst_type, ty.dst_type )
-                                ]
-                            );*/
-
-                if let Ok((halo2, σ2)) = subtype_unify(&dst_type, &ty.dst_type) {
-                    eprintln!("match. halo2 = {}", dict.unparse(&halo2));
-                    return Some(MorphismInstance {
-                        m: m.clone(),
-                        halo: halo2,
-                        σ,
-                    });
-                }
-            }
-        }
-        None
-    }
-
-    pub fn find_map_morphism(&self, ty: &MorphismType, dict: &mut impl TypeDict) -> Option< MorphismInstance<M> > {
-        for seq_type in self.seq_types.iter() {
-            eprintln!("try seq type {:?}", seq_type);
-
-            eprintln!("");
-
-            if let Ok((halos, σ)) = UnificationProblem::new_sub(vec![
-                (ty.src_type.clone().param_normalize(),
-                    TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(100)) ])),
-                (TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(101)) ]),
-                    ty.dst_type.clone().param_normalize()),
-            ]).solve() {
-                // TODO: use real fresh variable names
-                let item_morph_type = MorphismType {
-                    src_type: σ.get(&TypeID::Var(100)).unwrap().clone(),
-                    dst_type: σ.get(&TypeID::Var(101)).unwrap().clone(),
-                }.normalize();
-
-                //eprintln!("Map Morph: try to find item-morph with type {:?}", item_morph_type);
-                if let Some(item_morph_inst) = self.find_morphism( &item_morph_type, dict ) {
-                    if let Some( list_morph ) = item_morph_inst.m.map_morphism( seq_type.clone() ) {
-                        return Some( MorphismInstance {
-                            m: list_morph,
-                            σ,
-                            halo: halos[0].clone()
-                        } );
-                    }
-                }
-            }
-        }
-
-        None
-    }
-
-    pub fn find_morphism(&self, ty: &MorphismType,
-        dict: &mut impl TypeDict
-    )
-    -> Option< MorphismInstance<M> > {
-        if let Some(m) = self.find_direct_morphism(ty, dict) {
-            return Some(m);
-        }
-        if let Some(m) = self.find_map_morphism(ty, dict) {
-            return Some(m);
-        }
-        None
-    }
-}
-
-//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
diff --git a/src/morphism_base.rs b/src/morphism_base.rs
new file mode 100644
index 0000000..91dcba0
--- /dev/null
+++ b/src/morphism_base.rs
@@ -0,0 +1,183 @@
+use {
+    crate::{
+        subtype_unify, sugar::SugaredTypeTerm, unification::UnificationProblem, unparser::*, TypeDict, TypeID, TypeTerm,
+        morphism::{MorphismType, Morphism, MorphismInstance}
+    },
+    std::{collections::HashMap, u64}
+};
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+#[derive(Clone)]
+pub struct MorphismBase<M: Morphism + Clone> {
+    morphisms: Vec< M >,
+    seq_types: Vec< TypeTerm >
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+impl<M: Morphism + Clone> MorphismBase<M> {
+    pub fn new(seq_types: Vec<TypeTerm>) -> Self {
+        MorphismBase {
+            morphisms: Vec::new(),
+            seq_types
+        }
+    }
+
+    pub fn add_morphism(&mut self, m: M) {
+        self.morphisms.push( m );
+    }
+
+    pub fn enum_direct_morphisms(&self, src_type: &TypeTerm)
+    -> Vec< MorphismInstance<M> >
+    {
+        let mut dst_types = Vec::new();
+        for m in self.morphisms.iter() {
+            if let Ok((halo, σ)) = crate::unification::subtype_unify(
+                &src_type.clone().param_normalize(),
+                &m.get_type().src_type.param_normalize(),
+            ) {
+                dst_types.push(MorphismInstance{ halo, m: m.clone(), σ });
+            }
+        }
+        dst_types
+    }
+
+    pub fn enum_map_morphisms(&self, src_type: &TypeTerm)
+    -> Vec< MorphismInstance<M> > {
+        let src_type = src_type.clone().param_normalize();
+        let mut dst_types = Vec::new();
+
+        // Check if we have a List type, and if so, see what the Item type is
+        // TODO: function for generating fresh variables
+        let item_variable = TypeID::Var(800);
+
+        for seq_type in self.seq_types.iter() {
+            if let Ok((halo, σ)) = crate::unification::subtype_unify(
+                &src_type,
+                &TypeTerm::App(vec![
+                    seq_type.clone(),
+                    TypeTerm::TypeID(item_variable)
+                ])
+            ) {
+                let src_item_type = σ.get(&item_variable).expect("var not in unificator").clone();
+                for item_morph_inst in self.enum_morphisms( &src_item_type ) {
+
+                    let mut dst_halo_ladder = vec![ halo.clone() ];
+                    if item_morph_inst.halo != TypeTerm::unit() {
+                        dst_halo_ladder.push(
+                            TypeTerm::App(vec![
+                                seq_type.clone().get_lnf_vec().first().unwrap().clone(),
+                                item_morph_inst.halo.clone()
+                            ]));
+                    }
+
+                    if let Some( map_morph ) = item_morph_inst.m.map_morphism( seq_type.clone() ) {
+                        dst_types.push(
+                            MorphismInstance {
+                                halo: TypeTerm::Ladder(dst_halo_ladder).strip().param_normalize(),
+                                m: map_morph,
+                                σ: item_morph_inst.σ
+                            }
+                        );
+                    } else {
+                        eprintln!("could not get map morphism");
+                    }
+                }
+            }
+        }
+
+        dst_types
+    }
+
+    pub fn enum_morphisms(&self, src_type: &TypeTerm) -> Vec< MorphismInstance<M> > {
+        let mut dst_types = Vec::new();
+        dst_types.append(&mut self.enum_direct_morphisms(src_type));
+        dst_types.append(&mut self.enum_map_morphisms(src_type));
+        dst_types
+    }
+
+    pub fn find_direct_morphism(&self,
+        ty: &MorphismType,
+        dict: &mut impl TypeDict
+    ) -> Option< MorphismInstance<M> > {
+        eprintln!("find direct morph");
+        for m in self.morphisms.iter() {
+            let ty = ty.clone().normalize();
+            let morph_type = m.get_type().normalize();
+
+            eprintln!("find direct morph:\n   {}  <=   {}",
+                            dict.unparse(&ty.src_type), dict.unparse(&morph_type.src_type),
+                        );
+
+            if let Ok((halo, σ)) = subtype_unify(&ty.src_type, &morph_type.src_type) {
+                eprintln!("halo: {}", dict.unparse(&halo));
+
+                let dst_type = TypeTerm::Ladder(vec![
+                    halo.clone(),
+                    morph_type.dst_type.clone()
+                ]).normalize().param_normalize();
+
+                eprintln!("----------->   {}  <=   {}",
+                    dict.unparse(&dst_type), dict.unparse(&ty.dst_type)
+                );
+
+                if let Ok((halo2, σ2)) = subtype_unify(&dst_type, &ty.dst_type) {
+                    eprintln!("match. halo2 = {}", dict.unparse(&halo2));
+                    return Some(MorphismInstance {
+                        m: m.clone(),
+                        halo,
+                        σ,
+                    });
+                }
+            }
+        }
+        None
+    }
+
+    pub fn find_map_morphism(&self, ty: &MorphismType, dict: &mut impl TypeDict) -> Option< MorphismInstance<M> > {
+        for seq_type in self.seq_types.iter() {
+            if let Ok((halos, σ)) = UnificationProblem::new_sub(vec![
+                (ty.src_type.clone().param_normalize(),
+                    TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(100)) ])),
+
+                (TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(101)) ]),
+                    ty.dst_type.clone().param_normalize()),
+            ]).solve() {
+                // TODO: use real fresh variable names
+                let item_morph_type = MorphismType {
+                    src_type: σ.get(&TypeID::Var(100)).unwrap().clone(),
+                    dst_type: σ.get(&TypeID::Var(101)).unwrap().clone(),
+                }.normalize();
+
+                //eprintln!("Map Morph: try to find item-morph with type {:?}", item_morph_type);
+                if let Some(item_morph_inst) = self.find_morphism( &item_morph_type, dict ) {
+                    if let Some( list_morph ) = item_morph_inst.m.map_morphism( seq_type.clone() ) {
+                        return Some( MorphismInstance {
+                            m: list_morph,
+                            σ,
+                            halo: halos[0].clone()
+                        } );
+                    }
+                }
+            }
+        }
+
+        None
+    }
+
+    pub fn find_morphism(&self, ty: &MorphismType,
+        dict: &mut impl TypeDict
+    )
+    -> Option< MorphismInstance<M> > {
+        if let Some(m) = self.find_direct_morphism(ty, dict) {
+            return Some(m);
+        }
+        if let Some(m) = self.find_map_morphism(ty, dict) {
+            return Some(m);
+        }
+        None
+    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
diff --git a/src/morphism_path.rs b/src/morphism_path.rs
new file mode 100644
index 0000000..60f5321
--- /dev/null
+++ b/src/morphism_path.rs
@@ -0,0 +1,136 @@
+use {
+    crate::{
+        morphism::{MorphismType, Morphism, MorphismInstance},
+        morphism_base::MorphismBase,
+        dict::*,
+        term::*
+    }
+};
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+#[derive(Clone)]
+pub struct MorphismPath<M: Morphism + Clone> {
+    pub weight: u64,
+    pub cur_type: TypeTerm,
+    pub morphisms: Vec< MorphismInstance<M> >
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+pub struct ShortestPathProblem<'a, M: Morphism + Clone> {
+    pub morphism_base: &'a MorphismBase<M>,
+    pub goal: TypeTerm,
+    queue: Vec< MorphismPath<M> >
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+impl<'a, M:Morphism+Clone> ShortestPathProblem<'a, M> {
+    pub fn new(morphism_base: &'a MorphismBase<M>, ty: MorphismType) -> Self {
+        ShortestPathProblem {
+            morphism_base,
+            queue: vec![
+                MorphismPath::<M> { weight: 0, cur_type: ty.src_type, morphisms: vec![] }
+            ],
+            goal: ty.dst_type
+        }
+    }
+
+    pub fn solve(&mut self) -> Option< Vec<MorphismInstance<M>> > {
+        while ! self.queue.is_empty() {
+            self.queue.sort_by( |p1,p2| p2.weight.cmp(&p1.weight));
+
+            if let Some(mut cur_path) = self.queue.pop() {
+                if let Ok((halo, σ)) = crate::unification::subtype_unify( &cur_path.cur_type, &self.goal ) {
+                    /* found path,
+                     * now apply substitution and trim to variables in terms of each step
+                     */
+                    for n in cur_path.morphisms.iter_mut() {
+                        let src_type = n.m.get_type().src_type;
+                        let dst_type = n.m.get_type().dst_type;
+
+                        let mut new_σ = std::collections::HashMap::new();
+                        for (k,v) in σ.iter() {
+                            if let TypeID::Var(varid) = k {
+                                if src_type.contains_var(*varid)
+                                || dst_type.contains_var(*varid) {
+                                    new_σ.insert(
+                                        k.clone(),
+                                        v.clone().apply_substitution(&σ).clone().strip()
+                                    );
+                                }
+                            }
+                        }
+                        for (k,v) in n.σ.iter() {
+                            if let TypeID::Var(varid) = k {
+                                if src_type.contains_var(*varid)
+                                || dst_type.contains_var(*varid) {
+                                    new_σ.insert(
+                                        k.clone(),
+                                        v.clone().apply_substitution(&σ).clone().strip()
+                                    );
+                                }
+                            }
+                        }
+
+                        n.halo = n.halo.clone().apply_substitution(&σ).clone().strip().param_normalize();
+
+                        n.σ = new_σ;
+                    }
+
+                    return Some(cur_path.morphisms);
+                }
+
+                //eprintln!("cur path (w ={}) : @ {:?}", cur_path.weight, cur_path.cur_type);//.clone().sugar(type_dict).pretty(type_dict, 0) );
+                for mut next_morph_inst in self.morphism_base.enum_morphisms(&cur_path.cur_type) {
+                    let dst_type = next_morph_inst.get_type().dst_type;
+//                    eprintln!("try morph to {}", dst_type.clone().sugar(type_dict).pretty(type_dict, 0));
+
+                    let mut creates_loop = false;
+
+                    let mut new_path = cur_path.clone();
+                    for n in new_path.morphisms.iter_mut() {
+                        let mut new_σ = std::collections::HashMap::new();
+
+                        for (k,v) in next_morph_inst.σ.iter() {
+                            new_σ.insert(
+                                k.clone(),
+                                v.clone().apply_substitution(&next_morph_inst.σ).clone()
+                            );
+                        }
+
+                        for (k,v) in n.σ.iter() {
+                            new_σ.insert(
+                                k.clone(),
+                                v.clone().apply_substitution(&next_morph_inst.σ).clone()
+                            );
+                        }
+
+                        n.halo = n.halo.clone().apply_substitution(&next_morph_inst.σ).clone().strip().param_normalize();
+
+                        n.σ = new_σ;
+                    }
+
+                    for m in new_path.morphisms.iter() {
+                        if m.get_type().src_type == dst_type {
+                            creates_loop = true;
+                            break;
+                        }
+                    }
+
+                    if ! creates_loop {
+                        new_path.weight += next_morph_inst.m.weight();
+                        new_path.cur_type = dst_type;
+
+                        new_path.morphisms.push(next_morph_inst);
+                        self.queue.push(new_path);
+                    }
+                }
+            }
+        }
+        None
+    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
diff --git a/src/test/morphism.rs b/src/test/morphism.rs
index a0f3b05..99747f5 100644
--- a/src/test/morphism.rs
+++ b/src/test/morphism.rs
@@ -1,5 +1,5 @@
 use {
-    crate::{dict::*, morphism::*, parser::*, unparser::*, TypeTerm}
+    crate::{dict::*, morphism::*, parser::*, unparser::*, TypeTerm, morphism_base::*, morphism_path::*}
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
@@ -104,10 +104,10 @@ fn morphism_test_setup() -> ( BimapTypeDict, MorphismBase<DummyMorphism> ) {
 fn test_morphism_path1() {
     let (mut dict, mut base) = morphism_test_setup();
 
-    let path = base.find_morphism_path(MorphismType {
+    let path = ShortestPathProblem::new(&base, MorphismType {
         src_type: dict.parse("<Digit 10> ~ Char").unwrap(),
         dst_type: dict.parse("<Digit 10> ~ ℤ_2^64 ~ machine.UInt64").unwrap(),
-    });
+    }).solve();
 
     assert_eq!(
         path,
@@ -132,10 +132,10 @@ fn test_morphism_path1() {
 fn test_morphism_path2() {
     let (mut dict, mut base) = morphism_test_setup();
 
-    let path = base.find_morphism_path(MorphismType {
+    let path = ShortestPathProblem::new(&base, MorphismType {
         src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
         dst_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
-    });
+    }).solve();
 
     assert_eq!(
         path,
@@ -160,10 +160,10 @@ fn test_morphism_path2() {
 fn test_morphism_path3() {
     let (mut dict, mut base) = morphism_test_setup();
 
-    let path = base.find_morphism_path(MorphismType {
+    let path = ShortestPathProblem::new(&base, MorphismType {
         src_type: dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
         dst_type: dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
-    });
+    }).solve();
 
     if let Some(path) = path.as_ref() {
         print_path(&mut dict, path);
@@ -205,10 +205,10 @@ fn test_morphism_path3() {
 fn test_morphism_path4() {
     let (mut dict, mut base) = morphism_test_setup();
 
-    let path = base.find_morphism_path(MorphismType {
+    let path = ShortestPathProblem::new(&base, MorphismType {
         src_type: dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
         dst_type: dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
-    });
+    }).solve();
 
     if let Some(path) = path.as_ref() {
         print_path(&mut dict, path);
@@ -263,10 +263,10 @@ fn test_morphism_path4() {
 fn test_morphism_path_posint() {
     let (mut dict, mut base) = morphism_test_setup();
 
-    let path = base.find_morphism_path(MorphismType {
+    let path = ShortestPathProblem::new(&base, MorphismType {
         src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
         dst_type: dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap(),
-    });
+    }).solve();
 
     if let Some(path) = path.as_ref() {
         print_path(&mut dict, path);
@@ -422,10 +422,10 @@ fn test_morphism_path_listedit()
     );
 
 
-    let path = base.find_morphism_path(MorphismType {
+    let path = ShortestPathProblem::new(&base, MorphismType {
         src_type: dict.parse("<Seq~List~Vec <Digit 10>~Char>").unwrap(),
         dst_type: dict.parse("<Seq~List <Digit 10>~Char> ~ EditTree").unwrap(),
-    });
+    }).solve();
 
     if let Some(path) = path.as_ref() {
         print_path(&mut dict, path);
diff --git a/src/unification.rs b/src/unification.rs
index 5dceaf6..ace94e7 100644
--- a/src/unification.rs
+++ b/src/unification.rs
@@ -224,7 +224,7 @@ impl UnificationProblem {
         // error
         UnificationError
     > {
-        eprintln!("eval_subtype {:?} <=? {:?}", unification_pair.lhs, unification_pair.rhs);
+        // eprintln!("eval_subtype {:?} <=? {:?}", unification_pair.lhs, unification_pair.rhs);
         match (unification_pair.lhs.clone(), unification_pair.rhs.clone()) {
 
             /*

From 5a2cdbf00900f516d92d48dace5b04956c5e4ea2 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sun, 1 Jun 2025 20:25:39 +0200
Subject: [PATCH 33/33] github workflow: use '--features pretty' option

---
 .github/workflows/cargo-test.yml | 15 +++++++--------
 1 file changed, 7 insertions(+), 8 deletions(-)

diff --git a/.github/workflows/cargo-test.yml b/.github/workflows/cargo-test.yml
index a69c7e3..8328dcf 100644
--- a/.github/workflows/cargo-test.yml
+++ b/.github/workflows/cargo-test.yml
@@ -2,21 +2,20 @@ name: Rust
 
 on:
   push:
-    branches: [ "dev" ]
+    branches: ["dev"]
   pull_request:
-    branches: [ "dev" ]
+    branches: ["dev"]
 
 env:
   CARGO_TERM_COLOR: always
 
 jobs:
   build:
-
     runs-on: ubuntu-latest
 
     steps:
-    - uses: actions/checkout@v3
-    - name: Build
-      run: cargo build --verbose
-    - name: Run tests
-      run: cargo test --verbose
+      - uses: actions/checkout@v3
+      - name: Build
+        run: cargo build --verbose --features pretty
+      - name: Run tests
+        run: cargo test --verbose --features pretty