From c03db48fd27756880affd288afe031328d305d7a Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sun, 4 Aug 2024 23:18:37 +0200
Subject: [PATCH 01/20] TypeID: add Copy trait

---
 src/dict.rs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/dict.rs b/src/dict.rs
index 83b63ee..93bb6ec 100644
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -2,7 +2,7 @@ use crate::bimap::Bimap;
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-#[derive(Eq, PartialEq, Hash, Clone, Debug)]
+#[derive(Eq, PartialEq, Hash, Clone, Copy, Debug)]
 pub enum TypeID {
     Fun(u64),
     Var(u64)

From c6bad6046a3914a9978050786d35596c4d36c108 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Wed, 2 Oct 2024 22:37:06 +0200
Subject: [PATCH 02/20] add sugared terms & pretty printing

---
 Cargo.toml    |   5 ++
 src/dict.rs   |   4 ++
 src/lib.rs    |   8 ++-
 src/pretty.rs | 148 ++++++++++++++++++++++++++++++++++++++++++++++++++
 src/sugar.rs  |  95 ++++++++++++++++++++++++++++++++
 src/term.rs   |   2 +-
 6 files changed, 259 insertions(+), 3 deletions(-)
 create mode 100644 src/pretty.rs
 create mode 100644 src/sugar.rs

diff --git a/Cargo.toml b/Cargo.toml
index 0a57fd3..429541c 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -4,3 +4,8 @@ edition = "2018"
 name = "laddertypes"
 version = "0.1.0"
 
+[dependencies]
+tiny-ansi = { version = "0.1.0", optional = true }
+
+[features]
+pretty = ["dep:tiny-ansi"]
diff --git a/src/dict.rs b/src/dict.rs
index 93bb6ec..419d599 100644
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -54,6 +54,10 @@ impl TypeDict {
     pub fn get_typeid(&self, tn: &String) -> Option<TypeID> {
         self.typenames.mλ.get(tn).cloned()
     }
+
+    pub fn get_varname(&self, var_id: u64) -> Option<String> {
+        self.typenames.my.get(&TypeID::Var(var_id)).cloned()
+    }
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
diff --git a/src/lib.rs b/src/lib.rs
index 970c555..2e9a163 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -5,6 +5,7 @@ pub mod term;
 pub mod lexer;
 pub mod parser;
 pub mod unparser;
+pub mod sugar;
 pub mod curry;
 pub mod lnf;
 pub mod pnf;
@@ -14,9 +15,12 @@ pub mod unification;
 #[cfg(test)]
 mod test;
 
+#[cfg(feature = "pretty")]
+mod pretty;
+
 pub use {
     dict::*,
     term::*,
-    unification::*
+    sugar::*,
+    unification::*,
 };
-
diff --git a/src/pretty.rs b/src/pretty.rs
new file mode 100644
index 0000000..1a4aa60
--- /dev/null
+++ b/src/pretty.rs
@@ -0,0 +1,148 @@
+use {
+    crate::TypeDict,
+    crate::sugar::SugaredTypeTerm,
+    tiny_ansi::TinyAnsi
+};
+
+impl SugaredTypeTerm {
+    pub fn pretty(&self, dict: &TypeDict, indent: u64) -> String {
+        let indent_width = 4;
+        match self {
+            SugaredTypeTerm::TypeID(id) => {
+                format!("{}", dict.get_typename(id).unwrap_or("??".bright_red())).bright_blue()
+            },
+
+            SugaredTypeTerm::Num(n) => {
+                format!("{}", n).bright_cyan()
+            }
+
+            SugaredTypeTerm::Char(c) => {
+                format!("'{}'", c)
+            }
+
+            SugaredTypeTerm::Univ(t) => {
+                format!("{} {} . {}",
+                    "∀".yellow().bold(),
+                    dict.get_varname(0).unwrap_or("??".into()).bright_blue(),
+                    t.pretty(dict,indent)
+                )
+            }
+
+            SugaredTypeTerm::Spec(args) => {
+                let mut s = String::new();
+                s.push_str(&"<".yellow().bold());
+                for i in 0..args.len() {
+                    let arg = &args[i];
+                    if i > 0 {
+                        s.push(' ');
+                    }
+                    s.push_str( &arg.pretty(dict,indent+1) );
+                }
+                s.push_str(&">".yellow().bold());
+                s
+            }
+
+            SugaredTypeTerm::Struct(args) => {
+                let mut s = String::new();
+                s.push_str(&"{".yellow().bold());
+                for arg in args {
+                    s.push('\n');
+                    for x in 0..(indent+1)*indent_width {
+                        s.push(' ');
+                    }
+                    s.push_str(&arg.pretty(dict, indent + 1));
+                    s.push_str(&";\n".bright_yellow());
+                }
+
+                s.push('\n');
+                for x in 0..indent*indent_width {
+                    s.push(' ');
+                }
+                s.push_str(&"}".yellow().bold());
+                s
+            }
+
+            SugaredTypeTerm::Enum(args) => {
+                let mut s = String::new();
+                s.push_str(&"(".yellow().bold());
+                for i in 0..args.len() {
+                    let arg = &args[i];
+                    s.push('\n');
+                    for x in 0..(indent+1)*indent_width {
+                        s.push(' ');
+                    }
+                    if i > 0 {
+                        s.push_str(&"| ".yellow().bold());
+                    }
+                    s.push_str(&arg.pretty(dict, indent + 1));
+                }
+
+                s.push('\n');
+                for x in 0..indent*indent_width {
+                    s.push(' ');
+                }
+                s.push_str(&")".yellow().bold());
+                s
+            }
+
+            SugaredTypeTerm::Seq(args) => {
+                let mut s = String::new();
+                s.push_str(&"[ ".yellow().bold());
+                for i in 0..args.len() {
+                    let arg = &args[i];
+                    if i > 0 {
+                        s.push(' ');
+                    }
+                    s.push_str(&arg.pretty(dict, indent+1));
+                }
+                s.push_str(&" ]".yellow().bold());
+                s
+            }
+
+            SugaredTypeTerm::Morph(args) => {
+                let mut s = String::new();
+                for arg in args {
+                    s.push_str(&"  ~~morph~~>  ".bright_yellow());
+                    s.push_str(&arg.pretty(dict, indent));
+                }
+                s
+            }
+
+            SugaredTypeTerm::Func(args) => {
+                let mut s = String::new();
+                for i in 0..args.len() {
+                    let arg = &args[i];
+                    if i > 0{
+                        s.push('\n');
+                        for x in 0..(indent*indent_width) {
+                            s.push(' ');
+                        }                
+                        s.push_str(&"-->  ".bright_yellow());
+                    } else {
+//                        s.push_str("   ");
+                    }
+                    s.push_str(&arg.pretty(dict, indent));
+                }
+                s
+            }
+
+            SugaredTypeTerm::Ladder(rungs) => {
+                let mut s = String::new();
+                for i in 0..rungs.len() {
+                    let rung = &rungs[i];
+                    if i > 0{
+                        s.push('\n');
+                        for x in 0..(indent*indent_width) {
+                            s.push(' ');
+                        }
+                        s.push_str(&"~ ".yellow());
+                    }
+                    s.push_str(&rung.pretty(dict, indent));
+                }
+                s
+            }
+        }
+    }
+}
+
+
diff --git a/src/sugar.rs b/src/sugar.rs
new file mode 100644
index 0000000..4d13f78
--- /dev/null
+++ b/src/sugar.rs
@@ -0,0 +1,95 @@
+use {
+    crate::{TypeTerm, TypeID}
+};
+
+pub enum SugaredTypeTerm {
+    TypeID(TypeID),
+    Num(i64),
+    Char(char),
+    Univ(Box< SugaredTypeTerm >),
+    Spec(Vec< SugaredTypeTerm >),
+    Func(Vec< SugaredTypeTerm >),
+    Morph(Vec< SugaredTypeTerm >),
+    Ladder(Vec< SugaredTypeTerm >),
+    Struct(Vec< SugaredTypeTerm >),
+    Enum(Vec< SugaredTypeTerm >),
+    Seq(Vec< SugaredTypeTerm >)
+}
+
+impl TypeTerm {
+    pub fn sugar(self: TypeTerm, dict: &mut crate::TypeDict) -> SugaredTypeTerm {
+        match self {
+            TypeTerm::TypeID(id) => SugaredTypeTerm::TypeID(id),
+            TypeTerm::Num(n) => SugaredTypeTerm::Num(n),
+            TypeTerm::Char(c) => SugaredTypeTerm::Char(c),
+            TypeTerm::App(args) => if let Some(first) = args.first() {
+                if first == &dict.parse("Func").unwrap() {
+                    SugaredTypeTerm::Func( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
+                }
+                else if first == &dict.parse("Morph").unwrap() {
+                    SugaredTypeTerm::Morph( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
+                }
+                else if first == &dict.parse("Struct").unwrap() {
+                    SugaredTypeTerm::Struct( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
+                }
+                else if first == &dict.parse("Enum").unwrap() {
+                    SugaredTypeTerm::Enum( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
+                }
+                else if first == &dict.parse("Seq").unwrap() {
+                    SugaredTypeTerm::Seq( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
+                }
+                else if first == &dict.parse("Spec").unwrap() {
+                    SugaredTypeTerm::Spec( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
+                }
+                else if first == &dict.parse("Univ").unwrap() {
+                    SugaredTypeTerm::Univ(Box::new(
+                        SugaredTypeTerm::Spec(
+                            args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect()
+                        )
+                    ))
+                }
+                else {
+                    SugaredTypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
+                }
+            } else {
+                SugaredTypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
+            },
+            TypeTerm::Ladder(rungs) =>            
+               SugaredTypeTerm::Ladder(rungs.into_iter().map(|t| t.sugar(dict)).collect())
+        }
+    }
+}
+
+impl SugaredTypeTerm {
+    pub fn desugar(self, dict: &mut crate::TypeDict) -> TypeTerm {
+        match self {
+            SugaredTypeTerm::TypeID(id) => TypeTerm::TypeID(id),
+            SugaredTypeTerm::Num(n) => TypeTerm::Num(n),
+            SugaredTypeTerm::Char(c) => TypeTerm::Char(c),
+            SugaredTypeTerm::Univ(t) => t.desugar(dict),
+            SugaredTypeTerm::Spec(ts) => TypeTerm::App(ts.into_iter().map(|t| t.desugar(dict)).collect()),
+            SugaredTypeTerm::Ladder(ts) => TypeTerm::Ladder(ts.into_iter().map(|t|t.desugar(dict)).collect()),
+            SugaredTypeTerm::Func(ts) => TypeTerm::App(
+                std::iter::once( dict.parse("Func").unwrap() ).chain(
+                    ts.into_iter().map(|t| t.desugar(dict))
+                ).collect()),
+            SugaredTypeTerm::Morph(ts) => TypeTerm::App(
+                std::iter::once( dict.parse("Morph").unwrap() ).chain(
+                    ts.into_iter().map(|t| t.desugar(dict))
+                ).collect()),
+            SugaredTypeTerm::Struct(ts) => TypeTerm::App(
+                std::iter::once( dict.parse("Struct").unwrap() ).chain(
+                    ts.into_iter().map(|t| t.desugar(dict))
+                ).collect()),
+            SugaredTypeTerm::Enum(ts) => TypeTerm::App(
+                std::iter::once( dict.parse("Enum").unwrap() ).chain(
+                    ts.into_iter().map(|t| t.desugar(dict))
+                ).collect()),
+            SugaredTypeTerm::Seq(ts) => TypeTerm::App(
+                std::iter::once( dict.parse("Seq").unwrap() ).chain(
+                    ts.into_iter().map(|t| t.desugar(dict))
+                ).collect()),
+        }
+    }
+}
+
diff --git a/src/term.rs b/src/term.rs
index f5e8f5f..29c7d27 100644
--- a/src/term.rs
+++ b/src/term.rs
@@ -57,7 +57,7 @@ impl TypeTerm {
     pub fn repr_as(&mut self, t: impl Into<TypeTerm>) -> &mut Self {
         match self {
             TypeTerm::Ladder(rungs) => {
-                rungs.push(t.into());                
+                rungs.push(t.into());
             }
 
             _ => {

From 4a6a35a89752f4f6843ceb9f58aa9d5311b2544c Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Tue, 4 Feb 2025 15:15:09 +0100
Subject: [PATCH 03/20] pnf: add test for collapsing first application argument

---
 src/test/pnf.rs | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/src/test/pnf.rs b/src/test/pnf.rs
index 2303b3e..00daa3a 100644
--- a/src/test/pnf.rs
+++ b/src/test/pnf.rs
@@ -29,6 +29,11 @@ fn test_param_normalize() {
         dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror").param_normalize(),
     );
 
+    assert_eq!(
+        dict.parse("<A~Y B>").expect("parse error"),
+        dict.parse("<A B>~<Y B>").expect("parse error").param_normalize(),
+    );
+
     assert_eq!(
         dict.parse("<A <B C~D~E> F~G H H>").expect("parse error"),
         dict.parse("<A <B C> F H H>

From a9a35aed1b3fc69a6cda95d43f9ae27bbd0f9ccd Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Wed, 5 Feb 2025 11:06:49 +0100
Subject: [PATCH 04/20] rewrite param_normalize()

---
 src/pnf.rs      | 151 ++++++++++++++++++++++++++++--------------------
 src/test/pnf.rs |  23 ++++++--
 2 files changed, 106 insertions(+), 68 deletions(-)

diff --git a/src/pnf.rs b/src/pnf.rs
index 4576be5..d3a6b20 100644
--- a/src/pnf.rs
+++ b/src/pnf.rs
@@ -2,6 +2,20 @@ use crate::term::TypeTerm;
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
+pub fn splice_ladders( mut upper: Vec< TypeTerm >, mut lower: Vec< TypeTerm >  ) -> Vec< TypeTerm > {
+    for i in 0 .. upper.len() {
+        if upper[i] == lower[0] {
+            let mut result_ladder = Vec::<TypeTerm>::new();
+            result_ladder.append(&mut upper[0..i].iter().cloned().collect());
+            result_ladder.append(&mut lower);
+            return result_ladder;
+        }
+    }
+
+    upper.append(&mut lower);
+    upper
+}
+
 impl TypeTerm {
     /// transmute type into Parameter-Normal-Form (PNF)
     ///
@@ -10,88 +24,99 @@ impl TypeTerm {
     /// <Seq <Digit 10>>~<Seq Char>
     /// ⇒ <Seq <Digit 10>~Char>
     /// ```
-    pub fn param_normalize(self) -> Self {
+    pub fn param_normalize(mut self) -> Self {
         match self {
             TypeTerm::Ladder(mut rungs) => {
                 if rungs.len() > 0 {
-                    // normalize all rungs separately
-                    for r in rungs.iter_mut() {
-                        *r = r.clone().param_normalize();
-                    }
+                    let mut new_rungs = Vec::new();
+                    while let Some(bottom) = rungs.pop() {
+                        if let Some(last_but) = rungs.last_mut() {
+                            match (bottom, last_but) {
+                                (TypeTerm::App(bot_args), TypeTerm::App(last_args)) => {
+                                    if bot_args.len() == last_args.len() {
+                                        let mut new_rung_params = Vec::new();
+                                        let mut require_break = false;
 
-                    // take top-rung
-                    match rungs.remove(0) {
-                        TypeTerm::App(params_top) => {
-                            let mut params_ladders = Vec::new();
-                            let mut tail : Vec<TypeTerm> = Vec::new();
+                                        if bot_args.len() > 0 {
+                                            if let Ok(_idx) =  last_args[0].is_syntactic_subtype_of(&bot_args[0]) {
+                                                for i in 0 .. bot_args.len() {
 
-                            // append all other rungs to ladders inside
-                            // the application
-                            for p in params_top {
-                                params_ladders.push(vec![ p ]);
-                            }
+                                                    let spliced_type_ladder = splice_ladders(
+                                                        last_args[i].clone().get_lnf_vec(),
+                                                        bot_args[i].clone().get_lnf_vec()
+                                                    );
+                                                    let spliced_type =
+                                                        if spliced_type_ladder.len() == 1 {
+                                                            spliced_type_ladder[0].clone()
+                                                        } else if spliced_type_ladder.len() > 1 {
+                                                            TypeTerm::Ladder(spliced_type_ladder)
+                                                        } else {
+                                                            TypeTerm::unit()
+                                                        };
 
-                            for r in rungs {
-                                match r {
-                                    TypeTerm::App(mut params_rung) => {
-                                        if params_rung.len() > 0 {
-                                            let mut first_param = params_rung.remove(0); 
+                                                    new_rung_params.push( spliced_type.param_normalize() );
+                                                }
 
-                                            if first_param == params_ladders[0][0] {
-                                               for (l, p) in params_ladders.iter_mut().skip(1).zip(params_rung) {
-                                                   l.push(p.param_normalize());
-                                               }
                                             } else {
-                                               params_rung.insert(0, first_param);
-                                               tail.push(TypeTerm::App(params_rung));
+                                                new_rung_params.push(
+                                                    TypeTerm::Ladder(vec![
+                                                        last_args[0].clone(),
+                                                        bot_args[0].clone()
+                                                    ]).normalize()
+                                                );
+
+                                                for i in 1 .. bot_args.len() {
+                                                    if let Ok(_idx) = last_args[i].is_syntactic_subtype_of(&bot_args[i]) {
+                                                        let spliced_type_ladder = splice_ladders(
+                                                            last_args[i].clone().get_lnf_vec(),
+                                                            bot_args[i].clone().get_lnf_vec()
+                                                        );
+                                                        let spliced_type =
+                                                            if spliced_type_ladder.len() == 1 {
+                                                                spliced_type_ladder[0].clone()
+                                                            } else if spliced_type_ladder.len() > 1 {
+                                                                TypeTerm::Ladder(spliced_type_ladder)
+                                                            } else {
+                                                                TypeTerm::unit()
+                                                            };
+
+                                                        new_rung_params.push( spliced_type.param_normalize() );
+                                                    } else {
+                                                        new_rung_params.push( bot_args[i].clone() );
+                                                        require_break = true;
+                                                    }
+                                                }
                                             }
                                         }
-                                    }
 
-                                    TypeTerm::Ladder(mut rs) => {
-                                        for r in rs {
-                                            tail.push(r.param_normalize());
+                                        if require_break {
+                                            new_rungs.push( TypeTerm::App(new_rung_params) );
+                                        } else {
+                                            rungs.pop();
+                                            rungs.push(TypeTerm::App(new_rung_params));
                                         }
-                                    }
 
-                                    atomic => {
-                                        tail.push(atomic);
+                                    } else {
+                                        new_rungs.push( TypeTerm::App(bot_args) );
                                     }
                                 }
+                                (bottom, last_buf) => {
+                                    new_rungs.push( bottom );
+                                }
                             }
-
-                            let head = TypeTerm::App(
-                                params_ladders.into_iter()
-                                    .map(
-                                        |mut l| {
-                                            l.dedup();
-                                            match l.len() {
-                                                0 => TypeTerm::unit(),
-                                                1 => l.remove(0),
-                                                _ => TypeTerm::Ladder(l).param_normalize()
-                                            }
-                                        }
-                                    )
-                                    .collect()
-                            );
-
-                            if tail.len() > 0 {
-                                tail.insert(0, head);
-                                TypeTerm::Ladder(tail)
-                            } else {
-                                head
-                            }
+                        } else {
+                            new_rungs.push( bottom );
                         }
+                    }
 
-                        TypeTerm::Ladder(mut r) => {
-                            r.append(&mut rungs);
-                            TypeTerm::Ladder(r)
-                        }
+                    new_rungs.reverse();
 
-                        atomic => {
-                            rungs.insert(0, atomic);
-                            TypeTerm::Ladder(rungs)
-                        }
+                    if new_rungs.len() > 1 {
+                        TypeTerm::Ladder(new_rungs)
+                    } else if new_rungs.len() == 1 {
+                        new_rungs[0].clone()
+                    } else {
+                        TypeTerm::unit()
                     }
                 } else {
                     TypeTerm::unit()
diff --git a/src/test/pnf.rs b/src/test/pnf.rs
index 00daa3a..e668849 100644
--- a/src/test/pnf.rs
+++ b/src/test/pnf.rs
@@ -19,27 +19,40 @@ fn test_param_normalize() {
         dict.parse("<A B>~<A C>").expect("parse error").param_normalize(),
     );
 
+    assert_eq!(
+        dict.parse("<A~Y B>").expect("parse error"),
+        dict.parse("<A B>~<Y B>").expect("parse error").param_normalize(),
+    );
+
     assert_eq!(
         dict.parse("<A B~C D~E>").expect("parse error"),
         dict.parse("<A B D>~<A C D>~<A C E>").expect("parse errror").param_normalize(),
     );
 
+    assert_eq!(
+        dict.parse("<A~X B~C D~E>").expect("parse error"),
+        dict.parse("<A B D>~<A B~C E>~<X C E>").expect("parse errror").param_normalize(),
+    );
+
     assert_eq!(
         dict.parse("<Seq <Digit 10>~Char>").expect("parse error"),
         dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror").param_normalize(),
     );
 
     assert_eq!(
-        dict.parse("<A~Y B>").expect("parse error"),
-        dict.parse("<A B>~<Y B>").expect("parse error").param_normalize(),
+        dict.parse("<Seq Char> ~ <<ValueDelim '\\0'> Char> ~ <<ValueDelim '\\0'> Ascii~x86.UInt8>").expect("parse error").param_normalize(),
+        dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~x86.UInt8>").expect("parse error")
+    );
+    assert_eq!(
+        dict.parse("<Seq Char~Ascii> ~ <<ValueDelim '\\0'> Char~Ascii> ~ <<ValueDelim '\\0'> x86.UInt8>").expect("parse error").param_normalize(),
+        dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~x86.UInt8>").expect("parse error")
     );
 
     assert_eq!(
-        dict.parse("<A <B C~D~E> F~G H H>").expect("parse error"),
+        dict.parse("<A~Y <B C~D~E> F H H>").expect("parse error"),
         dict.parse("<A <B C> F H H>
                    ~<A <B D> F H H>
-                   ~<A <B E> F H H>
-                   ~<A <B E> G H H>").expect("parse errror")
+                   ~<A~Y <B E> F H H>").expect("parse errror")
                .param_normalize(),
     );
 }

From 967ae5f30e43d7cb8acde9c41e7f807ce59fb1df Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sun, 4 Aug 2024 23:23:20 +0200
Subject: [PATCH 05/20] initial MorphismBase with DFS to find morphism paths

---
 src/lib.rs      |   1 +
 src/morphism.rs | 204 ++++++++++++++++++++++++++++++++++++++++++++++++
 2 files changed, 205 insertions(+)
 create mode 100644 src/morphism.rs

diff --git a/src/lib.rs b/src/lib.rs
index 2e9a163..6d64576 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -11,6 +11,7 @@ pub mod lnf;
 pub mod pnf;
 pub mod subtype;
 pub mod unification;
+pub mod morphism;
 
 #[cfg(test)]
 mod test;
diff --git a/src/morphism.rs b/src/morphism.rs
new file mode 100644
index 0000000..31a8e31
--- /dev/null
+++ b/src/morphism.rs
@@ -0,0 +1,204 @@
+use {
+    crate::{
+        TypeTerm, TypeID,
+        unification::UnificationProblem,
+    },
+    std::collections::HashMap
+};
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+pub struct MorphismType {
+    pub src_type: TypeTerm,
+    pub dst_type: TypeTerm,
+}
+
+pub struct MorphismBase<Morphism: Clone> {
+    morphisms: Vec< (MorphismType, Morphism) >,
+    list_typeid: TypeID
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+impl MorphismType {
+    fn normalize(self) -> Self {
+        MorphismType {
+            src_type: self.src_type.normalize(),
+            dst_type: self.dst_type.normalize()
+        }
+    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+impl<Morphism: Clone> MorphismBase<Morphism> {
+    pub fn new() -> Self {
+        MorphismBase {
+            morphisms: Vec::new(),
+
+            // FIXME: magic number
+            list_typeid: TypeID::Fun(10)
+        }
+    }
+
+    pub fn add_morphism(&mut self, morph_type: MorphismType, morphism: Morphism) {
+        self.morphisms.push( (morph_type.normalize(), morphism) );
+    }
+
+    pub fn enum_morphisms(&self, src_type: &TypeTerm)
+    -> Vec< (HashMap<TypeID, TypeTerm>, TypeTerm) >
+    {
+        let mut dst_types = Vec::new();
+
+        // first enumerate all "direct" morphisms,
+        for (ty,m) in self.morphisms.iter() {
+            if let Ok(σ) = crate::unification::unify(
+                &ty.src_type,
+                &src_type.clone().normalize()
+            ) {
+                let dst_type = 
+                    ty.dst_type.clone()
+                        .apply_substitution(
+                            &|x| σ.get(x).cloned()
+                        )
+                        .clone();
+
+                dst_types.push( (σ, dst_type) );
+            }
+        }
+
+        // ..then all "list-map" morphisms.
+        // Check if we have a List type, and if so, see what the Item type is
+
+        // TODO: function for generating fresh variables
+        let item_variable = TypeID::Var(100);
+
+        if let Ok(σ) = crate::unification::unify(
+            &TypeTerm::App(vec![
+                TypeTerm::TypeID(self.list_typeid),
+                TypeTerm::TypeID(item_variable)
+            ]),
+            &src_type.clone().param_normalize(),
+        ) {
+            let src_item_type = σ.get(&item_variable).unwrap().clone();
+
+            for (γ, dst_item_type) in self.enum_morphisms( &src_item_type ) {
+                let dst_type =
+                        TypeTerm::App(vec![
+                            TypeTerm::TypeID(self.list_typeid),
+                            dst_item_type.clone()
+                                .apply_substitution(
+                                    &|x| γ.get(x).cloned()
+                                ).clone()
+                        ]).normalize();
+
+                dst_types.push( (γ.clone(), dst_type) );
+            }
+        }
+
+        dst_types
+    }
+
+    pub fn enum_morphisms_with_subtyping(&self, src_type: &TypeTerm)
+    -> Vec< (TypeTerm, TypeTerm) >
+    {
+        let mut src_lnf = src_type.clone().get_lnf_vec();
+        let mut halo_lnf = vec![];
+        let mut dst_types = Vec::new();
+
+        while src_lnf.len() > 0 {
+            let src_type = TypeTerm::Ladder( src_lnf.clone() );
+            let halo_type = TypeTerm::Ladder( halo_lnf.clone() );
+
+            for (σ, t) in self.enum_morphisms( &src_type ) {
+                dst_types.push(
+                    (halo_type.clone()
+                        .apply_substitution(
+                            &|x| σ.get(x).cloned()
+                        ).clone(),
+                    t.clone()
+                        .apply_substitution(
+                            &|x| σ.get(x).cloned()
+                        ).clone()
+                    )
+                );
+            }
+
+            // continue with next supertype
+            halo_lnf.push(src_lnf.remove(0));
+        }
+
+        dst_types
+    }
+
+    /* performs DFS to find a morphism-path for a given type
+     * will return the first matching path, not the shortest
+     */
+    pub fn find_morphism_path(&self, ty: MorphismType)
+    -> Option< Vec<TypeTerm> >
+    {
+        let ty = ty.normalize();
+        let mut visited = Vec::new();
+        let mut queue = vec![
+            vec![ ty.src_type.clone().normalize() ]
+        ];
+
+        while let Some(current_path) = queue.pop() {       
+            let current_type = current_path.last().unwrap();
+
+            if ! visited.contains( current_type ) {
+                visited.push( current_type.clone() );
+
+                for (h, t) in self.enum_morphisms_with_subtyping(&current_type) {
+                    let tt = TypeTerm::Ladder( vec![ h, t ] ).normalize();
+
+                    if ! visited.contains( &tt ) {
+                        let unification_result = crate::unification::unify(&tt, &ty.dst_type);
+                        let mut new_path = current_path.clone();
+
+                        new_path.push( tt );
+
+                        if let Ok(σ) = unification_result {
+                            new_path = new_path.into_iter().map(
+                                |mut t: TypeTerm| t.apply_substitution(&|x| σ.get(x).cloned()).clone()
+                            ).collect::<Vec<TypeTerm>>();
+
+                            return Some(new_path);
+                        } else {
+                            queue.push( new_path );
+                        }
+                    }
+                }
+            }
+        }
+
+        None
+    }
+
+    pub fn find_morphism(&self, ty: &MorphismType)
+    -> Option< Morphism > {
+
+        // TODO
+
+        None
+    }
+
+    pub fn find_list_map_morphism(&self, item_ty: &MorphismType)
+    -> Option< Morphism > {
+
+        // TODO
+
+        None
+    }
+
+    pub fn find_morphism_with_subtyping(&self, ty: &MorphismType)
+    -> Option<( Morphism, TypeTerm, HashMap<TypeID, TypeTerm> )> {
+
+        // TODO
+
+        None
+    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+

From fd5936209c148103617424c02fec1ce3a6449fec Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sun, 4 Aug 2024 23:47:59 +0200
Subject: [PATCH 06/20] add test for find_morphism_path()

---
 src/morphism.rs      |  6 ++--
 src/test/mod.rs      |  1 +
 src/test/morphism.rs | 70 ++++++++++++++++++++++++++++++++++++++++++++
 3 files changed, 73 insertions(+), 4 deletions(-)
 create mode 100644 src/test/morphism.rs

diff --git a/src/morphism.rs b/src/morphism.rs
index 31a8e31..6b921bf 100644
--- a/src/morphism.rs
+++ b/src/morphism.rs
@@ -32,12 +32,10 @@ impl MorphismType {
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
 impl<Morphism: Clone> MorphismBase<Morphism> {
-    pub fn new() -> Self {
+    pub fn new(list_typeid: TypeID) -> Self {
         MorphismBase {
             morphisms: Vec::new(),
-
-            // FIXME: magic number
-            list_typeid: TypeID::Fun(10)
+            list_typeid
         }
     }
 
diff --git a/src/test/mod.rs b/src/test/mod.rs
index 29c14bc..41f5e71 100644
--- a/src/test/mod.rs
+++ b/src/test/mod.rs
@@ -7,4 +7,5 @@ pub mod pnf;
 pub mod subtype;
 pub mod substitution;
 pub mod unification;
+pub mod morphism;
 
diff --git a/src/test/morphism.rs b/src/test/morphism.rs
new file mode 100644
index 0000000..d2328d7
--- /dev/null
+++ b/src/test/morphism.rs
@@ -0,0 +1,70 @@
+use {
+    crate::{dict::*, morphism::*}
+};
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+#[test]
+fn test_morphism_path() {
+    let mut dict = TypeDict::new();
+    let mut base = MorphismBase::<u64>::new( dict.add_typename("Seq".into()) );
+
+    dict.add_varname("Radix".into());
+    dict.add_varname("SrcRadix".into());
+    dict.add_varname("DstRadix".into());
+
+    base.add_morphism(
+        MorphismType{
+            src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
+            dst_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap()
+        },
+        11
+    );
+    base.add_morphism(
+        MorphismType{
+            src_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap(),
+            dst_type: dict.parse("<Digit Radix> ~ Char").unwrap()
+        },
+        22
+    );
+    base.add_morphism(
+        MorphismType{
+            src_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
+            dst_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
+        },
+        333
+    );
+    base.add_morphism(
+        MorphismType{
+            src_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
+            dst_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
+        },
+        444
+    );
+    base.add_morphism(
+        MorphismType{
+            src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix>~ℤ_2^64~machine.UInt64>").unwrap(),
+            dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix>~ℤ_2^64~machine.UInt64>").unwrap()
+        },
+        555
+    );
+
+
+    assert_eq!(
+        base.find_morphism_path(MorphismType {
+            src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+            dst_type: dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
+        }),
+        Some(
+            vec![
+                dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().normalize(),
+                dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().normalize(),
+            ]
+        )
+    );
+}
+

From 95fc28f80ecb3d7bd0216212654cfdeb53e10f64 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Mon, 5 Aug 2024 02:54:35 +0200
Subject: [PATCH 07/20] turn Morphism into trait and add find_morphism()
 function

---
 src/lib.rs           |  1 +
 src/morphism.rs      | 91 +++++++++++++++++++++++++++++++++-----------
 src/test/morphism.rs | 53 +++++++++++++++++---------
 3 files changed, 105 insertions(+), 40 deletions(-)

diff --git a/src/lib.rs b/src/lib.rs
index 6d64576..3361a6a 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -24,4 +24,5 @@ pub use {
     term::*,
     sugar::*,
     unification::*,
+    morphism::*
 };
diff --git a/src/morphism.rs b/src/morphism.rs
index 6b921bf..3211651 100644
--- a/src/morphism.rs
+++ b/src/morphism.rs
@@ -8,20 +8,27 @@ use {
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
+#[derive(Clone, PartialEq, Eq, Debug)]
 pub struct MorphismType {
     pub src_type: TypeTerm,
     pub dst_type: TypeTerm,
 }
 
-pub struct MorphismBase<Morphism: Clone> {
-    morphisms: Vec< (MorphismType, Morphism) >,
+pub trait Morphism : Sized {
+    fn get_type(&self) -> MorphismType;
+    fn list_map_morphism(&self, list_typeid: TypeID) -> Option< Self >;
+}
+
+#[derive(Clone)]
+pub struct MorphismBase<M: Morphism + Clone> {
+    morphisms: Vec< M >,
     list_typeid: TypeID
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
 impl MorphismType {
-    fn normalize(self) -> Self {
+    pub fn normalize(self) -> Self {
         MorphismType {
             src_type: self.src_type.normalize(),
             dst_type: self.dst_type.normalize()
@@ -31,7 +38,7 @@ impl MorphismType {
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-impl<Morphism: Clone> MorphismBase<Morphism> {
+impl<M: Morphism + Clone> MorphismBase<M> {
     pub fn new(list_typeid: TypeID) -> Self {
         MorphismBase {
             morphisms: Vec::new(),
@@ -39,8 +46,8 @@ impl<Morphism: Clone> MorphismBase<Morphism> {
         }
     }
 
-    pub fn add_morphism(&mut self, morph_type: MorphismType, morphism: Morphism) {
-        self.morphisms.push( (morph_type.normalize(), morphism) );
+    pub fn add_morphism(&mut self, m: M) {
+        self.morphisms.push( m );
     }
 
     pub fn enum_morphisms(&self, src_type: &TypeTerm)
@@ -49,17 +56,15 @@ impl<Morphism: Clone> MorphismBase<Morphism> {
         let mut dst_types = Vec::new();
 
         // first enumerate all "direct" morphisms,
-        for (ty,m) in self.morphisms.iter() {
+        for m in self.morphisms.iter() {
             if let Ok(σ) = crate::unification::unify(
-                &ty.src_type,
+                &m.get_type().src_type,
                 &src_type.clone().normalize()
             ) {
                 let dst_type = 
-                    ty.dst_type.clone()
-                        .apply_substitution(
-                            &|x| σ.get(x).cloned()
-                        )
-                        .clone();
+                    m.get_type().dst_type.clone()
+                    .apply_substitution( &|x| σ.get(x).cloned() )
+                    .clone();
 
                 dst_types.push( (σ, dst_type) );
             }
@@ -173,26 +178,68 @@ impl<Morphism: Clone> MorphismBase<Morphism> {
         None
     }
 
+
     pub fn find_morphism(&self, ty: &MorphismType)
-    -> Option< Morphism > {
+    -> Option< ( M, HashMap<TypeID, TypeTerm> ) > {
 
-        // TODO
+        // try list-map morphism
+        if let Ok(σ) = UnificationProblem::new(vec![
+            (ty.src_type.clone().param_normalize(), TypeTerm::App(vec![ TypeTerm::TypeID(self.list_typeid), TypeTerm::TypeID(TypeID::Var(100)) ])),
+            (ty.dst_type.clone().param_normalize(), TypeTerm::App(vec![ TypeTerm::TypeID(self.list_typeid), TypeTerm::TypeID(TypeID::Var(101)) ])),
+        ]).solve() {
 
-        None
-    }
+            // TODO: use real fresh variable names
+            let item_morph_type = MorphismType {
+                src_type: σ.get(&TypeID::Var(100)).unwrap().clone(),
+                dst_type: σ.get(&TypeID::Var(101)).unwrap().clone(),
+            }.normalize();
 
-    pub fn find_list_map_morphism(&self, item_ty: &MorphismType)
-    -> Option< Morphism > {
+            if let Some((m, σ)) = self.find_morphism( &item_morph_type ) {
+                if let Some(list_morph) = m.list_map_morphism( self.list_typeid ) {
+                    return Some( (list_morph, σ) );
+                }
+            }
+        }
 
-        // TODO
+        // otherwise
+        for m in self.morphisms.iter() {
+            let unification_problem = UnificationProblem::new(
+                vec![
+                    ( ty.src_type.clone().normalize(), m.get_type().src_type.clone() ),
+                    ( ty.dst_type.clone().normalize(), m.get_type().dst_type.clone() )
+                ]
+            );
+
+            let unification_result = unification_problem.solve();
+            if let Ok(σ) = unification_result {
+                return Some((m.clone(), σ));
+            }
+        }
 
         None
     }
 
     pub fn find_morphism_with_subtyping(&self, ty: &MorphismType)
-    -> Option<( Morphism, TypeTerm, HashMap<TypeID, TypeTerm> )> {
+    -> Option<( M, TypeTerm, HashMap<TypeID, TypeTerm> )> {
+        let mut src_lnf = ty.src_type.clone().get_lnf_vec();
+        let mut dst_lnf = ty.dst_type.clone().get_lnf_vec();
+        let mut halo = vec![];
 
-        // TODO
+        while src_lnf.len() > 0 && dst_lnf.len() > 0 {
+            if let Some((m, σ)) = self.find_morphism(&MorphismType{
+                src_type: TypeTerm::Ladder(src_lnf.clone()),
+                dst_type: TypeTerm::Ladder(dst_lnf.clone())
+            }) {
+                return Some((m, TypeTerm::Ladder(halo), σ));
+            } else {
+                if src_lnf[0] == dst_lnf[0] {
+                    src_lnf.remove(0);
+                    halo.push(dst_lnf.remove(0));
+                } else {
+                    return None;
+                }
+            }
+        }
 
         None
     }
diff --git a/src/test/morphism.rs b/src/test/morphism.rs
index d2328d7..fa4b492 100644
--- a/src/test/morphism.rs
+++ b/src/test/morphism.rs
@@ -1,55 +1,72 @@
 use {
-    crate::{dict::*, morphism::*}
+    crate::{dict::*, morphism::*, TypeTerm}
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
+#[derive(Clone)]
+struct DummyMorphism(MorphismType);
+
+impl Morphism for DummyMorphism {
+    fn get_type(&self) -> MorphismType {
+        self.0.clone().normalize()
+    }
+
+    fn list_map_morphism(&self, list_typeid: TypeID) -> Option<DummyMorphism> {
+        Some(DummyMorphism(MorphismType {
+            src_type: TypeTerm::App(vec![
+                TypeTerm::TypeID( list_typeid ),
+                self.0.src_type.clone()
+            ]),
+
+            dst_type: TypeTerm::App(vec![
+                TypeTerm::TypeID( list_typeid ),
+                self.0.dst_type.clone()
+            ])
+        }))
+    }
+}
+ 
 #[test]
 fn test_morphism_path() {
     let mut dict = TypeDict::new();
-    let mut base = MorphismBase::<u64>::new( dict.add_typename("Seq".into()) );
+    let mut base = MorphismBase::<DummyMorphism>::new( dict.add_typename("Seq".into()) );
 
     dict.add_varname("Radix".into());
     dict.add_varname("SrcRadix".into());
     dict.add_varname("DstRadix".into());
 
     base.add_morphism(
-        MorphismType{
+        DummyMorphism(MorphismType{
             src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
             dst_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap()
-        },
-        11
+        })
     );
     base.add_morphism(
-        MorphismType{
+        DummyMorphism(MorphismType{
             src_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap(),
             dst_type: dict.parse("<Digit Radix> ~ Char").unwrap()
-        },
-        22
+        })
     );
     base.add_morphism(
-        MorphismType{
+        DummyMorphism(MorphismType{
             src_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
             dst_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
-        },
-        333
+        })
     );
     base.add_morphism(
-        MorphismType{
+        DummyMorphism(MorphismType{
             src_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
             dst_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
-        },
-        444
+        })
     );
     base.add_morphism(
-        MorphismType{
+        DummyMorphism(MorphismType{
             src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix>~ℤ_2^64~machine.UInt64>").unwrap(),
             dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix>~ℤ_2^64~machine.UInt64>").unwrap()
-        },
-        555
+        })
     );
 
-
     assert_eq!(
         base.find_morphism_path(MorphismType {
             src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),

From c0c0184d97352eb6db29c5f85b08fc5cba24f026 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Tue, 6 Aug 2024 15:37:23 +0200
Subject: [PATCH 08/20] fix returned halo type in
 find_morphism_with_subtyping()

---
 src/morphism.rs      |  5 ++++-
 src/test/morphism.rs | 42 +++++++++++++++++++++++++++++++++++++++++-
 2 files changed, 45 insertions(+), 2 deletions(-)

diff --git a/src/morphism.rs b/src/morphism.rs
index 3211651..0796c91 100644
--- a/src/morphism.rs
+++ b/src/morphism.rs
@@ -230,7 +230,10 @@ impl<M: Morphism + Clone> MorphismBase<M> {
                 src_type: TypeTerm::Ladder(src_lnf.clone()),
                 dst_type: TypeTerm::Ladder(dst_lnf.clone())
             }) {
-                return Some((m, TypeTerm::Ladder(halo), σ));
+                halo.push(src_lnf.get(0).unwrap().clone());
+                return Some((m,
+                    TypeTerm::Ladder(halo).apply_substitution(&|x| σ.get(x).cloned()).clone(),
+                    σ));
             } else {
                 if src_lnf[0] == dst_lnf[0] {
                     src_lnf.remove(0);
diff --git a/src/test/morphism.rs b/src/test/morphism.rs
index fa4b492..47bd100 100644
--- a/src/test/morphism.rs
+++ b/src/test/morphism.rs
@@ -4,7 +4,7 @@ use {
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-#[derive(Clone)]
+#[derive(Clone, Debug, PartialEq)]
 struct DummyMorphism(MorphismType);
 
 impl Morphism for DummyMorphism {
@@ -83,5 +83,45 @@ fn test_morphism_path() {
             ]
         )
     );
+
+    assert_eq!(
+        base.find_morphism_path(MorphismType {
+            src_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+            dst_type: dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
+        }),
+        Some(
+            vec![
+                dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().normalize(),
+            ]
+        )
+    );
+
+    assert_eq!(
+        base.find_morphism_with_subtyping(
+            &MorphismType {
+                src_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+                dst_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+            }
+        ),
+
+        Some((
+                DummyMorphism(MorphismType{
+                    src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
+                    dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()                
+                }),
+
+                dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10>>").unwrap(),
+
+                vec![
+                    (dict.get_typeid(&"Radix".into()).unwrap(),
+                    dict.parse("10").unwrap())
+                ].into_iter().collect::<std::collections::HashMap<TypeID, TypeTerm>>()
+        ))
+    );
 }
 

From adc8a43b691e991c79d5e4e24e0a832d6b815b81 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sun, 11 Aug 2024 22:50:48 +0200
Subject: [PATCH 09/20] morphism base: find shortest path instead of just some
 path

---
 src/morphism.rs | 34 ++++++++++++++++++++++------------
 1 file changed, 22 insertions(+), 12 deletions(-)

diff --git a/src/morphism.rs b/src/morphism.rs
index 0796c91..33eafd5 100644
--- a/src/morphism.rs
+++ b/src/morphism.rs
@@ -17,6 +17,10 @@ pub struct MorphismType {
 pub trait Morphism : Sized {
     fn get_type(&self) -> MorphismType;
     fn list_map_morphism(&self, list_typeid: TypeID) -> Option< Self >;
+
+    fn weight(&self) -> u64 {
+        1
+    }
 }
 
 #[derive(Clone)]
@@ -134,41 +138,47 @@ impl<M: Morphism + Clone> MorphismBase<M> {
         dst_types
     }
 
-    /* performs DFS to find a morphism-path for a given type
-     * will return the first matching path, not the shortest
+    /* try to find shortest morphism-path for a given type
      */
     pub fn find_morphism_path(&self, ty: MorphismType)
     -> Option< Vec<TypeTerm> >
     {
         let ty = ty.normalize();
-        let mut visited = Vec::new();
+
         let mut queue = vec![
-            vec![ ty.src_type.clone().normalize() ]
+            (0, vec![ ty.src_type.clone().normalize() ])
         ];
 
-        while let Some(current_path) = queue.pop() {       
-            let current_type = current_path.last().unwrap();
+        while ! queue.is_empty() {
+            queue.sort_by( |&(w1,_),&(w2,_)| w2.cmp(&w1));
 
-            if ! visited.contains( current_type ) {
-                visited.push( current_type.clone() );
+            if let Some((current_weight, current_path)) = queue.pop() {
+                let current_type = current_path.last().unwrap();
 
                 for (h, t) in self.enum_morphisms_with_subtyping(&current_type) {
                     let tt = TypeTerm::Ladder( vec![ h, t ] ).normalize();
 
-                    if ! visited.contains( &tt ) {
+                    if ! current_path.contains( &tt ) {
                         let unification_result = crate::unification::unify(&tt, &ty.dst_type);
-                        let mut new_path = current_path.clone();
 
+                        let morphism_weight = 1;
+                            /*
+                            {
+                                self.find_morphism( &tt ).unwrap().0.get_weight()
+                            };
+                            */
+
+                        let new_weight = current_weight + morphism_weight;
+                        let mut new_path = current_path.clone();
                         new_path.push( tt );
 
                         if let Ok(σ) = unification_result {
                             new_path = new_path.into_iter().map(
                                 |mut t: TypeTerm| t.apply_substitution(&|x| σ.get(x).cloned()).clone()
                             ).collect::<Vec<TypeTerm>>();
-
                             return Some(new_path);
                         } else {
-                            queue.push( new_path );
+                            queue.push( (new_weight, new_path) );
                         }
                     }
                 }

From ffb990620916fa0ae52588e9a4c523519b5e45ff Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Mon, 12 Aug 2024 21:18:17 +0200
Subject: [PATCH 10/20] initial implementation of solver for steiner trees

---
 src/lib.rs           |   1 +
 src/morphism.rs      |   1 -
 src/steiner_tree.rs  | 162 +++++++++++++++++++++++++++++++++++++++++++
 src/test/morphism.rs |  42 +++++++++--
 4 files changed, 201 insertions(+), 5 deletions(-)
 create mode 100644 src/steiner_tree.rs

diff --git a/src/lib.rs b/src/lib.rs
index 3361a6a..11a001f 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -12,6 +12,7 @@ pub mod pnf;
 pub mod subtype;
 pub mod unification;
 pub mod morphism;
+pub mod steiner_tree;
 
 #[cfg(test)]
 mod test;
diff --git a/src/morphism.rs b/src/morphism.rs
index 33eafd5..9160610 100644
--- a/src/morphism.rs
+++ b/src/morphism.rs
@@ -188,7 +188,6 @@ impl<M: Morphism + Clone> MorphismBase<M> {
         None
     }
 
-
     pub fn find_morphism(&self, ty: &MorphismType)
     -> Option< ( M, HashMap<TypeID, TypeTerm> ) > {
 
diff --git a/src/steiner_tree.rs b/src/steiner_tree.rs
new file mode 100644
index 0000000..f5338e9
--- /dev/null
+++ b/src/steiner_tree.rs
@@ -0,0 +1,162 @@
+use {
+    std::collections::HashMap,
+    crate::{
+        TypeID,
+        TypeTerm,
+        morphism::{
+            MorphismType,
+            Morphism,
+            MorphismBase
+        }
+    }
+};
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+#[derive(Clone)]
+pub struct SteinerTree {
+    weight: u64,
+    goals: Vec< TypeTerm >,
+    pub edges: Vec< MorphismType >,
+}
+
+impl SteinerTree {
+    fn add_edge(&mut self, ty: MorphismType) {
+        self.weight += 1;
+
+        let ty = ty.normalize();
+
+        // check if by adding this new edge, we reach a goal
+        let mut new_goals = Vec::new();
+        let mut added = false;
+
+        for g in self.goals.clone() { 
+            if let Ok(σ) = crate::unify(&ty.dst_type, &g) {
+                if !added {
+                    self.edges.push(ty.clone());
+
+                    // goal reached.
+                    for e in self.edges.iter_mut() {
+                        e.src_type = e.src_type.apply_substitution(&|x| σ.get(x).cloned()).clone();
+                        e.dst_type = e.dst_type.apply_substitution(&|x| σ.get(x).cloned()).clone();
+                    }
+                    added = true;
+                } else {
+                    new_goals.push(g);
+                }
+            } else {
+                new_goals.push(g);
+            }
+        }
+
+        if !added {
+            self.edges.push(ty.clone());
+        }
+
+        self.goals = new_goals;
+    }
+
+    fn is_solved(&self) -> bool {
+        self.goals.len() == 0
+    }
+
+    fn contains(&self, t: &TypeTerm) -> Option< HashMap<TypeID, TypeTerm> > {
+        for e in self.edges.iter() {
+            if let Ok(σ) = crate::unify(&e.dst_type, t) {
+                return Some(σ)
+            }
+        }
+
+        None
+    }
+}
+
+/* given a representation tree with the available
+ * represenatations `src_types`, try to find
+ * a sequence of morphisms that span up all
+ * representations in `dst_types`.
+ */
+pub struct SteinerTreeProblem {
+    src_types: Vec< TypeTerm >,
+    queue: Vec< SteinerTree >
+}
+
+impl SteinerTreeProblem {
+    pub fn new(
+        src_types: Vec< TypeTerm >,
+        dst_types: Vec< TypeTerm >
+    ) -> Self {
+        SteinerTreeProblem {
+            src_types: src_types.into_iter().map(|t| t.normalize()).collect(),
+            queue: vec![
+                SteinerTree {
+                    weight: 0,
+                    goals: dst_types.into_iter().map(|t| t.normalize()).collect(),
+                    edges: Vec::new()
+                }
+            ]
+        }
+    }
+
+    pub fn next(&mut self) -> Option< SteinerTree > {
+        eprintln!("queue size = {}", self.queue.len());
+
+        /* FIXME: by giving the highest priority to
+         * candidate tree with the least remaining goals,
+         * the optimality of the search algorithm
+         * is probably destroyed, but it dramatically helps
+         * to tame the combinatorical explosion in this algorithm.
+         */
+        self.queue.sort_by(|t1, t2|
+            if t1.goals.len() < t2.goals.len() {
+                std::cmp::Ordering::Greater
+            } else if t1.goals.len() == t2.goals.len() {
+                if t1.weight < t2.weight {
+                    std::cmp::Ordering::Greater
+                } else {
+                    std::cmp::Ordering::Less
+                }
+            } else {
+                std::cmp::Ordering::Less
+            }
+        );
+        self.queue.pop()
+    }
+
+    pub fn solve_bfs<M: Morphism + Clone>(&mut self, dict: &crate::dict::TypeDict, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
+
+        // take the currently smallest tree and extend it by one step
+        while let Some( mut current_tree ) = self.next() {
+
+            // check if current tree is a solution
+            if current_tree.goals.len() == 0 {
+                return Some(current_tree);
+            }
+
+            // get all vertices spanned by this tree
+            let mut current_nodes = self.src_types.clone();
+            for e in current_tree.edges.iter() {
+                current_nodes.push( e.dst_type.clone() );
+            }
+
+            // extend the tree by one edge and add it to the queue
+            for src_type in current_nodes.iter() {
+                for (dst_halo,dst_ty) in morphisms.enum_morphisms_with_subtyping( &src_type ) {
+                    let dst_type = TypeTerm::Ladder(vec![
+                        dst_halo, dst_ty
+                    ]).normalize();
+
+                    if !current_nodes.contains( &dst_type ) {
+                        let mut new_tree = current_tree.clone();
+                        let src_type = src_type.clone();
+                        new_tree.add_edge(MorphismType { src_type, dst_type }.normalize());
+                        self.queue.push( new_tree );
+                    }
+                }
+            }
+        }
+
+        None
+    }
+}
+
diff --git a/src/test/morphism.rs b/src/test/morphism.rs
index 47bd100..b908101 100644
--- a/src/test/morphism.rs
+++ b/src/test/morphism.rs
@@ -1,5 +1,5 @@
 use {
-    crate::{dict::*, morphism::*, TypeTerm}
+    crate::{dict::*, morphism::*, steiner_tree::*, TypeTerm}
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
@@ -26,9 +26,8 @@ impl Morphism for DummyMorphism {
         }))
     }
 }
- 
-#[test]
-fn test_morphism_path() {
+
+fn morphism_test_setup() -> ( TypeDict, MorphismBase<DummyMorphism> ) {
     let mut dict = TypeDict::new();
     let mut base = MorphismBase::<DummyMorphism>::new( dict.add_typename("Seq".into()) );
 
@@ -67,6 +66,13 @@ fn test_morphism_path() {
         })
     );
 
+    (dict, base)
+}
+
+#[test]
+fn test_morphism_path() {
+    let (mut dict, mut base) = morphism_test_setup();
+
     assert_eq!(
         base.find_morphism_path(MorphismType {
             src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
@@ -125,3 +131,31 @@ fn test_morphism_path() {
     );
 }
 
+#[test]
+fn test_steiner_tree() {
+    let (mut dict, mut base) = morphism_test_setup();
+
+
+    let mut steiner_tree_problem = SteinerTreeProblem::new(
+        // source reprs
+        vec![
+            dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+        ],
+
+        // destination reprs
+        vec![
+            dict.parse("ℕ ~ <PosInt 2 BigEndian> ~ <Seq <Digit 2> ~ Char>").unwrap(),
+            dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),            
+            dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
+        ]
+    );
+
+    if let Some(solution) = steiner_tree_problem.solve_bfs( &dict, &base ) {
+        for e in solution.edges.iter() {
+            eprintln!(" :: {}\n--> {}", dict.unparse(&e.src_type), dict.unparse(&e.dst_type));
+        }
+    } else {
+        eprintln!("no solution");
+    }
+}
+

From c6edd44eacf4de0e11782bc507d911c3d3d33af9 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Fri, 27 Sep 2024 12:15:40 +0200
Subject: [PATCH 11/20] add steiner tree solver based on shortest path

---
 src/steiner_tree.rs | 93 ++++++++++++++++++++++++++++++++++++++++++---
 1 file changed, 87 insertions(+), 6 deletions(-)

diff --git a/src/steiner_tree.rs b/src/steiner_tree.rs
index f5338e9..f854dd9 100644
--- a/src/steiner_tree.rs
+++ b/src/steiner_tree.rs
@@ -17,10 +17,14 @@ use {
 pub struct SteinerTree {
     weight: u64,
     goals: Vec< TypeTerm >,
-    pub edges: Vec< MorphismType >,
+    edges: Vec< MorphismType >,
 }
 
 impl SteinerTree {
+    pub fn into_edges(self) -> Vec< MorphismType > {
+        self.edges
+    }
+
     fn add_edge(&mut self, ty: MorphismType) {
         self.weight += 1;
 
@@ -71,6 +75,72 @@ impl SteinerTree {
     }
 }
 
+
+pub struct PathApproxSteinerTreeSolver {
+    root: TypeTerm,
+    leaves: Vec< TypeTerm >
+}
+
+impl PathApproxSteinerTreeSolver {
+    pub fn new(
+        root: TypeTerm,
+        leaves: Vec<TypeTerm>
+    ) -> Self {
+        PathApproxSteinerTreeSolver {
+            root, leaves
+        }
+    }
+
+    pub fn solve<M: Morphism + Clone>(self, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
+        let mut tree = Vec::<MorphismType>::new();
+
+        for goal in self.leaves {
+            // try to find shortest path from root to current leaf
+            if let Some(new_path) = morphisms.find_morphism_path(
+                MorphismType {
+                    src_type: self.root.clone(),
+                    dst_type: goal.clone()
+                }
+            ) {
+                // reduce new path so that it does not collide with any existing path
+                let mut src_type = self.root.clone();
+                let mut new_path_iter = new_path.into_iter().peekable();
+
+                // check all existing nodes..
+                for mt in tree.iter() {
+//                    assert!( mt.src_type == &src_type );
+                    if let Some(t) = new_path_iter.peek() {
+                        if &mt.dst_type == t {
+                            // eliminate this node from new path
+                            src_type = new_path_iter.next().unwrap().clone();
+                        }
+                    } else {
+                        break;
+                    }
+                }
+
+                for dst_type in new_path_iter {
+                    tree.push(MorphismType {
+                        src_type: src_type.clone(),
+                        dst_type: dst_type.clone()
+                    });
+                    src_type = dst_type;
+                }
+            } else {
+                eprintln!("could not find path\nfrom {:?}\nto {:?}", &self.root, &goal);
+                return None;
+            }
+        }
+
+        Some(SteinerTree {
+            weight: 0,
+            goals: vec![],
+            edges: tree
+        })
+    }
+}
+
+
 /* given a representation tree with the available
  * represenatations `src_types`, try to find
  * a sequence of morphisms that span up all
@@ -122,8 +192,15 @@ impl SteinerTreeProblem {
         );
         self.queue.pop()
     }
+/*
+    pub fn solve_approx_path<M: Morphism + Clone>(&mut self, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
+        if let Some(master) = self.src_types.first() {
 
-    pub fn solve_bfs<M: Morphism + Clone>(&mut self, dict: &crate::dict::TypeDict, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
+            
+        }
+    }
+*/
+    pub fn solve_bfs<M: Morphism + Clone>(&mut self, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
 
         // take the currently smallest tree and extend it by one step
         while let Some( mut current_tree ) = self.next() {
@@ -140,16 +217,20 @@ impl SteinerTreeProblem {
             }
 
             // extend the tree by one edge and add it to the queue
-            for src_type in current_nodes.iter() {
+            for src_type in current_nodes {
                 for (dst_halo,dst_ty) in morphisms.enum_morphisms_with_subtyping( &src_type ) {
                     let dst_type = TypeTerm::Ladder(vec![
                         dst_halo, dst_ty
                     ]).normalize();
 
-                    if !current_nodes.contains( &dst_type ) {
+                    if current_tree.contains( &dst_type ).is_none() {
                         let mut new_tree = current_tree.clone();
-                        let src_type = src_type.clone();
-                        new_tree.add_edge(MorphismType { src_type, dst_type }.normalize());
+                        {
+                            let src_type = src_type.clone();
+                            let dst_type = dst_type.clone();
+                            new_tree.add_edge(MorphismType { src_type, dst_type }.normalize());
+                        }
+
                         self.queue.push( new_tree );
                     }
                 }

From b0b14fa04c8defb9dbbd9cb51917ecb41c55440d Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Thu, 3 Oct 2024 23:40:04 +0200
Subject: [PATCH 12/20] make TypeDict a trait & BimapTypeDict an impl

---
 src/dict.rs     | 76 ++++++++++++++++++++++++++++++++++++-------------
 src/parser.rs   | 17 +++++++++--
 src/sugar.rs    |  7 +++--
 src/unparser.rs |  8 ++++--
 4 files changed, 82 insertions(+), 26 deletions(-)

diff --git a/src/dict.rs b/src/dict.rs
index 419d599..4d5b35b 100644
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -8,9 +8,27 @@ pub enum TypeID {
     Var(u64)
 }
 
+pub trait TypeDict {
+    fn insert(&mut self, name: String, id: TypeID);
+    fn add_varname(&mut self, vn: String) -> TypeID;
+    fn add_typename(&mut self, tn: String) -> TypeID;
+    fn get_typeid(&self, tn: &String) -> Option<TypeID>;
+    fn get_typename(&self, tid: &TypeID) -> Option<String>;
+
+    fn get_varname(&self, var_id: u64) -> Option<String> {
+        self.get_typename(&TypeID::Var(var_id))
+    }
+
+    fn add_synonym(&mut self, new: String, old: String) {
+        if let Some(tyid) = self.get_typeid(&old) {
+            self.insert(new, tyid);
+        }
+    }
+}
+
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-pub struct TypeDict {
+pub struct BimapTypeDict {
     typenames: Bimap<String, TypeID>,
     type_lit_counter: u64,
     type_var_counter: u64,
@@ -18,46 +36,66 @@ pub struct TypeDict {
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-impl TypeDict {
+impl BimapTypeDict {
     pub fn new() -> Self {
-        TypeDict {
+        BimapTypeDict {
             typenames: Bimap::new(),
             type_lit_counter: 0,
             type_var_counter: 0,
         }
     }
+}
 
-    pub fn add_varname(&mut self, tn: String) -> TypeID {
+impl TypeDict for BimapTypeDict {
+    fn insert(&mut self, name: String, id: TypeID) {
+        self.typenames.insert(name, id);
+    }
+
+    fn add_varname(&mut self, tn: String) -> TypeID {
         let tyid = TypeID::Var(self.type_var_counter);
         self.type_var_counter += 1;
-        self.typenames.insert(tn, tyid.clone());
+        self.insert(tn, tyid.clone());
         tyid
     }
 
-    pub fn add_typename(&mut self, tn: String) -> TypeID {
+    fn add_typename(&mut self, tn: String) -> TypeID {
         let tyid = TypeID::Fun(self.type_lit_counter);
         self.type_lit_counter += 1;
-        self.typenames.insert(tn, tyid.clone());
+        self.insert(tn, tyid.clone());
         tyid
     }
 
-    pub fn add_synonym(&mut self, new: String, old: String) {
-        if let Some(tyid) = self.get_typeid(&old) {
-            self.typenames.insert(new, tyid);
-        }
-    }
-
-    pub fn get_typename(&self, tid: &TypeID) -> Option<String> {
+    fn get_typename(&self, tid: &TypeID) -> Option<String> {
         self.typenames.my.get(tid).cloned()
     }
 
-    pub fn get_typeid(&self, tn: &String) -> Option<TypeID> {
+    fn get_typeid(&self, tn: &String) -> Option<TypeID> {
         self.typenames.mλ.get(tn).cloned()
     }
-
-    pub fn get_varname(&self, var_id: u64) -> Option<String> {
-        self.typenames.my.get(&TypeID::Var(var_id)).cloned()
-    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
+
+use std::sync::Arc;
+use std::ops::{Deref, DerefMut};
+use std::sync::RwLock;
+
+impl<T: TypeDict> TypeDict for Arc<RwLock<T>> {
+    fn insert(&mut self, name: String, id: TypeID) {
+        self.write().unwrap().insert(name, id);
+    }
+    fn add_varname(&mut self, vn: String) -> TypeID {
+        self.write().unwrap().add_varname(vn)
+    }
+    fn add_typename(&mut self, tn: String) -> TypeID {
+        self.write().unwrap().add_typename(tn)
+    }
+    fn get_typename(&self, tid: &TypeID)-> Option<String> {
+        self.read().unwrap().get_typename(tid)
+    }
+    fn get_typeid(&self, tn: &String) -> Option<TypeID> {
+        self.read().unwrap().get_typeid(tn)
+    }
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
diff --git a/src/parser.rs b/src/parser.rs
index 85ff9b4..6160ca6 100644
--- a/src/parser.rs
+++ b/src/parser.rs
@@ -18,10 +18,23 @@ pub enum ParseError {
     UnexpectedToken
 }
 
+pub trait ParseLadderType {
+    fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError>;
+    
+    fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
+    where It: Iterator<Item = char>;
+
+    fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
+    where It: Iterator<Item = char>;
+
+    fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
+    where It: Iterator<Item = char>;
+}
+
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-impl TypeDict {
-    pub fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
+impl<T: TypeDict> ParseLadderType for T {
+    fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
         let mut tokens = LadderTypeLexer::from(s.chars()).peekable();
 
         match self.parse_ladder(&mut tokens) {
diff --git a/src/sugar.rs b/src/sugar.rs
index 4d13f78..dea73ba 100644
--- a/src/sugar.rs
+++ b/src/sugar.rs
@@ -1,7 +1,8 @@
 use {
-    crate::{TypeTerm, TypeID}
+    crate::{TypeTerm, TypeID, parser::ParseLadderType}
 };
 
+#[derive(Clone)]
 pub enum SugaredTypeTerm {
     TypeID(TypeID),
     Num(i64),
@@ -17,7 +18,7 @@ pub enum SugaredTypeTerm {
 }
 
 impl TypeTerm {
-    pub fn sugar(self: TypeTerm, dict: &mut crate::TypeDict) -> SugaredTypeTerm {
+    pub fn sugar(self: TypeTerm, dict: &mut impl crate::TypeDict) -> SugaredTypeTerm {
         match self {
             TypeTerm::TypeID(id) => SugaredTypeTerm::TypeID(id),
             TypeTerm::Num(n) => SugaredTypeTerm::Num(n),
@@ -61,7 +62,7 @@ impl TypeTerm {
 }
 
 impl SugaredTypeTerm {
-    pub fn desugar(self, dict: &mut crate::TypeDict) -> TypeTerm {
+    pub fn desugar(self, dict: &mut impl crate::TypeDict) -> TypeTerm {
         match self {
             SugaredTypeTerm::TypeID(id) => TypeTerm::TypeID(id),
             SugaredTypeTerm::Num(n) => TypeTerm::Num(n),
diff --git a/src/unparser.rs b/src/unparser.rs
index ccf754d..b78494e 100644
--- a/src/unparser.rs
+++ b/src/unparser.rs
@@ -2,8 +2,12 @@ use crate::{dict::*, term::*};
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-impl TypeDict {
-    pub fn unparse(&self, t: &TypeTerm) -> String {
+pub trait UnparseLadderType {
+    fn unparse(&self, t: &TypeTerm) -> String;
+}
+
+impl<T: TypeDict> UnparseLadderType for T {
+    fn unparse(&self, t: &TypeTerm) -> String {
         match t {
             TypeTerm::TypeID(id) => self.get_typename(id).unwrap(),
             TypeTerm::Num(n) => format!("{}", n),

From 575caa7c44c664fc74ace7ae419b3975048203b7 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sun, 6 Oct 2024 14:38:41 +0200
Subject: [PATCH 13/20] add Debug for Bimap & BimapTypeDict

---
 src/bimap.rs | 1 +
 src/dict.rs  | 1 +
 2 files changed, 2 insertions(+)

diff --git a/src/bimap.rs b/src/bimap.rs
index 9ea311a..9d0a96c 100644
--- a/src/bimap.rs
+++ b/src/bimap.rs
@@ -2,6 +2,7 @@ use std::{collections::HashMap, hash::Hash};
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
+#[derive(Debug)]
 pub struct Bimap<V: Eq + Hash, Λ: Eq + Hash> {
     pub mλ: HashMap<V, Λ>,
     pub my: HashMap<Λ, V>,
diff --git a/src/dict.rs b/src/dict.rs
index 4d5b35b..67e22b3 100644
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -28,6 +28,7 @@ pub trait TypeDict {
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
+#[derive(Debug)]
 pub struct BimapTypeDict {
     typenames: Bimap<String, TypeID>,
     type_lit_counter: u64,

From 9408703cd100300c0def3e7974789f5a6abe0442 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Sun, 6 Oct 2024 14:39:05 +0200
Subject: [PATCH 14/20] check if term is empty

---
 src/sugar.rs | 20 +++++++++++++++++++-
 1 file changed, 19 insertions(+), 1 deletion(-)

diff --git a/src/sugar.rs b/src/sugar.rs
index dea73ba..4734b6f 100644
--- a/src/sugar.rs
+++ b/src/sugar.rs
@@ -2,7 +2,7 @@ use {
     crate::{TypeTerm, TypeID, parser::ParseLadderType}
 };
 
-#[derive(Clone)]
+#[derive(Clone, PartialEq)]
 pub enum SugaredTypeTerm {
     TypeID(TypeID),
     Num(i64),
@@ -92,5 +92,23 @@ impl SugaredTypeTerm {
                 ).collect()),
         }
     }
+
+    pub fn is_empty(&self) -> bool {
+        match self {
+            SugaredTypeTerm::TypeID(_) => false,
+            SugaredTypeTerm::Num(_) => false,
+            SugaredTypeTerm::Char(_) => false,
+            SugaredTypeTerm::Univ(t) => t.is_empty(),
+            SugaredTypeTerm::Spec(ts) |
+            SugaredTypeTerm::Ladder(ts) |
+            SugaredTypeTerm::Func(ts) |
+            SugaredTypeTerm::Morph(ts) |
+            SugaredTypeTerm::Struct(ts) |
+            SugaredTypeTerm::Enum(ts) |
+            SugaredTypeTerm::Seq(ts) => {
+                ts.iter().fold(true, |s,t|s&&t.is_empty())
+            }
+        }
+    }
 }
 

From f7b8a20299284408e7843c8da298845ed23299d7 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Mon, 28 Oct 2024 19:58:59 +0100
Subject: [PATCH 15/20] fix find_morphism_path

* also apply substitution from src-type match
* get this substitution as result from `enum_morphisms_with_subtyping`
---
 src/morphism.rs     | 61 +++++++++++++++++++++------------------------
 src/steiner_tree.rs |  6 ++---
 2 files changed, 31 insertions(+), 36 deletions(-)

diff --git a/src/morphism.rs b/src/morphism.rs
index 9160610..9ba1ecb 100644
--- a/src/morphism.rs
+++ b/src/morphism.rs
@@ -106,29 +106,27 @@ impl<M: Morphism + Clone> MorphismBase<M> {
         dst_types
     }
 
-    pub fn enum_morphisms_with_subtyping(&self, src_type: &TypeTerm)
-    -> Vec< (TypeTerm, TypeTerm) >
-    {
+    pub fn enum_morphisms_with_subtyping(
+        &self,
+        src_type: &TypeTerm,
+    ) -> Vec<(TypeTerm, TypeTerm, HashMap<TypeID, TypeTerm>)> {
         let mut src_lnf = src_type.clone().get_lnf_vec();
         let mut halo_lnf = vec![];
         let mut dst_types = Vec::new();
 
         while src_lnf.len() > 0 {
-            let src_type = TypeTerm::Ladder( src_lnf.clone() );
-            let halo_type = TypeTerm::Ladder( halo_lnf.clone() );
+            let src_type = TypeTerm::Ladder(src_lnf.clone());
+            let halo_type = TypeTerm::Ladder(halo_lnf.clone());
 
-            for (σ, t) in self.enum_morphisms( &src_type ) {
-                dst_types.push(
-                    (halo_type.clone()
-                        .apply_substitution(
-                            &|x| σ.get(x).cloned()
-                        ).clone(),
-                    t.clone()
-                        .apply_substitution(
-                            &|x| σ.get(x).cloned()
-                        ).clone()
-                    )
-                );
+            for (σ, t) in self.enum_morphisms(&src_type) {
+                dst_types.push((
+                    halo_type
+                        .clone()
+                        .apply_substitution(&|x| σ.get(x).cloned())
+                        .clone(),
+                    t.clone().apply_substitution(&|x| σ.get(x).cloned()).clone(),
+                    σ,
+                ));
             }
 
             // continue with next supertype
@@ -154,31 +152,30 @@ impl<M: Morphism + Clone> MorphismBase<M> {
 
             if let Some((current_weight, current_path)) = queue.pop() {
                 let current_type = current_path.last().unwrap();
+                for (h, t, σp) in self.enum_morphisms_with_subtyping(&current_type) {
+                    let tt = TypeTerm::Ladder(vec![h, t]).normalize();
 
-                for (h, t) in self.enum_morphisms_with_subtyping(&current_type) {
-                    let tt = TypeTerm::Ladder( vec![ h, t ] ).normalize();
-
-                    if ! current_path.contains( &tt ) {
+                    if !current_path.contains(&tt) {
                         let unification_result = crate::unification::unify(&tt, &ty.dst_type);
-
                         let morphism_weight = 1;
-                            /*
-                            {
-                                self.find_morphism( &tt ).unwrap().0.get_weight()
-                            };
-                            */
+                        /*self.find_morphism( &tt ).unwrap().0.get_weight()*/
 
                         let new_weight = current_weight + morphism_weight;
                         let mut new_path = current_path.clone();
-                        new_path.push( tt );
+
+                        new_path.push(tt);
+
+                        for n in new_path.iter_mut() {
+                            n.apply_substitution(&|x| σp.get(x).cloned());
+                        }
 
                         if let Ok(σ) = unification_result {
-                            new_path = new_path.into_iter().map(
-                                |mut t: TypeTerm| t.apply_substitution(&|x| σ.get(x).cloned()).clone()
-                            ).collect::<Vec<TypeTerm>>();
+                            for n in new_path.iter_mut() {
+                                n.apply_substitution(&|x| σ.get(x).cloned());
+                            }
                             return Some(new_path);
                         } else {
-                            queue.push( (new_weight, new_path) );
+                            queue.push((new_weight, new_path));
                         }
                     }
                 }
diff --git a/src/steiner_tree.rs b/src/steiner_tree.rs
index f854dd9..d168812 100644
--- a/src/steiner_tree.rs
+++ b/src/steiner_tree.rs
@@ -218,10 +218,8 @@ impl SteinerTreeProblem {
 
             // extend the tree by one edge and add it to the queue
             for src_type in current_nodes {
-                for (dst_halo,dst_ty) in morphisms.enum_morphisms_with_subtyping( &src_type ) {
-                    let dst_type = TypeTerm::Ladder(vec![
-                        dst_halo, dst_ty
-                    ]).normalize();
+                for (dst_halo, dst_ty, σ) in morphisms.enum_morphisms_with_subtyping(&src_type) {
+                    let dst_type = TypeTerm::Ladder(vec![dst_halo, dst_ty]).normalize();
 
                     if current_tree.contains( &dst_type ).is_none() {
                         let mut new_tree = current_tree.clone();

From 8fb20e4e187e2cdec52de467a8fbdc8cf37a2384 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Mon, 28 Oct 2024 20:00:11 +0100
Subject: [PATCH 16/20] add Send+Sync trait bound to TypeDict

---
 src/dict.rs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/dict.rs b/src/dict.rs
index 67e22b3..333f8dd 100644
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -8,7 +8,7 @@ pub enum TypeID {
     Var(u64)
 }
 
-pub trait TypeDict {
+pub trait TypeDict : Send + Sync {
     fn insert(&mut self, name: String, id: TypeID);
     fn add_varname(&mut self, vn: String) -> TypeID;
     fn add_typename(&mut self, tn: String) -> TypeID;

From f33ad0a7e2faf23ee5db6cb6bc4466696ef7ab88 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Tue, 24 Dec 2024 12:54:43 +0100
Subject: [PATCH 17/20] steiner tree: eliminate identity loops

---
 src/morphism.rs     |  4 ++--
 src/steiner_tree.rs | 12 ++++++++----
 src/term.rs         |  5 +----
 3 files changed, 11 insertions(+), 10 deletions(-)

diff --git a/src/morphism.rs b/src/morphism.rs
index 9ba1ecb..a433bdc 100644
--- a/src/morphism.rs
+++ b/src/morphism.rs
@@ -65,7 +65,7 @@ impl<M: Morphism + Clone> MorphismBase<M> {
                 &m.get_type().src_type,
                 &src_type.clone().normalize()
             ) {
-                let dst_type = 
+                let dst_type =
                     m.get_type().dst_type.clone()
                     .apply_substitution( &|x| σ.get(x).cloned() )
                     .clone();
@@ -185,6 +185,7 @@ impl<M: Morphism + Clone> MorphismBase<M> {
         None
     }
 
+    /// finde a morphism that matches the given morphism type
     pub fn find_morphism(&self, ty: &MorphismType)
     -> Option< ( M, HashMap<TypeID, TypeTerm> ) > {
 
@@ -255,4 +256,3 @@ impl<M: Morphism + Clone> MorphismBase<M> {
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
-
diff --git a/src/steiner_tree.rs b/src/steiner_tree.rs
index d168812..c8984dd 100644
--- a/src/steiner_tree.rs
+++ b/src/steiner_tree.rs
@@ -34,7 +34,7 @@ impl SteinerTree {
         let mut new_goals = Vec::new();
         let mut added = false;
 
-        for g in self.goals.clone() { 
+        for g in self.goals.clone() {
             if let Ok(σ) = crate::unify(&ty.dst_type, &g) {
                 if !added {
                     self.edges.push(ty.clone());
@@ -107,8 +107,13 @@ impl PathApproxSteinerTreeSolver {
                 let mut new_path_iter = new_path.into_iter().peekable();
 
                 // check all existing nodes..
+
+                if new_path_iter.peek().unwrap() == &src_type {
+                    new_path_iter.next();
+                }
+
                 for mt in tree.iter() {
-//                    assert!( mt.src_type == &src_type );
+                    //assert!( mt.src_type == &src_type );
                     if let Some(t) = new_path_iter.peek() {
                         if &mt.dst_type == t {
                             // eliminate this node from new path
@@ -196,7 +201,7 @@ impl SteinerTreeProblem {
     pub fn solve_approx_path<M: Morphism + Clone>(&mut self, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
         if let Some(master) = self.src_types.first() {
 
-            
+
         }
     }
 */
@@ -238,4 +243,3 @@ impl SteinerTreeProblem {
         None
     }
 }
-
diff --git a/src/term.rs b/src/term.rs
index 29c7d27..2879ced 100644
--- a/src/term.rs
+++ b/src/term.rs
@@ -14,8 +14,6 @@ pub enum TypeTerm {
     Num(i64),
     Char(char),
 
-    
-
     /* Complex Terms */
 
     // Type Parameters
@@ -47,10 +45,9 @@ impl TypeTerm {
                 *self = TypeTerm::App(vec![
                     self.clone(),
                     t.into()
-                ])                
+                ])
             }
         }
-
         self
     }
 

From a534d33b7bdcda7205c7abd9dfae4727d0db5e1f Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Tue, 4 Feb 2025 14:26:37 +0100
Subject: [PATCH 18/20] pretty: output escape character for \0 and \n

---
 src/pretty.rs | 10 ++++++----
 1 file changed, 6 insertions(+), 4 deletions(-)

diff --git a/src/pretty.rs b/src/pretty.rs
index 1a4aa60..c5edf3d 100644
--- a/src/pretty.rs
+++ b/src/pretty.rs
@@ -17,7 +17,11 @@ impl SugaredTypeTerm {
             }
 
             SugaredTypeTerm::Char(c) => {
-                format!("'{}'", c)
+                match c {
+                    '\0' => format!("'\\0'"),
+                    '\n' => format!("'\\n'"),
+                    _ => format!("'{}'", c)
+                }
             }
 
             SugaredTypeTerm::Univ(t) => {
@@ -116,7 +120,7 @@ impl SugaredTypeTerm {
                         s.push('\n');
                         for x in 0..(indent*indent_width) {
                             s.push(' ');
-                        }                
+                        }
                         s.push_str(&"-->  ".bright_yellow());
                     } else {
 //                        s.push_str("   ");
@@ -144,5 +148,3 @@ impl SugaredTypeTerm {
         }
     }
 }
-
-

From 4155310e1efe16b94505048a139ebe7fdb0fc157 Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Tue, 4 Feb 2025 14:34:55 +0100
Subject: [PATCH 19/20] fix tests

---
 src/steiner_tree.rs      |  2 +-
 src/test/curry.rs        |  8 ++++----
 src/test/lnf.rs          |  7 +++----
 src/test/morphism.rs     | 13 ++++++-------
 src/test/parser.rs       | 33 ++++++++++++++++-----------------
 src/test/pnf.rs          |  5 ++---
 src/test/substitution.rs |  5 ++---
 src/test/subtype.rs      | 21 ++++++++++-----------
 src/test/unification.rs  |  9 ++++-----
 9 files changed, 48 insertions(+), 55 deletions(-)

diff --git a/src/steiner_tree.rs b/src/steiner_tree.rs
index c8984dd..6e2443d 100644
--- a/src/steiner_tree.rs
+++ b/src/steiner_tree.rs
@@ -17,7 +17,7 @@ use {
 pub struct SteinerTree {
     weight: u64,
     goals: Vec< TypeTerm >,
-    edges: Vec< MorphismType >,
+    pub edges: Vec< MorphismType >,
 }
 
 impl SteinerTree {
diff --git a/src/test/curry.rs b/src/test/curry.rs
index c728a37..a814ab2 100644
--- a/src/test/curry.rs
+++ b/src/test/curry.rs
@@ -1,12 +1,12 @@
 use {
-    crate::{dict::*}
+    crate::{dict::*, parser::*}
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
 #[test]
 fn test_curry() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("<A B C>").unwrap().curry(),
@@ -33,7 +33,7 @@ fn test_curry() {
 
 #[test]
 fn test_decurry() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("<<A B> C>").unwrap().decurry(),
@@ -47,7 +47,7 @@ fn test_decurry() {
         dict.parse("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap().decurry(),
         dict.parse("<A B C D E F G H I J K>").unwrap()
     );
-    
+
     assert_eq!(
         dict.parse("<<A~X B> C>").unwrap().decurry(),
         dict.parse("<A~X B C>").unwrap()
diff --git a/src/test/lnf.rs b/src/test/lnf.rs
index 1c81a55..4b2a7c2 100644
--- a/src/test/lnf.rs
+++ b/src/test/lnf.rs
@@ -1,8 +1,8 @@
-use crate::dict::TypeDict;
+use crate::{dict::{BimapTypeDict}, parser::*};
 
 #[test]
 fn test_flat() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert!( dict.parse("A").expect("parse error").is_flat() );
     assert!( dict.parse("10").expect("parse error").is_flat() );
@@ -17,7 +17,7 @@ fn test_flat() {
 
 #[test]
 fn test_normalize() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("A~B~C").expect("parse error").normalize(),
@@ -54,4 +54,3 @@ fn test_normalize() {
     );
 
 }
-
diff --git a/src/test/morphism.rs b/src/test/morphism.rs
index b908101..309d881 100644
--- a/src/test/morphism.rs
+++ b/src/test/morphism.rs
@@ -1,5 +1,5 @@
 use {
-    crate::{dict::*, morphism::*, steiner_tree::*, TypeTerm}
+    crate::{dict::*, parser::*, unparser::*, morphism::*, steiner_tree::*, TypeTerm}
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
@@ -27,8 +27,8 @@ impl Morphism for DummyMorphism {
     }
 }
 
-fn morphism_test_setup() -> ( TypeDict, MorphismBase<DummyMorphism> ) {
-    let mut dict = TypeDict::new();
+fn morphism_test_setup() -> ( BimapTypeDict, MorphismBase<DummyMorphism> ) {
+    let mut dict = BimapTypeDict::new();
     let mut base = MorphismBase::<DummyMorphism>::new( dict.add_typename("Seq".into()) );
 
     dict.add_varname("Radix".into());
@@ -118,7 +118,7 @@ fn test_morphism_path() {
         Some((
                 DummyMorphism(MorphismType{
                     src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
-                    dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()                
+                    dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
                 }),
 
                 dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10>>").unwrap(),
@@ -145,12 +145,12 @@ fn test_steiner_tree() {
         // destination reprs
         vec![
             dict.parse("ℕ ~ <PosInt 2 BigEndian> ~ <Seq <Digit 2> ~ Char>").unwrap(),
-            dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),            
+            dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
             dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
         ]
     );
 
-    if let Some(solution) = steiner_tree_problem.solve_bfs( &dict, &base ) {
+    if let Some(solution) = steiner_tree_problem.solve_bfs( &base ) {
         for e in solution.edges.iter() {
             eprintln!(" :: {}\n--> {}", dict.unparse(&e.src_type), dict.unparse(&e.dst_type));
         }
@@ -158,4 +158,3 @@ fn test_steiner_tree() {
         eprintln!("no solution");
     }
 }
-
diff --git a/src/test/parser.rs b/src/test/parser.rs
index 1166229..f650ae3 100644
--- a/src/test/parser.rs
+++ b/src/test/parser.rs
@@ -7,7 +7,7 @@ use {
 
 #[test]
 fn test_parser_id() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     dict.add_varname("T".into());
 
@@ -26,7 +26,7 @@ fn test_parser_id() {
 fn test_parser_num() {
     assert_eq!(
         Ok(TypeTerm::Num(1234)),
-        TypeDict::new().parse("1234")
+        BimapTypeDict::new().parse("1234")
     );
 }
 
@@ -34,21 +34,21 @@ fn test_parser_num() {
 fn test_parser_char() {
     assert_eq!(
         Ok(TypeTerm::Char('x')),
-        TypeDict::new().parse("'x'")
+        BimapTypeDict::new().parse("'x'")
     );
 }
 
 #[test]
 fn test_parser_app() {
     assert_eq!(
-        TypeDict::new().parse("<A B>"),
+        BimapTypeDict::new().parse("<A B>"),
         Ok(TypeTerm::App(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::TypeID(TypeID::Fun(1)),
         ]))
     );
     assert_eq!(
-        TypeDict::new().parse("<A B C>"),
+        BimapTypeDict::new().parse("<A B C>"),
         Ok(TypeTerm::App(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::TypeID(TypeID::Fun(1)),
@@ -60,7 +60,7 @@ fn test_parser_app() {
 #[test]
 fn test_parser_unexpected_close() {
     assert_eq!(
-        TypeDict::new().parse(">"),
+        BimapTypeDict::new().parse(">"),
         Err(ParseError::UnexpectedClose)
     );
 }
@@ -68,7 +68,7 @@ fn test_parser_unexpected_close() {
 #[test]
 fn test_parser_unexpected_token() {
     assert_eq!(
-        TypeDict::new().parse("A B"),
+        BimapTypeDict::new().parse("A B"),
         Err(ParseError::UnexpectedToken)
     );
 }
@@ -76,14 +76,14 @@ fn test_parser_unexpected_token() {
 #[test]
 fn test_parser_ladder() {
     assert_eq!(
-        TypeDict::new().parse("A~B"),
+        BimapTypeDict::new().parse("A~B"),
         Ok(TypeTerm::Ladder(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::TypeID(TypeID::Fun(1)),
         ]))
     );
     assert_eq!(
-        TypeDict::new().parse("A~B~C"),
+        BimapTypeDict::new().parse("A~B~C"),
         Ok(TypeTerm::Ladder(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::TypeID(TypeID::Fun(1)),
@@ -95,7 +95,7 @@ fn test_parser_ladder() {
 #[test]
 fn test_parser_ladder_outside() {
     assert_eq!(
-        TypeDict::new().parse("<A B>~C"),
+        BimapTypeDict::new().parse("<A B>~C"),
         Ok(TypeTerm::Ladder(vec![
             TypeTerm::App(vec![
                 TypeTerm::TypeID(TypeID::Fun(0)),
@@ -103,13 +103,13 @@ fn test_parser_ladder_outside() {
             ]),
             TypeTerm::TypeID(TypeID::Fun(2)),
         ]))
-    );    
+    );
 }
 
 #[test]
 fn test_parser_ladder_inside() {
     assert_eq!(
-        TypeDict::new().parse("<A B~C>"),
+        BimapTypeDict::new().parse("<A B~C>"),
         Ok(TypeTerm::App(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::Ladder(vec![
@@ -117,13 +117,13 @@ fn test_parser_ladder_inside() {
                 TypeTerm::TypeID(TypeID::Fun(2)),
             ])
         ]))
-    );    
+    );
 }
 
 #[test]
 fn test_parser_ladder_between() {
     assert_eq!(
-        TypeDict::new().parse("<A B~<C D>>"),
+        BimapTypeDict::new().parse("<A B~<C D>>"),
         Ok(TypeTerm::App(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::Ladder(vec![
@@ -134,14 +134,14 @@ fn test_parser_ladder_between() {
                 ])
             ])
         ]))
-    );    
+    );
 }
 
 
 #[test]
 fn test_parser_ladder_large() {
     assert_eq!(
-        TypeDict::new().parse(
+        BimapTypeDict::new().parse(
             "<Seq Date
                   ~<TimeSince UnixEpoch>
                   ~<Duration Seconds>
@@ -203,4 +203,3 @@ fn test_parser_ladder_large() {
         )
     );
 }
-
diff --git a/src/test/pnf.rs b/src/test/pnf.rs
index e668849..a1d5a33 100644
--- a/src/test/pnf.rs
+++ b/src/test/pnf.rs
@@ -1,8 +1,8 @@
-use crate::dict::TypeDict;
+use crate::{dict::BimapTypeDict, parser::*};
 
 #[test]
 fn test_param_normalize() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("A~B~C").expect("parse error"),
@@ -56,4 +56,3 @@ fn test_param_normalize() {
                .param_normalize(),
     );
 }
-
diff --git a/src/test/substitution.rs b/src/test/substitution.rs
index 7959b08..e8906b9 100644
--- a/src/test/substitution.rs
+++ b/src/test/substitution.rs
@@ -1,6 +1,6 @@
 
 use {
-    crate::{dict::*, term::*},
+    crate::{dict::*, term::*, parser::*, unparser::*},
     std::iter::FromIterator
 };
 
@@ -8,7 +8,7 @@ use {
 
 #[test]
 fn test_subst() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     let mut σ = std::collections::HashMap::new();
 
@@ -29,4 +29,3 @@ fn test_subst() {
         dict.parse("<Seq ℕ~<Seq Char>>").unwrap()
     );
 }
-
diff --git a/src/test/subtype.rs b/src/test/subtype.rs
index 08cc5c7..c993063 100644
--- a/src/test/subtype.rs
+++ b/src/test/subtype.rs
@@ -1,8 +1,8 @@
-use crate::dict::TypeDict;
+use crate::{dict::BimapTypeDict, parser::*, unparser::*};
 
 #[test]
 fn test_semantic_subtype() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("A~B~C").expect("parse error")
@@ -19,11 +19,11 @@ fn test_semantic_subtype() {
             ),
         Some((0, dict.parse("A~B1~C1").expect("parse errror")))
     );
-    
+
     assert_eq!(
         dict.parse("A~B~C1").expect("parse error")
             .is_semantic_subtype_of(
-                &dict.parse("B~C2").expect("parse errror")  
+                &dict.parse("B~C2").expect("parse errror")
             ),
         Some((1, dict.parse("B~C1").expect("parse errror")))
     );
@@ -31,12 +31,12 @@ fn test_semantic_subtype() {
 
 #[test]
 fn test_syntactic_subtype() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("A~B~C").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("A~B~C").expect("parse errror")  
+                &dict.parse("A~B~C").expect("parse errror")
             ),
         Ok(0)
     );
@@ -44,7 +44,7 @@ fn test_syntactic_subtype() {
     assert_eq!(
         dict.parse("A~B~C").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("B~C").expect("parse errror")  
+                &dict.parse("B~C").expect("parse errror")
             ),
         Ok(1)
     );
@@ -52,7 +52,7 @@ fn test_syntactic_subtype() {
     assert_eq!(
         dict.parse("A~B~C~D~E").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("C~D").expect("parse errror")  
+                &dict.parse("C~D").expect("parse errror")
             ),
         Ok(2)
     );
@@ -60,7 +60,7 @@ fn test_syntactic_subtype() {
     assert_eq!(
         dict.parse("A~B~C~D~E").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("C~G").expect("parse errror")  
+                &dict.parse("C~G").expect("parse errror")
             ),
         Err((2,3))
     );
@@ -68,7 +68,7 @@ fn test_syntactic_subtype() {
     assert_eq!(
         dict.parse("A~B~C~D~E").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("G~F~K").expect("parse errror")  
+                &dict.parse("G~F~K").expect("parse errror")
             ),
         Err((0,0))
     );
@@ -94,4 +94,3 @@ fn test_syntactic_subtype() {
         Ok(4)
     );
 }
-
diff --git a/src/test/unification.rs b/src/test/unification.rs
index 34d355d..e0b892b 100644
--- a/src/test/unification.rs
+++ b/src/test/unification.rs
@@ -1,13 +1,13 @@
 
 use {
-    crate::{dict::*, term::*, unification::*},
+    crate::{dict::*, parser::*, unparser::*, term::*, unification::*},
     std::iter::FromIterator
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
 fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
     dict.add_varname(String::from("T"));
     dict.add_varname(String::from("U"));
     dict.add_varname(String::from("V"));
@@ -33,7 +33,7 @@ fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) {
 
 #[test]
 fn test_unification_error() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
     dict.add_varname(String::from("T"));
 
     assert_eq!(
@@ -76,7 +76,7 @@ fn test_unification() {
         true
     );
 
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     dict.add_varname(String::from("T"));
     dict.add_varname(String::from("U"));
@@ -115,4 +115,3 @@ fn test_unification() {
         )
     );
 }
-

From a52c01d9e96684e0635330de07ac12a950edafaa Mon Sep 17 00:00:00 2001
From: Michael Sippel <micha@fragmental.art>
Date: Tue, 4 Feb 2025 14:36:05 +0100
Subject: [PATCH 20/20] morphism base: store vec of seq-types

---
 src/morphism.rs      | 61 +++++++++++++++++++++++++-------------------
 src/test/morphism.rs |  8 +++---
 2 files changed, 39 insertions(+), 30 deletions(-)

diff --git a/src/morphism.rs b/src/morphism.rs
index a433bdc..ba7cc23 100644
--- a/src/morphism.rs
+++ b/src/morphism.rs
@@ -16,7 +16,7 @@ pub struct MorphismType {
 
 pub trait Morphism : Sized {
     fn get_type(&self) -> MorphismType;
-    fn list_map_morphism(&self, list_typeid: TypeID) -> Option< Self >;
+    fn map_morphism(&self, seq_type: TypeTerm) -> Option< Self >;
 
     fn weight(&self) -> u64 {
         1
@@ -26,7 +26,7 @@ pub trait Morphism : Sized {
 #[derive(Clone)]
 pub struct MorphismBase<M: Morphism + Clone> {
     morphisms: Vec< M >,
-    list_typeid: TypeID
+    seq_types: Vec< TypeTerm >
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
@@ -43,10 +43,10 @@ impl MorphismType {
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
 impl<M: Morphism + Clone> MorphismBase<M> {
-    pub fn new(list_typeid: TypeID) -> Self {
+    pub fn new(seq_types: Vec<TypeTerm>) -> Self {
         MorphismBase {
             morphisms: Vec::new(),
-            list_typeid
+            seq_types
         }
     }
 
@@ -80,9 +80,10 @@ impl<M: Morphism + Clone> MorphismBase<M> {
         // TODO: function for generating fresh variables
         let item_variable = TypeID::Var(100);
 
+        for seq_type in self.seq_types.iter() {
         if let Ok(σ) = crate::unification::unify(
             &TypeTerm::App(vec![
-                TypeTerm::TypeID(self.list_typeid),
+                seq_type.clone(),
                 TypeTerm::TypeID(item_variable)
             ]),
             &src_type.clone().param_normalize(),
@@ -92,7 +93,7 @@ impl<M: Morphism + Clone> MorphismBase<M> {
             for (γ, dst_item_type) in self.enum_morphisms( &src_item_type ) {
                 let dst_type =
                         TypeTerm::App(vec![
-                            TypeTerm::TypeID(self.list_typeid),
+                            seq_type.clone(),
                             dst_item_type.clone()
                                 .apply_substitution(
                                     &|x| γ.get(x).cloned()
@@ -102,6 +103,7 @@ impl<M: Morphism + Clone> MorphismBase<M> {
                 dst_types.push( (γ.clone(), dst_type) );
             }
         }
+        }
 
         dst_types
     }
@@ -189,26 +191,7 @@ impl<M: Morphism + Clone> MorphismBase<M> {
     pub fn find_morphism(&self, ty: &MorphismType)
     -> Option< ( M, HashMap<TypeID, TypeTerm> ) > {
 
-        // try list-map morphism
-        if let Ok(σ) = UnificationProblem::new(vec![
-            (ty.src_type.clone().param_normalize(), TypeTerm::App(vec![ TypeTerm::TypeID(self.list_typeid), TypeTerm::TypeID(TypeID::Var(100)) ])),
-            (ty.dst_type.clone().param_normalize(), TypeTerm::App(vec![ TypeTerm::TypeID(self.list_typeid), TypeTerm::TypeID(TypeID::Var(101)) ])),
-        ]).solve() {
-
-            // TODO: use real fresh variable names
-            let item_morph_type = MorphismType {
-                src_type: σ.get(&TypeID::Var(100)).unwrap().clone(),
-                dst_type: σ.get(&TypeID::Var(101)).unwrap().clone(),
-            }.normalize();
-
-            if let Some((m, σ)) = self.find_morphism( &item_morph_type ) {
-                if let Some(list_morph) = m.list_map_morphism( self.list_typeid ) {
-                    return Some( (list_morph, σ) );
-                }
-            }
-        }
-
-        // otherwise
+        // try to find primitive morphism
         for m in self.morphisms.iter() {
             let unification_problem = UnificationProblem::new(
                 vec![
@@ -223,6 +206,32 @@ impl<M: Morphism + Clone> MorphismBase<M> {
             }
         }
 
+        // try list-map morphism
+        for seq_type in self.seq_types.iter() {
+            eprintln!("try seq type {:?}", seq_type);
+
+            eprintln!("");
+
+            if let Ok(σ) = UnificationProblem::new(vec![
+                (ty.src_type.clone().param_normalize(),
+                    TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(100)) ])),
+                (ty.dst_type.clone().param_normalize(),
+                    TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(101)) ])),
+            ]).solve() {
+                // TODO: use real fresh variable names
+                let item_morph_type = MorphismType {
+                    src_type: σ.get(&TypeID::Var(100)).unwrap().clone(),
+                    dst_type: σ.get(&TypeID::Var(101)).unwrap().clone(),
+                }.normalize();
+
+                if let Some((m, σ)) = self.find_morphism( &item_morph_type ) {
+                    if let Some(list_morph) = m.map_morphism( seq_type.clone() ) {
+                        return Some( (list_morph, σ) );
+                    }
+                }
+            }
+        }
+
         None
     }
 
diff --git a/src/test/morphism.rs b/src/test/morphism.rs
index 309d881..ae3775f 100644
--- a/src/test/morphism.rs
+++ b/src/test/morphism.rs
@@ -12,15 +12,15 @@ impl Morphism for DummyMorphism {
         self.0.clone().normalize()
     }
 
-    fn list_map_morphism(&self, list_typeid: TypeID) -> Option<DummyMorphism> {
+    fn map_morphism(&self, seq_type: TypeTerm) -> Option<DummyMorphism> {
         Some(DummyMorphism(MorphismType {
             src_type: TypeTerm::App(vec![
-                TypeTerm::TypeID( list_typeid ),
+                seq_type.clone(),
                 self.0.src_type.clone()
             ]),
 
             dst_type: TypeTerm::App(vec![
-                TypeTerm::TypeID( list_typeid ),
+                seq_type.clone(),
                 self.0.dst_type.clone()
             ])
         }))
@@ -29,7 +29,7 @@ impl Morphism for DummyMorphism {
 
 fn morphism_test_setup() -> ( BimapTypeDict, MorphismBase<DummyMorphism> ) {
     let mut dict = BimapTypeDict::new();
-    let mut base = MorphismBase::<DummyMorphism>::new( dict.add_typename("Seq".into()) );
+    let mut base = MorphismBase::<DummyMorphism>::new( vec![ dict.parse("Seq").expect("") ] );
 
     dict.add_varname("Radix".into());
     dict.add_varname("SrcRadix".into());