diff --git a/src/bimap.rs b/src/bimap.rs
index 9ea311a..9d0a96c 100644
--- a/src/bimap.rs
+++ b/src/bimap.rs
@@ -2,6 +2,7 @@ use std::{collections::HashMap, hash::Hash};
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
+#[derive(Debug)]
 pub struct Bimap<V: Eq + Hash, Λ: Eq + Hash> {
     pub mλ: HashMap<V, Λ>,
     pub my: HashMap<Λ, V>,
diff --git a/src/dict.rs b/src/dict.rs
index 419d599..333f8dd 100644
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -8,9 +8,28 @@ pub enum TypeID {
     Var(u64)
 }
 
+pub trait TypeDict : Send + Sync {
+    fn insert(&mut self, name: String, id: TypeID);
+    fn add_varname(&mut self, vn: String) -> TypeID;
+    fn add_typename(&mut self, tn: String) -> TypeID;
+    fn get_typeid(&self, tn: &String) -> Option<TypeID>;
+    fn get_typename(&self, tid: &TypeID) -> Option<String>;
+
+    fn get_varname(&self, var_id: u64) -> Option<String> {
+        self.get_typename(&TypeID::Var(var_id))
+    }
+
+    fn add_synonym(&mut self, new: String, old: String) {
+        if let Some(tyid) = self.get_typeid(&old) {
+            self.insert(new, tyid);
+        }
+    }
+}
+
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-pub struct TypeDict {
+#[derive(Debug)]
+pub struct BimapTypeDict {
     typenames: Bimap<String, TypeID>,
     type_lit_counter: u64,
     type_var_counter: u64,
@@ -18,46 +37,66 @@ pub struct TypeDict {
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-impl TypeDict {
+impl BimapTypeDict {
     pub fn new() -> Self {
-        TypeDict {
+        BimapTypeDict {
             typenames: Bimap::new(),
             type_lit_counter: 0,
             type_var_counter: 0,
         }
     }
+}
 
-    pub fn add_varname(&mut self, tn: String) -> TypeID {
+impl TypeDict for BimapTypeDict {
+    fn insert(&mut self, name: String, id: TypeID) {
+        self.typenames.insert(name, id);
+    }
+
+    fn add_varname(&mut self, tn: String) -> TypeID {
         let tyid = TypeID::Var(self.type_var_counter);
         self.type_var_counter += 1;
-        self.typenames.insert(tn, tyid.clone());
+        self.insert(tn, tyid.clone());
         tyid
     }
 
-    pub fn add_typename(&mut self, tn: String) -> TypeID {
+    fn add_typename(&mut self, tn: String) -> TypeID {
         let tyid = TypeID::Fun(self.type_lit_counter);
         self.type_lit_counter += 1;
-        self.typenames.insert(tn, tyid.clone());
+        self.insert(tn, tyid.clone());
         tyid
     }
 
-    pub fn add_synonym(&mut self, new: String, old: String) {
-        if let Some(tyid) = self.get_typeid(&old) {
-            self.typenames.insert(new, tyid);
-        }
-    }
-
-    pub fn get_typename(&self, tid: &TypeID) -> Option<String> {
+    fn get_typename(&self, tid: &TypeID) -> Option<String> {
         self.typenames.my.get(tid).cloned()
     }
 
-    pub fn get_typeid(&self, tn: &String) -> Option<TypeID> {
+    fn get_typeid(&self, tn: &String) -> Option<TypeID> {
         self.typenames.mλ.get(tn).cloned()
     }
-
-    pub fn get_varname(&self, var_id: u64) -> Option<String> {
-        self.typenames.my.get(&TypeID::Var(var_id)).cloned()
-    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
+
+use std::sync::Arc;
+use std::ops::{Deref, DerefMut};
+use std::sync::RwLock;
+
+impl<T: TypeDict> TypeDict for Arc<RwLock<T>> {
+    fn insert(&mut self, name: String, id: TypeID) {
+        self.write().unwrap().insert(name, id);
+    }
+    fn add_varname(&mut self, vn: String) -> TypeID {
+        self.write().unwrap().add_varname(vn)
+    }
+    fn add_typename(&mut self, tn: String) -> TypeID {
+        self.write().unwrap().add_typename(tn)
+    }
+    fn get_typename(&self, tid: &TypeID)-> Option<String> {
+        self.read().unwrap().get_typename(tid)
+    }
+    fn get_typeid(&self, tn: &String) -> Option<TypeID> {
+        self.read().unwrap().get_typeid(tn)
+    }
 }
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
diff --git a/src/lib.rs b/src/lib.rs
index 2e9a163..11a001f 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -11,6 +11,8 @@ pub mod lnf;
 pub mod pnf;
 pub mod subtype;
 pub mod unification;
+pub mod morphism;
+pub mod steiner_tree;
 
 #[cfg(test)]
 mod test;
@@ -23,4 +25,5 @@ pub use {
     term::*,
     sugar::*,
     unification::*,
+    morphism::*
 };
diff --git a/src/morphism.rs b/src/morphism.rs
new file mode 100644
index 0000000..8cf6a02
--- /dev/null
+++ b/src/morphism.rs
@@ -0,0 +1,308 @@
+use {
+    crate::{
+        unification::UnificationProblem, TypeDict, TypeID, TypeTerm,
+        pretty::*,
+        sugar::SugaredTypeTerm,
+    },
+    std::{collections::HashMap, u64}
+};
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct MorphismType {
+    pub src_type: TypeTerm,
+    pub dst_type: TypeTerm,
+}
+
+impl MorphismType {
+    pub fn normalize(self) -> Self {
+        MorphismType {
+            src_type: self.src_type.normalize().param_normalize(),
+            dst_type: self.dst_type.normalize().param_normalize()
+        }
+    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+pub trait Morphism : Sized {
+    fn get_type(&self) -> MorphismType;
+    fn map_morphism(&self, seq_type: TypeTerm) -> Option< Self >;
+
+    fn weight(&self) -> u64 {
+        1
+    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+#[derive(Clone, Debug, PartialEq)]
+pub struct MorphismInstance<M: Morphism + Clone> {
+    pub halo: TypeTerm,
+    pub m: M,
+    pub σ: HashMap<TypeID, TypeTerm>
+}
+
+impl<M: Morphism + Clone> MorphismInstance<M> {
+    pub fn get_type(&self) -> MorphismType {
+        MorphismType {
+            src_type: TypeTerm::Ladder(vec![
+                self.halo.clone(),
+                self.m.get_type().src_type.clone()
+            ]).apply_substitution(&|k| self.σ.get(k).cloned())
+            .clone(),
+
+            dst_type: TypeTerm::Ladder(vec![
+                self.halo.clone(),
+                self.m.get_type().dst_type.clone()
+            ]).apply_substitution(&|k| self.σ.get(k).cloned())
+            .clone()
+        }.normalize()
+    }
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+#[derive(Clone)]
+pub struct MorphismPath<M: Morphism + Clone> {
+    pub weight: u64,
+    pub cur_type: TypeTerm,
+    pub morphisms: Vec< MorphismInstance<M> >
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+#[derive(Clone)]
+pub struct MorphismBase<M: Morphism + Clone> {
+    morphisms: Vec< M >,
+    seq_types: Vec< TypeTerm >
+}
+
+impl<M: Morphism + Clone> MorphismBase<M> {
+    pub fn new(seq_types: Vec<TypeTerm>) -> Self {
+        MorphismBase {
+            morphisms: Vec::new(),
+            seq_types
+        }
+    }
+
+    pub fn add_morphism(&mut self, m: M) {
+        self.morphisms.push( m );
+    }
+
+    pub fn enum_direct_morphisms(&self, src_type: &TypeTerm)
+    -> Vec< MorphismInstance<M> >
+    {
+        let mut dst_types = Vec::new();
+        for m in self.morphisms.iter() {
+            if let Ok((halo, σ)) = crate::unification::subtype_unify(
+                &src_type.clone().param_normalize(),
+                &m.get_type().src_type.param_normalize(),
+            ) {
+                dst_types.push(MorphismInstance{ halo, m: m.clone(), σ });
+            }
+        }
+        dst_types
+    }
+
+    pub fn enum_map_morphisms(&self, src_type: &TypeTerm)
+    -> Vec< MorphismInstance<M> > {
+        let src_type = src_type.clone().param_normalize();
+        let mut dst_types = Vec::new();
+
+        // Check if we have a List type, and if so, see what the Item type is
+        // TODO: function for generating fresh variables
+        let item_variable = TypeID::Var(800);
+
+        for seq_type in self.seq_types.iter() {
+            if let Ok((halo, σ)) = crate::unification::subtype_unify(
+                &src_type,
+                &TypeTerm::App(vec![
+                    seq_type.clone(),
+                    TypeTerm::TypeID(item_variable)
+                ])
+            ) {
+                let src_item_type = σ.get(&item_variable).expect("var not in unificator").clone();
+                for item_morph_inst in self.enum_morphisms( &src_item_type ) {
+
+                    let mut dst_halo_ladder = vec![ halo.clone() ];
+                    if item_morph_inst.halo != TypeTerm::unit() {
+                        dst_halo_ladder.push(
+                            TypeTerm::App(vec![
+                                seq_type.clone().get_lnf_vec().first().unwrap().clone(),
+                                item_morph_inst.halo.clone()
+                            ]));
+                    }
+
+                    dst_types.push(
+                        MorphismInstance {
+                            halo: TypeTerm::Ladder(dst_halo_ladder).normalize(),
+                            m: item_morph_inst.m.map_morphism(seq_type.clone()).expect("couldnt get map morphism"),
+                            σ: item_morph_inst.σ
+                        }
+                    );
+                }
+            }
+        }
+
+        dst_types
+    }
+
+    pub fn enum_morphisms(&self, src_type: &TypeTerm) -> Vec< MorphismInstance<M> > {
+        let mut dst_types = Vec::new();
+        dst_types.append(&mut self.enum_direct_morphisms(src_type));
+        dst_types.append(&mut self.enum_map_morphisms(src_type));
+        dst_types
+    }
+
+    /* try to find shortest morphism-path for a given type
+     */
+    pub fn find_morphism_path(&self, ty: MorphismType
+        /*, type_dict: &mut impl TypeDict*/
+    )
+    -> Option< Vec<MorphismInstance<M>> >
+    {
+        let ty = ty.normalize();
+        let mut queue = vec![
+            MorphismPath { weight: 0, cur_type: ty.src_type.clone(), morphisms: vec![] }
+        ];
+
+        while ! queue.is_empty() {
+            queue.sort_by( |p1,p2| p2.weight.cmp(&p1.weight));
+
+            if let Some(mut cur_path) = queue.pop() {
+                if let Ok((halo, σ)) = crate::unification::subtype_unify( &cur_path.cur_type, &ty.dst_type ) {
+                    // found path
+                    for n in cur_path.morphisms.iter_mut() {
+                        let mut new_σ = HashMap::new();
+                        for (k,v) in σ.iter() {
+                            new_σ.insert(
+                                k.clone(),
+                                v.clone().apply_substitution(&|k| σ.get(k).cloned()).clone()
+                            );
+                        }
+                        for (k,v) in n.σ.iter() {
+                            new_σ.insert(
+                                k.clone(),
+                                v.clone().apply_substitution(&|k| σ.get(k).cloned()).clone()
+                            );
+                        }
+
+                        n.σ = new_σ;
+                    }
+
+                    return Some(cur_path.morphisms);
+                }
+
+                //eprintln!("cur path (w ={}) : @ {}", cur_path.weight, cur_path.cur_type.clone().sugar(type_dict).pretty(type_dict, 0) );
+                for next_morph_inst in self.enum_morphisms(&cur_path.cur_type) {
+                    let dst_type = next_morph_inst.get_type().dst_type;
+
+                    let mut creates_loop = false;
+
+                    let mut new_path = cur_path.clone();
+                    for n in new_path.morphisms.iter_mut() {
+                        let mut new_σ = HashMap::new();
+                        for (k,v) in n.σ.iter() {
+                            new_σ.insert(
+                                k.clone(),
+                                v.clone().apply_substitution(&|k| next_morph_inst.σ.get(k).cloned()).clone()
+                            );
+                        }
+                        for (k,v) in next_morph_inst.σ.iter() {
+                            new_σ.insert(
+                                k.clone(),
+                                v.clone().apply_substitution(&|k| next_morph_inst.σ.get(k).cloned()).clone()
+                            );
+                        }
+                        n.σ = new_σ;
+                    }
+
+                    for m in new_path.morphisms.iter() {
+                        if m.get_type().src_type == dst_type {
+                            creates_loop = true;
+                            //eprintln!("creates loop..");
+                            break;
+                        }
+                    }
+
+                    if ! creates_loop {
+                        /*eprintln!("next morph ? \n    {}\n--> {} ",
+                            next_morph_inst.get_type().src_type.sugar(type_dict).pretty(type_dict, 0),
+                            next_morph_inst.get_type().dst_type.sugar(type_dict).pretty(type_dict, 0)
+                        );
+                        eprintln!("....take!\n  :: halo = {}\n  :: σ = {:?}", next_morph_inst.halo.clone().sugar(type_dict).pretty(type_dict, 0), next_morph_inst.σ);
+                        */
+                        new_path.weight += next_morph_inst.m.weight();
+                        new_path.cur_type = dst_type;
+                        new_path.morphisms.push(next_morph_inst);
+                        queue.push(new_path);
+                    }
+                }
+            }
+        }
+        None
+    }
+
+/*
+    pub fn find_direct_morphism(&self, ty: &MorphismType) -> Option< (M, HashMap<TypeID, TypeTerm>) > {
+        for m in self.morphisms.iter() {
+            let unification_problem = UnificationProblem::new(
+                vec![
+                    ( ty.src_type.clone(), m.get_type().src_type.clone() ),
+                    ( m.get_type().dst_type.clone(), ty.dst_type.clone() )
+                ]
+            );
+
+            let unification_result = unification_problem.solve_subtype();
+            if let Ok((halo, σ)) = unification_result {
+                return Some((m.clone(), σ));
+            }
+        }
+        None
+    }
+
+    pub fn find_map_morphism(&self, ty: &MorphismType) -> Option< (M, HashMap<TypeID, TypeTerm>) > {
+        for seq_type in self.seq_types.iter() {
+            eprintln!("try seq type {:?}", seq_type);
+
+            if let Ok((halo, σ)) = UnificationProblem::new(vec![
+                (ty.src_type.clone().param_normalize(),
+                    TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(100)) ])),
+
+                (TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(101)) ]),
+                    ty.dst_type.clone().param_normalize()),
+            ]).solve_subtype() {
+                // TODO: use real fresh variable names
+                let item_morph_type = MorphismType {
+                    src_type: σ.get(&TypeID::Var(100)).unwrap().clone(),
+                    dst_type: σ.get(&TypeID::Var(101)).unwrap().clone(),
+                }.normalize();
+
+                //eprintln!("Map Morph: try to find item-morph with type {:?}", item_morph_type);
+                if let Some((m, σ)) = self.find_morphism( &item_morph_type ) {
+                    if let Some(list_morph) = m.map_morphism( seq_type.clone() ) {
+                        return Some( (list_morph, σ) );
+                    }
+                }
+            }
+        }
+
+        None
+    }
+
+    pub fn find_morphism(&self, ty: &MorphismType)
+    -> Option< ( M, HashMap<TypeID, TypeTerm> ) > {
+        if let Some((m,σ)) = self.find_direct_morphism(ty) {
+            return Some((m,σ));
+        }
+        if let Some((m,σ)) = self.find_map_morphism(ty) {
+            return Some((m, σ));
+        }
+        None
+    }
+    */
+}
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
diff --git a/src/parser.rs b/src/parser.rs
index 85ff9b4..6160ca6 100644
--- a/src/parser.rs
+++ b/src/parser.rs
@@ -18,10 +18,23 @@ pub enum ParseError {
     UnexpectedToken
 }
 
+pub trait ParseLadderType {
+    fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError>;
+    
+    fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
+    where It: Iterator<Item = char>;
+
+    fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
+    where It: Iterator<Item = char>;
+
+    fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
+    where It: Iterator<Item = char>;
+}
+
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-impl TypeDict {
-    pub fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
+impl<T: TypeDict> ParseLadderType for T {
+    fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
         let mut tokens = LadderTypeLexer::from(s.chars()).peekable();
 
         match self.parse_ladder(&mut tokens) {
diff --git a/src/pretty.rs b/src/pretty.rs
index 1a4aa60..40a7541 100644
--- a/src/pretty.rs
+++ b/src/pretty.rs
@@ -1,5 +1,5 @@
 use {
-    crate::TypeDict,
+    crate::{TypeDict, dict::TypeID},
     crate::sugar::SugaredTypeTerm,
     tiny_ansi::TinyAnsi
 };
@@ -9,15 +9,26 @@ impl SugaredTypeTerm {
         let indent_width = 4;
         match self {
             SugaredTypeTerm::TypeID(id) => {
-                format!("{}", dict.get_typename(id).unwrap_or("??".bright_red())).bright_blue()
+                match id {
+                    TypeID::Var(varid) => {
+                        format!("{}", dict.get_typename(id).unwrap_or("??".bright_red())).bright_magenta()
+                    },
+                    TypeID::Fun(funid) => {
+                        format!("{}", dict.get_typename(id).unwrap_or("??".bright_red())).blue().bold()
+                    }
+                }
             },
 
             SugaredTypeTerm::Num(n) => {
-                format!("{}", n).bright_cyan()
+                format!("{}", n).green().bold()
             }
 
             SugaredTypeTerm::Char(c) => {
-                format!("'{}'", c)
+                match c {
+                    '\0' => format!("'\\0'"),
+                    '\n' => format!("'\\n'"),
+                    _ => format!("'{}'", c)
+                }
             }
 
             SugaredTypeTerm::Univ(t) => {
@@ -30,7 +41,7 @@ impl SugaredTypeTerm {
 
             SugaredTypeTerm::Spec(args) => {
                 let mut s = String::new();
-                s.push_str(&"<".yellow().bold());
+                s.push_str(&"<".yellow());
                 for i in 0..args.len() {
                     let arg = &args[i];
                     if i > 0 {
@@ -38,7 +49,7 @@ impl SugaredTypeTerm {
                     }
                     s.push_str( &arg.pretty(dict,indent+1) );
                 }
-                s.push_str(&">".yellow().bold());
+                s.push_str(&">".yellow());
                 s
             }
 
@@ -116,7 +127,7 @@ impl SugaredTypeTerm {
                         s.push('\n');
                         for x in 0..(indent*indent_width) {
                             s.push(' ');
-                        }                
+                        }
                         s.push_str(&"-->  ".bright_yellow());
                     } else {
 //                        s.push_str("   ");
@@ -144,5 +155,3 @@ impl SugaredTypeTerm {
         }
     }
 }
-
-
diff --git a/src/steiner_tree.rs b/src/steiner_tree.rs
new file mode 100644
index 0000000..091d764
--- /dev/null
+++ b/src/steiner_tree.rs
@@ -0,0 +1,242 @@
+use {
+    std::collections::HashMap,
+    crate::{
+        TypeID,
+        TypeTerm,
+        morphism::{
+            MorphismType,
+            Morphism,
+            MorphismBase
+        }
+    }
+};
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+#[derive(Clone)]
+pub struct SteinerTree {
+    weight: u64,
+    goals: Vec< TypeTerm >,
+    pub edges: Vec< MorphismType >,
+}
+
+impl SteinerTree {
+    pub fn into_edges(self) -> Vec< MorphismType > {
+        self.edges
+    }
+
+    fn add_edge(&mut self, ty: MorphismType) {
+        self.weight += 1;
+
+        let ty = ty.normalize();
+
+        // check if by adding this new edge, we reach a goal
+        let mut new_goals = Vec::new();
+        let mut added = false;
+
+        for g in self.goals.clone() {
+            if let Ok(σ) = crate::unify(&ty.dst_type, &g) {
+                if !added {
+                    self.edges.push(ty.clone());
+
+                    // goal reached.
+                    for e in self.edges.iter_mut() {
+                        e.src_type = e.src_type.apply_substitution(&|x| σ.get(x).cloned()).clone();
+                        e.dst_type = e.dst_type.apply_substitution(&|x| σ.get(x).cloned()).clone();
+                    }
+                    added = true;
+                } else {
+                    new_goals.push(g);
+                }
+            } else {
+                new_goals.push(g);
+            }
+        }
+
+        if !added {
+            self.edges.push(ty.clone());
+        }
+
+        self.goals = new_goals;
+    }
+
+    fn is_solved(&self) -> bool {
+        self.goals.len() == 0
+    }
+
+    fn contains(&self, t: &TypeTerm) -> Option< HashMap<TypeID, TypeTerm> > {
+        for e in self.edges.iter() {
+            if let Ok(σ) = crate::unify(&e.dst_type, t) {
+                return Some(σ)
+            }
+        }
+
+        None
+    }
+}
+
+
+pub struct PathApproxSteinerTreeSolver {
+    root: TypeTerm,
+    leaves: Vec< TypeTerm >
+}
+
+impl PathApproxSteinerTreeSolver {
+    pub fn new(
+        root: TypeTerm,
+        leaves: Vec<TypeTerm>
+    ) -> Self {
+        PathApproxSteinerTreeSolver {
+            root, leaves
+        }
+    }
+
+    pub fn solve<M: Morphism + Clone>(self, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
+        let mut tree = Vec::<MorphismType>::new();
+
+        for goal in self.leaves {
+            // try to find shortest path from root to current leaf
+            if let Some(new_path) = morphisms.find_morphism_path(
+                MorphismType {
+                    src_type: self.root.clone(),
+                    dst_type: goal.clone()
+                }
+            ) {
+                // reduce new path so that it does not collide with any existing path
+                let mut src_type = self.root.clone();
+                let mut new_path_iter = new_path.into_iter().peekable();
+
+                // check all existing nodes..
+
+                if new_path_iter.peek().unwrap().get_type().src_type == src_type {
+                    new_path_iter.next();
+                }
+
+                for mt in tree.iter() {
+                    //assert!( mt.src_type == &src_type );
+                    if let Some(t) = new_path_iter.peek() {
+                        if &mt.dst_type == &t.get_type().src_type {
+                            // eliminate this node from new path
+                            src_type = new_path_iter.next().unwrap().get_type().src_type;
+                        }
+                    } else {
+                        break;
+                    }
+                }
+
+                for m in new_path_iter {
+                    tree.push(m.get_type());
+                }
+            } else {
+                eprintln!("could not find path\nfrom {:?}\nto {:?}", &self.root, &goal);
+                return None;
+            }
+        }
+
+        Some(SteinerTree {
+            weight: 0,
+            goals: vec![],
+            edges: tree
+        })
+    }
+}
+
+
+/* given a representation tree with the available
+ * represenatations `src_types`, try to find
+ * a sequence of morphisms that span up all
+ * representations in `dst_types`.
+ */
+pub struct SteinerTreeProblem {
+    src_types: Vec< TypeTerm >,
+    queue: Vec< SteinerTree >
+}
+
+impl SteinerTreeProblem {
+    pub fn new(
+        src_types: Vec< TypeTerm >,
+        dst_types: Vec< TypeTerm >
+    ) -> Self {
+        SteinerTreeProblem {
+            src_types: src_types.into_iter().map(|t| t.normalize()).collect(),
+            queue: vec![
+                SteinerTree {
+                    weight: 0,
+                    goals: dst_types.into_iter().map(|t| t.normalize()).collect(),
+                    edges: Vec::new()
+                }
+            ]
+        }
+    }
+
+    pub fn next(&mut self) -> Option< SteinerTree > {
+        eprintln!("queue size = {}", self.queue.len());
+
+        /* FIXME: by giving the highest priority to
+         * candidate tree with the least remaining goals,
+         * the optimality of the search algorithm
+         * is probably destroyed, but it dramatically helps
+         * to tame the combinatorical explosion in this algorithm.
+         */
+        self.queue.sort_by(|t1, t2|
+            if t1.goals.len() < t2.goals.len() {
+                std::cmp::Ordering::Greater
+            } else if t1.goals.len() == t2.goals.len() {
+                if t1.weight < t2.weight {
+                    std::cmp::Ordering::Greater
+                } else {
+                    std::cmp::Ordering::Less
+                }
+            } else {
+                std::cmp::Ordering::Less
+            }
+        );
+        self.queue.pop()
+    }
+/*
+    pub fn solve_approx_path<M: Morphism + Clone>(&mut self, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
+        if let Some(master) = self.src_types.first() {
+
+
+        }
+    }
+*/
+    pub fn solve_bfs<M: Morphism + Clone>(&mut self, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
+
+        // take the currently smallest tree and extend it by one step
+        while let Some( mut current_tree ) = self.next() {
+
+            // check if current tree is a solution
+            if current_tree.goals.len() == 0 {
+                return Some(current_tree);
+            }
+
+            // get all vertices spanned by this tree
+            let mut current_nodes = self.src_types.clone();
+            for e in current_tree.edges.iter() {
+                current_nodes.push( e.dst_type.clone() );
+            }
+
+            // extend the tree by one edge and add it to the queue
+            for src_type in current_nodes {
+                for next_morph_inst in morphisms.enum_morphisms(&src_type) {
+                    //let dst_type = TypeTerm::Ladder(vec![dst_halo, dst_ty]).normalize();
+                    let dst_type = next_morph_inst.get_type().dst_type;
+
+                    if current_tree.contains( &dst_type ).is_none() {
+                        let mut new_tree = current_tree.clone();
+                        {
+                            let src_type = src_type.clone();
+                            let dst_type = dst_type.clone();
+                            new_tree.add_edge(MorphismType { src_type, dst_type }.normalize());
+                        }
+
+                        self.queue.push( new_tree );
+                    }
+                }
+            }
+        }
+
+        None
+    }
+}
diff --git a/src/sugar.rs b/src/sugar.rs
index 4d13f78..4734b6f 100644
--- a/src/sugar.rs
+++ b/src/sugar.rs
@@ -1,7 +1,8 @@
 use {
-    crate::{TypeTerm, TypeID}
+    crate::{TypeTerm, TypeID, parser::ParseLadderType}
 };
 
+#[derive(Clone, PartialEq)]
 pub enum SugaredTypeTerm {
     TypeID(TypeID),
     Num(i64),
@@ -17,7 +18,7 @@ pub enum SugaredTypeTerm {
 }
 
 impl TypeTerm {
-    pub fn sugar(self: TypeTerm, dict: &mut crate::TypeDict) -> SugaredTypeTerm {
+    pub fn sugar(self: TypeTerm, dict: &mut impl crate::TypeDict) -> SugaredTypeTerm {
         match self {
             TypeTerm::TypeID(id) => SugaredTypeTerm::TypeID(id),
             TypeTerm::Num(n) => SugaredTypeTerm::Num(n),
@@ -61,7 +62,7 @@ impl TypeTerm {
 }
 
 impl SugaredTypeTerm {
-    pub fn desugar(self, dict: &mut crate::TypeDict) -> TypeTerm {
+    pub fn desugar(self, dict: &mut impl crate::TypeDict) -> TypeTerm {
         match self {
             SugaredTypeTerm::TypeID(id) => TypeTerm::TypeID(id),
             SugaredTypeTerm::Num(n) => TypeTerm::Num(n),
@@ -91,5 +92,23 @@ impl SugaredTypeTerm {
                 ).collect()),
         }
     }
+
+    pub fn is_empty(&self) -> bool {
+        match self {
+            SugaredTypeTerm::TypeID(_) => false,
+            SugaredTypeTerm::Num(_) => false,
+            SugaredTypeTerm::Char(_) => false,
+            SugaredTypeTerm::Univ(t) => t.is_empty(),
+            SugaredTypeTerm::Spec(ts) |
+            SugaredTypeTerm::Ladder(ts) |
+            SugaredTypeTerm::Func(ts) |
+            SugaredTypeTerm::Morph(ts) |
+            SugaredTypeTerm::Struct(ts) |
+            SugaredTypeTerm::Enum(ts) |
+            SugaredTypeTerm::Seq(ts) => {
+                ts.iter().fold(true, |s,t|s&&t.is_empty())
+            }
+        }
+    }
 }
 
diff --git a/src/term.rs b/src/term.rs
index 29c7d27..c93160b 100644
--- a/src/term.rs
+++ b/src/term.rs
@@ -14,8 +14,6 @@ pub enum TypeTerm {
     Num(i64),
     Char(char),
 
-    
-
     /* Complex Terms */
 
     // Type Parameters
@@ -47,10 +45,9 @@ impl TypeTerm {
                 *self = TypeTerm::App(vec![
                     self.clone(),
                     t.into()
-                ])                
+                ])
             }
         }
-
         self
     }
 
@@ -79,6 +76,22 @@ impl TypeTerm {
         self.arg(TypeTerm::Char(c))
     }
 
+    pub fn contains_var(&self, var_id: u64) -> bool {
+        match self {
+            TypeTerm::TypeID(TypeID::Var(v)) => (&var_id == v),
+            TypeTerm::App(args) |
+            TypeTerm::Ladder(args) => {
+                for a in args.iter() {
+                    if a.contains_var(var_id) {
+                        return true;
+                    }
+                }
+                false
+            }
+            _ => false
+        }
+    }
+
     /// recursively apply substitution to all subterms,
     /// which will replace all occurences of variables which map
     /// some type-term in `subst`
diff --git a/src/test/curry.rs b/src/test/curry.rs
index c728a37..a814ab2 100644
--- a/src/test/curry.rs
+++ b/src/test/curry.rs
@@ -1,12 +1,12 @@
 use {
-    crate::{dict::*}
+    crate::{dict::*, parser::*}
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
 #[test]
 fn test_curry() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("<A B C>").unwrap().curry(),
@@ -33,7 +33,7 @@ fn test_curry() {
 
 #[test]
 fn test_decurry() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("<<A B> C>").unwrap().decurry(),
@@ -47,7 +47,7 @@ fn test_decurry() {
         dict.parse("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap().decurry(),
         dict.parse("<A B C D E F G H I J K>").unwrap()
     );
-    
+
     assert_eq!(
         dict.parse("<<A~X B> C>").unwrap().decurry(),
         dict.parse("<A~X B C>").unwrap()
diff --git a/src/test/lnf.rs b/src/test/lnf.rs
index 1c81a55..4b2a7c2 100644
--- a/src/test/lnf.rs
+++ b/src/test/lnf.rs
@@ -1,8 +1,8 @@
-use crate::dict::TypeDict;
+use crate::{dict::{BimapTypeDict}, parser::*};
 
 #[test]
 fn test_flat() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert!( dict.parse("A").expect("parse error").is_flat() );
     assert!( dict.parse("10").expect("parse error").is_flat() );
@@ -17,7 +17,7 @@ fn test_flat() {
 
 #[test]
 fn test_normalize() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("A~B~C").expect("parse error").normalize(),
@@ -54,4 +54,3 @@ fn test_normalize() {
     );
 
 }
-
diff --git a/src/test/mod.rs b/src/test/mod.rs
index 29c14bc..41f5e71 100644
--- a/src/test/mod.rs
+++ b/src/test/mod.rs
@@ -7,4 +7,5 @@ pub mod pnf;
 pub mod subtype;
 pub mod substitution;
 pub mod unification;
+pub mod morphism;
 
diff --git a/src/test/morphism.rs b/src/test/morphism.rs
new file mode 100644
index 0000000..4d33df6
--- /dev/null
+++ b/src/test/morphism.rs
@@ -0,0 +1,247 @@
+use {
+    crate::{dict::*, parser::*, unparser::*, morphism::*, TypeTerm}
+};
+
+//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
+
+#[derive(Clone, Debug, PartialEq)]
+struct DummyMorphism(MorphismType);
+
+impl Morphism for DummyMorphism {
+    fn get_type(&self) -> MorphismType {
+        self.0.clone().normalize()
+    }
+
+    fn map_morphism(&self, seq_type: TypeTerm) -> Option<DummyMorphism> {
+        Some(DummyMorphism(MorphismType {
+            src_type: TypeTerm::App(vec![
+                seq_type.clone(),
+                self.0.src_type.clone()
+            ]),
+
+            dst_type: TypeTerm::App(vec![
+                seq_type.clone(),
+                self.0.dst_type.clone()
+            ])
+        }))
+    }
+}
+
+fn morphism_test_setup() -> ( BimapTypeDict, MorphismBase<DummyMorphism> ) {
+    let mut dict = BimapTypeDict::new();
+    let mut base = MorphismBase::<DummyMorphism>::new( vec![ dict.parse("Seq").expect("") ] );
+
+    dict.add_varname("Radix".into());
+    dict.add_varname("SrcRadix".into());
+    dict.add_varname("DstRadix".into());
+
+    base.add_morphism(
+        DummyMorphism(MorphismType{
+            src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
+            dst_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap()
+        })
+    );
+    base.add_morphism(
+        DummyMorphism(MorphismType{
+            src_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap(),
+            dst_type: dict.parse("<Digit Radix> ~ Char").unwrap()
+        })
+    );
+    base.add_morphism(
+        DummyMorphism(MorphismType{
+            src_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
+            dst_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
+        })
+    );
+    base.add_morphism(
+        DummyMorphism(MorphismType{
+            src_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
+            dst_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
+        })
+    );
+    base.add_morphism(
+        DummyMorphism(MorphismType{
+            src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix>~ℤ_2^64~machine.UInt64>").unwrap(),
+            dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix>~ℤ_2^64~machine.UInt64>").unwrap()
+        })
+    );
+
+    (dict, base)
+}
+
+#[test]
+fn test_morphism_path() {
+    let (mut dict, mut base) = morphism_test_setup();
+
+    let path = base.find_morphism_path(MorphismType {
+        src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+        dst_type: dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap(),
+    });
+
+    fn print_subst(m: &std::collections::HashMap<TypeID, TypeTerm>, dict: &mut impl TypeDict) {
+        eprintln!("{{");
+
+        for (k,v) in m.iter() {
+            eprintln!("    {} --> {}",
+                dict.get_typename(k).unwrap(),
+                dict.unparse(v)
+            );
+        }
+
+        eprintln!("}}");
+    }
+
+    if let Some(path) = path.as_ref() {
+        for n in path.iter() {
+            eprintln!("
+ψ = {}
+morph {}
+  --> {}
+with
+            ",
+            n.halo.clone().sugar(&mut dict).pretty(&mut dict, 0),
+            n.m.get_type().src_type.sugar(&mut dict).pretty(&mut dict, 0),
+            n.m.get_type().dst_type.sugar(&mut dict).pretty(&mut dict, 0),
+            );
+            print_subst(&n.σ, &mut dict)
+        }
+    }
+
+    assert_eq!(
+        path,
+        Some(
+            vec![
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
+                        (dict.get_typeid(&"SrcRadix".into()).unwrap(), TypeTerm::Num(10)),
+                        (dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16))
+                    ].into_iter().collect(),
+                    halo: dict.parse("ℕ ~ <PosInt 10 BigEndian>").unwrap(),
+                    m: DummyMorphism(MorphismType {
+                        src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
+                        dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+                    }),
+                },
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
+                        (dict.get_typeid(&"SrcRadix".into()).unwrap(), TypeTerm::Num(10)),
+                        (dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16))
+                    ].into_iter().collect(),
+                    halo: TypeTerm::unit(),
+                    m: DummyMorphism(MorphismType{
+                        src_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
+                        dst_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+                    }),
+                },
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"SrcRadix".into()).unwrap(), TypeTerm::Num(10)),
+                        (dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16)),
+                        (dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(16)),
+                    ].into_iter().collect(),
+                    halo: TypeTerm::unit(),
+                    m: DummyMorphism(MorphismType{
+                        src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
+                        dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+                    }),
+                },
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(16)),
+                        (dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16))
+                    ].into_iter().collect(),
+                    halo: TypeTerm::unit(),
+                    m: DummyMorphism(MorphismType{
+                        src_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
+                        dst_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
+                    }),
+                },
+                MorphismInstance {
+                    σ: vec![
+                        (dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(16))
+                    ].into_iter().collect(),
+                    halo: dict.parse("ℕ ~ <PosInt Radix BigEndian>").unwrap(),
+                    m: DummyMorphism(MorphismType{
+                        src_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
+                        dst_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap()
+                    })
+                }
+            ]
+        )
+    );
+/*
+    assert_eq!(
+        base.find_morphism_path(MorphismType {
+            src_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+            dst_type: dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
+        }),
+        Some(
+            vec![
+                dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
+                dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().normalize(),
+            ]
+        )
+    );
+    */
+
+
+/*
+    assert_eq!(
+        base.find_morphism_with_subtyping(
+            &MorphismType {
+                src_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+                dst_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+            }
+        ),
+
+        Some((
+                DummyMorphism(MorphismType{
+                    src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
+                    dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
+                }),
+
+                dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10>>").unwrap(),
+
+                vec![
+                    (dict.get_typeid(&"Radix".into()).unwrap(),
+                    dict.parse("10").unwrap())
+                ].into_iter().collect::<std::collections::HashMap<TypeID, TypeTerm>>()
+        ))
+    );
+    */
+}
+/*
+#[test]
+fn test_steiner_tree() {
+    let (mut dict, mut base) = morphism_test_setup();
+
+
+    let mut steiner_tree_problem = SteinerTreeProblem::new(
+        // source reprs
+        vec![
+            dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+        ],
+
+        // destination reprs
+        vec![
+            dict.parse("ℕ ~ <PosInt 2 BigEndian> ~ <Seq <Digit 2> ~ Char>").unwrap(),
+            dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
+            dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
+        ]
+    );
+
+    if let Some(solution) = steiner_tree_problem.solve_bfs( &base ) {
+        for e in solution.edges.iter() {
+            eprintln!(" :: {}\n--> {}", dict.unparse(&e.src_type), dict.unparse(&e.dst_type));
+        }
+    } else {
+        eprintln!("no solution");
+    }
+}
+*/
diff --git a/src/test/parser.rs b/src/test/parser.rs
index 1166229..f650ae3 100644
--- a/src/test/parser.rs
+++ b/src/test/parser.rs
@@ -7,7 +7,7 @@ use {
 
 #[test]
 fn test_parser_id() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     dict.add_varname("T".into());
 
@@ -26,7 +26,7 @@ fn test_parser_id() {
 fn test_parser_num() {
     assert_eq!(
         Ok(TypeTerm::Num(1234)),
-        TypeDict::new().parse("1234")
+        BimapTypeDict::new().parse("1234")
     );
 }
 
@@ -34,21 +34,21 @@ fn test_parser_num() {
 fn test_parser_char() {
     assert_eq!(
         Ok(TypeTerm::Char('x')),
-        TypeDict::new().parse("'x'")
+        BimapTypeDict::new().parse("'x'")
     );
 }
 
 #[test]
 fn test_parser_app() {
     assert_eq!(
-        TypeDict::new().parse("<A B>"),
+        BimapTypeDict::new().parse("<A B>"),
         Ok(TypeTerm::App(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::TypeID(TypeID::Fun(1)),
         ]))
     );
     assert_eq!(
-        TypeDict::new().parse("<A B C>"),
+        BimapTypeDict::new().parse("<A B C>"),
         Ok(TypeTerm::App(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::TypeID(TypeID::Fun(1)),
@@ -60,7 +60,7 @@ fn test_parser_app() {
 #[test]
 fn test_parser_unexpected_close() {
     assert_eq!(
-        TypeDict::new().parse(">"),
+        BimapTypeDict::new().parse(">"),
         Err(ParseError::UnexpectedClose)
     );
 }
@@ -68,7 +68,7 @@ fn test_parser_unexpected_close() {
 #[test]
 fn test_parser_unexpected_token() {
     assert_eq!(
-        TypeDict::new().parse("A B"),
+        BimapTypeDict::new().parse("A B"),
         Err(ParseError::UnexpectedToken)
     );
 }
@@ -76,14 +76,14 @@ fn test_parser_unexpected_token() {
 #[test]
 fn test_parser_ladder() {
     assert_eq!(
-        TypeDict::new().parse("A~B"),
+        BimapTypeDict::new().parse("A~B"),
         Ok(TypeTerm::Ladder(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::TypeID(TypeID::Fun(1)),
         ]))
     );
     assert_eq!(
-        TypeDict::new().parse("A~B~C"),
+        BimapTypeDict::new().parse("A~B~C"),
         Ok(TypeTerm::Ladder(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::TypeID(TypeID::Fun(1)),
@@ -95,7 +95,7 @@ fn test_parser_ladder() {
 #[test]
 fn test_parser_ladder_outside() {
     assert_eq!(
-        TypeDict::new().parse("<A B>~C"),
+        BimapTypeDict::new().parse("<A B>~C"),
         Ok(TypeTerm::Ladder(vec![
             TypeTerm::App(vec![
                 TypeTerm::TypeID(TypeID::Fun(0)),
@@ -103,13 +103,13 @@ fn test_parser_ladder_outside() {
             ]),
             TypeTerm::TypeID(TypeID::Fun(2)),
         ]))
-    );    
+    );
 }
 
 #[test]
 fn test_parser_ladder_inside() {
     assert_eq!(
-        TypeDict::new().parse("<A B~C>"),
+        BimapTypeDict::new().parse("<A B~C>"),
         Ok(TypeTerm::App(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::Ladder(vec![
@@ -117,13 +117,13 @@ fn test_parser_ladder_inside() {
                 TypeTerm::TypeID(TypeID::Fun(2)),
             ])
         ]))
-    );    
+    );
 }
 
 #[test]
 fn test_parser_ladder_between() {
     assert_eq!(
-        TypeDict::new().parse("<A B~<C D>>"),
+        BimapTypeDict::new().parse("<A B~<C D>>"),
         Ok(TypeTerm::App(vec![
             TypeTerm::TypeID(TypeID::Fun(0)),
             TypeTerm::Ladder(vec![
@@ -134,14 +134,14 @@ fn test_parser_ladder_between() {
                 ])
             ])
         ]))
-    );    
+    );
 }
 
 
 #[test]
 fn test_parser_ladder_large() {
     assert_eq!(
-        TypeDict::new().parse(
+        BimapTypeDict::new().parse(
             "<Seq Date
                   ~<TimeSince UnixEpoch>
                   ~<Duration Seconds>
@@ -203,4 +203,3 @@ fn test_parser_ladder_large() {
         )
     );
 }
-
diff --git a/src/test/pnf.rs b/src/test/pnf.rs
index e668849..a1d5a33 100644
--- a/src/test/pnf.rs
+++ b/src/test/pnf.rs
@@ -1,8 +1,8 @@
-use crate::dict::TypeDict;
+use crate::{dict::BimapTypeDict, parser::*};
 
 #[test]
 fn test_param_normalize() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("A~B~C").expect("parse error"),
@@ -56,4 +56,3 @@ fn test_param_normalize() {
                .param_normalize(),
     );
 }
-
diff --git a/src/test/substitution.rs b/src/test/substitution.rs
index 7959b08..e8906b9 100644
--- a/src/test/substitution.rs
+++ b/src/test/substitution.rs
@@ -1,6 +1,6 @@
 
 use {
-    crate::{dict::*, term::*},
+    crate::{dict::*, term::*, parser::*, unparser::*},
     std::iter::FromIterator
 };
 
@@ -8,7 +8,7 @@ use {
 
 #[test]
 fn test_subst() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     let mut σ = std::collections::HashMap::new();
 
@@ -29,4 +29,3 @@ fn test_subst() {
         dict.parse("<Seq ℕ~<Seq Char>>").unwrap()
     );
 }
-
diff --git a/src/test/subtype.rs b/src/test/subtype.rs
index 08cc5c7..c993063 100644
--- a/src/test/subtype.rs
+++ b/src/test/subtype.rs
@@ -1,8 +1,8 @@
-use crate::dict::TypeDict;
+use crate::{dict::BimapTypeDict, parser::*, unparser::*};
 
 #[test]
 fn test_semantic_subtype() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("A~B~C").expect("parse error")
@@ -19,11 +19,11 @@ fn test_semantic_subtype() {
             ),
         Some((0, dict.parse("A~B1~C1").expect("parse errror")))
     );
-    
+
     assert_eq!(
         dict.parse("A~B~C1").expect("parse error")
             .is_semantic_subtype_of(
-                &dict.parse("B~C2").expect("parse errror")  
+                &dict.parse("B~C2").expect("parse errror")
             ),
         Some((1, dict.parse("B~C1").expect("parse errror")))
     );
@@ -31,12 +31,12 @@ fn test_semantic_subtype() {
 
 #[test]
 fn test_syntactic_subtype() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     assert_eq!(
         dict.parse("A~B~C").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("A~B~C").expect("parse errror")  
+                &dict.parse("A~B~C").expect("parse errror")
             ),
         Ok(0)
     );
@@ -44,7 +44,7 @@ fn test_syntactic_subtype() {
     assert_eq!(
         dict.parse("A~B~C").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("B~C").expect("parse errror")  
+                &dict.parse("B~C").expect("parse errror")
             ),
         Ok(1)
     );
@@ -52,7 +52,7 @@ fn test_syntactic_subtype() {
     assert_eq!(
         dict.parse("A~B~C~D~E").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("C~D").expect("parse errror")  
+                &dict.parse("C~D").expect("parse errror")
             ),
         Ok(2)
     );
@@ -60,7 +60,7 @@ fn test_syntactic_subtype() {
     assert_eq!(
         dict.parse("A~B~C~D~E").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("C~G").expect("parse errror")  
+                &dict.parse("C~G").expect("parse errror")
             ),
         Err((2,3))
     );
@@ -68,7 +68,7 @@ fn test_syntactic_subtype() {
     assert_eq!(
         dict.parse("A~B~C~D~E").expect("parse error")
             .is_syntactic_subtype_of(
-                &dict.parse("G~F~K").expect("parse errror")  
+                &dict.parse("G~F~K").expect("parse errror")
             ),
         Err((0,0))
     );
@@ -94,4 +94,3 @@ fn test_syntactic_subtype() {
         Ok(4)
     );
 }
-
diff --git a/src/test/unification.rs b/src/test/unification.rs
index 239b384..56e88e2 100644
--- a/src/test/unification.rs
+++ b/src/test/unification.rs
@@ -1,13 +1,13 @@
 
 use {
-    crate::{dict::*, term::*, unification::*},
+    crate::{dict::*, parser::*, unparser::*, term::*, unification::*},
     std::iter::FromIterator
 };
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
 fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
     dict.add_varname(String::from("T"));
     dict.add_varname(String::from("U"));
     dict.add_varname(String::from("V"));
@@ -33,7 +33,7 @@ fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) {
 
 #[test]
 fn test_unification_error() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
     dict.add_varname(String::from("T"));
 
     assert_eq!(
@@ -61,6 +61,19 @@ fn test_unification_error() {
             t2: dict.parse("B").unwrap()
         })
     );
+
+    assert_eq!(
+        crate::unify(
+            &dict.parse("T").unwrap(),
+            &dict.parse("<Seq T>").unwrap()
+        ),
+
+        Err(UnificationError {
+            addr: vec![],
+            t1: dict.parse("T").unwrap(),
+            t2: dict.parse("<Seq T>").unwrap()
+        })
+    );
 }
 
 #[test]
@@ -76,7 +89,7 @@ fn test_unification() {
         true
     );
 
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     dict.add_varname(String::from("T"));
     dict.add_varname(String::from("U"));
@@ -116,10 +129,9 @@ fn test_unification() {
     );
 }
 
-
 #[test]
 fn test_subtype_unification() {
-    let mut dict = TypeDict::new();
+    let mut dict = BimapTypeDict::new();
 
     dict.add_varname(String::from("T"));
     dict.add_varname(String::from("U"));
@@ -131,12 +143,13 @@ fn test_subtype_unification() {
             (dict.parse("<Seq~T <Digit 10> ~ Char>").unwrap(),
                 dict.parse("<Seq~<LengthPrefix x86.UInt64> Char ~ Ascii>").unwrap()),
         ]).solve_subtype(),
-        Ok(
+        Ok((
+            dict.parse("<Seq <Digit 10>>").unwrap(),
             vec![
                 // T
                 (TypeID::Var(0), dict.parse("<LengthPrefix x86.UInt64>").unwrap())
             ].into_iter().collect()
-        )
+        ))
     );
 
     assert_eq!(
@@ -144,7 +157,8 @@ fn test_subtype_unification() {
             (dict.parse("U").unwrap(), dict.parse("<Seq Char>").unwrap()),
             (dict.parse("T").unwrap(), dict.parse("<Seq U>").unwrap()),
         ]).solve_subtype(),
-        Ok(
+        Ok((
+            TypeTerm::unit(),
             vec![
                 // T
                 (TypeID::Var(0), dict.parse("<Seq <Seq Char>>").unwrap()),
@@ -152,7 +166,7 @@ fn test_subtype_unification() {
                 // U
                 (TypeID::Var(1), dict.parse("<Seq Char>").unwrap())
             ].into_iter().collect()
-        )
+        ))
     );
 
     assert_eq!(
@@ -162,7 +176,10 @@ fn test_subtype_unification() {
             (dict.parse("<Seq ℕ~<PosInt 10 BigEndian>>").unwrap(),
                 dict.parse("<Seq~<LengthPrefix x86.UInt64> W>").unwrap()),
         ]).solve_subtype(),
-        Ok(
+        Ok((
+            dict.parse("
+                <Seq~<LengthPrefix x86.UInt64> ℕ~<PosInt 10 BigEndian>>
+            ").unwrap(),
             vec![
                 // W
                 (TypeID::Var(3), dict.parse("ℕ~<PosInt 10 BigEndian>").unwrap()),
@@ -170,6 +187,6 @@ fn test_subtype_unification() {
                 // T
                 (TypeID::Var(0), dict.parse("ℕ~<PosInt 10 BigEndian>~<Seq Char>").unwrap())
             ].into_iter().collect()
-        )
+        ))
     );
 }
diff --git a/src/unification.rs b/src/unification.rs
index 443a9a2..e605af4 100644
--- a/src/unification.rs
+++ b/src/unification.rs
@@ -25,64 +25,85 @@ impl UnificationProblem {
         }
     }
 
-    pub fn eval_subtype(&mut self, lhs: TypeTerm, rhs: TypeTerm, addr: Vec<usize>) -> Result<(), UnificationError> {
+    pub fn reapply_subst(&mut self) {
+        // update all values in substitution
+        let mut new_σ = HashMap::new();
+        for (v, tt) in self.σ.iter() {
+            let mut tt = tt.clone().normalize();
+            tt.apply_substitution(&|v| self.σ.get(v).cloned());
+            tt = tt.normalize();
+            //eprintln!("update σ : {:?} --> {:?}", v, tt);
+            new_σ.insert(v.clone(), tt);
+        }
+        self.σ = new_σ;
+    }
+
+    pub fn eval_subtype(&mut self, lhs: TypeTerm, rhs: TypeTerm, addr: Vec<usize>) -> Result<Vec<TypeTerm>, UnificationError> {
         match (lhs.clone(), rhs.clone()) {
             (TypeTerm::TypeID(TypeID::Var(varid)), t) |
             (t, TypeTerm::TypeID(TypeID::Var(varid))) => {
-                self.σ.insert(TypeID::Var(varid), t.clone());
-
-                // update all values in substitution
-                let mut new_σ = HashMap::new();
-                for (v, tt) in self.σ.iter() {
-                    let mut tt = tt.clone().normalize();
-                    tt.apply_substitution(&|v| self.σ.get(v).cloned());
-                    eprintln!("update σ : {:?} --> {:?}", v, tt);
-                    new_σ.insert(v.clone(), tt);
+                if ! t.contains_var( varid ) {
+                    self.σ.insert(TypeID::Var(varid), t.clone());
+                    self.reapply_subst();
+                    Ok(vec![])
+                } else if t == TypeTerm::TypeID(TypeID::Var(varid)) {
+                    Ok(vec![])
+                } else {
+                    Err(UnificationError{ addr, t1: TypeTerm::TypeID(TypeID::Var(varid)), t2: t })
                 }
-                self.σ = new_σ;
-
-                Ok(())
             }
 
             (TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
-                if a1 == a2 { Ok(()) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
+                if a1 == a2 { Ok(vec![]) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
             }
             (TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
-                if n1 == n2 { Ok(()) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
+                if n1 == n2 { Ok(vec![]) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
             }
             (TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
-                if c1 == c2 { Ok(()) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
+                if c1 == c2 { Ok(vec![]) } else { Err(UnificationError{ addr, t1: lhs, t2: rhs}) }
             }
 
             (TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) => {
-                eprintln!("unification: check two ladders");
+                let mut halo = Vec::new();
                 for i in 0..a1.len() {
-                    if let Some((_, _)) = a1[i].is_semantic_subtype_of( &a2[0] ) {
+                    if let Ok((r_halo, σ)) = subtype_unify( &a1[i], &a2[0] ) {
+                        //eprintln!("unified ladders at {}, r_halo = {:?}", i, r_halo);
+                        for (k,v) in σ.iter() {
+                            self.σ.insert(k.clone(),v.clone());
+                        }
+
                         for j in 0..a2.len() {
                             if i+j < a1.len() {
                                 let mut new_addr = addr.clone();
                                 new_addr.push(i+j);
-                                self.eqs.push((a1[i+j].clone(), a2[j].clone(), new_addr))
+                                self.eqs.push((a1[i+j].clone().apply_substitution(&|k| σ.get(k).cloned()).clone(),
+                                    a2[j].clone().apply_substitution(&|k| σ.get(k).cloned()).clone(),
+                                    new_addr));
                             }
                         }
-                        return Ok(())
+                        return Ok(halo)
+                    } else {
+                        halo.push(a1[i].clone());
+                        //eprintln!("could not unify ladders");
                     }
                 }
 
                 Err(UnificationError{ addr, t1: lhs, t2: rhs })
             },
 
-            (t, TypeTerm::Ladder(a1)) => {
-                if let Some((idx, τ)) = TypeTerm::Ladder(a1.clone()).is_semantic_subtype_of(&t) {
-                    Ok(())
+            (t, TypeTerm::Ladder(mut a1)) => {
+                if let Ok(mut halo) = self.eval_subtype( t.clone(), a1.first().unwrap().clone(), addr.clone() ) {
+                    a1.append(&mut halo);
+                    Ok(a1)
                 } else {
-                    Err(UnificationError{ addr, t1: TypeTerm::Ladder(a1), t2: t })
+                    Err(UnificationError{ addr, t1: t, t2: TypeTerm::Ladder(a1) })
                 }
             }
 
-            (TypeTerm::Ladder(a1), t) => {
-                if let Some((idx, τ)) = TypeTerm::Ladder(a1.clone()).is_semantic_subtype_of(&t) {
-                    Ok(())
+            (TypeTerm::Ladder(mut a1), t) => {
+                if let Ok(mut halo) = self.eval_subtype( a1.pop().unwrap(), t.clone(), addr.clone() ) {
+                    a1.append(&mut halo);
+                    Ok(a1)
                 } else {
                     Err(UnificationError{ addr, t1: TypeTerm::Ladder(a1), t2: t })
                 }
@@ -90,12 +111,31 @@ impl UnificationProblem {
 
             (TypeTerm::App(a1), TypeTerm::App(a2)) => {
                 if a1.len() == a2.len() {
+                    let mut halo_args = Vec::new();
+                    let mut halo_required = false;
+
                     for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate() {
                         let mut new_addr = addr.clone();
                         new_addr.push(i);
-                        self.eqs.push((x, y, new_addr));
+                        //self.eqs.push((x, y, new_addr));
+
+                        if let Ok(halo) = self.eval_subtype( x.clone(), y.clone(), new_addr ) {
+                            if halo.len() == 0 {
+                                halo_args.push(y.get_lnf_vec().first().unwrap().clone());
+                            } else {
+                                halo_args.push(TypeTerm::Ladder(halo));
+                                halo_required = true;
+                            }
+                        } else {
+                            return Err(UnificationError{ addr, t1: x, t2: y })
+                        }
+                    }
+
+                    if halo_required {
+                        Ok(vec![ TypeTerm::App(halo_args) ])
+                    } else {
+                        Ok(vec![])
                     }
-                    Ok(())
                 } else {
                     Err(UnificationError{ addr, t1: lhs, t2: rhs })
                 }
@@ -109,18 +149,15 @@ impl UnificationProblem {
         match (lhs.clone(), rhs.clone()) {
             (TypeTerm::TypeID(TypeID::Var(varid)), t) |
             (t, TypeTerm::TypeID(TypeID::Var(varid))) => {
-                self.σ.insert(TypeID::Var(varid), t.clone());
-
-                // update all values in substitution
-                let mut new_σ = HashMap::new();
-                for (v, tt) in self.σ.iter() {
-                    let mut tt = tt.clone();
-                    tt.apply_substitution(&|v| self.σ.get(v).cloned());
-                    new_σ.insert(v.clone(), tt);
+                if ! t.contains_var( varid ) {
+                    self.σ.insert(TypeID::Var(varid), t.clone());
+                    self.reapply_subst();
+                    Ok(())
+                } else if t == TypeTerm::TypeID(TypeID::Var(varid)) {
+                    Ok(())
+                } else {
+                    Err(UnificationError{ addr, t1: TypeTerm::TypeID(TypeID::Var(varid)), t2: t })
                 }
-                self.σ = new_σ;
-
-                Ok(())
             }
 
             (TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
@@ -136,7 +173,7 @@ impl UnificationProblem {
             (TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) |
             (TypeTerm::App(a1), TypeTerm::App(a2)) => {
                 if a1.len() == a2.len() {
-                    for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate() {
+                    for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate().rev() {
                         let mut new_addr = addr.clone();
                         new_addr.push(i);
                         self.eqs.push((x, y, new_addr));
@@ -160,15 +197,63 @@ impl UnificationProblem {
         Ok(self.σ)
     }
 
+    pub fn solve_subtype(mut self) -> Result<(TypeTerm, HashMap<TypeID, TypeTerm>), UnificationError> {
 
-    pub fn solve_subtype(mut self) -> Result<HashMap<TypeID, TypeTerm>, UnificationError> {
-        while let Some( (mut lhs,mut rhs,addr) ) = self.eqs.pop() {
+        pub fn insert_halo_at(
+            t: &mut TypeTerm,
+            mut addr: Vec<usize>,
+            halo_type: TypeTerm
+        ) {
+            match t {
+                TypeTerm::Ladder(rungs) => {
+                    if let Some(idx) = addr.pop() {
+                        for i in rungs.len()..idx+1 {
+                            rungs.push(TypeTerm::unit());
+                        }
+                        insert_halo_at( &mut rungs[idx], addr, halo_type );
+                    } else {
+                        rungs.push(halo_type);
+                    }
+                },
+
+                TypeTerm::App(args) => {
+                    if let Some(idx) = addr.pop() {
+                        insert_halo_at( &mut args[idx], addr, halo_type );
+                    } else {
+                        *t = TypeTerm::Ladder(vec![
+                            halo_type,
+                            t.clone()
+                        ]);
+                    }
+                }
+
+                atomic => {
+
+                }
+            }
+        }
+
+        //let mut halo_type = TypeTerm::unit();
+        let mut halo_rungs = Vec::new();
+
+        while let Some( (mut lhs, mut rhs, mut addr) ) = self.eqs.pop() {
             lhs.apply_substitution(&|v| self.σ.get(v).cloned());
             rhs.apply_substitution(&|v| self.σ.get(v).cloned());
-            eprintln!("eval subtype LHS={:?} || RHS={:?}", lhs, rhs);
-            self.eval_subtype(lhs, rhs, addr)?;
+            //eprintln!("eval subtype\n\tLHS={:?}\n\tRHS={:?}\n", lhs, rhs);
+            let mut new_halo = self.eval_subtype(lhs, rhs, addr.clone())?;
+            if new_halo.len() > 0 {
+                //insert_halo_at( &mut halo_type, addr, TypeTerm::Ladder(new_halo) );
+                if addr.len() == 0 {
+                    halo_rungs.push(TypeTerm::Ladder(new_halo))
+                }
+            }
         }
-        Ok(self.σ)
+
+        let mut halo_type = TypeTerm::Ladder(halo_rungs);
+        halo_type = halo_type.normalize();
+        halo_type = halo_type.apply_substitution(&|k| self.σ.get(k).cloned()).clone();
+
+        Ok((halo_type.param_normalize(), self.σ))
     }
 }
 
@@ -180,4 +265,12 @@ pub fn unify(
     unification.solve()
 }
 
+pub fn subtype_unify(
+    t1: &TypeTerm,
+    t2: &TypeTerm
+) -> Result<(TypeTerm, HashMap<TypeID, TypeTerm>), UnificationError> {
+    let mut unification = UnificationProblem::new(vec![ (t1.clone(), t2.clone()) ]);
+    unification.solve_subtype()
+}
+
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
diff --git a/src/unparser.rs b/src/unparser.rs
index ccf754d..b78494e 100644
--- a/src/unparser.rs
+++ b/src/unparser.rs
@@ -2,8 +2,12 @@ use crate::{dict::*, term::*};
 
 //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
 
-impl TypeDict {
-    pub fn unparse(&self, t: &TypeTerm) -> String {
+pub trait UnparseLadderType {
+    fn unparse(&self, t: &TypeTerm) -> String;
+}
+
+impl<T: TypeDict> UnparseLadderType for T {
+    fn unparse(&self, t: &TypeTerm) -> String {
         match t {
             TypeTerm::TypeID(id) => self.get_typename(id).unwrap(),
             TypeTerm::Num(n) => format!("{}", n),