diff --git a/src/morphism.rs b/src/morphism.rs index a433bdc..ba7cc23 100644 --- a/src/morphism.rs +++ b/src/morphism.rs @@ -16,7 +16,7 @@ pub struct MorphismType { pub trait Morphism : Sized { fn get_type(&self) -> MorphismType; - fn list_map_morphism(&self, list_typeid: TypeID) -> Option< Self >; + fn map_morphism(&self, seq_type: TypeTerm) -> Option< Self >; fn weight(&self) -> u64 { 1 @@ -26,7 +26,7 @@ pub trait Morphism : Sized { #[derive(Clone)] pub struct MorphismBase<M: Morphism + Clone> { morphisms: Vec< M >, - list_typeid: TypeID + seq_types: Vec< TypeTerm > } //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\ @@ -43,10 +43,10 @@ impl MorphismType { //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\ impl<M: Morphism + Clone> MorphismBase<M> { - pub fn new(list_typeid: TypeID) -> Self { + pub fn new(seq_types: Vec<TypeTerm>) -> Self { MorphismBase { morphisms: Vec::new(), - list_typeid + seq_types } } @@ -80,9 +80,10 @@ impl<M: Morphism + Clone> MorphismBase<M> { // TODO: function for generating fresh variables let item_variable = TypeID::Var(100); + for seq_type in self.seq_types.iter() { if let Ok(σ) = crate::unification::unify( &TypeTerm::App(vec![ - TypeTerm::TypeID(self.list_typeid), + seq_type.clone(), TypeTerm::TypeID(item_variable) ]), &src_type.clone().param_normalize(), @@ -92,7 +93,7 @@ impl<M: Morphism + Clone> MorphismBase<M> { for (γ, dst_item_type) in self.enum_morphisms( &src_item_type ) { let dst_type = TypeTerm::App(vec![ - TypeTerm::TypeID(self.list_typeid), + seq_type.clone(), dst_item_type.clone() .apply_substitution( &|x| γ.get(x).cloned() @@ -102,6 +103,7 @@ impl<M: Morphism + Clone> MorphismBase<M> { dst_types.push( (γ.clone(), dst_type) ); } } + } dst_types } @@ -189,26 +191,7 @@ impl<M: Morphism + Clone> MorphismBase<M> { pub fn find_morphism(&self, ty: &MorphismType) -> Option< ( M, HashMap<TypeID, TypeTerm> ) > { - // try list-map morphism - if let Ok(σ) = UnificationProblem::new(vec![ - (ty.src_type.clone().param_normalize(), TypeTerm::App(vec![ TypeTerm::TypeID(self.list_typeid), TypeTerm::TypeID(TypeID::Var(100)) ])), - (ty.dst_type.clone().param_normalize(), TypeTerm::App(vec![ TypeTerm::TypeID(self.list_typeid), TypeTerm::TypeID(TypeID::Var(101)) ])), - ]).solve() { - - // TODO: use real fresh variable names - let item_morph_type = MorphismType { - src_type: σ.get(&TypeID::Var(100)).unwrap().clone(), - dst_type: σ.get(&TypeID::Var(101)).unwrap().clone(), - }.normalize(); - - if let Some((m, σ)) = self.find_morphism( &item_morph_type ) { - if let Some(list_morph) = m.list_map_morphism( self.list_typeid ) { - return Some( (list_morph, σ) ); - } - } - } - - // otherwise + // try to find primitive morphism for m in self.morphisms.iter() { let unification_problem = UnificationProblem::new( vec![ @@ -223,6 +206,32 @@ impl<M: Morphism + Clone> MorphismBase<M> { } } + // try list-map morphism + for seq_type in self.seq_types.iter() { + eprintln!("try seq type {:?}", seq_type); + + eprintln!(""); + + if let Ok(σ) = UnificationProblem::new(vec![ + (ty.src_type.clone().param_normalize(), + TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(100)) ])), + (ty.dst_type.clone().param_normalize(), + TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(101)) ])), + ]).solve() { + // TODO: use real fresh variable names + let item_morph_type = MorphismType { + src_type: σ.get(&TypeID::Var(100)).unwrap().clone(), + dst_type: σ.get(&TypeID::Var(101)).unwrap().clone(), + }.normalize(); + + if let Some((m, σ)) = self.find_morphism( &item_morph_type ) { + if let Some(list_morph) = m.map_morphism( seq_type.clone() ) { + return Some( (list_morph, σ) ); + } + } + } + } + None } diff --git a/src/pretty.rs b/src/pretty.rs index 1a4aa60..c5edf3d 100644 --- a/src/pretty.rs +++ b/src/pretty.rs @@ -17,7 +17,11 @@ impl SugaredTypeTerm { } SugaredTypeTerm::Char(c) => { - format!("'{}'", c) + match c { + '\0' => format!("'\\0'"), + '\n' => format!("'\\n'"), + _ => format!("'{}'", c) + } } SugaredTypeTerm::Univ(t) => { @@ -116,7 +120,7 @@ impl SugaredTypeTerm { s.push('\n'); for x in 0..(indent*indent_width) { s.push(' '); - } + } s.push_str(&"--> ".bright_yellow()); } else { // s.push_str(" "); @@ -144,5 +148,3 @@ impl SugaredTypeTerm { } } } - - diff --git a/src/steiner_tree.rs b/src/steiner_tree.rs index c8984dd..6e2443d 100644 --- a/src/steiner_tree.rs +++ b/src/steiner_tree.rs @@ -17,7 +17,7 @@ use { pub struct SteinerTree { weight: u64, goals: Vec< TypeTerm >, - edges: Vec< MorphismType >, + pub edges: Vec< MorphismType >, } impl SteinerTree { diff --git a/src/test/curry.rs b/src/test/curry.rs index c728a37..a814ab2 100644 --- a/src/test/curry.rs +++ b/src/test/curry.rs @@ -1,12 +1,12 @@ use { - crate::{dict::*} + crate::{dict::*, parser::*} }; //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\ #[test] fn test_curry() { - let mut dict = TypeDict::new(); + let mut dict = BimapTypeDict::new(); assert_eq!( dict.parse("<A B C>").unwrap().curry(), @@ -33,7 +33,7 @@ fn test_curry() { #[test] fn test_decurry() { - let mut dict = TypeDict::new(); + let mut dict = BimapTypeDict::new(); assert_eq!( dict.parse("<<A B> C>").unwrap().decurry(), @@ -47,7 +47,7 @@ fn test_decurry() { dict.parse("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap().decurry(), dict.parse("<A B C D E F G H I J K>").unwrap() ); - + assert_eq!( dict.parse("<<A~X B> C>").unwrap().decurry(), dict.parse("<A~X B C>").unwrap() diff --git a/src/test/lnf.rs b/src/test/lnf.rs index 1c81a55..4b2a7c2 100644 --- a/src/test/lnf.rs +++ b/src/test/lnf.rs @@ -1,8 +1,8 @@ -use crate::dict::TypeDict; +use crate::{dict::{BimapTypeDict}, parser::*}; #[test] fn test_flat() { - let mut dict = TypeDict::new(); + let mut dict = BimapTypeDict::new(); assert!( dict.parse("A").expect("parse error").is_flat() ); assert!( dict.parse("10").expect("parse error").is_flat() ); @@ -17,7 +17,7 @@ fn test_flat() { #[test] fn test_normalize() { - let mut dict = TypeDict::new(); + let mut dict = BimapTypeDict::new(); assert_eq!( dict.parse("A~B~C").expect("parse error").normalize(), @@ -54,4 +54,3 @@ fn test_normalize() { ); } - diff --git a/src/test/morphism.rs b/src/test/morphism.rs index b908101..ae3775f 100644 --- a/src/test/morphism.rs +++ b/src/test/morphism.rs @@ -1,5 +1,5 @@ use { - crate::{dict::*, morphism::*, steiner_tree::*, TypeTerm} + crate::{dict::*, parser::*, unparser::*, morphism::*, steiner_tree::*, TypeTerm} }; //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\ @@ -12,24 +12,24 @@ impl Morphism for DummyMorphism { self.0.clone().normalize() } - fn list_map_morphism(&self, list_typeid: TypeID) -> Option<DummyMorphism> { + fn map_morphism(&self, seq_type: TypeTerm) -> Option<DummyMorphism> { Some(DummyMorphism(MorphismType { src_type: TypeTerm::App(vec![ - TypeTerm::TypeID( list_typeid ), + seq_type.clone(), self.0.src_type.clone() ]), dst_type: TypeTerm::App(vec![ - TypeTerm::TypeID( list_typeid ), + seq_type.clone(), self.0.dst_type.clone() ]) })) } } -fn morphism_test_setup() -> ( TypeDict, MorphismBase<DummyMorphism> ) { - let mut dict = TypeDict::new(); - let mut base = MorphismBase::<DummyMorphism>::new( dict.add_typename("Seq".into()) ); +fn morphism_test_setup() -> ( BimapTypeDict, MorphismBase<DummyMorphism> ) { + let mut dict = BimapTypeDict::new(); + let mut base = MorphismBase::<DummyMorphism>::new( vec![ dict.parse("Seq").expect("") ] ); dict.add_varname("Radix".into()); dict.add_varname("SrcRadix".into()); @@ -118,7 +118,7 @@ fn test_morphism_path() { Some(( DummyMorphism(MorphismType{ src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(), - dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap() + dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap() }), dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10>>").unwrap(), @@ -145,12 +145,12 @@ fn test_steiner_tree() { // destination reprs vec![ dict.parse("ℕ ~ <PosInt 2 BigEndian> ~ <Seq <Digit 2> ~ Char>").unwrap(), - dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(), + dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(), dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ Char>").unwrap() ] ); - if let Some(solution) = steiner_tree_problem.solve_bfs( &dict, &base ) { + if let Some(solution) = steiner_tree_problem.solve_bfs( &base ) { for e in solution.edges.iter() { eprintln!(" :: {}\n--> {}", dict.unparse(&e.src_type), dict.unparse(&e.dst_type)); } @@ -158,4 +158,3 @@ fn test_steiner_tree() { eprintln!("no solution"); } } - diff --git a/src/test/parser.rs b/src/test/parser.rs index 1166229..f650ae3 100644 --- a/src/test/parser.rs +++ b/src/test/parser.rs @@ -7,7 +7,7 @@ use { #[test] fn test_parser_id() { - let mut dict = TypeDict::new(); + let mut dict = BimapTypeDict::new(); dict.add_varname("T".into()); @@ -26,7 +26,7 @@ fn test_parser_id() { fn test_parser_num() { assert_eq!( Ok(TypeTerm::Num(1234)), - TypeDict::new().parse("1234") + BimapTypeDict::new().parse("1234") ); } @@ -34,21 +34,21 @@ fn test_parser_num() { fn test_parser_char() { assert_eq!( Ok(TypeTerm::Char('x')), - TypeDict::new().parse("'x'") + BimapTypeDict::new().parse("'x'") ); } #[test] fn test_parser_app() { assert_eq!( - TypeDict::new().parse("<A B>"), + BimapTypeDict::new().parse("<A B>"), Ok(TypeTerm::App(vec![ TypeTerm::TypeID(TypeID::Fun(0)), TypeTerm::TypeID(TypeID::Fun(1)), ])) ); assert_eq!( - TypeDict::new().parse("<A B C>"), + BimapTypeDict::new().parse("<A B C>"), Ok(TypeTerm::App(vec![ TypeTerm::TypeID(TypeID::Fun(0)), TypeTerm::TypeID(TypeID::Fun(1)), @@ -60,7 +60,7 @@ fn test_parser_app() { #[test] fn test_parser_unexpected_close() { assert_eq!( - TypeDict::new().parse(">"), + BimapTypeDict::new().parse(">"), Err(ParseError::UnexpectedClose) ); } @@ -68,7 +68,7 @@ fn test_parser_unexpected_close() { #[test] fn test_parser_unexpected_token() { assert_eq!( - TypeDict::new().parse("A B"), + BimapTypeDict::new().parse("A B"), Err(ParseError::UnexpectedToken) ); } @@ -76,14 +76,14 @@ fn test_parser_unexpected_token() { #[test] fn test_parser_ladder() { assert_eq!( - TypeDict::new().parse("A~B"), + BimapTypeDict::new().parse("A~B"), Ok(TypeTerm::Ladder(vec![ TypeTerm::TypeID(TypeID::Fun(0)), TypeTerm::TypeID(TypeID::Fun(1)), ])) ); assert_eq!( - TypeDict::new().parse("A~B~C"), + BimapTypeDict::new().parse("A~B~C"), Ok(TypeTerm::Ladder(vec![ TypeTerm::TypeID(TypeID::Fun(0)), TypeTerm::TypeID(TypeID::Fun(1)), @@ -95,7 +95,7 @@ fn test_parser_ladder() { #[test] fn test_parser_ladder_outside() { assert_eq!( - TypeDict::new().parse("<A B>~C"), + BimapTypeDict::new().parse("<A B>~C"), Ok(TypeTerm::Ladder(vec![ TypeTerm::App(vec![ TypeTerm::TypeID(TypeID::Fun(0)), @@ -103,13 +103,13 @@ fn test_parser_ladder_outside() { ]), TypeTerm::TypeID(TypeID::Fun(2)), ])) - ); + ); } #[test] fn test_parser_ladder_inside() { assert_eq!( - TypeDict::new().parse("<A B~C>"), + BimapTypeDict::new().parse("<A B~C>"), Ok(TypeTerm::App(vec![ TypeTerm::TypeID(TypeID::Fun(0)), TypeTerm::Ladder(vec![ @@ -117,13 +117,13 @@ fn test_parser_ladder_inside() { TypeTerm::TypeID(TypeID::Fun(2)), ]) ])) - ); + ); } #[test] fn test_parser_ladder_between() { assert_eq!( - TypeDict::new().parse("<A B~<C D>>"), + BimapTypeDict::new().parse("<A B~<C D>>"), Ok(TypeTerm::App(vec![ TypeTerm::TypeID(TypeID::Fun(0)), TypeTerm::Ladder(vec![ @@ -134,14 +134,14 @@ fn test_parser_ladder_between() { ]) ]) ])) - ); + ); } #[test] fn test_parser_ladder_large() { assert_eq!( - TypeDict::new().parse( + BimapTypeDict::new().parse( "<Seq Date ~<TimeSince UnixEpoch> ~<Duration Seconds> @@ -203,4 +203,3 @@ fn test_parser_ladder_large() { ) ); } - diff --git a/src/test/pnf.rs b/src/test/pnf.rs index 2303b3e..730793d 100644 --- a/src/test/pnf.rs +++ b/src/test/pnf.rs @@ -1,8 +1,8 @@ -use crate::dict::TypeDict; +use crate::{dict::BimapTypeDict, parser::*}; #[test] fn test_param_normalize() { - let mut dict = TypeDict::new(); + let mut dict = BimapTypeDict::new(); assert_eq!( dict.parse("A~B~C").expect("parse error"), @@ -38,4 +38,3 @@ fn test_param_normalize() { .param_normalize(), ); } - diff --git a/src/test/substitution.rs b/src/test/substitution.rs index 7959b08..e8906b9 100644 --- a/src/test/substitution.rs +++ b/src/test/substitution.rs @@ -1,6 +1,6 @@ use { - crate::{dict::*, term::*}, + crate::{dict::*, term::*, parser::*, unparser::*}, std::iter::FromIterator }; @@ -8,7 +8,7 @@ use { #[test] fn test_subst() { - let mut dict = TypeDict::new(); + let mut dict = BimapTypeDict::new(); let mut σ = std::collections::HashMap::new(); @@ -29,4 +29,3 @@ fn test_subst() { dict.parse("<Seq ℕ~<Seq Char>>").unwrap() ); } - diff --git a/src/test/subtype.rs b/src/test/subtype.rs index 08cc5c7..c993063 100644 --- a/src/test/subtype.rs +++ b/src/test/subtype.rs @@ -1,8 +1,8 @@ -use crate::dict::TypeDict; +use crate::{dict::BimapTypeDict, parser::*, unparser::*}; #[test] fn test_semantic_subtype() { - let mut dict = TypeDict::new(); + let mut dict = BimapTypeDict::new(); assert_eq!( dict.parse("A~B~C").expect("parse error") @@ -19,11 +19,11 @@ fn test_semantic_subtype() { ), Some((0, dict.parse("A~B1~C1").expect("parse errror"))) ); - + assert_eq!( dict.parse("A~B~C1").expect("parse error") .is_semantic_subtype_of( - &dict.parse("B~C2").expect("parse errror") + &dict.parse("B~C2").expect("parse errror") ), Some((1, dict.parse("B~C1").expect("parse errror"))) ); @@ -31,12 +31,12 @@ fn test_semantic_subtype() { #[test] fn test_syntactic_subtype() { - let mut dict = TypeDict::new(); + let mut dict = BimapTypeDict::new(); assert_eq!( dict.parse("A~B~C").expect("parse error") .is_syntactic_subtype_of( - &dict.parse("A~B~C").expect("parse errror") + &dict.parse("A~B~C").expect("parse errror") ), Ok(0) ); @@ -44,7 +44,7 @@ fn test_syntactic_subtype() { assert_eq!( dict.parse("A~B~C").expect("parse error") .is_syntactic_subtype_of( - &dict.parse("B~C").expect("parse errror") + &dict.parse("B~C").expect("parse errror") ), Ok(1) ); @@ -52,7 +52,7 @@ fn test_syntactic_subtype() { assert_eq!( dict.parse("A~B~C~D~E").expect("parse error") .is_syntactic_subtype_of( - &dict.parse("C~D").expect("parse errror") + &dict.parse("C~D").expect("parse errror") ), Ok(2) ); @@ -60,7 +60,7 @@ fn test_syntactic_subtype() { assert_eq!( dict.parse("A~B~C~D~E").expect("parse error") .is_syntactic_subtype_of( - &dict.parse("C~G").expect("parse errror") + &dict.parse("C~G").expect("parse errror") ), Err((2,3)) ); @@ -68,7 +68,7 @@ fn test_syntactic_subtype() { assert_eq!( dict.parse("A~B~C~D~E").expect("parse error") .is_syntactic_subtype_of( - &dict.parse("G~F~K").expect("parse errror") + &dict.parse("G~F~K").expect("parse errror") ), Err((0,0)) ); @@ -94,4 +94,3 @@ fn test_syntactic_subtype() { Ok(4) ); } - diff --git a/src/test/unification.rs b/src/test/unification.rs index 34d355d..e0b892b 100644 --- a/src/test/unification.rs +++ b/src/test/unification.rs @@ -1,13 +1,13 @@ use { - crate::{dict::*, term::*, unification::*}, + crate::{dict::*, parser::*, unparser::*, term::*, unification::*}, std::iter::FromIterator }; //<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\ fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) { - let mut dict = TypeDict::new(); + let mut dict = BimapTypeDict::new(); dict.add_varname(String::from("T")); dict.add_varname(String::from("U")); dict.add_varname(String::from("V")); @@ -33,7 +33,7 @@ fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) { #[test] fn test_unification_error() { - let mut dict = TypeDict::new(); + let mut dict = BimapTypeDict::new(); dict.add_varname(String::from("T")); assert_eq!( @@ -76,7 +76,7 @@ fn test_unification() { true ); - let mut dict = TypeDict::new(); + let mut dict = BimapTypeDict::new(); dict.add_varname(String::from("T")); dict.add_varname(String::from("U")); @@ -115,4 +115,3 @@ fn test_unification() { ) ); } -