Compare commits
22 commits
c26d16444c
...
a52c01d9e9
Author | SHA1 | Date | |
---|---|---|---|
a52c01d9e9 | |||
4155310e1e | |||
a534d33b7b | |||
f33ad0a7e2 | |||
8fb20e4e18 | |||
f7b8a20299 | |||
9408703cd1 | |||
575caa7c44 | |||
b0b14fa04c | |||
c6edd44eac | |||
ffb9906209 | |||
adc8a43b69 | |||
c0c0184d97 | |||
95fc28f80e | |||
fd5936209c | |||
967ae5f30e | |||
3c5d7111bc | |||
a9a35aed1b | |||
4a6a35a897 | |||
4aa62d4813 | |||
c6bad6046a | |||
c03db48fd2 |
21 changed files with 1184 additions and 140 deletions
|
@ -4,3 +4,8 @@ edition = "2018"
|
|||
name = "laddertypes"
|
||||
version = "0.1.0"
|
||||
|
||||
[dependencies]
|
||||
tiny-ansi = { version = "0.1.0", optional = true }
|
||||
|
||||
[features]
|
||||
pretty = ["dep:tiny-ansi"]
|
||||
|
|
|
@ -2,6 +2,7 @@ use std::{collections::HashMap, hash::Hash};
|
|||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Bimap<V: Eq + Hash, Λ: Eq + Hash> {
|
||||
pub mλ: HashMap<V, Λ>,
|
||||
pub my: HashMap<Λ, V>,
|
||||
|
|
75
src/dict.rs
75
src/dict.rs
|
@ -2,15 +2,34 @@ use crate::bimap::Bimap;
|
|||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Eq, PartialEq, Hash, Clone, Debug)]
|
||||
#[derive(Eq, PartialEq, Hash, Clone, Copy, Debug)]
|
||||
pub enum TypeID {
|
||||
Fun(u64),
|
||||
Var(u64)
|
||||
}
|
||||
|
||||
pub trait TypeDict : Send + Sync {
|
||||
fn insert(&mut self, name: String, id: TypeID);
|
||||
fn add_varname(&mut self, vn: String) -> TypeID;
|
||||
fn add_typename(&mut self, tn: String) -> TypeID;
|
||||
fn get_typeid(&self, tn: &String) -> Option<TypeID>;
|
||||
fn get_typename(&self, tid: &TypeID) -> Option<String>;
|
||||
|
||||
fn get_varname(&self, var_id: u64) -> Option<String> {
|
||||
self.get_typename(&TypeID::Var(var_id))
|
||||
}
|
||||
|
||||
fn add_synonym(&mut self, new: String, old: String) {
|
||||
if let Some(tyid) = self.get_typeid(&old) {
|
||||
self.insert(new, tyid);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
pub struct TypeDict {
|
||||
#[derive(Debug)]
|
||||
pub struct BimapTypeDict {
|
||||
typenames: Bimap<String, TypeID>,
|
||||
type_lit_counter: u64,
|
||||
type_var_counter: u64,
|
||||
|
@ -18,42 +37,66 @@ pub struct TypeDict {
|
|||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl TypeDict {
|
||||
impl BimapTypeDict {
|
||||
pub fn new() -> Self {
|
||||
TypeDict {
|
||||
BimapTypeDict {
|
||||
typenames: Bimap::new(),
|
||||
type_lit_counter: 0,
|
||||
type_var_counter: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_varname(&mut self, tn: String) -> TypeID {
|
||||
impl TypeDict for BimapTypeDict {
|
||||
fn insert(&mut self, name: String, id: TypeID) {
|
||||
self.typenames.insert(name, id);
|
||||
}
|
||||
|
||||
fn add_varname(&mut self, tn: String) -> TypeID {
|
||||
let tyid = TypeID::Var(self.type_var_counter);
|
||||
self.type_var_counter += 1;
|
||||
self.typenames.insert(tn, tyid.clone());
|
||||
self.insert(tn, tyid.clone());
|
||||
tyid
|
||||
}
|
||||
|
||||
pub fn add_typename(&mut self, tn: String) -> TypeID {
|
||||
fn add_typename(&mut self, tn: String) -> TypeID {
|
||||
let tyid = TypeID::Fun(self.type_lit_counter);
|
||||
self.type_lit_counter += 1;
|
||||
self.typenames.insert(tn, tyid.clone());
|
||||
self.insert(tn, tyid.clone());
|
||||
tyid
|
||||
}
|
||||
|
||||
pub fn add_synonym(&mut self, new: String, old: String) {
|
||||
if let Some(tyid) = self.get_typeid(&old) {
|
||||
self.typenames.insert(new, tyid);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_typename(&self, tid: &TypeID) -> Option<String> {
|
||||
fn get_typename(&self, tid: &TypeID) -> Option<String> {
|
||||
self.typenames.my.get(tid).cloned()
|
||||
}
|
||||
|
||||
pub fn get_typeid(&self, tn: &String) -> Option<TypeID> {
|
||||
fn get_typeid(&self, tn: &String) -> Option<TypeID> {
|
||||
self.typenames.mλ.get(tn).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::RwLock;
|
||||
|
||||
impl<T: TypeDict> TypeDict for Arc<RwLock<T>> {
|
||||
fn insert(&mut self, name: String, id: TypeID) {
|
||||
self.write().unwrap().insert(name, id);
|
||||
}
|
||||
fn add_varname(&mut self, vn: String) -> TypeID {
|
||||
self.write().unwrap().add_varname(vn)
|
||||
}
|
||||
fn add_typename(&mut self, tn: String) -> TypeID {
|
||||
self.write().unwrap().add_typename(tn)
|
||||
}
|
||||
fn get_typename(&self, tid: &TypeID)-> Option<String> {
|
||||
self.read().unwrap().get_typename(tid)
|
||||
}
|
||||
fn get_typeid(&self, tn: &String) -> Option<TypeID> {
|
||||
self.read().unwrap().get_typeid(tn)
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
|
||||
|
|
11
src/lib.rs
11
src/lib.rs
|
@ -5,18 +5,25 @@ pub mod term;
|
|||
pub mod lexer;
|
||||
pub mod parser;
|
||||
pub mod unparser;
|
||||
pub mod sugar;
|
||||
pub mod curry;
|
||||
pub mod lnf;
|
||||
pub mod pnf;
|
||||
pub mod subtype;
|
||||
pub mod unification;
|
||||
pub mod morphism;
|
||||
pub mod steiner_tree;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
||||
#[cfg(feature = "pretty")]
|
||||
mod pretty;
|
||||
|
||||
pub use {
|
||||
dict::*,
|
||||
term::*,
|
||||
unification::*
|
||||
sugar::*,
|
||||
unification::*,
|
||||
morphism::*
|
||||
};
|
||||
|
||||
|
|
267
src/morphism.rs
Normal file
267
src/morphism.rs
Normal file
|
@ -0,0 +1,267 @@
|
|||
use {
|
||||
crate::{
|
||||
TypeTerm, TypeID,
|
||||
unification::UnificationProblem,
|
||||
},
|
||||
std::collections::HashMap
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct MorphismType {
|
||||
pub src_type: TypeTerm,
|
||||
pub dst_type: TypeTerm,
|
||||
}
|
||||
|
||||
pub trait Morphism : Sized {
|
||||
fn get_type(&self) -> MorphismType;
|
||||
fn map_morphism(&self, seq_type: TypeTerm) -> Option< Self >;
|
||||
|
||||
fn weight(&self) -> u64 {
|
||||
1
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MorphismBase<M: Morphism + Clone> {
|
||||
morphisms: Vec< M >,
|
||||
seq_types: Vec< TypeTerm >
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl MorphismType {
|
||||
pub fn normalize(self) -> Self {
|
||||
MorphismType {
|
||||
src_type: self.src_type.normalize(),
|
||||
dst_type: self.dst_type.normalize()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl<M: Morphism + Clone> MorphismBase<M> {
|
||||
pub fn new(seq_types: Vec<TypeTerm>) -> Self {
|
||||
MorphismBase {
|
||||
morphisms: Vec::new(),
|
||||
seq_types
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_morphism(&mut self, m: M) {
|
||||
self.morphisms.push( m );
|
||||
}
|
||||
|
||||
pub fn enum_morphisms(&self, src_type: &TypeTerm)
|
||||
-> Vec< (HashMap<TypeID, TypeTerm>, TypeTerm) >
|
||||
{
|
||||
let mut dst_types = Vec::new();
|
||||
|
||||
// first enumerate all "direct" morphisms,
|
||||
for m in self.morphisms.iter() {
|
||||
if let Ok(σ) = crate::unification::unify(
|
||||
&m.get_type().src_type,
|
||||
&src_type.clone().normalize()
|
||||
) {
|
||||
let dst_type =
|
||||
m.get_type().dst_type.clone()
|
||||
.apply_substitution( &|x| σ.get(x).cloned() )
|
||||
.clone();
|
||||
|
||||
dst_types.push( (σ, dst_type) );
|
||||
}
|
||||
}
|
||||
|
||||
// ..then all "list-map" morphisms.
|
||||
// Check if we have a List type, and if so, see what the Item type is
|
||||
|
||||
// TODO: function for generating fresh variables
|
||||
let item_variable = TypeID::Var(100);
|
||||
|
||||
for seq_type in self.seq_types.iter() {
|
||||
if let Ok(σ) = crate::unification::unify(
|
||||
&TypeTerm::App(vec![
|
||||
seq_type.clone(),
|
||||
TypeTerm::TypeID(item_variable)
|
||||
]),
|
||||
&src_type.clone().param_normalize(),
|
||||
) {
|
||||
let src_item_type = σ.get(&item_variable).unwrap().clone();
|
||||
|
||||
for (γ, dst_item_type) in self.enum_morphisms( &src_item_type ) {
|
||||
let dst_type =
|
||||
TypeTerm::App(vec![
|
||||
seq_type.clone(),
|
||||
dst_item_type.clone()
|
||||
.apply_substitution(
|
||||
&|x| γ.get(x).cloned()
|
||||
).clone()
|
||||
]).normalize();
|
||||
|
||||
dst_types.push( (γ.clone(), dst_type) );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dst_types
|
||||
}
|
||||
|
||||
pub fn enum_morphisms_with_subtyping(
|
||||
&self,
|
||||
src_type: &TypeTerm,
|
||||
) -> Vec<(TypeTerm, TypeTerm, HashMap<TypeID, TypeTerm>)> {
|
||||
let mut src_lnf = src_type.clone().get_lnf_vec();
|
||||
let mut halo_lnf = vec![];
|
||||
let mut dst_types = Vec::new();
|
||||
|
||||
while src_lnf.len() > 0 {
|
||||
let src_type = TypeTerm::Ladder(src_lnf.clone());
|
||||
let halo_type = TypeTerm::Ladder(halo_lnf.clone());
|
||||
|
||||
for (σ, t) in self.enum_morphisms(&src_type) {
|
||||
dst_types.push((
|
||||
halo_type
|
||||
.clone()
|
||||
.apply_substitution(&|x| σ.get(x).cloned())
|
||||
.clone(),
|
||||
t.clone().apply_substitution(&|x| σ.get(x).cloned()).clone(),
|
||||
σ,
|
||||
));
|
||||
}
|
||||
|
||||
// continue with next supertype
|
||||
halo_lnf.push(src_lnf.remove(0));
|
||||
}
|
||||
|
||||
dst_types
|
||||
}
|
||||
|
||||
/* try to find shortest morphism-path for a given type
|
||||
*/
|
||||
pub fn find_morphism_path(&self, ty: MorphismType)
|
||||
-> Option< Vec<TypeTerm> >
|
||||
{
|
||||
let ty = ty.normalize();
|
||||
|
||||
let mut queue = vec![
|
||||
(0, vec![ ty.src_type.clone().normalize() ])
|
||||
];
|
||||
|
||||
while ! queue.is_empty() {
|
||||
queue.sort_by( |&(w1,_),&(w2,_)| w2.cmp(&w1));
|
||||
|
||||
if let Some((current_weight, current_path)) = queue.pop() {
|
||||
let current_type = current_path.last().unwrap();
|
||||
for (h, t, σp) in self.enum_morphisms_with_subtyping(¤t_type) {
|
||||
let tt = TypeTerm::Ladder(vec![h, t]).normalize();
|
||||
|
||||
if !current_path.contains(&tt) {
|
||||
let unification_result = crate::unification::unify(&tt, &ty.dst_type);
|
||||
let morphism_weight = 1;
|
||||
/*self.find_morphism( &tt ).unwrap().0.get_weight()*/
|
||||
|
||||
let new_weight = current_weight + morphism_weight;
|
||||
let mut new_path = current_path.clone();
|
||||
|
||||
new_path.push(tt);
|
||||
|
||||
for n in new_path.iter_mut() {
|
||||
n.apply_substitution(&|x| σp.get(x).cloned());
|
||||
}
|
||||
|
||||
if let Ok(σ) = unification_result {
|
||||
for n in new_path.iter_mut() {
|
||||
n.apply_substitution(&|x| σ.get(x).cloned());
|
||||
}
|
||||
return Some(new_path);
|
||||
} else {
|
||||
queue.push((new_weight, new_path));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// finde a morphism that matches the given morphism type
|
||||
pub fn find_morphism(&self, ty: &MorphismType)
|
||||
-> Option< ( M, HashMap<TypeID, TypeTerm> ) > {
|
||||
|
||||
// try to find primitive morphism
|
||||
for m in self.morphisms.iter() {
|
||||
let unification_problem = UnificationProblem::new(
|
||||
vec![
|
||||
( ty.src_type.clone().normalize(), m.get_type().src_type.clone() ),
|
||||
( ty.dst_type.clone().normalize(), m.get_type().dst_type.clone() )
|
||||
]
|
||||
);
|
||||
|
||||
let unification_result = unification_problem.solve();
|
||||
if let Ok(σ) = unification_result {
|
||||
return Some((m.clone(), σ));
|
||||
}
|
||||
}
|
||||
|
||||
// try list-map morphism
|
||||
for seq_type in self.seq_types.iter() {
|
||||
eprintln!("try seq type {:?}", seq_type);
|
||||
|
||||
eprintln!("");
|
||||
|
||||
if let Ok(σ) = UnificationProblem::new(vec![
|
||||
(ty.src_type.clone().param_normalize(),
|
||||
TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(100)) ])),
|
||||
(ty.dst_type.clone().param_normalize(),
|
||||
TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(101)) ])),
|
||||
]).solve() {
|
||||
// TODO: use real fresh variable names
|
||||
let item_morph_type = MorphismType {
|
||||
src_type: σ.get(&TypeID::Var(100)).unwrap().clone(),
|
||||
dst_type: σ.get(&TypeID::Var(101)).unwrap().clone(),
|
||||
}.normalize();
|
||||
|
||||
if let Some((m, σ)) = self.find_morphism( &item_morph_type ) {
|
||||
if let Some(list_morph) = m.map_morphism( seq_type.clone() ) {
|
||||
return Some( (list_morph, σ) );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn find_morphism_with_subtyping(&self, ty: &MorphismType)
|
||||
-> Option<( M, TypeTerm, HashMap<TypeID, TypeTerm> )> {
|
||||
let mut src_lnf = ty.src_type.clone().get_lnf_vec();
|
||||
let mut dst_lnf = ty.dst_type.clone().get_lnf_vec();
|
||||
let mut halo = vec![];
|
||||
|
||||
while src_lnf.len() > 0 && dst_lnf.len() > 0 {
|
||||
if let Some((m, σ)) = self.find_morphism(&MorphismType{
|
||||
src_type: TypeTerm::Ladder(src_lnf.clone()),
|
||||
dst_type: TypeTerm::Ladder(dst_lnf.clone())
|
||||
}) {
|
||||
halo.push(src_lnf.get(0).unwrap().clone());
|
||||
return Some((m,
|
||||
TypeTerm::Ladder(halo).apply_substitution(&|x| σ.get(x).cloned()).clone(),
|
||||
σ));
|
||||
} else {
|
||||
if src_lnf[0] == dst_lnf[0] {
|
||||
src_lnf.remove(0);
|
||||
halo.push(dst_lnf.remove(0));
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
|
@ -18,10 +18,23 @@ pub enum ParseError {
|
|||
UnexpectedToken
|
||||
}
|
||||
|
||||
pub trait ParseLadderType {
|
||||
fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError>;
|
||||
|
||||
fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>;
|
||||
|
||||
fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>;
|
||||
|
||||
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>;
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl TypeDict {
|
||||
pub fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
|
||||
impl<T: TypeDict> ParseLadderType for T {
|
||||
fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
|
||||
let mut tokens = LadderTypeLexer::from(s.chars()).peekable();
|
||||
|
||||
match self.parse_ladder(&mut tokens) {
|
||||
|
|
151
src/pnf.rs
151
src/pnf.rs
|
@ -2,6 +2,20 @@ use crate::term::TypeTerm;
|
|||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
pub fn splice_ladders( mut upper: Vec< TypeTerm >, mut lower: Vec< TypeTerm > ) -> Vec< TypeTerm > {
|
||||
for i in 0 .. upper.len() {
|
||||
if upper[i] == lower[0] {
|
||||
let mut result_ladder = Vec::<TypeTerm>::new();
|
||||
result_ladder.append(&mut upper[0..i].iter().cloned().collect());
|
||||
result_ladder.append(&mut lower);
|
||||
return result_ladder;
|
||||
}
|
||||
}
|
||||
|
||||
upper.append(&mut lower);
|
||||
upper
|
||||
}
|
||||
|
||||
impl TypeTerm {
|
||||
/// transmute type into Parameter-Normal-Form (PNF)
|
||||
///
|
||||
|
@ -10,88 +24,99 @@ impl TypeTerm {
|
|||
/// <Seq <Digit 10>>~<Seq Char>
|
||||
/// ⇒ <Seq <Digit 10>~Char>
|
||||
/// ```
|
||||
pub fn param_normalize(self) -> Self {
|
||||
pub fn param_normalize(mut self) -> Self {
|
||||
match self {
|
||||
TypeTerm::Ladder(mut rungs) => {
|
||||
if rungs.len() > 0 {
|
||||
// normalize all rungs separately
|
||||
for r in rungs.iter_mut() {
|
||||
*r = r.clone().param_normalize();
|
||||
}
|
||||
let mut new_rungs = Vec::new();
|
||||
while let Some(bottom) = rungs.pop() {
|
||||
if let Some(last_but) = rungs.last_mut() {
|
||||
match (bottom, last_but) {
|
||||
(TypeTerm::App(bot_args), TypeTerm::App(last_args)) => {
|
||||
if bot_args.len() == last_args.len() {
|
||||
let mut new_rung_params = Vec::new();
|
||||
let mut require_break = false;
|
||||
|
||||
// take top-rung
|
||||
match rungs.remove(0) {
|
||||
TypeTerm::App(params_top) => {
|
||||
let mut params_ladders = Vec::new();
|
||||
let mut tail : Vec<TypeTerm> = Vec::new();
|
||||
if bot_args.len() > 0 {
|
||||
if let Ok(_idx) = last_args[0].is_syntactic_subtype_of(&bot_args[0]) {
|
||||
for i in 0 .. bot_args.len() {
|
||||
|
||||
// append all other rungs to ladders inside
|
||||
// the application
|
||||
for p in params_top {
|
||||
params_ladders.push(vec![ p ]);
|
||||
}
|
||||
let spliced_type_ladder = splice_ladders(
|
||||
last_args[i].clone().get_lnf_vec(),
|
||||
bot_args[i].clone().get_lnf_vec()
|
||||
);
|
||||
let spliced_type =
|
||||
if spliced_type_ladder.len() == 1 {
|
||||
spliced_type_ladder[0].clone()
|
||||
} else if spliced_type_ladder.len() > 1 {
|
||||
TypeTerm::Ladder(spliced_type_ladder)
|
||||
} else {
|
||||
TypeTerm::unit()
|
||||
};
|
||||
|
||||
for r in rungs {
|
||||
match r {
|
||||
TypeTerm::App(mut params_rung) => {
|
||||
if params_rung.len() > 0 {
|
||||
let mut first_param = params_rung.remove(0);
|
||||
new_rung_params.push( spliced_type.param_normalize() );
|
||||
}
|
||||
|
||||
if first_param == params_ladders[0][0] {
|
||||
for (l, p) in params_ladders.iter_mut().skip(1).zip(params_rung) {
|
||||
l.push(p.param_normalize());
|
||||
}
|
||||
} else {
|
||||
params_rung.insert(0, first_param);
|
||||
tail.push(TypeTerm::App(params_rung));
|
||||
new_rung_params.push(
|
||||
TypeTerm::Ladder(vec![
|
||||
last_args[0].clone(),
|
||||
bot_args[0].clone()
|
||||
]).normalize()
|
||||
);
|
||||
|
||||
for i in 1 .. bot_args.len() {
|
||||
if let Ok(_idx) = last_args[i].is_syntactic_subtype_of(&bot_args[i]) {
|
||||
let spliced_type_ladder = splice_ladders(
|
||||
last_args[i].clone().get_lnf_vec(),
|
||||
bot_args[i].clone().get_lnf_vec()
|
||||
);
|
||||
let spliced_type =
|
||||
if spliced_type_ladder.len() == 1 {
|
||||
spliced_type_ladder[0].clone()
|
||||
} else if spliced_type_ladder.len() > 1 {
|
||||
TypeTerm::Ladder(spliced_type_ladder)
|
||||
} else {
|
||||
TypeTerm::unit()
|
||||
};
|
||||
|
||||
new_rung_params.push( spliced_type.param_normalize() );
|
||||
} else {
|
||||
new_rung_params.push( bot_args[i].clone() );
|
||||
require_break = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TypeTerm::Ladder(mut rs) => {
|
||||
for r in rs {
|
||||
tail.push(r.param_normalize());
|
||||
if require_break {
|
||||
new_rungs.push( TypeTerm::App(new_rung_params) );
|
||||
} else {
|
||||
rungs.pop();
|
||||
rungs.push(TypeTerm::App(new_rung_params));
|
||||
}
|
||||
}
|
||||
|
||||
atomic => {
|
||||
tail.push(atomic);
|
||||
} else {
|
||||
new_rungs.push( TypeTerm::App(bot_args) );
|
||||
}
|
||||
}
|
||||
(bottom, last_buf) => {
|
||||
new_rungs.push( bottom );
|
||||
}
|
||||
}
|
||||
|
||||
let head = TypeTerm::App(
|
||||
params_ladders.into_iter()
|
||||
.map(
|
||||
|mut l| {
|
||||
l.dedup();
|
||||
match l.len() {
|
||||
0 => TypeTerm::unit(),
|
||||
1 => l.remove(0),
|
||||
_ => TypeTerm::Ladder(l).param_normalize()
|
||||
}
|
||||
}
|
||||
)
|
||||
.collect()
|
||||
);
|
||||
|
||||
if tail.len() > 0 {
|
||||
tail.insert(0, head);
|
||||
TypeTerm::Ladder(tail)
|
||||
} else {
|
||||
head
|
||||
}
|
||||
} else {
|
||||
new_rungs.push( bottom );
|
||||
}
|
||||
}
|
||||
|
||||
TypeTerm::Ladder(mut r) => {
|
||||
r.append(&mut rungs);
|
||||
TypeTerm::Ladder(r)
|
||||
}
|
||||
new_rungs.reverse();
|
||||
|
||||
atomic => {
|
||||
rungs.insert(0, atomic);
|
||||
TypeTerm::Ladder(rungs)
|
||||
}
|
||||
if new_rungs.len() > 1 {
|
||||
TypeTerm::Ladder(new_rungs)
|
||||
} else if new_rungs.len() == 1 {
|
||||
new_rungs[0].clone()
|
||||
} else {
|
||||
TypeTerm::unit()
|
||||
}
|
||||
} else {
|
||||
TypeTerm::unit()
|
||||
|
|
150
src/pretty.rs
Normal file
150
src/pretty.rs
Normal file
|
@ -0,0 +1,150 @@
|
|||
use {
|
||||
crate::TypeDict,
|
||||
crate::sugar::SugaredTypeTerm,
|
||||
tiny_ansi::TinyAnsi
|
||||
};
|
||||
|
||||
impl SugaredTypeTerm {
|
||||
pub fn pretty(&self, dict: &TypeDict, indent: u64) -> String {
|
||||
let indent_width = 4;
|
||||
match self {
|
||||
SugaredTypeTerm::TypeID(id) => {
|
||||
format!("{}", dict.get_typename(id).unwrap_or("??".bright_red())).bright_blue()
|
||||
},
|
||||
|
||||
SugaredTypeTerm::Num(n) => {
|
||||
format!("{}", n).bright_cyan()
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Char(c) => {
|
||||
match c {
|
||||
'\0' => format!("'\\0'"),
|
||||
'\n' => format!("'\\n'"),
|
||||
_ => format!("'{}'", c)
|
||||
}
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Univ(t) => {
|
||||
format!("{} {} . {}",
|
||||
"∀".yellow().bold(),
|
||||
dict.get_varname(0).unwrap_or("??".into()).bright_blue(),
|
||||
t.pretty(dict,indent)
|
||||
)
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Spec(args) => {
|
||||
let mut s = String::new();
|
||||
s.push_str(&"<".yellow().bold());
|
||||
for i in 0..args.len() {
|
||||
let arg = &args[i];
|
||||
if i > 0 {
|
||||
s.push(' ');
|
||||
}
|
||||
s.push_str( &arg.pretty(dict,indent+1) );
|
||||
}
|
||||
s.push_str(&">".yellow().bold());
|
||||
s
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Struct(args) => {
|
||||
let mut s = String::new();
|
||||
s.push_str(&"{".yellow().bold());
|
||||
for arg in args {
|
||||
s.push('\n');
|
||||
for x in 0..(indent+1)*indent_width {
|
||||
s.push(' ');
|
||||
}
|
||||
s.push_str(&arg.pretty(dict, indent + 1));
|
||||
s.push_str(&";\n".bright_yellow());
|
||||
}
|
||||
|
||||
s.push('\n');
|
||||
for x in 0..indent*indent_width {
|
||||
s.push(' ');
|
||||
}
|
||||
s.push_str(&"}".yellow().bold());
|
||||
s
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Enum(args) => {
|
||||
let mut s = String::new();
|
||||
s.push_str(&"(".yellow().bold());
|
||||
for i in 0..args.len() {
|
||||
let arg = &args[i];
|
||||
s.push('\n');
|
||||
for x in 0..(indent+1)*indent_width {
|
||||
s.push(' ');
|
||||
}
|
||||
if i > 0 {
|
||||
s.push_str(&"| ".yellow().bold());
|
||||
}
|
||||
s.push_str(&arg.pretty(dict, indent + 1));
|
||||
}
|
||||
|
||||
s.push('\n');
|
||||
for x in 0..indent*indent_width {
|
||||
s.push(' ');
|
||||
}
|
||||
s.push_str(&")".yellow().bold());
|
||||
s
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Seq(args) => {
|
||||
let mut s = String::new();
|
||||
s.push_str(&"[ ".yellow().bold());
|
||||
for i in 0..args.len() {
|
||||
let arg = &args[i];
|
||||
if i > 0 {
|
||||
s.push(' ');
|
||||
}
|
||||
s.push_str(&arg.pretty(dict, indent+1));
|
||||
}
|
||||
s.push_str(&" ]".yellow().bold());
|
||||
s
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Morph(args) => {
|
||||
let mut s = String::new();
|
||||
for arg in args {
|
||||
s.push_str(&" ~~morph~~> ".bright_yellow());
|
||||
s.push_str(&arg.pretty(dict, indent));
|
||||
}
|
||||
s
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Func(args) => {
|
||||
let mut s = String::new();
|
||||
for i in 0..args.len() {
|
||||
let arg = &args[i];
|
||||
if i > 0{
|
||||
s.push('\n');
|
||||
for x in 0..(indent*indent_width) {
|
||||
s.push(' ');
|
||||
}
|
||||
s.push_str(&"--> ".bright_yellow());
|
||||
} else {
|
||||
// s.push_str(" ");
|
||||
}
|
||||
s.push_str(&arg.pretty(dict, indent));
|
||||
}
|
||||
s
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Ladder(rungs) => {
|
||||
let mut s = String::new();
|
||||
for i in 0..rungs.len() {
|
||||
let rung = &rungs[i];
|
||||
if i > 0{
|
||||
s.push('\n');
|
||||
for x in 0..(indent*indent_width) {
|
||||
s.push(' ');
|
||||
}
|
||||
s.push_str(&"~ ".yellow());
|
||||
}
|
||||
s.push_str(&rung.pretty(dict, indent));
|
||||
}
|
||||
s
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
245
src/steiner_tree.rs
Normal file
245
src/steiner_tree.rs
Normal file
|
@ -0,0 +1,245 @@
|
|||
use {
|
||||
std::collections::HashMap,
|
||||
crate::{
|
||||
TypeID,
|
||||
TypeTerm,
|
||||
morphism::{
|
||||
MorphismType,
|
||||
Morphism,
|
||||
MorphismBase
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SteinerTree {
|
||||
weight: u64,
|
||||
goals: Vec< TypeTerm >,
|
||||
pub edges: Vec< MorphismType >,
|
||||
}
|
||||
|
||||
impl SteinerTree {
|
||||
pub fn into_edges(self) -> Vec< MorphismType > {
|
||||
self.edges
|
||||
}
|
||||
|
||||
fn add_edge(&mut self, ty: MorphismType) {
|
||||
self.weight += 1;
|
||||
|
||||
let ty = ty.normalize();
|
||||
|
||||
// check if by adding this new edge, we reach a goal
|
||||
let mut new_goals = Vec::new();
|
||||
let mut added = false;
|
||||
|
||||
for g in self.goals.clone() {
|
||||
if let Ok(σ) = crate::unify(&ty.dst_type, &g) {
|
||||
if !added {
|
||||
self.edges.push(ty.clone());
|
||||
|
||||
// goal reached.
|
||||
for e in self.edges.iter_mut() {
|
||||
e.src_type = e.src_type.apply_substitution(&|x| σ.get(x).cloned()).clone();
|
||||
e.dst_type = e.dst_type.apply_substitution(&|x| σ.get(x).cloned()).clone();
|
||||
}
|
||||
added = true;
|
||||
} else {
|
||||
new_goals.push(g);
|
||||
}
|
||||
} else {
|
||||
new_goals.push(g);
|
||||
}
|
||||
}
|
||||
|
||||
if !added {
|
||||
self.edges.push(ty.clone());
|
||||
}
|
||||
|
||||
self.goals = new_goals;
|
||||
}
|
||||
|
||||
fn is_solved(&self) -> bool {
|
||||
self.goals.len() == 0
|
||||
}
|
||||
|
||||
fn contains(&self, t: &TypeTerm) -> Option< HashMap<TypeID, TypeTerm> > {
|
||||
for e in self.edges.iter() {
|
||||
if let Ok(σ) = crate::unify(&e.dst_type, t) {
|
||||
return Some(σ)
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub struct PathApproxSteinerTreeSolver {
|
||||
root: TypeTerm,
|
||||
leaves: Vec< TypeTerm >
|
||||
}
|
||||
|
||||
impl PathApproxSteinerTreeSolver {
|
||||
pub fn new(
|
||||
root: TypeTerm,
|
||||
leaves: Vec<TypeTerm>
|
||||
) -> Self {
|
||||
PathApproxSteinerTreeSolver {
|
||||
root, leaves
|
||||
}
|
||||
}
|
||||
|
||||
pub fn solve<M: Morphism + Clone>(self, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
|
||||
let mut tree = Vec::<MorphismType>::new();
|
||||
|
||||
for goal in self.leaves {
|
||||
// try to find shortest path from root to current leaf
|
||||
if let Some(new_path) = morphisms.find_morphism_path(
|
||||
MorphismType {
|
||||
src_type: self.root.clone(),
|
||||
dst_type: goal.clone()
|
||||
}
|
||||
) {
|
||||
// reduce new path so that it does not collide with any existing path
|
||||
let mut src_type = self.root.clone();
|
||||
let mut new_path_iter = new_path.into_iter().peekable();
|
||||
|
||||
// check all existing nodes..
|
||||
|
||||
if new_path_iter.peek().unwrap() == &src_type {
|
||||
new_path_iter.next();
|
||||
}
|
||||
|
||||
for mt in tree.iter() {
|
||||
//assert!( mt.src_type == &src_type );
|
||||
if let Some(t) = new_path_iter.peek() {
|
||||
if &mt.dst_type == t {
|
||||
// eliminate this node from new path
|
||||
src_type = new_path_iter.next().unwrap().clone();
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for dst_type in new_path_iter {
|
||||
tree.push(MorphismType {
|
||||
src_type: src_type.clone(),
|
||||
dst_type: dst_type.clone()
|
||||
});
|
||||
src_type = dst_type;
|
||||
}
|
||||
} else {
|
||||
eprintln!("could not find path\nfrom {:?}\nto {:?}", &self.root, &goal);
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
Some(SteinerTree {
|
||||
weight: 0,
|
||||
goals: vec![],
|
||||
edges: tree
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* given a representation tree with the available
|
||||
* represenatations `src_types`, try to find
|
||||
* a sequence of morphisms that span up all
|
||||
* representations in `dst_types`.
|
||||
*/
|
||||
pub struct SteinerTreeProblem {
|
||||
src_types: Vec< TypeTerm >,
|
||||
queue: Vec< SteinerTree >
|
||||
}
|
||||
|
||||
impl SteinerTreeProblem {
|
||||
pub fn new(
|
||||
src_types: Vec< TypeTerm >,
|
||||
dst_types: Vec< TypeTerm >
|
||||
) -> Self {
|
||||
SteinerTreeProblem {
|
||||
src_types: src_types.into_iter().map(|t| t.normalize()).collect(),
|
||||
queue: vec![
|
||||
SteinerTree {
|
||||
weight: 0,
|
||||
goals: dst_types.into_iter().map(|t| t.normalize()).collect(),
|
||||
edges: Vec::new()
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(&mut self) -> Option< SteinerTree > {
|
||||
eprintln!("queue size = {}", self.queue.len());
|
||||
|
||||
/* FIXME: by giving the highest priority to
|
||||
* candidate tree with the least remaining goals,
|
||||
* the optimality of the search algorithm
|
||||
* is probably destroyed, but it dramatically helps
|
||||
* to tame the combinatorical explosion in this algorithm.
|
||||
*/
|
||||
self.queue.sort_by(|t1, t2|
|
||||
if t1.goals.len() < t2.goals.len() {
|
||||
std::cmp::Ordering::Greater
|
||||
} else if t1.goals.len() == t2.goals.len() {
|
||||
if t1.weight < t2.weight {
|
||||
std::cmp::Ordering::Greater
|
||||
} else {
|
||||
std::cmp::Ordering::Less
|
||||
}
|
||||
} else {
|
||||
std::cmp::Ordering::Less
|
||||
}
|
||||
);
|
||||
self.queue.pop()
|
||||
}
|
||||
/*
|
||||
pub fn solve_approx_path<M: Morphism + Clone>(&mut self, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
|
||||
if let Some(master) = self.src_types.first() {
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
*/
|
||||
pub fn solve_bfs<M: Morphism + Clone>(&mut self, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
|
||||
|
||||
// take the currently smallest tree and extend it by one step
|
||||
while let Some( mut current_tree ) = self.next() {
|
||||
|
||||
// check if current tree is a solution
|
||||
if current_tree.goals.len() == 0 {
|
||||
return Some(current_tree);
|
||||
}
|
||||
|
||||
// get all vertices spanned by this tree
|
||||
let mut current_nodes = self.src_types.clone();
|
||||
for e in current_tree.edges.iter() {
|
||||
current_nodes.push( e.dst_type.clone() );
|
||||
}
|
||||
|
||||
// extend the tree by one edge and add it to the queue
|
||||
for src_type in current_nodes {
|
||||
for (dst_halo, dst_ty, σ) in morphisms.enum_morphisms_with_subtyping(&src_type) {
|
||||
let dst_type = TypeTerm::Ladder(vec![dst_halo, dst_ty]).normalize();
|
||||
|
||||
if current_tree.contains( &dst_type ).is_none() {
|
||||
let mut new_tree = current_tree.clone();
|
||||
{
|
||||
let src_type = src_type.clone();
|
||||
let dst_type = dst_type.clone();
|
||||
new_tree.add_edge(MorphismType { src_type, dst_type }.normalize());
|
||||
}
|
||||
|
||||
self.queue.push( new_tree );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
114
src/sugar.rs
Normal file
114
src/sugar.rs
Normal file
|
@ -0,0 +1,114 @@
|
|||
use {
|
||||
crate::{TypeTerm, TypeID, parser::ParseLadderType}
|
||||
};
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub enum SugaredTypeTerm {
|
||||
TypeID(TypeID),
|
||||
Num(i64),
|
||||
Char(char),
|
||||
Univ(Box< SugaredTypeTerm >),
|
||||
Spec(Vec< SugaredTypeTerm >),
|
||||
Func(Vec< SugaredTypeTerm >),
|
||||
Morph(Vec< SugaredTypeTerm >),
|
||||
Ladder(Vec< SugaredTypeTerm >),
|
||||
Struct(Vec< SugaredTypeTerm >),
|
||||
Enum(Vec< SugaredTypeTerm >),
|
||||
Seq(Vec< SugaredTypeTerm >)
|
||||
}
|
||||
|
||||
impl TypeTerm {
|
||||
pub fn sugar(self: TypeTerm, dict: &mut impl crate::TypeDict) -> SugaredTypeTerm {
|
||||
match self {
|
||||
TypeTerm::TypeID(id) => SugaredTypeTerm::TypeID(id),
|
||||
TypeTerm::Num(n) => SugaredTypeTerm::Num(n),
|
||||
TypeTerm::Char(c) => SugaredTypeTerm::Char(c),
|
||||
TypeTerm::App(args) => if let Some(first) = args.first() {
|
||||
if first == &dict.parse("Func").unwrap() {
|
||||
SugaredTypeTerm::Func( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
|
||||
}
|
||||
else if first == &dict.parse("Morph").unwrap() {
|
||||
SugaredTypeTerm::Morph( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
|
||||
}
|
||||
else if first == &dict.parse("Struct").unwrap() {
|
||||
SugaredTypeTerm::Struct( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
|
||||
}
|
||||
else if first == &dict.parse("Enum").unwrap() {
|
||||
SugaredTypeTerm::Enum( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
|
||||
}
|
||||
else if first == &dict.parse("Seq").unwrap() {
|
||||
SugaredTypeTerm::Seq( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
|
||||
}
|
||||
else if first == &dict.parse("Spec").unwrap() {
|
||||
SugaredTypeTerm::Spec( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
|
||||
}
|
||||
else if first == &dict.parse("Univ").unwrap() {
|
||||
SugaredTypeTerm::Univ(Box::new(
|
||||
SugaredTypeTerm::Spec(
|
||||
args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect()
|
||||
)
|
||||
))
|
||||
}
|
||||
else {
|
||||
SugaredTypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
|
||||
}
|
||||
} else {
|
||||
SugaredTypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
|
||||
},
|
||||
TypeTerm::Ladder(rungs) =>
|
||||
SugaredTypeTerm::Ladder(rungs.into_iter().map(|t| t.sugar(dict)).collect())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SugaredTypeTerm {
|
||||
pub fn desugar(self, dict: &mut impl crate::TypeDict) -> TypeTerm {
|
||||
match self {
|
||||
SugaredTypeTerm::TypeID(id) => TypeTerm::TypeID(id),
|
||||
SugaredTypeTerm::Num(n) => TypeTerm::Num(n),
|
||||
SugaredTypeTerm::Char(c) => TypeTerm::Char(c),
|
||||
SugaredTypeTerm::Univ(t) => t.desugar(dict),
|
||||
SugaredTypeTerm::Spec(ts) => TypeTerm::App(ts.into_iter().map(|t| t.desugar(dict)).collect()),
|
||||
SugaredTypeTerm::Ladder(ts) => TypeTerm::Ladder(ts.into_iter().map(|t|t.desugar(dict)).collect()),
|
||||
SugaredTypeTerm::Func(ts) => TypeTerm::App(
|
||||
std::iter::once( dict.parse("Func").unwrap() ).chain(
|
||||
ts.into_iter().map(|t| t.desugar(dict))
|
||||
).collect()),
|
||||
SugaredTypeTerm::Morph(ts) => TypeTerm::App(
|
||||
std::iter::once( dict.parse("Morph").unwrap() ).chain(
|
||||
ts.into_iter().map(|t| t.desugar(dict))
|
||||
).collect()),
|
||||
SugaredTypeTerm::Struct(ts) => TypeTerm::App(
|
||||
std::iter::once( dict.parse("Struct").unwrap() ).chain(
|
||||
ts.into_iter().map(|t| t.desugar(dict))
|
||||
).collect()),
|
||||
SugaredTypeTerm::Enum(ts) => TypeTerm::App(
|
||||
std::iter::once( dict.parse("Enum").unwrap() ).chain(
|
||||
ts.into_iter().map(|t| t.desugar(dict))
|
||||
).collect()),
|
||||
SugaredTypeTerm::Seq(ts) => TypeTerm::App(
|
||||
std::iter::once( dict.parse("Seq").unwrap() ).chain(
|
||||
ts.into_iter().map(|t| t.desugar(dict))
|
||||
).collect()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
SugaredTypeTerm::TypeID(_) => false,
|
||||
SugaredTypeTerm::Num(_) => false,
|
||||
SugaredTypeTerm::Char(_) => false,
|
||||
SugaredTypeTerm::Univ(t) => t.is_empty(),
|
||||
SugaredTypeTerm::Spec(ts) |
|
||||
SugaredTypeTerm::Ladder(ts) |
|
||||
SugaredTypeTerm::Func(ts) |
|
||||
SugaredTypeTerm::Morph(ts) |
|
||||
SugaredTypeTerm::Struct(ts) |
|
||||
SugaredTypeTerm::Enum(ts) |
|
||||
SugaredTypeTerm::Seq(ts) => {
|
||||
ts.iter().fold(true, |s,t|s&&t.is_empty())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -14,8 +14,6 @@ pub enum TypeTerm {
|
|||
Num(i64),
|
||||
Char(char),
|
||||
|
||||
|
||||
|
||||
/* Complex Terms */
|
||||
|
||||
// Type Parameters
|
||||
|
@ -47,17 +45,16 @@ impl TypeTerm {
|
|||
*self = TypeTerm::App(vec![
|
||||
self.clone(),
|
||||
t.into()
|
||||
])
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub fn repr_as(&mut self, t: impl Into<TypeTerm>) -> &mut Self {
|
||||
match self {
|
||||
TypeTerm::Ladder(rungs) => {
|
||||
rungs.push(t.into());
|
||||
rungs.push(t.into());
|
||||
}
|
||||
|
||||
_ => {
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
use {
|
||||
crate::{dict::*}
|
||||
crate::{dict::*, parser::*}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[test]
|
||||
fn test_curry() {
|
||||
let mut dict = TypeDict::new();
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A B C>").unwrap().curry(),
|
||||
|
@ -33,7 +33,7 @@ fn test_curry() {
|
|||
|
||||
#[test]
|
||||
fn test_decurry() {
|
||||
let mut dict = TypeDict::new();
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<<A B> C>").unwrap().decurry(),
|
||||
|
@ -47,7 +47,7 @@ fn test_decurry() {
|
|||
dict.parse("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap().decurry(),
|
||||
dict.parse("<A B C D E F G H I J K>").unwrap()
|
||||
);
|
||||
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<<A~X B> C>").unwrap().decurry(),
|
||||
dict.parse("<A~X B C>").unwrap()
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use crate::dict::TypeDict;
|
||||
use crate::{dict::{BimapTypeDict}, parser::*};
|
||||
|
||||
#[test]
|
||||
fn test_flat() {
|
||||
let mut dict = TypeDict::new();
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert!( dict.parse("A").expect("parse error").is_flat() );
|
||||
assert!( dict.parse("10").expect("parse error").is_flat() );
|
||||
|
@ -17,7 +17,7 @@ fn test_flat() {
|
|||
|
||||
#[test]
|
||||
fn test_normalize() {
|
||||
let mut dict = TypeDict::new();
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C").expect("parse error").normalize(),
|
||||
|
@ -54,4 +54,3 @@ fn test_normalize() {
|
|||
);
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -7,4 +7,5 @@ pub mod pnf;
|
|||
pub mod subtype;
|
||||
pub mod substitution;
|
||||
pub mod unification;
|
||||
pub mod morphism;
|
||||
|
||||
|
|
160
src/test/morphism.rs
Normal file
160
src/test/morphism.rs
Normal file
|
@ -0,0 +1,160 @@
|
|||
use {
|
||||
crate::{dict::*, parser::*, unparser::*, morphism::*, steiner_tree::*, TypeTerm}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
struct DummyMorphism(MorphismType);
|
||||
|
||||
impl Morphism for DummyMorphism {
|
||||
fn get_type(&self) -> MorphismType {
|
||||
self.0.clone().normalize()
|
||||
}
|
||||
|
||||
fn map_morphism(&self, seq_type: TypeTerm) -> Option<DummyMorphism> {
|
||||
Some(DummyMorphism(MorphismType {
|
||||
src_type: TypeTerm::App(vec![
|
||||
seq_type.clone(),
|
||||
self.0.src_type.clone()
|
||||
]),
|
||||
|
||||
dst_type: TypeTerm::App(vec![
|
||||
seq_type.clone(),
|
||||
self.0.dst_type.clone()
|
||||
])
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
fn morphism_test_setup() -> ( BimapTypeDict, MorphismBase<DummyMorphism> ) {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
let mut base = MorphismBase::<DummyMorphism>::new( vec![ dict.parse("Seq").expect("") ] );
|
||||
|
||||
dict.add_varname("Radix".into());
|
||||
dict.add_varname("SrcRadix".into());
|
||||
dict.add_varname("DstRadix".into());
|
||||
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
|
||||
dst_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap(),
|
||||
dst_type: dict.parse("<Digit Radix> ~ Char").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix>~ℤ_2^64~machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix>~ℤ_2^64~machine.UInt64>").unwrap()
|
||||
})
|
||||
);
|
||||
|
||||
(dict, base)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_morphism_path() {
|
||||
let (mut dict, mut base) = morphism_test_setup();
|
||||
|
||||
assert_eq!(
|
||||
base.find_morphism_path(MorphismType {
|
||||
src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
|
||||
}),
|
||||
Some(
|
||||
vec![
|
||||
dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().normalize(),
|
||||
dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().normalize(),
|
||||
]
|
||||
)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
base.find_morphism_path(MorphismType {
|
||||
src_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
|
||||
}),
|
||||
Some(
|
||||
vec![
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().normalize(),
|
||||
]
|
||||
)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
base.find_morphism_with_subtyping(
|
||||
&MorphismType {
|
||||
src_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
|
||||
}
|
||||
),
|
||||
|
||||
Some((
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
|
||||
}),
|
||||
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10>>").unwrap(),
|
||||
|
||||
vec![
|
||||
(dict.get_typeid(&"Radix".into()).unwrap(),
|
||||
dict.parse("10").unwrap())
|
||||
].into_iter().collect::<std::collections::HashMap<TypeID, TypeTerm>>()
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_steiner_tree() {
|
||||
let (mut dict, mut base) = morphism_test_setup();
|
||||
|
||||
|
||||
let mut steiner_tree_problem = SteinerTreeProblem::new(
|
||||
// source reprs
|
||||
vec![
|
||||
dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
],
|
||||
|
||||
// destination reprs
|
||||
vec![
|
||||
dict.parse("ℕ ~ <PosInt 2 BigEndian> ~ <Seq <Digit 2> ~ Char>").unwrap(),
|
||||
dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
|
||||
]
|
||||
);
|
||||
|
||||
if let Some(solution) = steiner_tree_problem.solve_bfs( &base ) {
|
||||
for e in solution.edges.iter() {
|
||||
eprintln!(" :: {}\n--> {}", dict.unparse(&e.src_type), dict.unparse(&e.dst_type));
|
||||
}
|
||||
} else {
|
||||
eprintln!("no solution");
|
||||
}
|
||||
}
|
|
@ -7,7 +7,7 @@ use {
|
|||
|
||||
#[test]
|
||||
fn test_parser_id() {
|
||||
let mut dict = TypeDict::new();
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
dict.add_varname("T".into());
|
||||
|
||||
|
@ -26,7 +26,7 @@ fn test_parser_id() {
|
|||
fn test_parser_num() {
|
||||
assert_eq!(
|
||||
Ok(TypeTerm::Num(1234)),
|
||||
TypeDict::new().parse("1234")
|
||||
BimapTypeDict::new().parse("1234")
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -34,21 +34,21 @@ fn test_parser_num() {
|
|||
fn test_parser_char() {
|
||||
assert_eq!(
|
||||
Ok(TypeTerm::Char('x')),
|
||||
TypeDict::new().parse("'x'")
|
||||
BimapTypeDict::new().parse("'x'")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parser_app() {
|
||||
assert_eq!(
|
||||
TypeDict::new().parse("<A B>"),
|
||||
BimapTypeDict::new().parse("<A B>"),
|
||||
Ok(TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
]))
|
||||
);
|
||||
assert_eq!(
|
||||
TypeDict::new().parse("<A B C>"),
|
||||
BimapTypeDict::new().parse("<A B C>"),
|
||||
Ok(TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
|
@ -60,7 +60,7 @@ fn test_parser_app() {
|
|||
#[test]
|
||||
fn test_parser_unexpected_close() {
|
||||
assert_eq!(
|
||||
TypeDict::new().parse(">"),
|
||||
BimapTypeDict::new().parse(">"),
|
||||
Err(ParseError::UnexpectedClose)
|
||||
);
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ fn test_parser_unexpected_close() {
|
|||
#[test]
|
||||
fn test_parser_unexpected_token() {
|
||||
assert_eq!(
|
||||
TypeDict::new().parse("A B"),
|
||||
BimapTypeDict::new().parse("A B"),
|
||||
Err(ParseError::UnexpectedToken)
|
||||
);
|
||||
}
|
||||
|
@ -76,14 +76,14 @@ fn test_parser_unexpected_token() {
|
|||
#[test]
|
||||
fn test_parser_ladder() {
|
||||
assert_eq!(
|
||||
TypeDict::new().parse("A~B"),
|
||||
BimapTypeDict::new().parse("A~B"),
|
||||
Ok(TypeTerm::Ladder(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
]))
|
||||
);
|
||||
assert_eq!(
|
||||
TypeDict::new().parse("A~B~C"),
|
||||
BimapTypeDict::new().parse("A~B~C"),
|
||||
Ok(TypeTerm::Ladder(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
|
@ -95,7 +95,7 @@ fn test_parser_ladder() {
|
|||
#[test]
|
||||
fn test_parser_ladder_outside() {
|
||||
assert_eq!(
|
||||
TypeDict::new().parse("<A B>~C"),
|
||||
BimapTypeDict::new().parse("<A B>~C"),
|
||||
Ok(TypeTerm::Ladder(vec![
|
||||
TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
|
@ -103,13 +103,13 @@ fn test_parser_ladder_outside() {
|
|||
]),
|
||||
TypeTerm::TypeID(TypeID::Fun(2)),
|
||||
]))
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parser_ladder_inside() {
|
||||
assert_eq!(
|
||||
TypeDict::new().parse("<A B~C>"),
|
||||
BimapTypeDict::new().parse("<A B~C>"),
|
||||
Ok(TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::Ladder(vec![
|
||||
|
@ -117,13 +117,13 @@ fn test_parser_ladder_inside() {
|
|||
TypeTerm::TypeID(TypeID::Fun(2)),
|
||||
])
|
||||
]))
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parser_ladder_between() {
|
||||
assert_eq!(
|
||||
TypeDict::new().parse("<A B~<C D>>"),
|
||||
BimapTypeDict::new().parse("<A B~<C D>>"),
|
||||
Ok(TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::Ladder(vec![
|
||||
|
@ -134,14 +134,14 @@ fn test_parser_ladder_between() {
|
|||
])
|
||||
])
|
||||
]))
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_parser_ladder_large() {
|
||||
assert_eq!(
|
||||
TypeDict::new().parse(
|
||||
BimapTypeDict::new().parse(
|
||||
"<Seq Date
|
||||
~<TimeSince UnixEpoch>
|
||||
~<Duration Seconds>
|
||||
|
@ -203,4 +203,3 @@ fn test_parser_ladder_large() {
|
|||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use crate::dict::TypeDict;
|
||||
use crate::{dict::BimapTypeDict, parser::*};
|
||||
|
||||
#[test]
|
||||
fn test_param_normalize() {
|
||||
let mut dict = TypeDict::new();
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C").expect("parse error"),
|
||||
|
@ -19,23 +19,40 @@ fn test_param_normalize() {
|
|||
dict.parse("<A B>~<A C>").expect("parse error").param_normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A~Y B>").expect("parse error"),
|
||||
dict.parse("<A B>~<Y B>").expect("parse error").param_normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A B~C D~E>").expect("parse error"),
|
||||
dict.parse("<A B D>~<A C D>~<A C E>").expect("parse errror").param_normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A~X B~C D~E>").expect("parse error"),
|
||||
dict.parse("<A B D>~<A B~C E>~<X C E>").expect("parse errror").param_normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<Seq <Digit 10>~Char>").expect("parse error"),
|
||||
dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror").param_normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A <B C~D~E> F~G H H>").expect("parse error"),
|
||||
dict.parse("<Seq Char> ~ <<ValueDelim '\\0'> Char> ~ <<ValueDelim '\\0'> Ascii~x86.UInt8>").expect("parse error").param_normalize(),
|
||||
dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~x86.UInt8>").expect("parse error")
|
||||
);
|
||||
assert_eq!(
|
||||
dict.parse("<Seq Char~Ascii> ~ <<ValueDelim '\\0'> Char~Ascii> ~ <<ValueDelim '\\0'> x86.UInt8>").expect("parse error").param_normalize(),
|
||||
dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~x86.UInt8>").expect("parse error")
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A~Y <B C~D~E> F H H>").expect("parse error"),
|
||||
dict.parse("<A <B C> F H H>
|
||||
~<A <B D> F H H>
|
||||
~<A <B E> F H H>
|
||||
~<A <B E> G H H>").expect("parse errror")
|
||||
~<A~Y <B E> F H H>").expect("parse errror")
|
||||
.param_normalize(),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
|
||||
use {
|
||||
crate::{dict::*, term::*},
|
||||
crate::{dict::*, term::*, parser::*, unparser::*},
|
||||
std::iter::FromIterator
|
||||
};
|
||||
|
||||
|
@ -8,7 +8,7 @@ use {
|
|||
|
||||
#[test]
|
||||
fn test_subst() {
|
||||
let mut dict = TypeDict::new();
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
let mut σ = std::collections::HashMap::new();
|
||||
|
||||
|
@ -29,4 +29,3 @@ fn test_subst() {
|
|||
dict.parse("<Seq ℕ~<Seq Char>>").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use crate::dict::TypeDict;
|
||||
use crate::{dict::BimapTypeDict, parser::*, unparser::*};
|
||||
|
||||
#[test]
|
||||
fn test_semantic_subtype() {
|
||||
let mut dict = TypeDict::new();
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C").expect("parse error")
|
||||
|
@ -19,11 +19,11 @@ fn test_semantic_subtype() {
|
|||
),
|
||||
Some((0, dict.parse("A~B1~C1").expect("parse errror")))
|
||||
);
|
||||
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C1").expect("parse error")
|
||||
.is_semantic_subtype_of(
|
||||
&dict.parse("B~C2").expect("parse errror")
|
||||
&dict.parse("B~C2").expect("parse errror")
|
||||
),
|
||||
Some((1, dict.parse("B~C1").expect("parse errror")))
|
||||
);
|
||||
|
@ -31,12 +31,12 @@ fn test_semantic_subtype() {
|
|||
|
||||
#[test]
|
||||
fn test_syntactic_subtype() {
|
||||
let mut dict = TypeDict::new();
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C").expect("parse error")
|
||||
.is_syntactic_subtype_of(
|
||||
&dict.parse("A~B~C").expect("parse errror")
|
||||
&dict.parse("A~B~C").expect("parse errror")
|
||||
),
|
||||
Ok(0)
|
||||
);
|
||||
|
@ -44,7 +44,7 @@ fn test_syntactic_subtype() {
|
|||
assert_eq!(
|
||||
dict.parse("A~B~C").expect("parse error")
|
||||
.is_syntactic_subtype_of(
|
||||
&dict.parse("B~C").expect("parse errror")
|
||||
&dict.parse("B~C").expect("parse errror")
|
||||
),
|
||||
Ok(1)
|
||||
);
|
||||
|
@ -52,7 +52,7 @@ fn test_syntactic_subtype() {
|
|||
assert_eq!(
|
||||
dict.parse("A~B~C~D~E").expect("parse error")
|
||||
.is_syntactic_subtype_of(
|
||||
&dict.parse("C~D").expect("parse errror")
|
||||
&dict.parse("C~D").expect("parse errror")
|
||||
),
|
||||
Ok(2)
|
||||
);
|
||||
|
@ -60,7 +60,7 @@ fn test_syntactic_subtype() {
|
|||
assert_eq!(
|
||||
dict.parse("A~B~C~D~E").expect("parse error")
|
||||
.is_syntactic_subtype_of(
|
||||
&dict.parse("C~G").expect("parse errror")
|
||||
&dict.parse("C~G").expect("parse errror")
|
||||
),
|
||||
Err((2,3))
|
||||
);
|
||||
|
@ -68,7 +68,7 @@ fn test_syntactic_subtype() {
|
|||
assert_eq!(
|
||||
dict.parse("A~B~C~D~E").expect("parse error")
|
||||
.is_syntactic_subtype_of(
|
||||
&dict.parse("G~F~K").expect("parse errror")
|
||||
&dict.parse("G~F~K").expect("parse errror")
|
||||
),
|
||||
Err((0,0))
|
||||
);
|
||||
|
@ -94,4 +94,3 @@ fn test_syntactic_subtype() {
|
|||
Ok(4)
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
|
||||
use {
|
||||
crate::{dict::*, term::*, unification::*},
|
||||
crate::{dict::*, parser::*, unparser::*, term::*, unification::*},
|
||||
std::iter::FromIterator
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) {
|
||||
let mut dict = TypeDict::new();
|
||||
let mut dict = BimapTypeDict::new();
|
||||
dict.add_varname(String::from("T"));
|
||||
dict.add_varname(String::from("U"));
|
||||
dict.add_varname(String::from("V"));
|
||||
|
@ -33,7 +33,7 @@ fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) {
|
|||
|
||||
#[test]
|
||||
fn test_unification_error() {
|
||||
let mut dict = TypeDict::new();
|
||||
let mut dict = BimapTypeDict::new();
|
||||
dict.add_varname(String::from("T"));
|
||||
|
||||
assert_eq!(
|
||||
|
@ -76,7 +76,7 @@ fn test_unification() {
|
|||
true
|
||||
);
|
||||
|
||||
let mut dict = TypeDict::new();
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
dict.add_varname(String::from("T"));
|
||||
dict.add_varname(String::from("U"));
|
||||
|
@ -115,4 +115,3 @@ fn test_unification() {
|
|||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -2,8 +2,12 @@ use crate::{dict::*, term::*};
|
|||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl TypeDict {
|
||||
pub fn unparse(&self, t: &TypeTerm) -> String {
|
||||
pub trait UnparseLadderType {
|
||||
fn unparse(&self, t: &TypeTerm) -> String;
|
||||
}
|
||||
|
||||
impl<T: TypeDict> UnparseLadderType for T {
|
||||
fn unparse(&self, t: &TypeTerm) -> String {
|
||||
match t {
|
||||
TypeTerm::TypeID(id) => self.get_typename(id).unwrap(),
|
||||
TypeTerm::Num(n) => format!("{}", n),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue