Compare commits

..

14 commits

13 changed files with 1019 additions and 22 deletions

View file

@ -4,3 +4,8 @@ edition = "2018"
name = "laddertypes"
version = "0.1.0"
[dependencies]
tiny-ansi = { version = "0.1.0", optional = true }
[features]
pretty = ["dep:tiny-ansi"]

View file

@ -2,6 +2,7 @@ use std::{collections::HashMap, hash::Hash};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[derive(Debug)]
pub struct Bimap<V: Eq + Hash, Λ: Eq + Hash> {
pub : HashMap<V, Λ>,
pub my: HashMap<Λ, V>,

View file

@ -2,15 +2,34 @@ use crate::bimap::Bimap;
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[derive(Eq, PartialEq, Hash, Clone, Debug)]
#[derive(Eq, PartialEq, Hash, Clone, Copy, Debug)]
pub enum TypeID {
Fun(u64),
Var(u64)
}
pub trait TypeDict : Send + Sync {
fn insert(&mut self, name: String, id: TypeID);
fn add_varname(&mut self, vn: String) -> TypeID;
fn add_typename(&mut self, tn: String) -> TypeID;
fn get_typeid(&self, tn: &String) -> Option<TypeID>;
fn get_typename(&self, tid: &TypeID) -> Option<String>;
fn get_varname(&self, var_id: u64) -> Option<String> {
self.get_typename(&TypeID::Var(var_id))
}
fn add_synonym(&mut self, new: String, old: String) {
if let Some(tyid) = self.get_typeid(&old) {
self.insert(new, tyid);
}
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
pub struct TypeDict {
#[derive(Debug)]
pub struct BimapTypeDict {
typenames: Bimap<String, TypeID>,
type_lit_counter: u64,
type_var_counter: u64,
@ -18,42 +37,66 @@ pub struct TypeDict {
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl TypeDict {
impl BimapTypeDict {
pub fn new() -> Self {
TypeDict {
BimapTypeDict {
typenames: Bimap::new(),
type_lit_counter: 0,
type_var_counter: 0,
}
}
}
pub fn add_varname(&mut self, tn: String) -> TypeID {
impl TypeDict for BimapTypeDict {
fn insert(&mut self, name: String, id: TypeID) {
self.typenames.insert(name, id);
}
fn add_varname(&mut self, tn: String) -> TypeID {
let tyid = TypeID::Var(self.type_var_counter);
self.type_var_counter += 1;
self.typenames.insert(tn, tyid.clone());
self.insert(tn, tyid.clone());
tyid
}
pub fn add_typename(&mut self, tn: String) -> TypeID {
fn add_typename(&mut self, tn: String) -> TypeID {
let tyid = TypeID::Fun(self.type_lit_counter);
self.type_lit_counter += 1;
self.typenames.insert(tn, tyid.clone());
self.insert(tn, tyid.clone());
tyid
}
pub fn add_synonym(&mut self, new: String, old: String) {
if let Some(tyid) = self.get_typeid(&old) {
self.typenames.insert(new, tyid);
}
}
pub fn get_typename(&self, tid: &TypeID) -> Option<String> {
fn get_typename(&self, tid: &TypeID) -> Option<String> {
self.typenames.my.get(tid).cloned()
}
pub fn get_typeid(&self, tn: &String) -> Option<TypeID> {
fn get_typeid(&self, tn: &String) -> Option<TypeID> {
self.typenames..get(tn).cloned()
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
use std::sync::Arc;
use std::ops::{Deref, DerefMut};
use std::sync::RwLock;
impl<T: TypeDict> TypeDict for Arc<RwLock<T>> {
fn insert(&mut self, name: String, id: TypeID) {
self.write().unwrap().insert(name, id);
}
fn add_varname(&mut self, vn: String) -> TypeID {
self.write().unwrap().add_varname(vn)
}
fn add_typename(&mut self, tn: String) -> TypeID {
self.write().unwrap().add_typename(tn)
}
fn get_typename(&self, tid: &TypeID)-> Option<String> {
self.read().unwrap().get_typename(tid)
}
fn get_typeid(&self, tn: &String) -> Option<TypeID> {
self.read().unwrap().get_typeid(tn)
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>

View file

@ -5,18 +5,26 @@ pub mod term;
pub mod lexer;
pub mod parser;
pub mod unparser;
pub mod sugar;
pub mod curry;
pub mod lnf;
pub mod pnf;
pub mod subtype;
pub mod unification;
pub mod morphism;
pub mod steiner_tree;
#[cfg(test)]
mod test;
#[cfg(feature = "pretty")]
mod pretty;
pub use {
dict::*,
term::*,
unification::*
sugar::*,
unification::*,
morphism::*
};

258
src/morphism.rs Normal file
View file

@ -0,0 +1,258 @@
use {
crate::{
TypeTerm, TypeID,
unification::UnificationProblem,
},
std::collections::HashMap
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct MorphismType {
pub src_type: TypeTerm,
pub dst_type: TypeTerm,
}
pub trait Morphism : Sized {
fn get_type(&self) -> MorphismType;
fn list_map_morphism(&self, list_typeid: TypeID) -> Option< Self >;
fn weight(&self) -> u64 {
1
}
}
#[derive(Clone)]
pub struct MorphismBase<M: Morphism + Clone> {
morphisms: Vec< M >,
list_typeid: TypeID
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl MorphismType {
pub fn normalize(self) -> Self {
MorphismType {
src_type: self.src_type.normalize(),
dst_type: self.dst_type.normalize()
}
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl<M: Morphism + Clone> MorphismBase<M> {
pub fn new(list_typeid: TypeID) -> Self {
MorphismBase {
morphisms: Vec::new(),
list_typeid
}
}
pub fn add_morphism(&mut self, m: M) {
self.morphisms.push( m );
}
pub fn enum_morphisms(&self, src_type: &TypeTerm)
-> Vec< (HashMap<TypeID, TypeTerm>, TypeTerm) >
{
let mut dst_types = Vec::new();
// first enumerate all "direct" morphisms,
for m in self.morphisms.iter() {
if let Ok(σ) = crate::unification::unify(
&m.get_type().src_type,
&src_type.clone().normalize()
) {
let dst_type =
m.get_type().dst_type.clone()
.apply_substitution( &|x| σ.get(x).cloned() )
.clone();
dst_types.push( (σ, dst_type) );
}
}
// ..then all "list-map" morphisms.
// Check if we have a List type, and if so, see what the Item type is
// TODO: function for generating fresh variables
let item_variable = TypeID::Var(100);
if let Ok(σ) = crate::unification::unify(
&TypeTerm::App(vec![
TypeTerm::TypeID(self.list_typeid),
TypeTerm::TypeID(item_variable)
]),
&src_type.clone().param_normalize(),
) {
let src_item_type = σ.get(&item_variable).unwrap().clone();
for (γ, dst_item_type) in self.enum_morphisms( &src_item_type ) {
let dst_type =
TypeTerm::App(vec![
TypeTerm::TypeID(self.list_typeid),
dst_item_type.clone()
.apply_substitution(
&|x| γ.get(x).cloned()
).clone()
]).normalize();
dst_types.push( (γ.clone(), dst_type) );
}
}
dst_types
}
pub fn enum_morphisms_with_subtyping(
&self,
src_type: &TypeTerm,
) -> Vec<(TypeTerm, TypeTerm, HashMap<TypeID, TypeTerm>)> {
let mut src_lnf = src_type.clone().get_lnf_vec();
let mut halo_lnf = vec![];
let mut dst_types = Vec::new();
while src_lnf.len() > 0 {
let src_type = TypeTerm::Ladder(src_lnf.clone());
let halo_type = TypeTerm::Ladder(halo_lnf.clone());
for (σ, t) in self.enum_morphisms(&src_type) {
dst_types.push((
halo_type
.clone()
.apply_substitution(&|x| σ.get(x).cloned())
.clone(),
t.clone().apply_substitution(&|x| σ.get(x).cloned()).clone(),
σ,
));
}
// continue with next supertype
halo_lnf.push(src_lnf.remove(0));
}
dst_types
}
/* try to find shortest morphism-path for a given type
*/
pub fn find_morphism_path(&self, ty: MorphismType)
-> Option< Vec<TypeTerm> >
{
let ty = ty.normalize();
let mut queue = vec![
(0, vec![ ty.src_type.clone().normalize() ])
];
while ! queue.is_empty() {
queue.sort_by( |&(w1,_),&(w2,_)| w2.cmp(&w1));
if let Some((current_weight, current_path)) = queue.pop() {
let current_type = current_path.last().unwrap();
for (h, t, σp) in self.enum_morphisms_with_subtyping(&current_type) {
let tt = TypeTerm::Ladder(vec![h, t]).normalize();
if !current_path.contains(&tt) {
let unification_result = crate::unification::unify(&tt, &ty.dst_type);
let morphism_weight = 1;
/*self.find_morphism( &tt ).unwrap().0.get_weight()*/
let new_weight = current_weight + morphism_weight;
let mut new_path = current_path.clone();
new_path.push(tt);
for n in new_path.iter_mut() {
n.apply_substitution(&|x| σp.get(x).cloned());
}
if let Ok(σ) = unification_result {
for n in new_path.iter_mut() {
n.apply_substitution(&|x| σ.get(x).cloned());
}
return Some(new_path);
} else {
queue.push((new_weight, new_path));
}
}
}
}
}
None
}
pub fn find_morphism(&self, ty: &MorphismType)
-> Option< ( M, HashMap<TypeID, TypeTerm> ) > {
// try list-map morphism
if let Ok(σ) = UnificationProblem::new(vec![
(ty.src_type.clone().param_normalize(), TypeTerm::App(vec![ TypeTerm::TypeID(self.list_typeid), TypeTerm::TypeID(TypeID::Var(100)) ])),
(ty.dst_type.clone().param_normalize(), TypeTerm::App(vec![ TypeTerm::TypeID(self.list_typeid), TypeTerm::TypeID(TypeID::Var(101)) ])),
]).solve() {
// TODO: use real fresh variable names
let item_morph_type = MorphismType {
src_type: σ.get(&TypeID::Var(100)).unwrap().clone(),
dst_type: σ.get(&TypeID::Var(101)).unwrap().clone(),
}.normalize();
if let Some((m, σ)) = self.find_morphism( &item_morph_type ) {
if let Some(list_morph) = m.list_map_morphism( self.list_typeid ) {
return Some( (list_morph, σ) );
}
}
}
// otherwise
for m in self.morphisms.iter() {
let unification_problem = UnificationProblem::new(
vec![
( ty.src_type.clone().normalize(), m.get_type().src_type.clone() ),
( ty.dst_type.clone().normalize(), m.get_type().dst_type.clone() )
]
);
let unification_result = unification_problem.solve();
if let Ok(σ) = unification_result {
return Some((m.clone(), σ));
}
}
None
}
pub fn find_morphism_with_subtyping(&self, ty: &MorphismType)
-> Option<( M, TypeTerm, HashMap<TypeID, TypeTerm> )> {
let mut src_lnf = ty.src_type.clone().get_lnf_vec();
let mut dst_lnf = ty.dst_type.clone().get_lnf_vec();
let mut halo = vec![];
while src_lnf.len() > 0 && dst_lnf.len() > 0 {
if let Some((m, σ)) = self.find_morphism(&MorphismType{
src_type: TypeTerm::Ladder(src_lnf.clone()),
dst_type: TypeTerm::Ladder(dst_lnf.clone())
}) {
halo.push(src_lnf.get(0).unwrap().clone());
return Some((m,
TypeTerm::Ladder(halo).apply_substitution(&|x| σ.get(x).cloned()).clone(),
σ));
} else {
if src_lnf[0] == dst_lnf[0] {
src_lnf.remove(0);
halo.push(dst_lnf.remove(0));
} else {
return None;
}
}
}
None
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

View file

@ -18,10 +18,23 @@ pub enum ParseError {
UnexpectedToken
}
pub trait ParseLadderType {
fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError>;
fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
where It: Iterator<Item = char>;
fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
where It: Iterator<Item = char>;
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
where It: Iterator<Item = char>;
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl TypeDict {
pub fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
impl<T: TypeDict> ParseLadderType for T {
fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
let mut tokens = LadderTypeLexer::from(s.chars()).peekable();
match self.parse_ladder(&mut tokens) {

148
src/pretty.rs Normal file
View file

@ -0,0 +1,148 @@
use {
crate::TypeDict,
crate::sugar::SugaredTypeTerm,
tiny_ansi::TinyAnsi
};
impl SugaredTypeTerm {
pub fn pretty(&self, dict: &TypeDict, indent: u64) -> String {
let indent_width = 4;
match self {
SugaredTypeTerm::TypeID(id) => {
format!("{}", dict.get_typename(id).unwrap_or("??".bright_red())).bright_blue()
},
SugaredTypeTerm::Num(n) => {
format!("{}", n).bright_cyan()
}
SugaredTypeTerm::Char(c) => {
format!("'{}'", c)
}
SugaredTypeTerm::Univ(t) => {
format!("{} {} . {}",
"".yellow().bold(),
dict.get_varname(0).unwrap_or("??".into()).bright_blue(),
t.pretty(dict,indent)
)
}
SugaredTypeTerm::Spec(args) => {
let mut s = String::new();
s.push_str(&"<".yellow().bold());
for i in 0..args.len() {
let arg = &args[i];
if i > 0 {
s.push(' ');
}
s.push_str( &arg.pretty(dict,indent+1) );
}
s.push_str(&">".yellow().bold());
s
}
SugaredTypeTerm::Struct(args) => {
let mut s = String::new();
s.push_str(&"{".yellow().bold());
for arg in args {
s.push('\n');
for x in 0..(indent+1)*indent_width {
s.push(' ');
}
s.push_str(&arg.pretty(dict, indent + 1));
s.push_str(&";\n".bright_yellow());
}
s.push('\n');
for x in 0..indent*indent_width {
s.push(' ');
}
s.push_str(&"}".yellow().bold());
s
}
SugaredTypeTerm::Enum(args) => {
let mut s = String::new();
s.push_str(&"(".yellow().bold());
for i in 0..args.len() {
let arg = &args[i];
s.push('\n');
for x in 0..(indent+1)*indent_width {
s.push(' ');
}
if i > 0 {
s.push_str(&"| ".yellow().bold());
}
s.push_str(&arg.pretty(dict, indent + 1));
}
s.push('\n');
for x in 0..indent*indent_width {
s.push(' ');
}
s.push_str(&")".yellow().bold());
s
}
SugaredTypeTerm::Seq(args) => {
let mut s = String::new();
s.push_str(&"[ ".yellow().bold());
for i in 0..args.len() {
let arg = &args[i];
if i > 0 {
s.push(' ');
}
s.push_str(&arg.pretty(dict, indent+1));
}
s.push_str(&" ]".yellow().bold());
s
}
SugaredTypeTerm::Morph(args) => {
let mut s = String::new();
for arg in args {
s.push_str(&" ~~morph~~> ".bright_yellow());
s.push_str(&arg.pretty(dict, indent));
}
s
}
SugaredTypeTerm::Func(args) => {
let mut s = String::new();
for i in 0..args.len() {
let arg = &args[i];
if i > 0{
s.push('\n');
for x in 0..(indent*indent_width) {
s.push(' ');
}
s.push_str(&"--> ".bright_yellow());
} else {
// s.push_str(" ");
}
s.push_str(&arg.pretty(dict, indent));
}
s
}
SugaredTypeTerm::Ladder(rungs) => {
let mut s = String::new();
for i in 0..rungs.len() {
let rung = &rungs[i];
if i > 0{
s.push('\n');
for x in 0..(indent*indent_width) {
s.push(' ');
}
s.push_str(&"~ ".yellow());
}
s.push_str(&rung.pretty(dict, indent));
}
s
}
}
}
}

241
src/steiner_tree.rs Normal file
View file

@ -0,0 +1,241 @@
use {
std::collections::HashMap,
crate::{
TypeID,
TypeTerm,
morphism::{
MorphismType,
Morphism,
MorphismBase
}
}
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[derive(Clone)]
pub struct SteinerTree {
weight: u64,
goals: Vec< TypeTerm >,
edges: Vec< MorphismType >,
}
impl SteinerTree {
pub fn into_edges(self) -> Vec< MorphismType > {
self.edges
}
fn add_edge(&mut self, ty: MorphismType) {
self.weight += 1;
let ty = ty.normalize();
// check if by adding this new edge, we reach a goal
let mut new_goals = Vec::new();
let mut added = false;
for g in self.goals.clone() {
if let Ok(σ) = crate::unify(&ty.dst_type, &g) {
if !added {
self.edges.push(ty.clone());
// goal reached.
for e in self.edges.iter_mut() {
e.src_type = e.src_type.apply_substitution(&|x| σ.get(x).cloned()).clone();
e.dst_type = e.dst_type.apply_substitution(&|x| σ.get(x).cloned()).clone();
}
added = true;
} else {
new_goals.push(g);
}
} else {
new_goals.push(g);
}
}
if !added {
self.edges.push(ty.clone());
}
self.goals = new_goals;
}
fn is_solved(&self) -> bool {
self.goals.len() == 0
}
fn contains(&self, t: &TypeTerm) -> Option< HashMap<TypeID, TypeTerm> > {
for e in self.edges.iter() {
if let Ok(σ) = crate::unify(&e.dst_type, t) {
return Some(σ)
}
}
None
}
}
pub struct PathApproxSteinerTreeSolver {
root: TypeTerm,
leaves: Vec< TypeTerm >
}
impl PathApproxSteinerTreeSolver {
pub fn new(
root: TypeTerm,
leaves: Vec<TypeTerm>
) -> Self {
PathApproxSteinerTreeSolver {
root, leaves
}
}
pub fn solve<M: Morphism + Clone>(self, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
let mut tree = Vec::<MorphismType>::new();
for goal in self.leaves {
// try to find shortest path from root to current leaf
if let Some(new_path) = morphisms.find_morphism_path(
MorphismType {
src_type: self.root.clone(),
dst_type: goal.clone()
}
) {
// reduce new path so that it does not collide with any existing path
let mut src_type = self.root.clone();
let mut new_path_iter = new_path.into_iter().peekable();
// check all existing nodes..
for mt in tree.iter() {
// assert!( mt.src_type == &src_type );
if let Some(t) = new_path_iter.peek() {
if &mt.dst_type == t {
// eliminate this node from new path
src_type = new_path_iter.next().unwrap().clone();
}
} else {
break;
}
}
for dst_type in new_path_iter {
tree.push(MorphismType {
src_type: src_type.clone(),
dst_type: dst_type.clone()
});
src_type = dst_type;
}
} else {
eprintln!("could not find path\nfrom {:?}\nto {:?}", &self.root, &goal);
return None;
}
}
Some(SteinerTree {
weight: 0,
goals: vec![],
edges: tree
})
}
}
/* given a representation tree with the available
* represenatations `src_types`, try to find
* a sequence of morphisms that span up all
* representations in `dst_types`.
*/
pub struct SteinerTreeProblem {
src_types: Vec< TypeTerm >,
queue: Vec< SteinerTree >
}
impl SteinerTreeProblem {
pub fn new(
src_types: Vec< TypeTerm >,
dst_types: Vec< TypeTerm >
) -> Self {
SteinerTreeProblem {
src_types: src_types.into_iter().map(|t| t.normalize()).collect(),
queue: vec![
SteinerTree {
weight: 0,
goals: dst_types.into_iter().map(|t| t.normalize()).collect(),
edges: Vec::new()
}
]
}
}
pub fn next(&mut self) -> Option< SteinerTree > {
eprintln!("queue size = {}", self.queue.len());
/* FIXME: by giving the highest priority to
* candidate tree with the least remaining goals,
* the optimality of the search algorithm
* is probably destroyed, but it dramatically helps
* to tame the combinatorical explosion in this algorithm.
*/
self.queue.sort_by(|t1, t2|
if t1.goals.len() < t2.goals.len() {
std::cmp::Ordering::Greater
} else if t1.goals.len() == t2.goals.len() {
if t1.weight < t2.weight {
std::cmp::Ordering::Greater
} else {
std::cmp::Ordering::Less
}
} else {
std::cmp::Ordering::Less
}
);
self.queue.pop()
}
/*
pub fn solve_approx_path<M: Morphism + Clone>(&mut self, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
if let Some(master) = self.src_types.first() {
}
}
*/
pub fn solve_bfs<M: Morphism + Clone>(&mut self, morphisms: &MorphismBase<M>) -> Option< SteinerTree > {
// take the currently smallest tree and extend it by one step
while let Some( mut current_tree ) = self.next() {
// check if current tree is a solution
if current_tree.goals.len() == 0 {
return Some(current_tree);
}
// get all vertices spanned by this tree
let mut current_nodes = self.src_types.clone();
for e in current_tree.edges.iter() {
current_nodes.push( e.dst_type.clone() );
}
// extend the tree by one edge and add it to the queue
for src_type in current_nodes {
for (dst_halo, dst_ty, σ) in morphisms.enum_morphisms_with_subtyping(&src_type) {
let dst_type = TypeTerm::Ladder(vec![dst_halo, dst_ty]).normalize();
if current_tree.contains( &dst_type ).is_none() {
let mut new_tree = current_tree.clone();
{
let src_type = src_type.clone();
let dst_type = dst_type.clone();
new_tree.add_edge(MorphismType { src_type, dst_type }.normalize());
}
self.queue.push( new_tree );
}
}
}
}
None
}
}

114
src/sugar.rs Normal file
View file

@ -0,0 +1,114 @@
use {
crate::{TypeTerm, TypeID, parser::ParseLadderType}
};
#[derive(Clone, PartialEq)]
pub enum SugaredTypeTerm {
TypeID(TypeID),
Num(i64),
Char(char),
Univ(Box< SugaredTypeTerm >),
Spec(Vec< SugaredTypeTerm >),
Func(Vec< SugaredTypeTerm >),
Morph(Vec< SugaredTypeTerm >),
Ladder(Vec< SugaredTypeTerm >),
Struct(Vec< SugaredTypeTerm >),
Enum(Vec< SugaredTypeTerm >),
Seq(Vec< SugaredTypeTerm >)
}
impl TypeTerm {
pub fn sugar(self: TypeTerm, dict: &mut impl crate::TypeDict) -> SugaredTypeTerm {
match self {
TypeTerm::TypeID(id) => SugaredTypeTerm::TypeID(id),
TypeTerm::Num(n) => SugaredTypeTerm::Num(n),
TypeTerm::Char(c) => SugaredTypeTerm::Char(c),
TypeTerm::App(args) => if let Some(first) = args.first() {
if first == &dict.parse("Func").unwrap() {
SugaredTypeTerm::Func( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
}
else if first == &dict.parse("Morph").unwrap() {
SugaredTypeTerm::Morph( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
}
else if first == &dict.parse("Struct").unwrap() {
SugaredTypeTerm::Struct( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
}
else if first == &dict.parse("Enum").unwrap() {
SugaredTypeTerm::Enum( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
}
else if first == &dict.parse("Seq").unwrap() {
SugaredTypeTerm::Seq( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
}
else if first == &dict.parse("Spec").unwrap() {
SugaredTypeTerm::Spec( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
}
else if first == &dict.parse("Univ").unwrap() {
SugaredTypeTerm::Univ(Box::new(
SugaredTypeTerm::Spec(
args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect()
)
))
}
else {
SugaredTypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
}
} else {
SugaredTypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
},
TypeTerm::Ladder(rungs) =>
SugaredTypeTerm::Ladder(rungs.into_iter().map(|t| t.sugar(dict)).collect())
}
}
}
impl SugaredTypeTerm {
pub fn desugar(self, dict: &mut impl crate::TypeDict) -> TypeTerm {
match self {
SugaredTypeTerm::TypeID(id) => TypeTerm::TypeID(id),
SugaredTypeTerm::Num(n) => TypeTerm::Num(n),
SugaredTypeTerm::Char(c) => TypeTerm::Char(c),
SugaredTypeTerm::Univ(t) => t.desugar(dict),
SugaredTypeTerm::Spec(ts) => TypeTerm::App(ts.into_iter().map(|t| t.desugar(dict)).collect()),
SugaredTypeTerm::Ladder(ts) => TypeTerm::Ladder(ts.into_iter().map(|t|t.desugar(dict)).collect()),
SugaredTypeTerm::Func(ts) => TypeTerm::App(
std::iter::once( dict.parse("Func").unwrap() ).chain(
ts.into_iter().map(|t| t.desugar(dict))
).collect()),
SugaredTypeTerm::Morph(ts) => TypeTerm::App(
std::iter::once( dict.parse("Morph").unwrap() ).chain(
ts.into_iter().map(|t| t.desugar(dict))
).collect()),
SugaredTypeTerm::Struct(ts) => TypeTerm::App(
std::iter::once( dict.parse("Struct").unwrap() ).chain(
ts.into_iter().map(|t| t.desugar(dict))
).collect()),
SugaredTypeTerm::Enum(ts) => TypeTerm::App(
std::iter::once( dict.parse("Enum").unwrap() ).chain(
ts.into_iter().map(|t| t.desugar(dict))
).collect()),
SugaredTypeTerm::Seq(ts) => TypeTerm::App(
std::iter::once( dict.parse("Seq").unwrap() ).chain(
ts.into_iter().map(|t| t.desugar(dict))
).collect()),
}
}
pub fn is_empty(&self) -> bool {
match self {
SugaredTypeTerm::TypeID(_) => false,
SugaredTypeTerm::Num(_) => false,
SugaredTypeTerm::Char(_) => false,
SugaredTypeTerm::Univ(t) => t.is_empty(),
SugaredTypeTerm::Spec(ts) |
SugaredTypeTerm::Ladder(ts) |
SugaredTypeTerm::Func(ts) |
SugaredTypeTerm::Morph(ts) |
SugaredTypeTerm::Struct(ts) |
SugaredTypeTerm::Enum(ts) |
SugaredTypeTerm::Seq(ts) => {
ts.iter().fold(true, |s,t|s&&t.is_empty())
}
}
}
}

View file

@ -57,7 +57,7 @@ impl TypeTerm {
pub fn repr_as(&mut self, t: impl Into<TypeTerm>) -> &mut Self {
match self {
TypeTerm::Ladder(rungs) => {
rungs.push(t.into());
rungs.push(t.into());
}
_ => {

View file

@ -7,4 +7,5 @@ pub mod pnf;
pub mod subtype;
pub mod substitution;
pub mod unification;
pub mod morphism;

161
src/test/morphism.rs Normal file
View file

@ -0,0 +1,161 @@
use {
crate::{dict::*, morphism::*, steiner_tree::*, TypeTerm}
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[derive(Clone, Debug, PartialEq)]
struct DummyMorphism(MorphismType);
impl Morphism for DummyMorphism {
fn get_type(&self) -> MorphismType {
self.0.clone().normalize()
}
fn list_map_morphism(&self, list_typeid: TypeID) -> Option<DummyMorphism> {
Some(DummyMorphism(MorphismType {
src_type: TypeTerm::App(vec![
TypeTerm::TypeID( list_typeid ),
self.0.src_type.clone()
]),
dst_type: TypeTerm::App(vec![
TypeTerm::TypeID( list_typeid ),
self.0.dst_type.clone()
])
}))
}
}
fn morphism_test_setup() -> ( TypeDict, MorphismBase<DummyMorphism> ) {
let mut dict = TypeDict::new();
let mut base = MorphismBase::<DummyMorphism>::new( dict.add_typename("Seq".into()) );
dict.add_varname("Radix".into());
dict.add_varname("SrcRadix".into());
dict.add_varname("DstRadix".into());
base.add_morphism(
DummyMorphism(MorphismType{
src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
dst_type: dict.parse("<Digit Radix> ~ _2^64 ~ machine.UInt64").unwrap()
})
);
base.add_morphism(
DummyMorphism(MorphismType{
src_type: dict.parse("<Digit Radix> ~ _2^64 ~ machine.UInt64").unwrap(),
dst_type: dict.parse("<Digit Radix> ~ Char").unwrap()
})
);
base.add_morphism(
DummyMorphism(MorphismType{
src_type: dict.parse(" ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~_2^64~machine.UInt64>").unwrap(),
dst_type: dict.parse(" ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~_2^64~machine.UInt64>").unwrap()
})
);
base.add_morphism(
DummyMorphism(MorphismType{
src_type: dict.parse(" ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~_2^64~machine.UInt64>").unwrap(),
dst_type: dict.parse(" ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~_2^64~machine.UInt64>").unwrap()
})
);
base.add_morphism(
DummyMorphism(MorphismType{
src_type: dict.parse(" ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix>~_2^64~machine.UInt64>").unwrap(),
dst_type: dict.parse(" ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix>~_2^64~machine.UInt64>").unwrap()
})
);
(dict, base)
}
#[test]
fn test_morphism_path() {
let (mut dict, mut base) = morphism_test_setup();
assert_eq!(
base.find_morphism_path(MorphismType {
src_type: dict.parse(" ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
dst_type: dict.parse(" ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
}),
Some(
vec![
dict.parse(" ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().normalize(),
dict.parse(" ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ _2^64 ~ machine.UInt64>").unwrap().normalize(),
dict.parse(" ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ _2^64 ~ machine.UInt64>").unwrap().normalize(),
dict.parse(" ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ _2^64 ~ machine.UInt64>").unwrap().normalize(),
dict.parse(" ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ _2^64 ~ machine.UInt64>").unwrap().normalize(),
dict.parse(" ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().normalize(),
]
)
);
assert_eq!(
base.find_morphism_path(MorphismType {
src_type: dict.parse("Symbol ~ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
dst_type: dict.parse("Symbol ~ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
}),
Some(
vec![
dict.parse("Symbol ~ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().normalize(),
dict.parse("Symbol ~ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ _2^64 ~ machine.UInt64>").unwrap().normalize(),
dict.parse("Symbol ~ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ _2^64 ~ machine.UInt64>").unwrap().normalize(),
dict.parse("Symbol ~ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ _2^64 ~ machine.UInt64>").unwrap().normalize(),
dict.parse("Symbol ~ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ _2^64 ~ machine.UInt64>").unwrap().normalize(),
dict.parse("Symbol ~ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().normalize(),
]
)
);
assert_eq!(
base.find_morphism_with_subtyping(
&MorphismType {
src_type: dict.parse("Symbol ~ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
dst_type: dict.parse("Symbol ~ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ _2^64 ~ machine.UInt64>").unwrap()
}
),
Some((
DummyMorphism(MorphismType{
src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
dst_type: dict.parse("<Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap()
}),
dict.parse("Symbol ~ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10>>").unwrap(),
vec![
(dict.get_typeid(&"Radix".into()).unwrap(),
dict.parse("10").unwrap())
].into_iter().collect::<std::collections::HashMap<TypeID, TypeTerm>>()
))
);
}
#[test]
fn test_steiner_tree() {
let (mut dict, mut base) = morphism_test_setup();
let mut steiner_tree_problem = SteinerTreeProblem::new(
// source reprs
vec![
dict.parse(" ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
],
// destination reprs
vec![
dict.parse(" ~ <PosInt 2 BigEndian> ~ <Seq <Digit 2> ~ Char>").unwrap(),
dict.parse(" ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
dict.parse(" ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
]
);
if let Some(solution) = steiner_tree_problem.solve_bfs( &dict, &base ) {
for e in solution.edges.iter() {
eprintln!(" :: {}\n--> {}", dict.unparse(&e.src_type), dict.unparse(&e.dst_type));
}
} else {
eprintln!("no solution");
}
}

View file

@ -2,8 +2,12 @@ use crate::{dict::*, term::*};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl TypeDict {
pub fn unparse(&self, t: &TypeTerm) -> String {
pub trait UnparseLadderType {
fn unparse(&self, t: &TypeTerm) -> String;
}
impl<T: TypeDict> UnparseLadderType for T {
fn unparse(&self, t: &TypeTerm) -> String {
match t {
TypeTerm::TypeID(id) => self.get_typename(id).unwrap(),
TypeTerm::Num(n) => format!("{}", n),