Compare commits
5 commits
a52f38dadf
...
6a5c27cfba
Author | SHA1 | Date | |
---|---|---|---|
6a5c27cfba | |||
64b9d98a1f | |||
fcd58baec6 | |||
6c574d620f | |||
02d8815acd |
8 changed files with 477 additions and 2 deletions
|
@ -2,7 +2,7 @@ use crate::bimap::Bimap;
|
|||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Eq, PartialEq, Hash, Clone, Debug)]
|
||||
#[derive(Eq, PartialEq, Hash, Clone, Copy, Debug)]
|
||||
pub enum TypeID {
|
||||
Fun(u64),
|
||||
Var(u64)
|
||||
|
|
|
@ -7,8 +7,10 @@ pub mod parser;
|
|||
pub mod unparser;
|
||||
pub mod curry;
|
||||
pub mod lnf;
|
||||
pub mod pnf;
|
||||
pub mod subtype;
|
||||
pub mod unification;
|
||||
pub mod morphism;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
|
202
src/morphism.rs
Normal file
202
src/morphism.rs
Normal file
|
@ -0,0 +1,202 @@
|
|||
use {
|
||||
crate::{
|
||||
TypeTerm, TypeID,
|
||||
unification::UnificationProblem,
|
||||
},
|
||||
std::collections::HashMap
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
pub struct MorphismType {
|
||||
pub src_type: TypeTerm,
|
||||
pub dst_type: TypeTerm,
|
||||
}
|
||||
|
||||
pub struct MorphismBase<Morphism: Clone> {
|
||||
morphisms: Vec< (MorphismType, Morphism) >,
|
||||
list_typeid: TypeID
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl MorphismType {
|
||||
fn normalize(self) -> Self {
|
||||
MorphismType {
|
||||
src_type: self.src_type.normalize(),
|
||||
dst_type: self.dst_type.normalize()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl<Morphism: Clone> MorphismBase<Morphism> {
|
||||
pub fn new(list_typeid: TypeID) -> Self {
|
||||
MorphismBase {
|
||||
morphisms: Vec::new(),
|
||||
list_typeid
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_morphism(&mut self, morph_type: MorphismType, morphism: Morphism) {
|
||||
self.morphisms.push( (morph_type.normalize(), morphism) );
|
||||
}
|
||||
|
||||
pub fn enum_morphisms(&self, src_type: &TypeTerm)
|
||||
-> Vec< (HashMap<TypeID, TypeTerm>, TypeTerm) >
|
||||
{
|
||||
let mut dst_types = Vec::new();
|
||||
|
||||
// first enumerate all "direct" morphisms,
|
||||
for (ty,m) in self.morphisms.iter() {
|
||||
if let Ok(σ) = crate::unification::unify(
|
||||
&ty.src_type,
|
||||
&src_type.clone().normalize()
|
||||
) {
|
||||
let dst_type =
|
||||
ty.dst_type.clone()
|
||||
.apply_substitution(
|
||||
&|x| σ.get(x).cloned()
|
||||
)
|
||||
.clone();
|
||||
|
||||
dst_types.push( (σ, dst_type) );
|
||||
}
|
||||
}
|
||||
|
||||
// ..then all "list-map" morphisms.
|
||||
// Check if we have a List type, and if so, see what the Item type is
|
||||
|
||||
// TODO: function for generating fresh variables
|
||||
let item_variable = TypeID::Var(100);
|
||||
|
||||
if let Ok(σ) = crate::unification::unify(
|
||||
&TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(self.list_typeid),
|
||||
TypeTerm::TypeID(item_variable)
|
||||
]),
|
||||
&src_type.clone().param_normalize(),
|
||||
) {
|
||||
let src_item_type = σ.get(&item_variable).unwrap().clone();
|
||||
|
||||
for (γ, dst_item_type) in self.enum_morphisms( &src_item_type ) {
|
||||
let dst_type =
|
||||
TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(self.list_typeid),
|
||||
dst_item_type.clone()
|
||||
.apply_substitution(
|
||||
&|x| γ.get(x).cloned()
|
||||
).clone()
|
||||
]).normalize();
|
||||
|
||||
dst_types.push( (γ.clone(), dst_type) );
|
||||
}
|
||||
}
|
||||
|
||||
dst_types
|
||||
}
|
||||
|
||||
pub fn enum_morphisms_with_subtyping(&self, src_type: &TypeTerm)
|
||||
-> Vec< (TypeTerm, TypeTerm) >
|
||||
{
|
||||
let mut src_lnf = src_type.clone().get_lnf_vec();
|
||||
let mut halo_lnf = vec![];
|
||||
let mut dst_types = Vec::new();
|
||||
|
||||
while src_lnf.len() > 0 {
|
||||
let src_type = TypeTerm::Ladder( src_lnf.clone() );
|
||||
let halo_type = TypeTerm::Ladder( halo_lnf.clone() );
|
||||
|
||||
for (σ, t) in self.enum_morphisms( &src_type ) {
|
||||
dst_types.push(
|
||||
(halo_type.clone()
|
||||
.apply_substitution(
|
||||
&|x| σ.get(x).cloned()
|
||||
).clone(),
|
||||
t.clone()
|
||||
.apply_substitution(
|
||||
&|x| σ.get(x).cloned()
|
||||
).clone()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// continue with next supertype
|
||||
halo_lnf.push(src_lnf.remove(0));
|
||||
}
|
||||
|
||||
dst_types
|
||||
}
|
||||
|
||||
/* performs DFS to find a morphism-path for a given type
|
||||
* will return the first matching path, not the shortest
|
||||
*/
|
||||
pub fn find_morphism_path(&self, ty: MorphismType)
|
||||
-> Option< Vec<TypeTerm> >
|
||||
{
|
||||
let ty = ty.normalize();
|
||||
let mut visited = Vec::new();
|
||||
let mut queue = vec![
|
||||
vec![ ty.src_type.clone().normalize() ]
|
||||
];
|
||||
|
||||
while let Some(current_path) = queue.pop() {
|
||||
let current_type = current_path.last().unwrap();
|
||||
|
||||
if ! visited.contains( current_type ) {
|
||||
visited.push( current_type.clone() );
|
||||
|
||||
for (h, t) in self.enum_morphisms_with_subtyping(¤t_type) {
|
||||
let tt = TypeTerm::Ladder( vec![ h, t ] ).normalize();
|
||||
|
||||
if ! visited.contains( &tt ) {
|
||||
let unification_result = crate::unification::unify(&tt, &ty.dst_type);
|
||||
let mut new_path = current_path.clone();
|
||||
|
||||
new_path.push( tt );
|
||||
|
||||
if let Ok(σ) = unification_result {
|
||||
new_path = new_path.into_iter().map(
|
||||
|mut t: TypeTerm| t.apply_substitution(&|x| σ.get(x).cloned()).clone()
|
||||
).collect::<Vec<TypeTerm>>();
|
||||
|
||||
return Some(new_path);
|
||||
} else {
|
||||
queue.push( new_path );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn find_morphism(&self, ty: &MorphismType)
|
||||
-> Option< Morphism > {
|
||||
|
||||
// TODO
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn find_list_map_morphism(&self, item_ty: &MorphismType)
|
||||
-> Option< Morphism > {
|
||||
|
||||
// TODO
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn find_morphism_with_subtyping(&self, ty: &MorphismType)
|
||||
-> Option<( Morphism, TypeTerm, HashMap<TypeID, TypeTerm> )> {
|
||||
|
||||
// TODO
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
113
src/pnf.rs
Normal file
113
src/pnf.rs
Normal file
|
@ -0,0 +1,113 @@
|
|||
use crate::term::TypeTerm;
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl TypeTerm {
|
||||
/// transmute type into Parameter-Normal-Form (PNF)
|
||||
///
|
||||
/// Example:
|
||||
/// ```ignore
|
||||
/// <Seq <Digit 10>>~<Seq Char>
|
||||
/// ⇒ <Seq <Digit 10>~Char>
|
||||
/// ```
|
||||
pub fn param_normalize(self) -> Self {
|
||||
match self {
|
||||
TypeTerm::Ladder(mut rungs) => {
|
||||
if rungs.len() > 0 {
|
||||
// normalize all rungs separately
|
||||
for r in rungs.iter_mut() {
|
||||
*r = r.clone().param_normalize();
|
||||
}
|
||||
|
||||
// take top-rung
|
||||
match rungs.remove(0) {
|
||||
TypeTerm::App(params_top) => {
|
||||
let mut params_ladders = Vec::new();
|
||||
let mut tail : Vec<TypeTerm> = Vec::new();
|
||||
|
||||
// append all other rungs to ladders inside
|
||||
// the application
|
||||
for p in params_top {
|
||||
params_ladders.push(vec![ p ]);
|
||||
}
|
||||
|
||||
for r in rungs {
|
||||
match r {
|
||||
TypeTerm::App(mut params_rung) => {
|
||||
if params_rung.len() > 0 {
|
||||
let mut first_param = params_rung.remove(0);
|
||||
|
||||
if first_param == params_ladders[0][0] {
|
||||
for (l, p) in params_ladders.iter_mut().skip(1).zip(params_rung) {
|
||||
l.push(p.param_normalize());
|
||||
}
|
||||
} else {
|
||||
params_rung.insert(0, first_param);
|
||||
tail.push(TypeTerm::App(params_rung));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TypeTerm::Ladder(mut rs) => {
|
||||
for r in rs {
|
||||
tail.push(r.param_normalize());
|
||||
}
|
||||
}
|
||||
|
||||
atomic => {
|
||||
tail.push(atomic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let head = TypeTerm::App(
|
||||
params_ladders.into_iter()
|
||||
.map(
|
||||
|mut l| {
|
||||
l.dedup();
|
||||
match l.len() {
|
||||
0 => TypeTerm::unit(),
|
||||
1 => l.remove(0),
|
||||
_ => TypeTerm::Ladder(l).param_normalize()
|
||||
}
|
||||
}
|
||||
)
|
||||
.collect()
|
||||
);
|
||||
|
||||
if tail.len() > 0 {
|
||||
tail.insert(0, head);
|
||||
TypeTerm::Ladder(tail)
|
||||
} else {
|
||||
head
|
||||
}
|
||||
}
|
||||
|
||||
TypeTerm::Ladder(mut r) => {
|
||||
r.append(&mut rungs);
|
||||
TypeTerm::Ladder(r)
|
||||
}
|
||||
|
||||
atomic => {
|
||||
rungs.insert(0, atomic);
|
||||
TypeTerm::Ladder(rungs)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
TypeTerm::unit()
|
||||
}
|
||||
}
|
||||
|
||||
TypeTerm::App(params) => {
|
||||
TypeTerm::App(
|
||||
params.into_iter()
|
||||
.map(|p| p.param_normalize())
|
||||
.collect())
|
||||
}
|
||||
|
||||
atomic => atomic
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
|
@ -3,6 +3,48 @@ use crate::term::TypeTerm;
|
|||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl TypeTerm {
|
||||
pub fn find_semantic_subtype_matches(&self, expected_type: &TypeTerm)
|
||||
-> Option<(TypeTerm, TypeTerm, TypeTerm)>
|
||||
{
|
||||
let provided_lnf = self.clone().get_lnf_vec();
|
||||
let expected_lnf = expected_type.clone().get_lnf_vec();
|
||||
|
||||
for i in 0..provided_lnf.len() {
|
||||
if provided_lnf[i] == expected_lnf[0] {
|
||||
// found first match.
|
||||
// now find first mismatch.
|
||||
for j in i..usize::min(provided_lnf.len(), i+expected_lnf.len()) {
|
||||
if provided_lnf[j] != expected_lnf[ j-i ] {
|
||||
|
||||
eprintln!("found match at {}, mismatch at {}", i, j);
|
||||
let syntactic_subladder = TypeTerm::Ladder( provided_lnf[ 0 .. j ].into_iter().cloned().collect() );
|
||||
let provided_reprladder = TypeTerm::Ladder( provided_lnf[ j .. ].into_iter().cloned().collect() );
|
||||
let expected_reprladder = TypeTerm::Ladder( expected_lnf[ j-i .. ].into_iter().cloned().collect() );
|
||||
return Some((syntactic_subladder, provided_reprladder, expected_reprladder));
|
||||
}
|
||||
}
|
||||
|
||||
eprintln!("only syntactic subtype");
|
||||
|
||||
// syntactic subtype
|
||||
let n = {
|
||||
if provided_lnf.len() + i < expected_lnf.len() {
|
||||
1
|
||||
} else {
|
||||
2
|
||||
}
|
||||
};
|
||||
|
||||
let syntactic_subladder = TypeTerm::Ladder( provided_lnf[ 0 .. provided_lnf.len()-1 ].into_iter().cloned().collect() );
|
||||
let provided_reprladder = TypeTerm::Ladder( provided_lnf[ provided_lnf.len()-n .. ].into_iter().cloned().collect() );
|
||||
let expected_reprladder = TypeTerm::Ladder( expected_lnf[ provided_lnf.len()-n-i .. ].into_iter().cloned().collect() );
|
||||
return Some((syntactic_subladder, provided_reprladder, expected_reprladder));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
// returns ladder-step of first match and provided representation-type
|
||||
pub fn is_semantic_subtype_of(&self, expected_type: &TypeTerm) -> Option<(usize, TypeTerm)> {
|
||||
let provided_lnf = self.clone().get_lnf_vec();
|
||||
|
|
|
@ -3,7 +3,9 @@ pub mod lexer;
|
|||
pub mod parser;
|
||||
pub mod curry;
|
||||
pub mod lnf;
|
||||
pub mod pnf;
|
||||
pub mod subtype;
|
||||
pub mod substitution;
|
||||
pub mod unification;
|
||||
pub mod morphism;
|
||||
|
||||
|
|
73
src/test/morphism.rs
Normal file
73
src/test/morphism.rs
Normal file
|
@ -0,0 +1,73 @@
|
|||
use {
|
||||
crate::{dict::*, morphism::*}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[test]
|
||||
fn test_morphism_path() {
|
||||
let mut dict = TypeDict::new();
|
||||
let mut base = MorphismBase::<u64>::new( dict.add_typename("Seq".into()) );
|
||||
|
||||
dict.add_varname("Radix".into());
|
||||
dict.add_varname("SrcRadix".into());
|
||||
dict.add_varname("DstRadix".into());
|
||||
|
||||
base.add_morphism(
|
||||
MorphismType{
|
||||
src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
|
||||
dst_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap()
|
||||
},
|
||||
11
|
||||
);
|
||||
base.add_morphism(
|
||||
MorphismType{
|
||||
src_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap(),
|
||||
dst_type: dict.parse("<Digit Radix> ~ Char").unwrap()
|
||||
},
|
||||
22
|
||||
);
|
||||
base.add_morphism(
|
||||
MorphismType{
|
||||
src_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
|
||||
},
|
||||
333
|
||||
);
|
||||
base.add_morphism(
|
||||
MorphismType{
|
||||
src_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
|
||||
},
|
||||
444
|
||||
);
|
||||
base.add_morphism(
|
||||
MorphismType{
|
||||
src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix>~ℤ_2^64~machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix>~ℤ_2^64~machine.UInt64>").unwrap()
|
||||
},
|
||||
555
|
||||
);
|
||||
|
||||
|
||||
let path = base.find_morphism_path(MorphismType {
|
||||
src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
path,
|
||||
|
||||
Some(
|
||||
vec![
|
||||
dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().normalize(),
|
||||
dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().normalize(),
|
||||
]
|
||||
)
|
||||
);
|
||||
}
|
||||
|
41
src/test/pnf.rs
Normal file
41
src/test/pnf.rs
Normal file
|
@ -0,0 +1,41 @@
|
|||
use crate::dict::TypeDict;
|
||||
|
||||
#[test]
|
||||
fn test_param_normalize() {
|
||||
let mut dict = TypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C").expect("parse error"),
|
||||
dict.parse("A~B~C").expect("parse error").param_normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A B>~C").expect("parse error"),
|
||||
dict.parse("<A B>~C").expect("parse error").param_normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A B~C>").expect("parse error"),
|
||||
dict.parse("<A B>~<A C>").expect("parse error").param_normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A B~C D~E>").expect("parse error"),
|
||||
dict.parse("<A B D>~<A C D>~<A C E>").expect("parse errror").param_normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<Seq <Digit 10>~Char>").expect("parse error"),
|
||||
dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror").param_normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A <B C~D~E> F~G H H>").expect("parse error"),
|
||||
dict.parse("<A <B C> F H H>
|
||||
~<A <B D> F H H>
|
||||
~<A <B E> F H H>
|
||||
~<A <B E> G H H>").expect("parse errror")
|
||||
.param_normalize(),
|
||||
);
|
||||
}
|
||||
|
Loading…
Reference in a new issue