Compare commits

...
Sign in to create a new pull request.

15 commits

Author SHA1 Message Date
cb9b62b00c
add test with mixed constraints 2025-06-04 23:03:45 +02:00
0cd80c16d5
initial trait type satisfaction 2025-06-03 00:00:32 +02:00
e8615aa42b
initially add bounds to MorphismType
.. but this will require scoped variables via debruijn indices first
2025-06-02 22:22:04 +02:00
bf5b2447be
terms: change Morph type to have src/dst instead of Vec 2025-06-02 14:18:16 +02:00
8cb3b1b8ec
move add_upper/lower_bound into subtype module 2025-06-02 00:27:09 +02:00
936ea9a649
split up constraint system unit tests 2025-06-02 00:26:40 +02:00
17ba81c817
split constraint system into files 2025-06-01 18:05:56 +02:00
a1dd6a5121
fix halo type in subtype-constraint 2025-06-01 18:04:38 +02:00
c8b2ffae95
add test to break halo type 2025-06-01 18:04:38 +02:00
10947ce791
rename term&morphism structs to make Sugared* variant the default
- SugaredUnification -> ConstraintSystem
- SugaredTypeTerm -> TypeTerm
- SugaredMorphismType -> MorphismType
- etc.
2025-06-01 18:04:38 +02:00
7b25b4472d
MorphismType: add strip_common_rungs() 2025-06-01 18:04:38 +02:00
c75cf43900
terms: rewrite of normalize (PNF, sugared variant)
- fixes error in previously added test
2025-06-01 18:04:38 +02:00
c8b3e4f4a2
add failing test (failed to find morphism instance) 2025-06-01 18:04:38 +02:00
646dfcfc2a
rewrite path search & unification for Sugared Terms
- deprecates the old term struct
- MorphismType: add apply_subst()
- morphism instance: fix apply subst
- SugaredMorphismType::Struct add src/dst repr
- find_morphism_path(): always advance with direct morph, even if complex decomposition from goal exists
2025-06-01 18:04:38 +02:00
fd1515c0fb
add SugaredStructMember & SugaredVariantEnum 2025-06-01 18:04:38 +02:00
38 changed files with 3762 additions and 1837 deletions

View file

@ -1,6 +1,6 @@
# lib-laddertypes
Rust Implementation of Ladder-Types (parsing, unification, rewriting, etc)
Rust Implementation of Ladder-Types (parsing, unification, rewriting, etc)
<hr/>
## Ladder Types
@ -121,16 +121,15 @@ fn main() {
- [x] (Un-)Parsing
- [x] (De-)Currying
- [x] Unification
- [x] Ladder-Normal-Form
- [x] Parameter-Normal-Form
- [ ] (De)-Sugaring
- [ ] Seq
- [ ] Enum
- [ ] Struct
- [ ] References
- [ ] Function
- [x] Normal-Form
- [x] Pretty Debug
- [ ] Sugared Parser
- [ ] Universal Types, Function Types
- [x] Constraint Solving (Unification, Subtype Satisfaction)
- [x] Morphism Graph
- [x] Complex Morphisms
- [x] Find Shortest Path
- [x] Approximate Steiner Tree
## License
[GPLv3](COPYING)

View file

@ -0,0 +1,145 @@
use {
crate::{
dict::*, term::TypeTerm, EnumVariant, StructMember,
ConstraintSystem, ConstraintPair, ConstraintError
}
};
impl ConstraintSystem {
pub fn eval_equation(&mut self, unification_pair: ConstraintPair) -> Result<(), ConstraintError> {
match (&unification_pair.lhs, &unification_pair.rhs) {
(TypeTerm::TypeID(TypeID::Var(varid)), t) |
(t, TypeTerm::TypeID(TypeID::Var(varid))) => {
if ! t.contains_var( *varid ) {
self.σ.insert(TypeID::Var(*varid), t.clone());
self.reapply_subst();
Ok(())
} else if t == &TypeTerm::TypeID(TypeID::Var(*varid)) {
Ok(())
} else {
Err(ConstraintError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(*varid)), t2: t.clone() })
}
}
(TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
if a1 == a2 { Ok(()) } else { Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
}
(TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
if n1 == n2 { Ok(()) } else { Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
}
(TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
if c1 == c2 { Ok(()) } else { Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
}
(TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) |
(TypeTerm::Spec(a1), TypeTerm::Spec(a2)) => {
if a1.len() == a2.len() {
for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate().rev() {
let mut new_addr = unification_pair.addr.clone();
new_addr.push(i);
self.equal_pairs.push(
ConstraintPair {
lhs: x,
rhs: y,
addr: new_addr
});
}
Ok(())
} else {
Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
}
}
(TypeTerm::Seq{ seq_repr: lhs_seq_repr, items: lhs_items },
TypeTerm::Seq { seq_repr: rhs_seq_repr, items: rhs_items })
=> {
let mut new_addr = unification_pair.addr.clone();
new_addr.push(0);
if let Some(rhs_seq_repr) = rhs_seq_repr.as_ref() {
if let Some(lhs_seq_repr) = lhs_seq_repr.as_ref() {
let _seq_repr_ψ = self.eval_equation(ConstraintPair { addr: new_addr.clone(), lhs: *lhs_seq_repr.clone(), rhs: *rhs_seq_repr.clone() })?;
} else {
return Err(ConstraintError{ addr: new_addr, t1: unification_pair.lhs, t2: unification_pair.rhs });
}
}
if lhs_items.len() == rhs_items.len() {
for (i, (lhs_ty, rhs_ty)) in lhs_items.into_iter().zip(rhs_items.into_iter()).enumerate()
{
let mut new_addr = unification_pair.addr.clone();
new_addr.push(i);
self.equal_pairs.push( ConstraintPair { addr: new_addr, lhs: lhs_ty.clone(), rhs: rhs_ty.clone() } );
}
Ok(())
} else {
Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
}
}
(TypeTerm::Struct{ struct_repr: lhs_struct_repr, members: lhs_members },
TypeTerm::Struct{ struct_repr: rhs_struct_repr, members: rhs_members })
=> {
let new_addr = unification_pair.addr.clone();
if let Some(rhs_struct_repr) = rhs_struct_repr.as_ref() {
if let Some(lhs_struct_repr) = lhs_struct_repr.as_ref() {
let _struct_repr_ψ = self.eval_subtype(ConstraintPair { addr: new_addr.clone(), lhs: *lhs_struct_repr.clone(), rhs: *rhs_struct_repr.clone() })?;
} else {
return Err(ConstraintError{ addr: new_addr.clone(), t1: unification_pair.lhs, t2: unification_pair.rhs });
}
}
if lhs_members.len() == rhs_members.len() {
for (i,
(StructMember{ symbol: lhs_symbol, ty: lhs_ty},
StructMember{ symbol: rhs_symbol, ty: rhs_ty })
) in
lhs_members.into_iter().zip(rhs_members.into_iter()).enumerate()
{
let mut new_addr = unification_pair.addr.clone();
new_addr.push(i);
self.equal_pairs.push( ConstraintPair { addr: new_addr, lhs: lhs_ty.clone(), rhs: rhs_ty.clone() } );
}
Ok(())
} else {
Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
}
}
(TypeTerm::Enum{ enum_repr: lhs_enum_repr, variants: lhs_variants },
TypeTerm::Enum{ enum_repr: rhs_enum_repr, variants: rhs_variants })
=> {
let mut new_addr = unification_pair.addr.clone();
if let Some(rhs_enum_repr) = rhs_enum_repr.as_ref() {
if let Some(lhs_enum_repr) = lhs_enum_repr.as_ref() {
let _enum_repr_ψ = self.eval_subtype(ConstraintPair { addr: new_addr.clone(), lhs: *lhs_enum_repr.clone(), rhs: *rhs_enum_repr.clone() })?;
} else {
return Err(ConstraintError{ addr: new_addr, t1: unification_pair.lhs, t2: unification_pair.rhs });
}
}
if lhs_variants.len() == rhs_variants.len() {
for (i,
(EnumVariant{ symbol: lhs_symbol, ty: lhs_ty },
EnumVariant{ symbol: rhs_symbol, ty: rhs_ty })
) in
lhs_variants.into_iter().zip(rhs_variants.into_iter()).enumerate()
{
let mut new_addr = unification_pair.addr.clone();
new_addr.push(i);
self.equal_pairs.push( ConstraintPair { addr: new_addr, lhs: lhs_ty.clone(), rhs: rhs_ty.clone() } );
}
Ok(())
} else {
Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
}
}
_ => Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
}
}
}

View file

View file

@ -0,0 +1,514 @@
use {
crate::{
dict::*, term::TypeTerm, EnumVariant, StructMember,
ConstraintSystem, ConstraintPair, ConstraintError
}
};
impl ConstraintSystem {
pub fn add_lower_subtype_bound(&mut self, v: u64, new_lower_bound: TypeTerm) -> Result<(),()> {
if new_lower_bound == TypeTerm::TypeID(TypeID::Var(v)) {
return Ok(());
}
if new_lower_bound.contains_var(v) {
// loop
return Err(());
}
if let Some(lower_bound) = self.lower_bounds.get(&v).cloned() {
//eprintln!("var already exists. check max. type");
if let Ok(halo) = self.eval_subtype(
ConstraintPair {
lhs: lower_bound.clone(),
rhs: new_lower_bound.clone(),
addr: vec![]
}
) {
//eprintln!("found more general lower bound");
//eprintln!("set var {}'s lowerbound to {:?}", v, new_lower_bound.clone());
// generalize variable type to supertype
self.lower_bounds.insert(v, new_lower_bound);
Ok(())
} else if let Ok(halo) = self.eval_subtype(
ConstraintPair{
lhs: new_lower_bound,
rhs: lower_bound,
addr: vec![]
}
) {
//eprintln!("OK, is already larger type");
Ok(())
} else {
//eprintln!("violated subtype restriction");
Err(())
}
} else {
//eprintln!("set var {}'s lowerbound to {:?}", v, new_lower_bound.clone());
self.lower_bounds.insert(v, new_lower_bound);
Ok(())
}
}
pub fn add_upper_subtype_bound(&mut self, v: u64, new_upper_bound: TypeTerm) -> Result<(),()> {
if new_upper_bound == TypeTerm::TypeID(TypeID::Var(v)) {
return Ok(());
}
if new_upper_bound.contains_var(v) {
// loop
return Err(());
}
if let Some(upper_bound) = self.upper_bounds.get(&v).cloned() {
if let Ok(_halo) = self.eval_subtype(
ConstraintPair {
lhs: new_upper_bound.clone(),
rhs: upper_bound,
addr: vec![]
}
) {
//println!("found a lower upper bound: {} <= {:?}", v, new_upper_bound);
// found a lower upper bound
self.upper_bounds.insert(v, new_upper_bound);
Ok(())
} else {
//println!("new upper bound violates subtype restriction");
Err(())
}
} else {
//eprintln!("set upper bound: {} <= {:?}", v, new_upper_bound);
self.upper_bounds.insert(v, new_upper_bound);
Ok(())
}
}
pub fn eval_subtype(&mut self, unification_pair: ConstraintPair) -> Result<
// ok: halo type
TypeTerm,
// error
ConstraintError
> {
match (unification_pair.lhs.clone().strip(), unification_pair.rhs.clone().strip()) {
/*
Variables
*/
(TypeTerm::TypeID(TypeID::Var(v)), t) => {
//eprintln!("variable <= t");
if self.add_upper_subtype_bound(v, t.clone()).is_ok() {
Ok(TypeTerm::unit())
} else {
Err(ConstraintError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(v)), t2: t })
}
}
(t, TypeTerm::TypeID(TypeID::Var(v))) => {
//eprintln!("t <= variable");
if self.add_lower_subtype_bound(v, t.clone()).is_ok() {
Ok(TypeTerm::unit())
} else {
Err(ConstraintError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(v)), t2: t })
}
}
/*
Atoms
*/
(TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
if a1 == a2 { Ok(TypeTerm::unit()) } else { Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs}) }
}
(TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
if n1 == n2 { Ok(TypeTerm::unit()) } else { Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
}
(TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
if c1 == c2 { Ok(TypeTerm::unit()) } else { Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
}
/*
Complex Types
*/
(TypeTerm::Seq{ seq_repr: lhs_seq_repr, items: lhs_items },
TypeTerm::Seq { seq_repr: rhs_seq_repr, items: rhs_items })
=> {
let mut new_addr = unification_pair.addr.clone();
new_addr.push(0);
if let Some(rhs_seq_repr) = rhs_seq_repr.as_ref() {
//eprintln!("subtype unify: rhs has seq-repr: {:?}", rhs_seq_repr);
if let Some(lhs_seq_repr) = lhs_seq_repr.as_ref() {
//eprintln!("check if it maches lhs seq-repr: {:?}", lhs_seq_repr);
let _seq_repr_ψ = self.eval_subtype(ConstraintPair { addr: new_addr.clone(), lhs: *lhs_seq_repr.clone(), rhs: *rhs_seq_repr.clone() })?;
//eprintln!("..yes!");
} else {
//eprintln!("...but lhs has none.");
return Err(ConstraintError{ addr: new_addr, t1: unification_pair.lhs, t2: unification_pair.rhs });
}
}
let mut new_addr = unification_pair.addr.clone();
new_addr.push(1);
if lhs_items.len() == rhs_items.len() && lhs_items.len() > 0 {
match self.eval_subtype( ConstraintPair { addr: new_addr.clone(), lhs: lhs_items[0].clone(), rhs: rhs_items[0].clone() } ) {
Ok(ψ) => Ok(TypeTerm::Seq {
seq_repr: None, // <<- todo
items: vec![ψ]
}.strip()),
Err(e) => Err(ConstraintError{
addr: new_addr,
t1: e.t1,
t2: e.t2,
})
}
} else {
Err(ConstraintError{ addr: new_addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
}
}
(TypeTerm::Struct{ struct_repr: lhs_struct_repr, members: lhs_members },
TypeTerm::Struct{ struct_repr: rhs_struct_repr, members: rhs_members })
=> {
let new_addr = unification_pair.addr.clone();
if let Some(rhs_struct_repr) = rhs_struct_repr.as_ref() {
if let Some(lhs_struct_repr) = lhs_struct_repr.as_ref() {
let _struct_repr_ψ = self.eval_subtype(ConstraintPair { addr: new_addr.clone(), lhs: *lhs_struct_repr.clone(), rhs: *rhs_struct_repr.clone() })?;
} else {
return Err(ConstraintError{ addr: new_addr.clone(), t1: unification_pair.lhs, t2: unification_pair.rhs });
}
}
if lhs_members.len() == rhs_members.len() {
let mut halo_members = Vec::new();
for (i,
(StructMember{ symbol: lhs_symbol, ty: lhs_ty},
StructMember{ symbol: rhs_symbol, ty: rhs_ty })
) in
lhs_members.into_iter().zip(rhs_members.into_iter()).enumerate()
{
let mut new_addr = unification_pair.addr.clone();
new_addr.push(i);
let ψ = self.eval_subtype( ConstraintPair { addr: new_addr, lhs: lhs_ty.clone(), rhs: rhs_ty.clone() } )?;
halo_members.push(StructMember { symbol: lhs_symbol, ty: ψ });
}
Ok(TypeTerm::Struct {
struct_repr: None,
members: halo_members
})
} else {
Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
}
}
(TypeTerm::Enum{ enum_repr: lhs_enum_repr, variants: lhs_variants },
TypeTerm::Enum{ enum_repr: rhs_enum_repr, variants: rhs_variants })
=> {
let mut new_addr = unification_pair.addr.clone();
if let Some(rhs_enum_repr) = rhs_enum_repr.as_ref() {
if let Some(lhs_enum_repr) = lhs_enum_repr.as_ref() {
let _enum_repr_ψ = self.eval_subtype(ConstraintPair { addr: new_addr.clone(), lhs: *lhs_enum_repr.clone(), rhs: *rhs_enum_repr.clone() })?;
} else {
return Err(ConstraintError{ addr: new_addr, t1: unification_pair.lhs, t2: unification_pair.rhs });
}
}
if lhs_variants.len() == rhs_variants.len() {
let mut halo_variants = Vec::new();
for (i,
(EnumVariant{ symbol: lhs_symbol, ty: lhs_ty },
EnumVariant{ symbol: rhs_symbol, ty: rhs_ty })
) in
lhs_variants.into_iter().zip(rhs_variants.into_iter()).enumerate()
{
let mut new_addr = unification_pair.addr.clone();
new_addr.push(i);
let ψ = self.eval_subtype( ConstraintPair { addr: new_addr, lhs: lhs_ty.clone(), rhs: rhs_ty.clone() } )?;
halo_variants.push(EnumVariant { symbol: lhs_symbol, ty: ψ });
}
Ok(TypeTerm::Enum {
enum_repr: None,
variants: halo_variants
})
} else {
Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
}
}
/*
Ladders
*/
(TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) => {
let mut l1_iter = a1.into_iter().enumerate().rev();
let mut l2_iter = a2.into_iter().rev();
let mut halo_ladder = Vec::new();
while let Some(rhs) = l2_iter.next() {
//eprintln!("take rhs = {:?}", rhs);
if let Some((i, lhs)) = l1_iter.next() {
//eprintln!("take lhs ({}) = {:?}", i, lhs);
let mut addr = unification_pair.addr.clone();
addr.push(i);
//eprintln!("addr = {:?}", addr);
match (lhs.clone(), rhs.clone()) {
(t, TypeTerm::TypeID(TypeID::Var(v))) => {
if self.add_upper_subtype_bound(v,t.clone()).is_ok() {
let mut new_upper_bound_ladder = vec![ t ];
if let Some(next_rhs) = l2_iter.next() {
} else {
// ladder of rhs is empty
// take everything
while let Some((i,t)) = l1_iter.next() {
new_upper_bound_ladder.push(t);
}
}
new_upper_bound_ladder.reverse();
if self.add_upper_subtype_bound(v, TypeTerm::Ladder(new_upper_bound_ladder)).is_ok() {
// ok
} else {
return Err(ConstraintError {
addr,
t1: lhs,
t2: rhs
});
}
} else {
return Err(ConstraintError {
addr,
t1: lhs,
t2: rhs
});
}
}
(lhs, rhs) => {
if let Ok(ψ) = self.eval_subtype(
ConstraintPair {
lhs: lhs.clone(),
rhs: rhs.clone(),
addr:addr.clone(),
}
) {
// ok.
//eprintln!("rungs are subtypes. continue");
halo_ladder.push(ψ);
} else {
return Err(ConstraintError {
addr,
t1: lhs,
t2: rhs
});
}
}
}
} else {
// not a subtype,
return Err(ConstraintError {
addr: vec![],
t1: unification_pair.lhs,
t2: unification_pair.rhs
});
}
}
//eprintln!("left ladder fully consumed");
for (i,t) in l1_iter {
//!("push {} to halo ladder", t.pretty(self.dict,0));
halo_ladder.push(t);
}
halo_ladder.reverse();
Ok(TypeTerm::Ladder(halo_ladder).strip())//.param_normalize())
},
(TypeTerm::Seq { seq_repr, items }, TypeTerm::Spec(mut args)) => {
let mut new_addr = unification_pair.addr.clone();
if args.len() > 1 {
if let Some(seq_repr) = seq_repr {
let repr_rhs = args.remove(0);
let reprψinterface = repr_rhs.get_interface_type();
let mut reprψ = self.eval_subtype(ConstraintPair{
addr: new_addr.clone(),
lhs: seq_repr.as_ref().clone(),
rhs: repr_rhs
})?;
let mut itemsψ = Vec::new();
let mut n_halos_required = 0;
let mut next_arg_with_common_rung = 0;
for (i,(item, arg)) in items.iter().zip(args.iter()).enumerate() {
let mut new_addr = new_addr.clone();
new_addr.push(i);
let ψ = self.eval_subtype(ConstraintPair {
addr: new_addr,
lhs: item.clone(),
rhs: arg.clone()
})?;
if ψ.is_empty() {
itemsψ.push(item.get_interface_type());
} else {
while next_arg_with_common_rung < i {
let x = &mut itemsψ[next_arg_with_common_rung];
*x = TypeTerm::Ladder(vec![
x.clone(),
args[next_arg_with_common_rung].get_interface_type()
]).normalize();
x.apply_subst(&self.σ);
next_arg_with_common_rung += 1;
}
n_halos_required += 1;
itemsψ.push(ψ);
}
}
eprintln!("itemsψ = {:?}", itemsψ);
if n_halos_required > 0 {
reprψ = TypeTerm::Ladder(vec![
reprψ,
reprψinterface
]);
}
Ok(
TypeTerm::Seq {
seq_repr: if reprψ.is_empty() { None }
else { Some(Box::new(reprψ)) },
items: itemsψ
}
)
} else {
Err(ConstraintError {
addr: new_addr,
t1: unification_pair.lhs,
t2: unification_pair.rhs
})
}
} else {
Err(ConstraintError {
addr: unification_pair.addr,
t1: unification_pair.lhs,
t2: unification_pair.rhs
})
}
}
(t, TypeTerm::Ladder(a1)) => {
Err(ConstraintError{ addr: unification_pair.addr, t1: t, t2: TypeTerm::Ladder(a1) })
}
(TypeTerm::Ladder(mut a1), t) => {
if a1.len() > 0 {
let mut new_addr = unification_pair.addr.clone();
new_addr.push( a1.len() - 1 );
if let Ok(halo) = self.eval_subtype(
ConstraintPair {
lhs: a1.pop().unwrap(),
rhs: t.clone(),
addr: new_addr
}
) {
a1.push(halo);
if a1.len() == 1 {
Ok(a1.pop().unwrap())
} else {
Ok(TypeTerm::Ladder(a1).normalize())
}
} else {
Err(ConstraintError{ addr: unification_pair.addr, t1: TypeTerm::Ladder(a1), t2: t })
}
} else if t == TypeTerm::unit() {
Ok(TypeTerm::unit())
} else {
Err(ConstraintError { addr: unification_pair.addr, t1: TypeTerm::unit(), t2: t })
}
}
/*
Application
*/
(TypeTerm::Spec(a1), TypeTerm::Spec(a2)) => {
if a1.len() == a2.len() {
let mut halo_args = Vec::new();
let mut n_halos_required = 0;
let mut next_arg_with_common_rung = 0;
for (i, (mut x, mut y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate() {
let mut new_addr = unification_pair.addr.clone();
new_addr.push(i);
x = x.strip();
// eprintln!("before strip: {:?}", y);
y = y.strip();
// eprintln!("after strip: {:?}", y);
// eprintln!("APP<> eval {:?} \n ?<=? {:?} ", x, y);
match self.eval_subtype(
ConstraintPair {
lhs: x.clone(),
rhs: y.clone(),
addr: new_addr,
}
) {
Ok(halo) => {
if halo.is_empty() {
let mut y = y.clone();
y.apply_subst(&self.σ);
y = y.strip();
let top = y.get_interface_type();
halo_args.push(top);
} else {
//println!("add halo {}", halo.pretty(self.dict, 0));
while next_arg_with_common_rung < i {
let x = &mut halo_args[next_arg_with_common_rung];
*x = TypeTerm::Ladder(vec![
x.clone(),
a2[next_arg_with_common_rung].get_interface_type()
]).normalize();
x.apply_subst(&self.σ);
next_arg_with_common_rung += 1;
}
halo_args.push(halo);
n_halos_required += 1;
}
},
Err(err) => { return Err(err); }
}
}
if n_halos_required > 0 {
Ok(TypeTerm::Spec(halo_args))
} else {
Ok(TypeTerm::unit())
}
} else {
Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
}
}
_ => Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
}
}
}

View file

@ -0,0 +1,179 @@
use {
crate::{
dict::*, term::TypeTerm, ConstraintError, ConstraintPair, ConstraintSystem, EnumVariant, StructMember
}, std::ops::Deref
};
impl ConstraintSystem {
/* chek if lhs has trait given by rhs
*/
pub fn eval_trait(
&mut self,
pair: ConstraintPair
) -> Result<(), ConstraintError> {
match (pair.lhs.clone().strip(), pair.rhs.clone().strip()) {
// check if at least some rung of the ladder has trait τ
(TypeTerm::Ladder(r1), τ) => {
for (i, rung) in r1.iter().enumerate() {
let mut addr = pair.addr.clone();
addr.push(i);
if self.eval_trait(ConstraintPair{
addr,
lhs: rung.clone(),
rhs: τ.clone()
}).is_ok() {
return Ok(());
}
}
Err(ConstraintError { addr: pair.addr, t1: pair.lhs, t2: pair.rhs })
}
// otherwise check for equality
(TypeTerm::TypeID(TypeID::Var(varid)), t) |
(t, TypeTerm::TypeID(TypeID::Var(varid))) => {
if ! t.contains_var( varid ) {
self.σ.insert(TypeID::Var(varid), t.clone());
self.reapply_subst();
Ok(())
} else if t == TypeTerm::TypeID(TypeID::Var(varid)) {
Ok(())
} else {
Err(ConstraintError{
addr: pair.addr,
t1: TypeTerm::TypeID(TypeID::Var(varid)),
t2: t.clone()
})
}
}
(TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
if a1 == a2 { Ok(()) } else { Err(ConstraintError{ addr: pair.addr, t1: pair.lhs, t2: pair.rhs }) }
}
(TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
if n1 == n2 { Ok(()) } else { Err(ConstraintError{ addr: pair.addr, t1: pair.lhs, t2: pair.rhs }) }
}
(TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
if c1 == c2 { Ok(()) } else { Err(ConstraintError{ addr: pair.addr, t1: pair.lhs, t2: pair.rhs }) }
}
(TypeTerm::Spec(a1), TypeTerm::Spec(a2)) => {
if a1.len() == a2.len() {
for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate().rev() {
let mut new_addr = pair.addr.clone();
new_addr.push(i);
self.trait_pairs.push(
ConstraintPair {
lhs: x,
rhs: y,
addr: new_addr
});
}
Ok(())
} else {
Err(ConstraintError{ addr: pair.addr, t1: pair.lhs, t2: pair.rhs })
}
}
(TypeTerm::Seq { seq_repr: lhs_sr, items: lhs_it },
TypeTerm::Seq { seq_repr: rhs_sr, items: rhs_it })
=> {
{
let mut addr = pair.addr.clone();
addr.push(0);
if let Some(rhs_sr) = rhs_sr {
if let Some(lhs_sr) = lhs_sr {
self.trait_pairs.push(ConstraintPair { addr, lhs: lhs_sr.deref().clone(), rhs: rhs_sr.deref().clone() });
} else {
return Err(ConstraintError{ addr, t1: TypeTerm::unit(), t2: rhs_sr.deref().clone() });
}
}
}
for (i, (lhs_member, rhs_member)) in lhs_it.into_iter().zip(rhs_it.into_iter()).enumerate() {
let mut addr = pair.addr.clone();
addr.push(0);
self.trait_pairs.push(ConstraintPair { addr, lhs: lhs_member, rhs: rhs_member });
}
Ok(())
}
(TypeTerm::Struct { struct_repr: lhs_sr, members: lhs_it },
TypeTerm::Struct { struct_repr: rhs_sr, members: rhs_it })
=> {
{
let mut addr = pair.addr.clone();
addr.push(0);
if let Some(rhs_sr) = rhs_sr {
if let Some(lhs_sr) = lhs_sr {
self.trait_pairs.push(ConstraintPair { addr, lhs: lhs_sr.deref().clone(), rhs: rhs_sr.deref().clone() });
} else {
return Err(ConstraintError{ addr, t1: TypeTerm::unit(), t2: rhs_sr.deref().clone() });
}
}
}
for (i, rhs_member) in rhs_it.into_iter().enumerate() {
let mut found = false;
for lhs_member in lhs_it.iter() {
if lhs_member.symbol == rhs_member.symbol {
let mut addr = pair.addr.clone();
addr.push(0);
self.trait_pairs.push(ConstraintPair { addr, lhs: lhs_member.ty.clone(), rhs: rhs_member.ty.clone() });
found = true;
break;
}
}
if ! found {
let mut addr = pair.addr.clone();
addr.push(i);
return Err(ConstraintError { addr, t1: TypeTerm::unit(), t2: rhs_member.ty })
}
}
Ok(())
}
(TypeTerm::Enum { enum_repr: lhs_sr, variants: lhs_it },
TypeTerm::Enum { enum_repr: rhs_sr, variants: rhs_it })
=> {
{
let mut addr = pair.addr.clone();
addr.push(0);
if let Some(rhs_sr) = rhs_sr {
if let Some(lhs_sr) = lhs_sr {
self.trait_pairs.push(ConstraintPair { addr, lhs: lhs_sr.deref().clone(), rhs: rhs_sr.deref().clone() });
} else {
return Err(ConstraintError{ addr, t1: TypeTerm::unit(), t2: rhs_sr.deref().clone() });
}
}
}
for (i, rhs_member) in rhs_it.into_iter().enumerate() {
let mut found = false;
for lhs_member in lhs_it.iter() {
if lhs_member.symbol == rhs_member.symbol {
let mut addr = pair.addr.clone();
addr.push(0);
self.trait_pairs.push(ConstraintPair { addr, lhs: lhs_member.ty.clone(), rhs: rhs_member.ty.clone() });
found = true;
break;
}
}
if ! found {
let mut addr = pair.addr.clone();
addr.push(i);
return Err(ConstraintError { addr, t1: TypeTerm::unit(), t2: rhs_member.ty })
}
}
Ok(())
}
_ => Err(ConstraintError { addr: pair.addr, t1: pair.lhs, t2: pair.rhs })
}
}
}

View file

@ -0,0 +1,169 @@
use {
crate::{dict::*, term::TypeTerm, desugared_term::*, EnumVariant, StructMember, Substitution}, std::collections::HashMap
};
pub mod eval_eq;
pub mod eval_sub;
pub mod eval_trait;
pub mod eval_parallel;
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct ConstraintError {
pub addr: Vec<usize>,
pub t1: TypeTerm,
pub t2: TypeTerm
}
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct ConstraintPair {
pub addr: Vec<usize>,
pub lhs: TypeTerm,
pub rhs: TypeTerm,
}
impl ConstraintPair {
pub fn new(lhs: TypeTerm, rhs: TypeTerm) -> Self {
ConstraintPair {
lhs,rhs, addr:vec![]
}
}
}
pub struct ConstraintSystem {
σ: HashMap<TypeID, TypeTerm>,
upper_bounds: HashMap< u64, TypeTerm >,
lower_bounds: HashMap< u64, TypeTerm >,
equal_pairs: Vec<ConstraintPair>,
subtype_pairs: Vec<ConstraintPair>,
trait_pairs: Vec<ConstraintPair>,
parallel_pairs: Vec<ConstraintPair>
}
impl ConstraintSystem {
pub fn new(
equal_pairs: Vec<ConstraintPair>,
subtype_pairs: Vec<ConstraintPair>,
trait_pairs: Vec<ConstraintPair>,
parallel_pairs: Vec<ConstraintPair>
) -> Self {
ConstraintSystem {
σ: HashMap::new(),
equal_pairs,
subtype_pairs,
trait_pairs,
parallel_pairs,
upper_bounds: HashMap::new(),
lower_bounds: HashMap::new(),
}
}
pub fn new_eq(eqs: Vec<ConstraintPair>) -> Self {
ConstraintSystem::new( eqs, Vec::new(), Vec::new(), Vec::new() )
}
pub fn new_sub( subs: Vec<ConstraintPair>) -> Self {
ConstraintSystem::new( Vec::new(), subs, Vec::new(), Vec::new() )
}
pub fn new_trait(traits: Vec<ConstraintPair>) -> Self {
ConstraintSystem::new( Vec::new(), Vec::new(), traits, Vec::new() )
}
pub fn new_parallel( parallels: Vec<ConstraintPair>) -> Self {
ConstraintSystem::new(Vec::new(), Vec::new(), Vec::new(), parallels )
}
/// update all values in substitution
pub fn reapply_subst(&mut self) {
let mut new_σ = HashMap::new();
for (v, tt) in self.σ.iter() {
let mut tt = tt.clone();
tt.apply_subst(&self.σ);
//eprintln!("update σ : {:?} --> {:?}", v, tt);
new_σ.insert(v.clone(), tt.normalize());
}
self.σ = new_σ;
}
pub fn solve(mut self) -> Result<(Vec<TypeTerm>, HashMap<TypeID, TypeTerm>), ConstraintError> {
// solve equations
while let Some( mut equal_pair ) = self.equal_pairs.pop() {
equal_pair.lhs.apply_subst(&self.σ);
equal_pair.rhs.apply_subst(&self.σ);
self.eval_equation(equal_pair)?;
}
// solve subtypes
//eprintln!("------ SOLVE SUBTYPES ---- ");
for mut subtype_pair in self.subtype_pairs.clone().into_iter() {
subtype_pair.lhs.apply_subst(&self.σ);
subtype_pair.rhs.apply_subst(&self.σ);
let _halo = self.eval_subtype( subtype_pair.clone() )?.strip();
}
// add variables from subtype bounds
for (var_id, t) in self.upper_bounds.iter() {
// eprintln!("VAR {} upper bound {:?}", var_id, t);
self.σ.insert(TypeID::Var(*var_id), t.clone().strip());
}
for (var_id, t) in self.lower_bounds.iter() {
// eprintln!("VAR {} lower bound {:?}", var_id, t);
self.σ.insert(TypeID::Var(*var_id), t.clone().strip());
}
self.reapply_subst();
//eprintln!("------ MAKE HALOS -----");
let mut halo_types = Vec::new();
for mut subtype_pair in self.subtype_pairs.clone().into_iter() {
subtype_pair.lhs = subtype_pair.lhs.apply_subst(&self.σ).clone();
subtype_pair.rhs = subtype_pair.rhs.apply_subst(&self.σ).clone();
let halo = self.eval_subtype( subtype_pair.clone() )?.strip();
halo_types.push(halo);
}
// solve traits
while let Some( mut trait_pair ) = self.trait_pairs.pop() {
trait_pair.lhs.apply_subst(&self.σ);
trait_pair.rhs.apply_subst(&self.σ);
self.eval_trait(trait_pair)?;
}
Ok((halo_types, self.σ))
}
}
pub fn unify(
t1: &TypeTerm,
t2: &TypeTerm
) -> Result<HashMap<TypeID, TypeTerm>, ConstraintError> {
let unification = ConstraintSystem::new_eq(vec![ ConstraintPair{ lhs: t1.clone(), rhs: t2.clone(), addr:vec![] } ]);
Ok(unification.solve()?.1)
}
pub fn subtype_unify(
t1: &TypeTerm,
t2: &TypeTerm
) -> Result<(TypeTerm, HashMap<TypeID, TypeTerm>), ConstraintError> {
let unification = ConstraintSystem::new_sub(vec![ ConstraintPair{ lhs: t1.clone(), rhs: t2.clone(), addr:vec![] } ]);
unification.solve().map( |(halos,σ)| ( halos.first().cloned().unwrap_or(TypeTerm::unit()), σ) )
}
pub fn parallel_unify(
t1: &TypeTerm,
t2: &TypeTerm
) -> Result<(TypeTerm, HashMap<TypeID, TypeTerm>), ConstraintError> {
let unification = ConstraintSystem::new_parallel(vec![ ConstraintPair{ lhs: t1.clone(), rhs: t2.clone(), addr:vec![] } ]);
unification.solve().map( |(halos,σ)| ( halos.first().cloned().unwrap_or(TypeTerm::unit()), σ) )
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

View file

@ -1,12 +1,12 @@
use crate::term::*;
use crate::desugared_term::*;
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl TypeTerm {
impl DesugaredTypeTerm {
/// transform term to have at max 2 entries in Application list
pub fn curry(self) -> TypeTerm {
pub fn curry(self) -> DesugaredTypeTerm {
match self {
TypeTerm::App(args) => {
DesugaredTypeTerm::App(args) => {
if args.len() >= 2 {
let mut old_args = args.into_iter();
let mut new_args = vec![
@ -16,19 +16,19 @@ impl TypeTerm {
for x in old_args {
new_args = vec![
TypeTerm::App(new_args),
DesugaredTypeTerm::App(new_args),
x
];
}
TypeTerm::App(new_args)
DesugaredTypeTerm::App(new_args)
} else {
TypeTerm::App(args)
DesugaredTypeTerm::App(args)
}
}
TypeTerm::Ladder(rungs) => {
TypeTerm::Ladder(rungs.into_iter().map(|r| r.curry()).collect())
DesugaredTypeTerm::Ladder(rungs) => {
DesugaredTypeTerm::Ladder(rungs.into_iter().map(|r| r.curry()).collect())
}
_ => self
@ -38,11 +38,11 @@ impl TypeTerm {
/// summarize all curried applications into one vec
pub fn decurry(self) -> Self {
match self {
TypeTerm::App(mut args) => {
DesugaredTypeTerm::App(mut args) => {
if args.len() > 0 {
let a0 = args.remove(0).decurry();
match a0 {
TypeTerm::App(sub_args) => {
DesugaredTypeTerm::App(sub_args) => {
for (i,x) in sub_args.into_iter().enumerate() {
args.insert(i, x);
}
@ -50,10 +50,10 @@ impl TypeTerm {
other => { args.insert(0, other); }
}
}
TypeTerm::App(args)
DesugaredTypeTerm::App(args)
}
TypeTerm::Ladder(args) => {
TypeTerm::Ladder(args.into_iter().map(|a| a.decurry()).collect())
DesugaredTypeTerm::Ladder(args) => {
DesugaredTypeTerm::Ladder(args.into_iter().map(|a| a.decurry()).collect())
}
_ => self
}

155
src/desugared_term.rs Normal file
View file

@ -0,0 +1,155 @@
use crate::TypeID;
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum DesugaredTypeTerm {
/* Atomic Terms */
// Base types from dictionary
TypeID(TypeID),
// Literals
Num(i64),
Char(char),
/* Complex Terms */
// Type Parameters
// avoid currying to save space & indirection
App(Vec< DesugaredTypeTerm >),
// Type Ladders
Ladder(Vec< DesugaredTypeTerm >),
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl DesugaredTypeTerm {
pub fn unit() -> Self {
DesugaredTypeTerm::Ladder(vec![])
}
pub fn new(id: TypeID) -> Self {
DesugaredTypeTerm::TypeID(id)
}
pub fn arg(&mut self, t: impl Into<DesugaredTypeTerm>) -> &mut Self {
match self {
DesugaredTypeTerm::App(args) => {
args.push(t.into());
}
_ => {
*self = DesugaredTypeTerm::App(vec![
self.clone(),
t.into()
])
}
}
self
}
pub fn repr_as(&mut self, t: impl Into<DesugaredTypeTerm>) -> &mut Self {
match self {
DesugaredTypeTerm::Ladder(rungs) => {
rungs.push(t.into());
}
_ => {
*self = DesugaredTypeTerm::Ladder(vec![
self.clone(),
t.into()
])
}
}
self
}
pub fn num_arg(&mut self, v: i64) -> &mut Self {
self.arg(DesugaredTypeTerm::Num(v))
}
pub fn char_arg(&mut self, c: char) -> &mut Self {
self.arg(DesugaredTypeTerm::Char(c))
}
pub fn contains_var(&self, var_id: u64) -> bool {
match self {
DesugaredTypeTerm::TypeID(TypeID::Var(v)) => (&var_id == v),
DesugaredTypeTerm::App(args) |
DesugaredTypeTerm::Ladder(args) => {
for a in args.iter() {
if a.contains_var(var_id) {
return true;
}
}
false
}
_ => false
}
}
/* strip away empty ladders
* & unwrap singletons
*/
pub fn strip(self) -> Self {
match self {
DesugaredTypeTerm::Ladder(rungs) => {
let mut rungs :Vec<_> = rungs.into_iter()
.filter_map(|mut r| {
r = r.strip();
if r != DesugaredTypeTerm::unit() {
Some(match r {
DesugaredTypeTerm::Ladder(r) => r,
a => vec![ a ]
})
}
else { None }
})
.flatten()
.collect();
if rungs.len() == 1 {
rungs.pop().unwrap()
} else {
DesugaredTypeTerm::Ladder(rungs)
}
},
DesugaredTypeTerm::App(args) => {
let mut args :Vec<_> = args.into_iter().map(|arg| arg.strip()).collect();
if args.len() == 0 {
DesugaredTypeTerm::unit()
} else if args.len() == 1 {
args.pop().unwrap()
} else {
DesugaredTypeTerm::App(args)
}
}
atom => atom
}
}
pub fn get_interface_type(&self) -> DesugaredTypeTerm {
match self {
DesugaredTypeTerm::Ladder(rungs) => {
if let Some(top) = rungs.first() {
top.get_interface_type()
} else {
DesugaredTypeTerm::unit()
}
}
DesugaredTypeTerm::App(args) => {
DesugaredTypeTerm::App(args.iter().map(|a| a.get_interface_type()).collect())
}
atom => atom.clone()
}
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

View file

@ -25,11 +25,11 @@ pub trait TypeDict : Send + Sync {
}
}
fn get_typeid_creat(&mut self, tn: &String) -> TypeID {
if let Some(id) = self.get_typeid(tn) {
fn get_typeid_creat(&mut self, tn: &str) -> TypeID {
if let Some(id) = self.get_typeid(&tn.to_string()) {
id
} else {
self.add_typename(tn.clone())
self.add_typename(tn.to_string())
}
}
}

View file

@ -1,18 +1,18 @@
#![allow(mixed_script_confusables)]
pub mod bimap;
pub mod dict;
pub mod term;
pub mod substitution;
pub mod lexer;
pub mod parser;
pub mod parser; // todo sugared variant
pub mod curry; // todo: sugared variant
//pub mod subtype; // deprecated
pub mod unparser;
pub mod sugar;
pub mod curry;
pub mod lnf;
pub mod desugared_term; // deprecated
pub mod term;
pub mod pnf;
pub mod subtype;
pub mod unification;
pub mod substitution;
pub mod constraint_system;
pub mod morphism;
pub mod morphism_base;
pub mod morphism_path;
@ -25,9 +25,11 @@ mod pretty;
pub use {
dict::*,
term::*,
desugared_term::*,
substitution::*,
sugar::*,
unification::*,
morphism::*
term::*,
constraint_system::*,
morphism::*,
morphism_base::*,
morphism_path::*,
};

View file

@ -1,119 +0,0 @@
use crate::term::TypeTerm;
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl TypeTerm {
/// does the type contain ladders (false) or is it 'flat' (true) ?
///
/// Example:
/// ```<Seq <Digit 10>>``` is flat, but
/// ```<Digit 10>~Char``` is not
pub fn is_flat(&self) -> bool {
match self {
TypeTerm::TypeID(_) => true,
TypeTerm::Num(_) => true,
TypeTerm::Char(_) => true,
TypeTerm::App(args) => args.iter().fold(true, |s,x| s && x.is_flat()),
TypeTerm::Ladder(_) => false
}
}
/// transmute type into Ladder-Normal-Form (LNF)
///
/// Example:
/// ```ignore
/// <Seq <Digit 10>~Char>
/// ⇒ <Seq <Digit 10>>~<Seq Char>
/// ```
pub fn normalize(self) -> Self {
let mut new_ladder = Vec::<TypeTerm>::new();
match self {
TypeTerm::Ladder(args) => {
for x in args.into_iter() {
match x.normalize() {
TypeTerm::Ladder(gs) => {
for g in gs {
new_ladder.push(g);
}
}
g => {
new_ladder.push(g);
}
}
}
}
TypeTerm::App(args) => {
let args_iter = args.into_iter();
new_ladder.push( TypeTerm::App(vec![]) );
for arg in args_iter {
match arg.normalize() {
TypeTerm::Ladder(rungs) => {
// duplicate last element for each rung
let l = new_ladder.len();
for _ in 1..rungs.len() {
new_ladder.push( new_ladder.last().unwrap().clone() );
}
for (i,r) in new_ladder.iter_mut().enumerate() {
match r {
TypeTerm::App(al) => {
if i < l {
al.push(rungs[0].clone());
} else {
al.push(rungs[i-l+1].clone());
}
}
_ => unreachable!()
}
}
}
mut other => {
other = other.normalize();
for rung in new_ladder.iter_mut() {
match rung {
TypeTerm::App(al) => {
al.push(other.clone());
}
_ => unreachable!()
}
}
}
}
}
}
atom => {
new_ladder.push(atom);
}
}
match new_ladder.len() {
0 => TypeTerm::unit(),
1 => new_ladder.into_iter().next().unwrap(),
_ => TypeTerm::Ladder( new_ladder )
}
}
/// transmute type into a `Vec` containing
/// all rungs of the type in LNF
///
/// Example:
/// ```<Seq <Digit 10>~Char>``` gives
/// ```ignore
/// vec![ <Seq <Digit 10>>, <Seq Char> ]
/// ```
pub fn get_lnf_vec(self) -> Vec<TypeTerm> {
match self.normalize() {
TypeTerm::Ladder( v ) => {
v
},
flat => vec![ flat ]
}
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

View file

@ -1,63 +1,422 @@
use {
crate::{
subtype_unify, sugar::SugaredTypeTerm, unification::UnificationProblem, unparser::*, TypeDict, TypeID, TypeTerm
constraint_system::ConstraintSystem, substitution::Substitution, term::{StructMember, TypeTerm}, unparser::*, EnumVariant, TypeDict, TypeID, VariableConstraint
},
std::{collections::HashMap, u64}
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct MorphismType {
pub bounds: Vec< VariableConstraint >,
pub src_type: TypeTerm,
pub dst_type: TypeTerm,
pub dst_type: TypeTerm
}
impl MorphismType {
pub fn normalize(self) -> Self {
pub fn strip_common_rungs(&self) -> MorphismType {
match (&self.src_type.clone().strip(), &self.dst_type.clone().strip()) {
(TypeTerm::Ladder(rungs_lhs), TypeTerm::Ladder(rungs_rhs)) => {
let mut lhs_iter = rungs_lhs.iter();
let mut rhs_iter = rungs_rhs.iter();
let mut last = MorphismType {
bounds: self.bounds.clone(),
src_type: TypeTerm::unit(),
dst_type: TypeTerm::unit()
};
while let (Some(lhs_top), Some(rhs_top)) = (lhs_iter.next(), rhs_iter.next()) {
last.src_type = lhs_top.clone();
last.dst_type = rhs_top.clone();
if lhs_top != rhs_top {
let x = MorphismType {
bounds: Vec::new(),
src_type: lhs_top.clone(),
dst_type: rhs_top.clone()
}.strip_common_rungs();
let mut rl : Vec<_> = lhs_iter.cloned().collect();
rl.insert(0, x.src_type);
let mut rr : Vec<_> = rhs_iter.cloned().collect();
rr.insert(0, x.dst_type);
return MorphismType {
bounds: self.bounds.clone(),
src_type: TypeTerm::Ladder(rl),
dst_type: TypeTerm::Ladder(rr)
};
}
}
last
}
(TypeTerm::Spec(args_lhs), TypeTerm::Spec(args_rhs)) => {
let (rl, rr) = args_lhs.iter().zip(args_rhs.iter()).map(
|(al,ar)| MorphismType{
bounds: Vec::new(),
src_type: al.clone(),
dst_type: ar.clone()
}.strip_common_rungs()
)
.fold((vec![], vec![]), |(mut rl, mut rr), x| {
rl.push(x.src_type);
rr.push(x.dst_type);
(rl,rr)
});
MorphismType {
bounds: self.bounds.clone(),
src_type: TypeTerm::Spec(rl),
dst_type: TypeTerm::Spec(rr)
}
}
(TypeTerm::Seq { seq_repr:seq_repr_lhs, items:items_lhs },
TypeTerm::Seq { seq_repr: seq_repr_rhs, items:items_rhs })
=> {
let (rl, rr) = items_lhs.iter().zip(items_rhs.iter()).map(
|(al,ar)| MorphismType{
bounds: Vec::new(),
src_type: al.clone(),
dst_type: ar.clone()
}.strip_common_rungs()
)
.fold((vec![], vec![]), |(mut rl, mut rr), x| {
rl.push(x.src_type);
rr.push(x.dst_type);
(rl,rr)
});
MorphismType {
bounds: self.bounds.clone(),
src_type: TypeTerm::Seq{ seq_repr: seq_repr_lhs.clone(), items: rl },
dst_type: TypeTerm::Seq { seq_repr: seq_repr_rhs.clone(), items: rr }
}
}
(TypeTerm::Struct { struct_repr:struct_repr_lhs, members:members_lhs },
TypeTerm::Struct { struct_repr: struct_repr_rhs, members:members_rhs })
=> {
let mut rl = Vec::new();
let mut rr = Vec::new();
for ar in members_rhs.iter() {
let mut found = false;
for al in members_lhs.iter() {
if al.symbol == ar.symbol {
let x = MorphismType{
bounds: Vec::new(),
src_type: al.ty.clone(),
dst_type: ar.ty.clone()
}.strip_common_rungs();
rl.push( StructMember{
symbol: al.symbol.clone(),
ty: x.src_type
});
rr.push( StructMember{
symbol: ar.symbol.clone(),
ty: x.dst_type
});
found = true;
break;
}
}
if !found {
return MorphismType {
bounds: self.bounds.clone(),
src_type: TypeTerm::Struct { struct_repr: struct_repr_lhs.clone(), members:members_lhs.clone() },
dst_type: TypeTerm::Struct { struct_repr: struct_repr_rhs.clone(), members:members_rhs.clone() }
};
}
}
MorphismType {
bounds: self.bounds.clone(),
src_type: TypeTerm::Struct{ struct_repr: struct_repr_lhs.clone(), members: rl },
dst_type: TypeTerm::Struct{ struct_repr: struct_repr_rhs.clone(), members: rr }
}
}
(TypeTerm::Enum { enum_repr:enum_repr_lhs, variants:variants_lhs },
TypeTerm::Enum { enum_repr: enum_repr_rhs, variants:variants_rhs })
=> {
let mut rl = Vec::new();
let mut rr = Vec::new();
for ar in variants_rhs.iter() {
let mut found = false;
for al in variants_lhs.iter() {
if al.symbol == ar.symbol {
let x = MorphismType {
bounds: self.bounds.clone(),
src_type: al.ty.clone(),
dst_type: ar.ty.clone()
}.strip_common_rungs();
rl.push( EnumVariant{
symbol: al.symbol.clone(),
ty: x.src_type
});
rr.push( EnumVariant{
symbol: ar.symbol.clone(),
ty: x.dst_type
});
found = true;
break;
}
}
if !found {
return MorphismType {
bounds: self.bounds.clone(),
src_type: TypeTerm::Enum { enum_repr: enum_repr_lhs.clone(), variants:variants_lhs.clone() },
dst_type: TypeTerm::Enum { enum_repr: enum_repr_rhs.clone(), variants:variants_rhs.clone() }
};
}
}
MorphismType {
bounds: self.bounds.clone(),
src_type: TypeTerm::Enum{ enum_repr: enum_repr_lhs.clone(), variants: rl },
dst_type: TypeTerm::Enum { enum_repr: enum_repr_rhs.clone(), variants: rr }
}
}
(x,y) => MorphismType { bounds: self.bounds.clone(), src_type: x.clone(), dst_type: y.clone() }
}
}
pub fn apply_subst(&self, σ: &impl Substitution) -> MorphismType {
MorphismType {
src_type: self.src_type.normalize().param_normalize(),
dst_type: self.dst_type.normalize().param_normalize()
bounds: self.bounds.iter().map(|b| b.clone().apply_subst(σ).clone()).collect(),
src_type: self.src_type.clone().apply_subst(σ).clone(),
dst_type: self.dst_type.clone().apply_subst(σ).clone()
}
}
pub fn normalize(&self) -> MorphismType {
MorphismType {
bounds: self.bounds.iter().map(|bound| bound.normalize()).collect(),
src_type: self.src_type.clone().normalize(),
dst_type: self.dst_type.clone().normalize(),
}
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
pub trait Morphism : Sized {
fn get_type(&self) -> MorphismType;
fn map_morphism(&self, seq_type: TypeTerm) -> Option< Self >;
fn weight(&self) -> u64 {
1
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[derive(Clone, Debug, PartialEq)]
pub struct MorphismInstance<M: Morphism + Clone> {
pub halo: TypeTerm,
pub m: M,
pub σ: HashMap<TypeID, TypeTerm>
#[derive(Clone, PartialEq, Debug)]
pub enum MorphismInstance<M: Morphism + Clone> {
Primitive{
ψ: TypeTerm,
σ: HashMap<TypeID, TypeTerm>,
morph: M,
},
Chain{
path: Vec<MorphismInstance<M>>
},
MapSeq{
ψ: TypeTerm,
seq_repr: Option<Box<TypeTerm>>,
item_morph: Box<MorphismInstance<M>>,
},
MapStruct{
ψ: TypeTerm,
src_struct_repr: Option<Box<TypeTerm>>,
dst_struct_repr: Option<Box<TypeTerm>>,
member_morph: Vec< (String, MorphismInstance<M>) >
},
MapEnum{
ψ: TypeTerm,
enum_repr: Option<Box<TypeTerm>>,
variant_morph: Vec< (String, MorphismInstance<M>) >
}
}
impl<M: Morphism + Clone> MorphismInstance<M> {
pub fn get_type(&self) -> MorphismType {
MorphismType {
src_type: TypeTerm::Ladder(vec![
self.halo.clone(),
self.m.get_type().src_type.clone()
]).apply_substitution(&self.σ)
.clone(),
dst_type: TypeTerm::Ladder(vec![
self.halo.clone(),
self.m.get_type().dst_type.clone()
]).apply_substitution(&self.σ)
.clone()
impl<M: Morphism + Clone> MorphismInstance<M> {
pub fn get_action_type(&self) -> MorphismType {
self.get_type().strip_common_rungs()
}
pub fn get_type(&self) -> MorphismType {
match self {
MorphismInstance::Primitive { ψ, σ, morph } => {
MorphismType {
bounds: morph.get_type().bounds,
src_type:
TypeTerm::Ladder(vec![
ψ.clone(),
morph.get_type().src_type
.apply_subst(σ).clone()
]).strip(),
dst_type: TypeTerm::Ladder(vec![
ψ.clone(),
morph.get_type().dst_type
.apply_subst(σ).clone()
]).strip(),
}
}
MorphismInstance::Chain { path } => {
if path.len() > 0 {
let s = self.get_subst();
MorphismType {
//bounds: path.iter().map(|m| m.get_type().bounds.iter()).flatten().collect(),
// here we would need to "move up" the remaining variables
bounds: Vec::new(), // <-- fixme: but first implement variable scopes
src_type: path.first().unwrap().get_type().src_type.clone(),
dst_type: path.last().unwrap().get_type().dst_type.clone()
}.apply_subst(&s)
} else {
MorphismType {
bounds: Vec::new(),
src_type: TypeTerm::TypeID(TypeID::Fun(45454)),
dst_type: TypeTerm::TypeID(TypeID::Fun(45454))
}
}
}
MorphismInstance::MapSeq { ψ, seq_repr, item_morph } => {
MorphismType {
bounds: item_morph.get_type().bounds,
src_type: TypeTerm::Ladder(vec![
ψ.clone(),
TypeTerm::Seq{ seq_repr: seq_repr.clone(),
items: vec![ item_morph.get_type().src_type ]}
]),
dst_type: TypeTerm::Ladder(vec![
ψ.clone(),
TypeTerm::Seq{ seq_repr: seq_repr.clone(),
items: vec![ item_morph.get_type().dst_type ]}
])
}
}
MorphismInstance::MapStruct { ψ, src_struct_repr, dst_struct_repr, member_morph } => {
MorphismType {
bounds: Vec::new(), // <-- fixme: same as with chain
src_type: TypeTerm::Ladder(vec![ ψ.clone(),
TypeTerm::Struct{
struct_repr: src_struct_repr.clone(),
members:
member_morph.iter().map(|(symbol, morph)| {
StructMember{ symbol:symbol.clone(), ty: morph.get_type().src_type }
}).collect()
}
]),
dst_type: TypeTerm::Ladder(vec![ ψ.clone(),
TypeTerm::Struct{
struct_repr: dst_struct_repr.clone(),
members: member_morph.iter().map(|(symbol, morph)| {
StructMember { symbol: symbol.clone(), ty: morph.get_type().dst_type}
}).collect()
}
])
}
}
MorphismInstance::MapEnum { ψ, enum_repr, variant_morph } => {
MorphismType {
bounds: Vec::new(), // <-- fixme: same as with chain
src_type: TypeTerm::Ladder(vec![ ψ.clone(),
TypeTerm::Struct{
struct_repr: enum_repr.clone(),
members:
variant_morph.iter().map(|(symbol, morph)| {
StructMember{ symbol:symbol.clone(), ty: morph.get_type().src_type }
}).collect()
}
]),
dst_type: TypeTerm::Ladder(vec![ ψ.clone(),
TypeTerm::Struct{
struct_repr: enum_repr.clone(),
members: variant_morph.iter().map(|(symbol, morph)| {
StructMember { symbol: symbol.clone(), ty: morph.get_type().dst_type}
}).collect()
}
])
}
}
}.normalize()
}
pub fn get_subst(&self) -> std::collections::HashMap< TypeID, TypeTerm > {
match self {
MorphismInstance::Primitive { ψ, σ, morph } => σ.clone(),
MorphismInstance::Chain { path } => {
path.iter().fold(
std::collections::HashMap::new(),
|mut σ, m| {
σ = σ.append(&m.get_subst());
σ
}
)
},
MorphismInstance::MapSeq { ψ, seq_repr, item_morph } => {
item_morph.get_subst()
},
MorphismInstance::MapStruct { ψ, src_struct_repr, dst_struct_repr, member_morph } => {
let mut σ = HashMap::new();
for (symbol, m) in member_morph.iter() {
σ = σ.append(&mut m.get_subst());
}
σ
},
MorphismInstance::MapEnum { ψ, enum_repr, variant_morph } => {
todo!();
HashMap::new()
},
}
}
pub fn apply_subst(&mut self, γ: &std::collections::HashMap< TypeID, TypeTerm >) {
let ty = self.get_type();
match self {
MorphismInstance::Primitive { ψ, σ, morph } => {
ψ.apply_subst(γ);
for (n,t) in σ.iter_mut() {
t.apply_subst(γ);
}
for (n,t) in γ.iter() {
if let TypeID::Var(varid) = n {
if morph.get_type().src_type.apply_subst(σ).contains_var(*varid)
|| morph.get_type().dst_type.apply_subst(σ).contains_var(*varid) {
σ.insert(n.clone(), t.clone());
}
}
}
},
MorphismInstance::Chain { path } => {
for n in path.iter_mut() {
n.apply_subst(γ);
}
}
MorphismInstance::MapSeq { ψ, seq_repr, item_morph } => {
ψ.apply_subst(γ);
item_morph.apply_subst(γ);
}
MorphismInstance::MapStruct { ψ, src_struct_repr, dst_struct_repr, member_morph } => {
for (_,ty) in member_morph {
ty.apply_subst(γ);
}
},
MorphismInstance::MapEnum { ψ, enum_repr, variant_morph } => {
for (_,ty) in variant_morph {
ty.apply_subst(γ);
}
}
}
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

View file

@ -1,26 +1,24 @@
use {
crate::{
subtype_unify, sugar::SugaredTypeTerm, unification::UnificationProblem, unparser::*, TypeDict, TypeID, TypeTerm,
morphism::{MorphismType, Morphism, MorphismInstance}
},
std::{collections::HashMap, u64}
morphism_path::{MorphismPath, ShortestPathProblem},
morphism::{MorphismInstance, Morphism, MorphismType},
TypeTerm, StructMember, TypeDict, TypeID
}, std::io::{Read, Write}
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[derive(Clone)]
pub struct MorphismBase<M: Morphism + Clone> {
morphisms: Vec< M >,
seq_types: Vec< TypeTerm >
morphisms: Vec< M >
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl<M: Morphism + Clone> MorphismBase<M> {
pub fn new(seq_types: Vec<TypeTerm>) -> Self {
pub fn new() -> Self {
MorphismBase {
morphisms: Vec::new(),
seq_types
morphisms: Vec::new()
}
}
@ -28,156 +26,173 @@ impl<M: Morphism + Clone> MorphismBase<M> {
self.morphisms.push( m );
}
pub fn enum_direct_morphisms(&self, src_type: &TypeTerm)
-> Vec< MorphismInstance<M> >
{
let mut dst_types = Vec::new();
for m in self.morphisms.iter() {
if let Ok((halo, σ)) = crate::unification::subtype_unify(
&src_type.clone().param_normalize(),
&m.get_type().src_type.param_normalize(),
) {
dst_types.push(MorphismInstance{ halo, m: m.clone(), σ });
pub fn get_morphism_instance(&self, ty: &MorphismType) -> Option<MorphismInstance<M>> {
if let Some(path) = ShortestPathProblem::new(self, ty.clone()).solve() {
if path.len() == 1 {
Some(path[0].clone())
} else {
Some(MorphismInstance::Chain { path })
}
} else {
None
}
dst_types
}
pub fn enum_map_morphisms(&self, src_type: &TypeTerm)
-> Vec< MorphismInstance<M> > {
let src_type = src_type.clone().param_normalize();
let mut dst_types = Vec::new();
pub fn complex_morphism_decomposition(&self, src_type: &TypeTerm, dst_type: &TypeTerm) -> Option< MorphismInstance<M> > {
let (src_ψ, src_floor) = src_type.get_floor_type();
let (dst_ψ, dst_floor) = dst_type.get_floor_type();
// Check if we have a List type, and if so, see what the Item type is
// TODO: function for generating fresh variables
let item_variable = TypeID::Var(800);
if !dst_ψ.is_empty() {
if !crate::constraint_system::subtype_unify(&src_ψ, &dst_ψ).is_ok() {
return None;
}
}
for seq_type in self.seq_types.iter() {
if let Ok((halo, σ)) = crate::unification::subtype_unify(
&src_type,
&TypeTerm::App(vec![
seq_type.clone(),
TypeTerm::TypeID(item_variable)
])
) {
let src_item_type = σ.get(&item_variable).expect("var not in unificator").clone();
for item_morph_inst in self.enum_morphisms( &src_item_type ) {
match (src_floor, dst_floor) {
(TypeTerm::Struct{ struct_repr: struct_repr_lhs, members: members_lhs},
TypeTerm::Struct { struct_repr: struct_repr_rhs, members: members_rhs })
=> {
// todo: optimization: check if struct repr match
let mut dst_halo_ladder = vec![ halo.clone() ];
if item_morph_inst.halo != TypeTerm::unit() {
dst_halo_ladder.push(
TypeTerm::App(vec![
seq_type.clone().get_lnf_vec().first().unwrap().clone(),
item_morph_inst.halo.clone()
]));
}
let mut member_morph = Vec::new();
let mut failed = false;
let mut necessary = false;
if let Some( map_morph ) = item_morph_inst.m.map_morphism( seq_type.clone() ) {
dst_types.push(
MorphismInstance {
halo: TypeTerm::Ladder(dst_halo_ladder).strip().param_normalize(),
m: map_morph,
σ: item_morph_inst.σ
for StructMember{ symbol: symbol_rhs, ty: ty_rhs } in members_rhs.iter() {
let mut found_src_member = false;
for StructMember{ symbol: symbol_lhs, ty: ty_lhs } in members_lhs.iter() {
if symbol_rhs == symbol_lhs {
found_src_member = true;
if let Some(mm) = self.get_morphism_instance(&MorphismType {
bounds: Vec::new(),
src_type: ty_lhs.clone(),
dst_type: ty_rhs.clone()
}) {
if ty_lhs != ty_rhs {
necessary = true;
}
member_morph.push((symbol_lhs.clone(), mm))
} else {
failed = true;
}
);
} else {
eprintln!("could not get map morphism");
break;
}
}
// member of rhs not found in lhs
if ! found_src_member {
failed = true;
break;
}
}
if ! failed && necessary {
Some(MorphismInstance::MapStruct {
ψ: src_ψ,
src_struct_repr: struct_repr_lhs.clone(),
dst_struct_repr: struct_repr_rhs.clone(),
member_morph
})
} else {
None
}
}
(TypeTerm::Seq{ seq_repr: seq_repr_lhs, items: items_lhs },
TypeTerm::Seq{ seq_repr: _seq_rerpr_rhs, items: items_rhs })
=> {
//let mut item_morphs = Vec::new();
for (ty_lhs, ty_rhs) in items_lhs.iter().zip(items_rhs.iter()) {
if let Some(item_morph) = self.get_morphism_instance(&MorphismType{
bounds: Vec::new(),
src_type: ty_lhs.clone(),
dst_type: ty_rhs.clone()
}) {
return Some(MorphismInstance::MapSeq { ψ: src_ψ, seq_repr: seq_repr_lhs.clone(), item_morph: Box::new(item_morph) });
}
break;
}
None
}
_ => {
None
}
}
dst_types
}
pub fn enum_morphisms(&self, src_type: &TypeTerm) -> Vec< MorphismInstance<M> > {
let mut dst_types = Vec::new();
dst_types.append(&mut self.enum_direct_morphisms(src_type));
dst_types.append(&mut self.enum_map_morphisms(src_type));
dst_types
}
pub fn enum_morphisms_from(&self, src_type: &TypeTerm) -> Vec< MorphismInstance<M> > {
let mut morphs = Vec::new();
pub fn find_direct_morphism(&self,
ty: &MorphismType,
dict: &mut impl TypeDict
) -> Option< MorphismInstance<M> > {
eprintln!("find direct morph");
//eprintln!("enum morphisms from {:?}", src_type);
for m in self.morphisms.iter() {
let ty = ty.clone().normalize();
let morph_type = m.get_type().normalize();
let m_src_type = m.get_type().src_type;
let m_dst_type = m.get_type().dst_type;
eprintln!("find direct morph:\n {} <= {}",
dict.unparse(&ty.src_type), dict.unparse(&morph_type.src_type),
);
/* 1. primitive morphisms */
if let Ok((halo, σ)) = subtype_unify(&ty.src_type, &morph_type.src_type) {
eprintln!("halo: {}", dict.unparse(&halo));
let dst_type = TypeTerm::Ladder(vec![
halo.clone(),
morph_type.dst_type.clone()
]).normalize().param_normalize();
eprintln!("-----------> {} <= {}",
dict.unparse(&dst_type), dict.unparse(&ty.dst_type)
);
if let Ok((halo2, σ2)) = subtype_unify(&dst_type, &ty.dst_type) {
eprintln!("match. halo2 = {}", dict.unparse(&halo2));
return Some(MorphismInstance {
m: m.clone(),
halo,
σ,
});
}
// check if the given start type is compatible with the
// morphisms source type,
// i.e. check if `src_type` is a subtype of `m_src_type`
if let Ok((ψ, σ)) = crate::constraint_system::subtype_unify(src_type, &m_src_type) {
let morph_inst = MorphismInstance::Primitive { ψ, σ, morph: m.clone() };
//eprintln!("..found direct morph to {:?}", morph_inst.get_type().dst_type);
morphs.push(morph_inst);
}
}
None
}
pub fn find_map_morphism(&self, ty: &MorphismType, dict: &mut impl TypeDict) -> Option< MorphismInstance<M> > {
for seq_type in self.seq_types.iter() {
if let Ok((halos, σ)) = UnificationProblem::new_sub(vec![
(ty.src_type.clone().param_normalize(),
TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(100)) ])),
(TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(101)) ]),
ty.dst_type.clone().param_normalize()),
]).solve() {
// TODO: use real fresh variable names
let item_morph_type = MorphismType {
src_type: σ.get(&TypeID::Var(100)).unwrap().clone(),
dst_type: σ.get(&TypeID::Var(101)).unwrap().clone(),
}.normalize();
//eprintln!("Map Morph: try to find item-morph with type {:?}", item_morph_type);
if let Some(item_morph_inst) = self.find_morphism( &item_morph_type, dict ) {
if let Some( list_morph ) = item_morph_inst.m.map_morphism( seq_type.clone() ) {
return Some( MorphismInstance {
m: list_morph,
σ,
halo: halos[0].clone()
} );
}
}
/* 2. check complex types */
else if let Some(complex_morph) = self.complex_morphism_decomposition(src_type, &m_src_type) {
//eprintln!("found complex morph to {:?}", complex_morph.get_type().dst_type);
morphs.push(complex_morph);
}
}
None
morphs
}
pub fn find_morphism(&self, ty: &MorphismType,
dict: &mut impl TypeDict
)
-> Option< MorphismInstance<M> > {
if let Some(m) = self.find_direct_morphism(ty, dict) {
return Some(m);
pub fn to_dot(&self, dict: &mut impl TypeDict) -> String {
let mut dot_source = String::new();
dot_source.push_str("digraph MorphismGraph {");
pub fn ty_to_dot_label(dict: &mut impl TypeDict, ty: &TypeTerm) -> String {
let pretty_str = ty.pretty(dict, 0);
let mut child = std::process::Command::new("aha").arg("--no-header")
.stdin( std::process::Stdio::piped() )
.stdout(std::process::Stdio::piped())
.spawn().expect("spawn child");
let mut stdin = child.stdin.take().expect("cant get stdin");
std::thread::spawn(move ||{ stdin.write_all(pretty_str.as_bytes()).expect("failed to write")});
let out = child.wait_with_output().expect("");
let html_str = String::from_utf8_lossy(&out.stdout).replace("\n", "<BR/>").replace("span", "B");
html_str
}
if let Some(m) = self.find_map_morphism(ty, dict) {
return Some(m);
// add vertices
for (i,m) in self.morphisms.iter().enumerate() {
dot_source.push_str(&format!("
SRC{} [label=<{}>]
DST{} [label=<{}>]
SRC{} -> DST{} [label=\"{}\"]
", i, ty_to_dot_label(dict, &m.get_type().src_type),
i, ty_to_dot_label(dict, &m.get_type().dst_type),
i,i,i
));
}
None
// add edges
dot_source.push_str("}");
dot_source
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

View file

@ -1,9 +1,10 @@
use {
crate::{
morphism::{MorphismType, Morphism, MorphismInstance},
morphism_base::MorphismBase,
dict::*,
term::*
morphism::{Morphism, MorphismType, MorphismInstance},
morphism_base::MorphismBase,
substitution::Substitution,
term::*, desugared_term::*,
}
};
@ -16,6 +17,15 @@ pub struct MorphismPath<M: Morphism + Clone> {
pub morphisms: Vec< MorphismInstance<M> >
}
impl<M: Morphism+Clone> MorphismPath<M> {
fn apply_subst(&mut self, σ: &std::collections::HashMap<TypeID, TypeTerm>) {
for m in self.morphisms.iter_mut() {
m.apply_subst(σ);
}
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
pub struct ShortestPathProblem<'a, M: Morphism + Clone> {
@ -37,95 +47,55 @@ impl<'a, M:Morphism+Clone> ShortestPathProblem<'a, M> {
}
}
pub fn advance(&mut self, prev_path: &MorphismPath<M>, morph_inst: MorphismInstance<M>) {
let dst_type = morph_inst.get_type().dst_type;
//eprintln!("try morph to {:?}", dst_type.clone());//.sugar(type_dict).pretty(type_dict, 0));
let mut creates_loop = false;
let mut new_path = prev_path.clone();
new_path.apply_subst(&morph_inst.get_subst());
for m in new_path.morphisms.iter() {
if m.get_type().src_type == dst_type {
creates_loop = true;
break;
}
}
if ! creates_loop {
new_path.weight += 1;//next_morph_inst.get_weight();
new_path.cur_type = dst_type;
new_path.morphisms.push(morph_inst);
self.queue.push(new_path);
}
}
pub fn solve(&mut self) -> Option< Vec<MorphismInstance<M>> > {
while ! self.queue.is_empty() {
/* take the shortest partial path and try to advance it by one step */
self.queue.sort_by( |p1,p2| p2.weight.cmp(&p1.weight));
if let Some(mut cur_path) = self.queue.pop() {
if let Ok((halo, σ)) = crate::unification::subtype_unify( &cur_path.cur_type, &self.goal ) {
/* 1. Check if goal is already reached by the current path */
if let Ok((, σ)) = crate::constraint_system::subtype_unify( &cur_path.cur_type, &self.goal ) {
/* found path,
* now apply substitution and trim to variables in terms of each step
*/
for n in cur_path.morphisms.iter_mut() {
let src_type = n.m.get_type().src_type;
let dst_type = n.m.get_type().dst_type;
let mut new_σ = std::collections::HashMap::new();
for (k,v) in σ.iter() {
if let TypeID::Var(varid) = k {
if src_type.contains_var(*varid)
|| dst_type.contains_var(*varid) {
new_σ.insert(
k.clone(),
v.clone().apply_substitution(&σ).clone().strip()
);
}
}
}
for (k,v) in n.σ.iter() {
if let TypeID::Var(varid) = k {
if src_type.contains_var(*varid)
|| dst_type.contains_var(*varid) {
new_σ.insert(
k.clone(),
v.clone().apply_substitution(&σ).clone().strip()
);
}
}
}
n.halo = n.halo.clone().apply_substitution(&σ).clone().strip().param_normalize();
n.σ = new_σ;
}
cur_path.apply_subst(&σ);
return Some(cur_path.morphisms);
}
//eprintln!("cur path (w ={}) : @ {:?}", cur_path.weight, cur_path.cur_type);//.clone().sugar(type_dict).pretty(type_dict, 0) );
for mut next_morph_inst in self.morphism_base.enum_morphisms(&cur_path.cur_type) {
let dst_type = next_morph_inst.get_type().dst_type;
// eprintln!("try morph to {}", dst_type.clone().sugar(type_dict).pretty(type_dict, 0));
/* 2. Try to advance current path */
else if let Some(complex_morph) =
self.morphism_base.complex_morphism_decomposition( &cur_path.cur_type, &self.goal )
{
self.advance(&cur_path, complex_morph);
}
let mut creates_loop = false;
let mut new_path = cur_path.clone();
for n in new_path.morphisms.iter_mut() {
let mut new_σ = std::collections::HashMap::new();
for (k,v) in next_morph_inst.σ.iter() {
new_σ.insert(
k.clone(),
v.clone().apply_substitution(&next_morph_inst.σ).clone()
);
}
for (k,v) in n.σ.iter() {
new_σ.insert(
k.clone(),
v.clone().apply_substitution(&next_morph_inst.σ).clone()
);
}
n.halo = n.halo.clone().apply_substitution(&next_morph_inst.σ).clone().strip().param_normalize();
n.σ = new_σ;
}
for m in new_path.morphisms.iter() {
if m.get_type().src_type == dst_type {
creates_loop = true;
break;
}
}
if ! creates_loop {
new_path.weight += next_morph_inst.m.weight();
new_path.cur_type = dst_type;
new_path.morphisms.push(next_morph_inst);
self.queue.push(new_path);
}
for next_morph_inst in self.morphism_base.enum_morphisms_from(&cur_path.cur_type) {
self.advance(&cur_path, next_morph_inst);
}
}
}

View file

@ -1,10 +1,8 @@
use {
std::iter::Peekable,
crate::{
dict::*,
term::*,
lexer::*
}
dict::*, lexer::*, desugared_term::*, TypeTerm, term::*
}, std::iter::Peekable
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
@ -19,22 +17,28 @@ pub enum ParseError {
}
pub trait ParseLadderType {
fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError>;
fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
fn parse(&mut self, s:&str) -> Result<TypeTerm, ParseError>;
fn parse_desugared(&mut self, s: &str) -> Result<DesugaredTypeTerm, ParseError>;
fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<DesugaredTypeTerm, ParseError>
where It: Iterator<Item = char>;
fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<DesugaredTypeTerm, ParseError>
where It: Iterator<Item = char>;
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<DesugaredTypeTerm, ParseError>
where It: Iterator<Item = char>;
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl<T: TypeDict> ParseLadderType for T {
fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
fn parse(&mut self, s:&str) -> Result<TypeTerm, ParseError> {
Ok(self.parse_desugared(s)?.sugar(self))
}
fn parse_desugared(&mut self, s: &str) -> Result<DesugaredTypeTerm, ParseError> {
let mut tokens = LadderTypeLexer::from(s.chars()).peekable();
match self.parse_ladder(&mut tokens) {
@ -49,7 +53,7 @@ impl<T: TypeDict> ParseLadderType for T {
}
}
fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<DesugaredTypeTerm, ParseError>
where It: Iterator<Item = char>
{
let mut args = Vec::new();
@ -57,7 +61,7 @@ impl<T: TypeDict> ParseLadderType for T {
match tok {
Ok(LadderTypeToken::Close) => {
tokens.next();
return Ok(TypeTerm::App(args));
return Ok(DesugaredTypeTerm::App(args));
}
_ => {
match self.parse_ladder(tokens) {
@ -70,7 +74,7 @@ impl<T: TypeDict> ParseLadderType for T {
Err(ParseError::UnexpectedEnd)
}
fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<DesugaredTypeTerm, ParseError>
where It: Iterator<Item = char>
{
match tokens.next() {
@ -78,21 +82,21 @@ impl<T: TypeDict> ParseLadderType for T {
Some(Ok(LadderTypeToken::Close)) => Err(ParseError::UnexpectedClose),
Some(Ok(LadderTypeToken::Ladder)) => Err(ParseError::UnexpectedLadder),
Some(Ok(LadderTypeToken::Symbol(s))) =>
Ok(TypeTerm::TypeID(
Ok(DesugaredTypeTerm::TypeID(
if let Some(tyid) = self.get_typeid(&s) {
tyid
} else {
self.add_typename(s)
}
)),
Some(Ok(LadderTypeToken::Char(c))) => Ok(TypeTerm::Char(c)),
Some(Ok(LadderTypeToken::Num(n))) => Ok(TypeTerm::Num(n)),
Some(Ok(LadderTypeToken::Char(c))) => Ok(DesugaredTypeTerm::Char(c)),
Some(Ok(LadderTypeToken::Num(n))) => Ok(DesugaredTypeTerm::Num(n)),
Some(Err(err)) => Err(ParseError::LexError(err)),
None => Err(ParseError::UnexpectedEnd)
}
}
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<DesugaredTypeTerm, ParseError>
where It: Iterator<Item = char>
{
let mut rungs = Vec::new();
@ -101,7 +105,7 @@ impl<T: TypeDict> ParseLadderType for T {
Ok(t) => { rungs.push(t); }
Err(err) => { return Err(err); }
}
while let Some(tok) = tokens.peek() {
match tok {
Ok(LadderTypeToken::Ladder) => {
@ -113,7 +117,7 @@ impl<T: TypeDict> ParseLadderType for T {
Err(err) => { return Err(err); }
}
} else {
return Err(ParseError::UnexpectedLadder);
return Err(ParseError::UnexpectedLadder);
}
}
Err(lexerr) => {
@ -128,7 +132,7 @@ impl<T: TypeDict> ParseLadderType for T {
match rungs.len() {
0 => Err(ParseError::UnexpectedEnd),
1 => Ok(rungs[0].clone()),
_ => Ok(TypeTerm::Ladder(rungs)),
_ => Ok(DesugaredTypeTerm::Ladder(rungs)),
}
}
}

View file

@ -1,8 +1,10 @@
use crate::term::TypeTerm;
use crate::{term::TypeTerm, constraint_system, EnumVariant, StructMember};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
pub fn splice_ladders( mut upper: Vec< TypeTerm >, mut lower: Vec< TypeTerm > ) -> Vec< TypeTerm > {
eprintln!("splice ladders {:?} <<<====>>> {:?} ", upper, lower);
// check for overlap
for i in 0 .. upper.len() {
if upper[i] == lower[0] {
let mut result_ladder = Vec::<TypeTerm>::new();
@ -12,6 +14,7 @@ pub fn splice_ladders( mut upper: Vec< TypeTerm >, mut lower: Vec< TypeTerm > )
}
}
// no overlap found, just concatenate ladders
upper.append(&mut lower);
upper
}
@ -24,112 +27,305 @@ impl TypeTerm {
/// <Seq <Digit 10>>~<Seq Char>
/// ⇒ <Seq <Digit 10>~Char>
/// ```
pub fn param_normalize(mut self) -> Self {
pub fn normalize(mut self) -> Self {
match self {
TypeTerm::Ladder(mut rungs) => {
if rungs.len() > 0 {
let mut new_rungs = Vec::new();
while let Some(bottom) = rungs.pop() {
if let Some(last_but) = rungs.last_mut() {
match (bottom, last_but) {
(TypeTerm::App(bot_args), TypeTerm::App(last_args)) => {
if bot_args.len() == last_args.len() {
let mut new_rung_params = Vec::new();
let mut require_break = false;
if rungs.len() == 0 {
return TypeTerm::unit();
} else if rungs.len() == 1 {
return rungs.pop().unwrap().normalize();
}
if bot_args.len() > 0 {
if let Ok(_idx) = last_args[0].is_syntactic_subtype_of(&bot_args[0]) {
for i in 0 .. bot_args.len() {
let mut new_rungs = Vec::new();
let mut r2 = rungs.pop().unwrap().strip();
while let Some(r1) = rungs.pop() {
let r1 = r1.strip();
match (r1.clone(), r2.clone()) {
(TypeTerm::Seq { seq_repr: seq_repr1, items: items1 },
TypeTerm::Seq { seq_repr: seq_repr2, items: items2 })
=> {
r2 = TypeTerm::Seq {
seq_repr:
if seq_repr1.is_some() || seq_repr2.is_some() {
let sr1 = if let Some(seq_repr1) = seq_repr1 { *seq_repr1.clone() }
else { TypeTerm::unit() };
let sr2 = if let Some(seq_repr2) = seq_repr2 { *seq_repr2 }
else { TypeTerm::unit() };
let spliced_type_ladder = splice_ladders(
last_args[i].clone().get_lnf_vec(),
bot_args[i].clone().get_lnf_vec()
);
let spliced_type =
if spliced_type_ladder.len() == 1 {
spliced_type_ladder[0].clone()
} else if spliced_type_ladder.len() > 1 {
TypeTerm::Ladder(spliced_type_ladder)
} else {
TypeTerm::unit()
};
new_rung_params.push( spliced_type.param_normalize() );
}
} else {
new_rung_params.push(
TypeTerm::Ladder(vec![
last_args[0].clone(),
bot_args[0].clone()
]).normalize()
);
for i in 1 .. bot_args.len() {
if let Ok(_idx) = last_args[i].is_syntactic_subtype_of(&bot_args[i]) {
let spliced_type_ladder = splice_ladders(
last_args[i].clone().get_lnf_vec(),
bot_args[i].clone().get_lnf_vec()
);
let spliced_type =
if spliced_type_ladder.len() == 1 {
spliced_type_ladder[0].clone()
} else if spliced_type_ladder.len() > 1 {
TypeTerm::Ladder(spliced_type_ladder)
} else {
TypeTerm::unit()
};
new_rung_params.push( spliced_type.param_normalize() );
} else {
new_rung_params.push( bot_args[i].clone() );
require_break = true;
}
}
}
}
if require_break {
new_rungs.push( TypeTerm::App(new_rung_params) );
Some(Box::new(
if sr1 == sr2 {
sr1
} else if sr1 == TypeTerm::unit() {
sr2
} else {
TypeTerm::Ladder(vec![ sr1, sr2 ]).normalize()
}))
} else {
rungs.pop();
rungs.push(TypeTerm::App(new_rung_params));
}
None
},
items:
items1.into_iter()
.zip(items2.into_iter())
.map(|(item1, item2)| {
if item1 == item2 {
item1
} else {
TypeTerm::Ladder(vec![ item1.clone(), item2 ])
}
})
.collect()
};
}
} else {
new_rungs.push( TypeTerm::App(bot_args) );
(TypeTerm::Seq { seq_repr, items },
TypeTerm::Spec( mut args )
) => {
if args.len() == items.len()+1 {
r2 = TypeTerm::Seq {
seq_repr: Some(Box::new(TypeTerm::Ladder(vec![
if let Some(seq_repr) = seq_repr {
*seq_repr.clone()
} else {
TypeTerm::unit()
},
args.remove(0)
]).normalize())),
items: items.into_iter()
.zip(args.into_iter())
.map(|(i1, i2)| {
if i1 == i2 {
i1
} else {
TypeTerm::Ladder(vec![ i1, i2 ]).normalize()
}
})
.collect()
};
} else {
new_rungs.push(r2);
r2 = r1;
}
}
(TypeTerm::Struct { struct_repr: struct_repr1, members: members1 },
TypeTerm::Struct { struct_repr: struct_repr2, members: members2 }) => {
let mut condensed_struct_repr = None;
let mut condensed_members = Vec::new();
let mut require_break = false;
if let Some(struct_repr1) = struct_repr1 {
if let Some(struct_repr2) = struct_repr2 {
condensed_struct_repr = Some(Box::new(TypeTerm::Ladder(
vec![
struct_repr1.as_ref().clone(),
struct_repr2.as_ref().clone()
]
).normalize()))
} else {
condensed_struct_repr = Some(Box::new(struct_repr1.as_ref().clone()));
}
} else {
condensed_struct_repr = struct_repr2.clone();
}
for StructMember{ symbol: symbol2, ty: ty2 } in members2.iter() {
let mut found = false;
for StructMember{ symbol: symbol1, ty: ty1 } in members1.iter() {
if symbol2 == symbol1 {
condensed_members.push(StructMember {
symbol: symbol1.clone(),
ty: TypeTerm::Ladder(vec![
ty1.clone(),
ty2.clone()
]).normalize()
});
found = true;
break;
}
}
(bottom, last_buf) => {
new_rungs.push( bottom );
if ! found {
require_break = true;
}
}
} else {
new_rungs.push( bottom );
if require_break {
new_rungs.push(r2);
r2 = r1;
} else {
r2 = TypeTerm::Struct {
struct_repr: condensed_struct_repr,
members: condensed_members
};
}
}
(TypeTerm::Enum { enum_repr: enum_repr1, variants: variants1 },
TypeTerm::Enum { enum_repr: enum_repr2, variants: variants2 }) => {
let mut condensed_enum_repr = None;
let mut condensed_variants = Vec::new();
let mut require_break = false;
if let Some(enum_repr1) = enum_repr1 {
if let Some(enum_repr2) = enum_repr2 {
condensed_enum_repr = Some(Box::new(TypeTerm::Ladder(
vec![
enum_repr1.as_ref().clone(),
enum_repr2.as_ref().clone()
]
).normalize()))
} else {
condensed_enum_repr = Some(Box::new(enum_repr1.as_ref().clone()));
}
} else {
condensed_enum_repr = enum_repr2.clone();
}
for EnumVariant{ symbol: symbol2, ty: ty2 } in variants2.iter() {
let mut found = false;
for EnumVariant{ symbol: symbol1, ty: ty1 } in variants1.iter() {
if symbol2 == symbol1 {
condensed_variants.push(EnumVariant {
symbol: symbol1.clone(),
ty: TypeTerm::Ladder(vec![
ty1.clone(),
ty2.clone()
]).normalize()
});
found = true;
break;
}
}
if ! found {
require_break = true;
}
}
if require_break {
new_rungs.push(r2);
r2 = r1;
} else {
r2 = TypeTerm::Enum {
enum_repr: condensed_enum_repr,
variants: condensed_variants
};
}
}
(TypeTerm::Spec(args1), TypeTerm::Spec(args2)) => {
if args1.len() == args2.len() {
if let Ok((ψ,σ)) = constraint_system::subtype_unify(&args1[0], &args2[0]) {
let mut new_args = Vec::new();
for (a1, a2) in args1.into_iter().zip(args2.into_iter()) {
new_args.push(TypeTerm::Ladder(vec![ a1, a2 ]).normalize());
}
r2 = TypeTerm::Spec(new_args);
//new_rungs.push(r2.clone());
} else {
new_rungs.push(r2);
r2 = r1;
}
} else {
new_rungs.push(r2);
r2 = r1;
}
}
(TypeTerm::Univ(bound1, args1), TypeTerm::Univ(bound2, args2)) => {
todo!();
}
(TypeTerm::Func(args1), TypeTerm::Func(args2)) => {
todo!();
}
(TypeTerm::Morph(src1,dst1), TypeTerm::Morph(src2,dst2)) => {
todo!();
}
(TypeTerm::Ladder(rr1), TypeTerm::Ladder(rr2)) => {
if rr1.len() > 0 {
let l = splice_ladders(rr1, rr2);
r2 = TypeTerm::Ladder(l).normalize();
}
}
(atomic1, TypeTerm::Ladder(mut rr2)) => {
if !atomic1.is_empty() {
if rr2.first() != Some(&atomic1) {
rr2.insert(0, atomic1);
}
}
r2 = TypeTerm::Ladder(rr2).normalize();
}
(TypeTerm::Ladder(mut rr1), atomic2) => {
if !atomic2.is_empty() {
if rr1.last() != Some(&atomic2) {
rr1.push(atomic2);
}
}
r2 = TypeTerm::Ladder(rr1).normalize();
}
(atomic1, atomic2) => {
if atomic1.is_empty() {
} else if atomic1 == atomic2 {
} else if atomic2.is_empty() {
r2 = atomic1;
} else {
new_rungs.push(atomic2);
r2 = atomic1;
}
}
}
}
if new_rungs.len() > 0 {
new_rungs.push(r2);
new_rungs.reverse();
if new_rungs.len() > 1 {
TypeTerm::Ladder(new_rungs)
} else if new_rungs.len() == 1 {
new_rungs[0].clone()
} else {
TypeTerm::unit()
}
return TypeTerm::Ladder(new_rungs);
} else {
TypeTerm::unit()
return r2;
}
}
TypeTerm::App(params) => {
TypeTerm::App(
TypeTerm::Spec(params) => {
TypeTerm::Spec(
params.into_iter()
.map(|p| p.param_normalize())
.map(|p| p.normalize())
.collect())
}
TypeTerm::Seq { seq_repr, items } => TypeTerm::Seq {
seq_repr: if let Some(seq_repr) = seq_repr { Some(Box::new(seq_repr.normalize())) } else { None },
items: items.into_iter().map(|p| p.normalize()).collect()
},
TypeTerm::Struct { struct_repr, members } => TypeTerm::Struct {
struct_repr: if let Some(struct_repr) = struct_repr { Some(Box::new(struct_repr.normalize())) } else { None },
members: members.into_iter()
.map(|StructMember{symbol, ty}|
StructMember{ symbol, ty: ty.normalize() })
.collect()
},
TypeTerm::Enum { enum_repr, variants } => TypeTerm::Enum {
enum_repr: if let Some(enum_repr) = enum_repr { Some(Box::new(enum_repr.normalize())) } else { None },
variants: variants.into_iter()
.map(|EnumVariant{symbol, ty}|
EnumVariant{ symbol, ty: ty.normalize() })
.collect()
},
atomic => atomic
}
}

View file

@ -1,14 +1,37 @@
use {
crate::{TypeDict, dict::TypeID},
crate::sugar::SugaredTypeTerm,
crate::{dict::TypeID, term::TypeTerm, EnumVariant, StructMember, TypeDict, VariableConstraint},
tiny_ansi::TinyAnsi
};
impl SugaredTypeTerm {
pub fn pretty(&self, dict: &TypeDict, indent: u64) -> String {
impl StructMember {
pub fn pretty(&self, dict: &impl TypeDict, indent: u64) -> String {
format!("{}: {}", self.symbol, self.ty.pretty(dict, indent+1))
}
}
impl EnumVariant {
pub fn pretty(&self, dict: &impl TypeDict, indent: u64) -> String {
format!("{}: {}", self.symbol, self.ty.pretty(dict, indent+1))
}
}
impl VariableConstraint {
pub fn pretty(&self, dict: &impl TypeDict, indent: u64) -> String {
match self {
VariableConstraint::UnconstrainedType => format!(""),
VariableConstraint::Subtype(τ) => format!(":<= {}", τ.pretty(dict, indent)),
VariableConstraint::Trait(τ) => format!(":>< {}", τ.pretty(dict, indent)),
VariableConstraint::Parallel(τ) => format!(":|| {}", τ.pretty(dict, indent)),
VariableConstraint::ValueUInt => format!(": "),
}
}
}
impl TypeTerm {
pub fn pretty(&self, dict: &impl TypeDict, indent: u64) -> String {
let indent_width = 4;
match self {
SugaredTypeTerm::TypeID(id) => {
TypeTerm::TypeID(id) => {
match id {
TypeID::Var(varid) => {
format!("{}", dict.get_typename(id).unwrap_or("??".bright_red())).bright_magenta()
@ -19,11 +42,11 @@ impl SugaredTypeTerm {
}
},
SugaredTypeTerm::Num(n) => {
TypeTerm::Num(n) => {
format!("{}", n).green().bold()
}
SugaredTypeTerm::Char(c) => {
TypeTerm::Char(c) => {
match c {
'\0' => format!("'\\0'"),
'\n' => format!("'\\n'"),
@ -31,15 +54,16 @@ impl SugaredTypeTerm {
}
}
SugaredTypeTerm::Univ(t) => {
format!("{} {} . {}",
TypeTerm::Univ(bound, t) => {
format!("{} {}{} . {}",
"".yellow().bold(),
dict.get_varname(0).unwrap_or("??".into()).bright_blue(),
bound.pretty(dict, indent),
t.pretty(dict,indent)
)
}
SugaredTypeTerm::Spec(args) => {
TypeTerm::Spec(args) => {
let mut s = String::new();
s.push_str(&"<".yellow());
for i in 0..args.len() {
@ -53,15 +77,20 @@ impl SugaredTypeTerm {
s
}
SugaredTypeTerm::Struct(args) => {
TypeTerm::Struct{ struct_repr, members } => {
let mut s = String::new();
s.push_str(&"{".yellow().bold());
for arg in args {
if let Some(struct_repr) = struct_repr {
s.push_str(&format!("{}{} ", "~".yellow(), struct_repr.pretty(dict, indent+1)));
}
for member in members {
s.push('\n');
for x in 0..(indent+1)*indent_width {
s.push(' ');
}
s.push_str(&arg.pretty(dict, indent + 1));
s.push_str(&member.pretty(dict, indent + 1));
s.push_str(&";\n".bright_yellow());
}
@ -73,11 +102,16 @@ impl SugaredTypeTerm {
s
}
SugaredTypeTerm::Enum(args) => {
TypeTerm::Enum{ enum_repr, variants } => {
let mut s = String::new();
s.push_str(&"(".yellow().bold());
for i in 0..args.len() {
let arg = &args[i];
if let Some(enum_repr) = enum_repr {
s.push_str(&format!("{}{} ", "~".yellow(), enum_repr.pretty(dict, indent+1)));
}
for (i,variant) in variants.iter().enumerate() {
s.push('\n');
for x in 0..(indent+1)*indent_width {
s.push(' ');
@ -85,7 +119,7 @@ impl SugaredTypeTerm {
if i > 0 {
s.push_str(&"| ".yellow().bold());
}
s.push_str(&arg.pretty(dict, indent + 1));
s.push_str(&variant.pretty(dict, indent + 1));
}
s.push('\n');
@ -96,30 +130,34 @@ impl SugaredTypeTerm {
s
}
SugaredTypeTerm::Seq(args) => {
TypeTerm::Seq{ seq_repr, items } => {
let mut s = String::new();
s.push_str(&"[ ".yellow().bold());
for i in 0..args.len() {
let arg = &args[i];
s.push_str(&"[".yellow().bold());
if let Some(seq_repr) = seq_repr {
s.push_str(&format!("{}{}", "~".yellow(), seq_repr.pretty(dict, indent+1)));
}
s.push(' ');
for (i, item) in items.iter().enumerate() {
if i > 0 {
s.push(' ');
}
s.push_str(&arg.pretty(dict, indent+1));
s.push_str(&item.pretty(dict, indent+1));
}
s.push_str(&" ]".yellow().bold());
s
}
SugaredTypeTerm::Morph(args) => {
TypeTerm::Morph(src,dst) => {
let mut s = String::new();
for arg in args {
s.push_str(&" ~~morph~~> ".bright_yellow());
s.push_str(&arg.pretty(dict, indent));
}
s.push_str(&src.pretty(dict, indent));
s.push_str(&" ~~morph~~> ".bright_yellow());
s.push_str(&dst.pretty(dict, indent));
s
}
SugaredTypeTerm::Func(args) => {
TypeTerm::Func(args) => {
let mut s = String::new();
for i in 0..args.len() {
let arg = &args[i];
@ -137,7 +175,7 @@ impl SugaredTypeTerm {
s
}
SugaredTypeTerm::Ladder(rungs) => {
TypeTerm::Ladder(rungs) => {
let mut s = String::new();
for i in 0..rungs.len() {
let rung = &rungs[i];

View file

@ -1,25 +1,48 @@
use std::ops::DerefMut;
use crate::{
TypeID,
TypeTerm
DesugaredTypeTerm
};
use crate::term::*;
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
pub trait Substitution {
fn get(&self, t: &TypeID) -> Option< TypeTerm >;
}
impl<S: Fn(&TypeID)->Option<TypeTerm>> Substitution for S {
fn get(&self, t: &TypeID) -> Option< TypeTerm > {
(self)(t)
}
fn add(&mut self, tyid: TypeID, val: TypeTerm);
fn append(self, rhs: &Self) -> Self;
}
impl Substitution for std::collections::HashMap< TypeID, TypeTerm > {
fn get(&self, t: &TypeID) -> Option< TypeTerm > {
(self as &std::collections::HashMap< TypeID, TypeTerm >).get(t).cloned()
}
fn add(&mut self, tyid: TypeID, val: TypeTerm) {
if let TypeID::Var(id) = tyid {
if !val.contains_var(id) {
self.insert(tyid, val.normalize());
} else {
eprintln!("substitution cannot contain loop");
}
}
}
fn append(self, rhs: &Self) -> Self {
let mut new_σ = std::collections::HashMap::new();
for (v, tt) in self.iter() {
let mut tt = tt.clone().normalize();
tt.apply_subst(rhs);
tt.apply_subst(&self);
new_σ.add(v.clone(), tt);
}
for (v, tt) in rhs.iter() {
new_σ.add(v.clone(), tt.clone().normalize());
}
new_σ
}
}
pub type HashMapSubst = std::collections::HashMap< TypeID, TypeTerm >;
@ -40,25 +63,61 @@ impl TypeTerm {
σ: &impl Substitution
) -> &mut Self {
match self {
TypeTerm::Num(_) => {},
TypeTerm::Char(_) => {},
TypeTerm::TypeID(typid) => {
if let Some(t) = σ.get(typid) {
*self = t;
}
}
TypeTerm::Ladder(rungs) => {
for r in rungs.iter_mut() {
r.apply_subst(σ);
}
}
TypeTerm::App(args) => {
TypeTerm::Ladder(args) |
TypeTerm::Spec(args) |
TypeTerm::Func(args)
=> {
for r in args.iter_mut() {
r.apply_subst(σ);
}
}
_ => {}
TypeTerm::Univ(bound, t) => {
bound.apply_subst(σ);
t.apply_subst(σ);
}
TypeTerm::Morph(src, dst) => {
src.apply_subst(σ);
dst.apply_subst(σ);
}
TypeTerm::Struct { struct_repr, members } => {
if let Some(struct_repr) = struct_repr.as_mut() {
struct_repr.apply_subst(σ);
}
for StructMember{ symbol:_, ty } in members.iter_mut() {
ty.apply_subst(σ);
}
},
TypeTerm::Enum { enum_repr, variants } => {
if let Some(enum_repr) = enum_repr.as_mut() {
enum_repr.apply_subst(σ);
}
for EnumVariant{ symbol:_, ty } in variants.iter_mut() {
ty.apply_subst(σ);
}
}
TypeTerm::Seq { seq_repr, items } => {
if let Some(seq_repr) = seq_repr {
seq_repr.apply_subst(σ);
}
for ty in items.iter_mut() {
ty.apply_subst(σ);
}
},
}
self
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

View file

@ -1,16 +1,16 @@
use crate::term::TypeTerm;
use crate::term::DesugaredTypeTerm;
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
impl TypeTerm {
impl DesugaredTypeTerm {
// returns ladder-step of first match and provided representation-type
pub fn is_semantic_subtype_of(&self, expected_type: &TypeTerm) -> Option<(usize, TypeTerm)> {
pub fn is_semantic_subtype_of(&self, expected_type: &DesugaredTypeTerm) -> Option<(usize, DesugaredTypeTerm)> {
let provided_lnf = self.clone().get_lnf_vec();
let expected_lnf = expected_type.clone().get_lnf_vec();
for i in 0..provided_lnf.len() {
if provided_lnf[i] == expected_lnf[0] {
return Some((i, TypeTerm::Ladder(
return Some((i, DesugaredTypeTerm::Ladder(
provided_lnf[i..].into_iter().cloned().collect()
)))
}
@ -19,7 +19,7 @@ impl TypeTerm {
None
}
pub fn is_syntactic_subtype_of(&self, expected_type: &TypeTerm) -> Result<usize, (usize, usize)> {
pub fn is_syntactic_subtype_of(&self, expected_type: &DesugaredTypeTerm) -> Result<usize, (usize, usize)> {
if let Some((first_match, provided_type)) = self.is_semantic_subtype_of( expected_type ) {
let provided_lnf = provided_type.get_lnf_vec();
let expected_lnf = expected_type.clone().get_lnf_vec();
@ -39,13 +39,38 @@ impl TypeTerm {
// supertype analogs
pub fn is_semantic_supertype_of(&self, t: &TypeTerm) -> Option<(usize, TypeTerm)> {
pub fn is_semantic_supertype_of(&self, t: &DesugaredTypeTerm) -> Option<(usize, DesugaredTypeTerm)> {
t.is_semantic_subtype_of(self)
}
pub fn is_syntactic_supertype_of(&self, t: &TypeTerm) -> Result<usize, (usize, usize)> {
pub fn is_syntactic_supertype_of(&self, t: &DesugaredTypeTerm) -> Result<usize, (usize, usize)> {
t.is_syntactic_subtype_of(self)
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
use crate::sugar::*;
impl TypeTerm {
pub fn is_compatible(&self, supertype: TypeTerm) -> bool {
match (self, supertype) {
(TypeTerm::TypeID(idl), TypeTerm::TypeID(idr)) => {
if *idl == idr {
true
} else {
false
}
}
(TypeTerm::Ladder(l_rungs), TypeTerm::Ladder(r_rungs)) => {
false
}
_ => {
false
}
}
}
}

View file

@ -1,114 +0,0 @@
use {
crate::{TypeTerm, TypeID, parser::ParseLadderType}
};
#[derive(Clone)]
pub enum SugaredTypeTerm {
TypeID(TypeID),
Num(i64),
Char(char),
Univ(Box< SugaredTypeTerm >),
Spec(Vec< SugaredTypeTerm >),
Func(Vec< SugaredTypeTerm >),
Morph(Vec< SugaredTypeTerm >),
Ladder(Vec< SugaredTypeTerm >),
Struct(Vec< SugaredTypeTerm >),
Enum(Vec< SugaredTypeTerm >),
Seq(Vec< SugaredTypeTerm >)
}
impl TypeTerm {
pub fn sugar(self: TypeTerm, dict: &mut impl crate::TypeDict) -> SugaredTypeTerm {
match self {
TypeTerm::TypeID(id) => SugaredTypeTerm::TypeID(id),
TypeTerm::Num(n) => SugaredTypeTerm::Num(n),
TypeTerm::Char(c) => SugaredTypeTerm::Char(c),
TypeTerm::App(args) => if let Some(first) = args.first() {
if first == &dict.parse("Func").unwrap() {
SugaredTypeTerm::Func( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
}
else if first == &dict.parse("Morph").unwrap() {
SugaredTypeTerm::Morph( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
}
else if first == &dict.parse("Struct").unwrap() {
SugaredTypeTerm::Struct( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
}
else if first == &dict.parse("Enum").unwrap() {
SugaredTypeTerm::Enum( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
}
else if first == &dict.parse("Seq").unwrap() {
SugaredTypeTerm::Seq( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
}
else if first == &dict.parse("Spec").unwrap() {
SugaredTypeTerm::Spec( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
}
else if first == &dict.parse("Univ").unwrap() {
SugaredTypeTerm::Univ(Box::new(
SugaredTypeTerm::Spec(
args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect()
)
))
}
else {
SugaredTypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
}
} else {
SugaredTypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
},
TypeTerm::Ladder(rungs) =>
SugaredTypeTerm::Ladder(rungs.into_iter().map(|t| t.sugar(dict)).collect())
}
}
}
impl SugaredTypeTerm {
pub fn desugar(self, dict: &mut impl crate::TypeDict) -> TypeTerm {
match self {
SugaredTypeTerm::TypeID(id) => TypeTerm::TypeID(id),
SugaredTypeTerm::Num(n) => TypeTerm::Num(n),
SugaredTypeTerm::Char(c) => TypeTerm::Char(c),
SugaredTypeTerm::Univ(t) => t.desugar(dict),
SugaredTypeTerm::Spec(ts) => TypeTerm::App(ts.into_iter().map(|t| t.desugar(dict)).collect()),
SugaredTypeTerm::Ladder(ts) => TypeTerm::Ladder(ts.into_iter().map(|t|t.desugar(dict)).collect()),
SugaredTypeTerm::Func(ts) => TypeTerm::App(
std::iter::once( dict.parse("Func").unwrap() ).chain(
ts.into_iter().map(|t| t.desugar(dict))
).collect()),
SugaredTypeTerm::Morph(ts) => TypeTerm::App(
std::iter::once( dict.parse("Morph").unwrap() ).chain(
ts.into_iter().map(|t| t.desugar(dict))
).collect()),
SugaredTypeTerm::Struct(ts) => TypeTerm::App(
std::iter::once( dict.parse("Struct").unwrap() ).chain(
ts.into_iter().map(|t| t.desugar(dict))
).collect()),
SugaredTypeTerm::Enum(ts) => TypeTerm::App(
std::iter::once( dict.parse("Enum").unwrap() ).chain(
ts.into_iter().map(|t| t.desugar(dict))
).collect()),
SugaredTypeTerm::Seq(ts) => TypeTerm::App(
std::iter::once( dict.parse("Seq").unwrap() ).chain(
ts.into_iter().map(|t| t.desugar(dict))
).collect()),
}
}
pub fn is_empty(&self) -> bool {
match self {
SugaredTypeTerm::TypeID(_) => false,
SugaredTypeTerm::Num(_) => false,
SugaredTypeTerm::Char(_) => false,
SugaredTypeTerm::Univ(t) => t.is_empty(),
SugaredTypeTerm::Spec(ts) |
SugaredTypeTerm::Ladder(ts) |
SugaredTypeTerm::Func(ts) |
SugaredTypeTerm::Morph(ts) |
SugaredTypeTerm::Struct(ts) |
SugaredTypeTerm::Enum(ts) |
SugaredTypeTerm::Seq(ts) => {
ts.iter().fold(true, |s,t|s&&t.is_empty())
}
}
}
}

View file

@ -1,88 +1,387 @@
use crate::TypeID;
use {
crate::{parser::ParseLadderType, subtype_unify, DesugaredTypeTerm, MorphismType, Substitution, TypeDict, TypeID}, std::{f32::consts::TAU, ops::Deref}
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum TypeTerm {
/* Atomic Terms */
// Base types from dictionary
TypeID(TypeID),
// Literals
Num(i64),
Char(char),
/* Complex Terms */
// Type Parameters
// avoid currying to save space & indirection
App(Vec< TypeTerm >),
// Type Ladders
Ladder(Vec< TypeTerm >),
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum VariableConstraint {
UnconstrainedType,
Subtype(TypeTerm),
Trait(TypeTerm),
Parallel(TypeTerm),
ValueUInt,
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct StructMember {
pub symbol: String,
pub ty: TypeTerm
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct EnumVariant {
pub symbol: String,
pub ty: TypeTerm
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum TypeTerm {
TypeID(TypeID),
Num(i64),
Char(char),
Univ(Box< VariableConstraint >, Box< TypeTerm >),
Spec(Vec< TypeTerm >),
Func(Vec< TypeTerm >),
Morph(Box< TypeTerm >, Box< TypeTerm >),
Ladder(Vec< TypeTerm >),
Struct{
struct_repr: Option< Box<TypeTerm> >,
members: Vec< StructMember >
},
Enum{
enum_repr: Option<Box< TypeTerm >>,
variants: Vec< EnumVariant >
},
Seq{
seq_repr: Option<Box< TypeTerm >>,
items: Vec< TypeTerm >
},
/*
Todo: Ref, RefMut
*/
}
impl TypeTerm {
pub fn into_morphism_type(self) -> Option< MorphismType > {
match self.normalize() {
TypeTerm::Univ(bound, τ) => {
let mut m = τ.into_morphism_type()?;
m.bounds.push(bound.deref().clone());
Some(m)
}
TypeTerm::Morph(src,dst) => {
Some(MorphismType {
bounds: Vec::new(),
src_type: src.deref().clone(),
dst_type: dst.deref().clone()
})
},
_ => None
}
}
}
impl VariableConstraint {
pub fn normalize(&self) -> Self {
match self {
VariableConstraint::UnconstrainedType => VariableConstraint::UnconstrainedType,
VariableConstraint::Subtype(τ) => VariableConstraint::Subtype(τ.clone().normalize()),
VariableConstraint::Trait(τ) => VariableConstraint::Trait(τ.clone().normalize()),
VariableConstraint::Parallel(τ) => VariableConstraint::Parallel(τ.clone().normalize()),
VariableConstraint::ValueUInt => VariableConstraint::ValueUInt
}
}
pub fn apply_subst(&mut self, σ: &impl Substitution) -> &mut Self {
match self {
VariableConstraint::Subtype(type_term) => { type_term.apply_subst(σ); },
VariableConstraint::Trait(type_term) => { type_term.apply_subst(σ); },
VariableConstraint::Parallel(type_term) => { type_term.clone().apply_subst(σ); },
_ => {}
}
self
}
}
impl StructMember {
pub fn parse( dict: &mut impl TypeDict, ty: &DesugaredTypeTerm ) -> Option<Self> {
match ty {
DesugaredTypeTerm::App(args) => {
if args.len() != 2 {
return None;
}
/*
if args[0] != dict.parse("Struct.Field").expect("parse") {
return None;
}
*/
let symbol = match args[0] {
DesugaredTypeTerm::Char(c) => c.to_string(),
DesugaredTypeTerm::TypeID(id) => dict.get_typename(&id).expect("cant get member name"),
_ => {
return None;
}
};
let ty = args[1].clone().sugar(dict);
Some(StructMember { symbol, ty })
}
_ => {
None
}
}
}
}
impl EnumVariant {
pub fn parse( dict: &mut impl TypeDict, ty: &DesugaredTypeTerm ) -> Option<Self> {
match ty {
DesugaredTypeTerm::App(args) => {
if args.len() != 2 {
return None;
}
/*
if args[0] != dict.parse("Enum.Variant").expect("parse") {
return None;
}
*/
let symbol = match args[0] {
DesugaredTypeTerm::Char(c) => c.to_string(),
DesugaredTypeTerm::TypeID(id) => dict.get_typename(&id).expect("cant get member name"),
_ => {
return None;
}
};
let ty = args[1].clone().sugar(dict);
Some(EnumVariant { symbol, ty })
}
_ => {
None
}
}
}
}
impl DesugaredTypeTerm {
pub fn sugar(self: DesugaredTypeTerm, dict: &mut impl crate::TypeDict) -> TypeTerm {
dict.add_varname("StructRepr".into());
dict.add_varname("EnumRepr".into());
dict.add_varname("SeqRepr".into());
match self {
DesugaredTypeTerm::TypeID(id) => TypeTerm::TypeID(id),
DesugaredTypeTerm::Num(n) => TypeTerm::Num(n),
DesugaredTypeTerm::Char(c) => TypeTerm::Char(c),
DesugaredTypeTerm::App(args) => if let Some(first) = args.first() {
if first == &dict.parse_desugared("Func").unwrap() {
TypeTerm::Func( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
}
else if first == &dict.parse_desugared("Morph").unwrap() {
if args.len() == 3 {
TypeTerm::Morph(
Box::new(args[1].clone().sugar(dict)),
Box::new(args[2].clone().sugar(dict)),
)
} else {
panic!("sugar: invalid arguments for morphism type")
}
}
else if first == &dict.parse_desugared("Seq").unwrap() {
TypeTerm::Seq{
seq_repr: None,
items: args[1..].into_iter()
.map(|t| t.clone().sugar(dict))
.collect()
}
}
else if first == &dict.parse_desugared("Struct").unwrap() {
TypeTerm::Struct{
struct_repr: None,
members: args[1..].into_iter()
.map(|t| StructMember::parse(dict, t).expect("cant parse field"))
.collect()
}
}
else if first == &dict.parse_desugared("Enum").unwrap() {
TypeTerm::Enum{
enum_repr: None,
variants: args[1..].into_iter()
.map(|t| EnumVariant::parse(dict, t).expect("cant parse variant"))
.collect()
}
}
else if let DesugaredTypeTerm::Ladder(mut rungs) = first.clone() {
if rungs.len() > 0 {
match rungs.remove(0) {
DesugaredTypeTerm::TypeID(tyid) => {
if tyid == dict.get_typeid(&"Seq".into()).expect("") {
TypeTerm::Seq {
seq_repr:
if rungs.len() > 0 {
Some(Box::new(
TypeTerm::Ladder(rungs.into_iter()
.map(|r| r.clone().sugar(dict))
.collect()
).normalize()
))
} else {
None
},
items: args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect()
}
} else if tyid == dict.get_typeid(&"Struct".into()).expect("") {
TypeTerm::Struct {
struct_repr:
if rungs.len() > 0 {
Some(Box::new(
TypeTerm::Ladder(rungs.into_iter()
.map(|r| r.clone().sugar(dict))
.collect()
).normalize()
))
} else {
None
},
members: args[1..].into_iter()
.map(|t| StructMember::parse(dict, t).expect("cant parse field"))
.collect()
}
} else if tyid == dict.get_typeid(&"Enum".into()).expect("") {
TypeTerm::Enum {
enum_repr:
if rungs.len() > 0 {
Some(Box::new(
TypeTerm::Ladder(rungs.into_iter()
.map(|r| r.clone().sugar(dict))
.collect()
).normalize()
))
} else {
None
},
variants: args[1..].into_iter()
.map(|t| EnumVariant::parse(dict, t).expect("cant parse field"))
.collect()
}
} else {
TypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
}
}
_ => {
unreachable!();
}
}
} else {
unreachable!();
}
}
else if first == &dict.parse_desugared("Spec").unwrap() {
TypeTerm::Spec( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
}
else if first == &dict.parse_desugared("Univ").unwrap() {
TypeTerm::Univ(
// fixme: ignored bound, will be superseded by new parser
Box::new(VariableConstraint::UnconstrainedType),
Box::new(TypeTerm::Spec(args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect()))
)
}
else {
TypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
}
} else {
TypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
},
DesugaredTypeTerm::Ladder(rungs) =>
TypeTerm::Ladder(rungs.into_iter().map(|t| t.sugar(dict)).collect())
}
}
}
impl StructMember {
pub fn desugar(self, dict: &mut impl crate::TypeDict) -> DesugaredTypeTerm {
DesugaredTypeTerm::App(vec![
//dict.parse("Struct.Field").expect("parse"),
dict.parse_desugared(&self.symbol).expect("parse"),
self.ty.desugar(dict)
])
}
}
impl EnumVariant {
pub fn desugar(self, dict: &mut impl crate::TypeDict) -> DesugaredTypeTerm {
DesugaredTypeTerm::App(vec![
//dict.parse("Enum.Variant").expect("parse"),
dict.parse_desugared(&self.symbol).expect("parse"),
self.ty.desugar(dict)
])
}
}
impl TypeTerm {
pub fn unit() -> Self {
TypeTerm::Ladder(vec![])
}
pub fn new(id: TypeID) -> Self {
TypeTerm::TypeID(id)
}
pub fn arg(&mut self, t: impl Into<TypeTerm>) -> &mut Self {
pub fn desugar(self, dict: &mut impl crate::TypeDict) -> DesugaredTypeTerm {
match self {
TypeTerm::App(args) => {
args.push(t.into());
}
_ => {
*self = TypeTerm::App(vec![
self.clone(),
t.into()
])
}
TypeTerm::TypeID(id) => DesugaredTypeTerm::TypeID(id),
TypeTerm::Num(n) => DesugaredTypeTerm::Num(n),
TypeTerm::Char(c) => DesugaredTypeTerm::Char(c),
TypeTerm::Univ(bound, t) => t.desugar(dict), // <- fixme: missing bound
TypeTerm::Spec(ts) => DesugaredTypeTerm::App(ts.into_iter().map(|t| t.desugar(dict)).collect()),
TypeTerm::Ladder(ts) => DesugaredTypeTerm::Ladder(ts.into_iter().map(|t|t.desugar(dict)).collect()),
TypeTerm::Func(ts) => DesugaredTypeTerm::App(
std::iter::once( dict.parse_desugared("Func").unwrap() ).chain(
ts.into_iter().map(|t| t.desugar(dict))
).collect()),
TypeTerm::Morph(src,dst) => DesugaredTypeTerm::App(vec![
dict.parse_desugared("Morph").unwrap(),
src.desugar(dict),
dst.desugar(dict)
]),
TypeTerm::Struct{ struct_repr, members } => DesugaredTypeTerm::App(
std::iter::once(
if let Some(sr) = struct_repr {
DesugaredTypeTerm::Ladder(vec![
dict.parse_desugared("Struct").unwrap(),
sr.desugar(dict)
])
} else {
dict.parse_desugared("Struct").unwrap()
}
).chain(
members.into_iter().map(|t| t.desugar(dict))
).collect()),
TypeTerm::Enum{ enum_repr, variants } => DesugaredTypeTerm::App(
std::iter::once(
if let Some(sr) = enum_repr {
DesugaredTypeTerm::Ladder(vec![
dict.parse_desugared("Enum").unwrap(),
sr.desugar(dict)
])
} else {
dict.parse_desugared("Enum").unwrap()
}
).chain(
variants.into_iter().map(|t| t.desugar(dict))
).collect()),
TypeTerm::Seq{ seq_repr, items } => DesugaredTypeTerm::App(
std::iter::once(
if let Some(sr) = seq_repr {
DesugaredTypeTerm::Ladder(vec![
dict.parse_desugared("Seq").unwrap(),
sr.desugar(dict)
])
} else {
dict.parse_desugared("Seq").unwrap()
}
).chain(
items.into_iter().map(|t| t.desugar(dict))
).collect()),
}
self
}
pub fn repr_as(&mut self, t: impl Into<TypeTerm>) -> &mut Self {
match self {
TypeTerm::Ladder(rungs) => {
rungs.push(t.into());
}
_ => {
*self = TypeTerm::Ladder(vec![
self.clone(),
t.into()
])
}
}
self
}
pub fn num_arg(&mut self, v: i64) -> &mut Self {
self.arg(TypeTerm::Num(v))
}
pub fn char_arg(&mut self, c: char) -> &mut Self {
self.arg(TypeTerm::Char(c))
}
pub fn contains_var(&self, var_id: u64) -> bool {
match self {
TypeTerm::TypeID(TypeID::Var(v)) => (&var_id == v),
TypeTerm::App(args) |
TypeTerm::Spec(args) |
TypeTerm::Func(args) |
TypeTerm::Ladder(args) => {
for a in args.iter() {
if a.contains_var(var_id) {
@ -91,15 +390,67 @@ impl TypeTerm {
}
false
}
_ => false
TypeTerm::Morph(src,dst) => {
src.contains_var(var_id) || dst.contains_var(var_id)
}
TypeTerm::Univ(bound,t) => {
// todo: capture avoidance (via debruijn)
t.contains_var(var_id)
}
TypeTerm::Struct { struct_repr, members } => {
if let Some(struct_repr) = struct_repr {
if struct_repr.contains_var(var_id) {
return true;
}
}
for StructMember{ symbol, ty } in members {
if ty.contains_var(var_id) {
return true;
}
}
false
}
TypeTerm::Enum { enum_repr, variants } => {
if let Some(enum_repr) = enum_repr {
if enum_repr.contains_var(var_id) {
return true;
}
}
for EnumVariant{ symbol, ty } in variants {
if ty.contains_var(var_id) {
return true;
}
}
false
}
TypeTerm::Seq { seq_repr, items } => {
if let Some(seq_repr) = seq_repr {
if seq_repr.contains_var(var_id) {
return true;
}
}
for ty in items {
if ty.contains_var(var_id) {
return true;
}
}
false
}
TypeTerm::Num(_) |
TypeTerm::Char(_) |
TypeTerm::TypeID(TypeID::Fun(_)) => false
}
}
pub fn strip(self) -> TypeTerm {
if self.is_empty() {
return TypeTerm::unit();
}
/* strip away empty ladders
* & unwrap singletons
*/
pub fn strip(self) -> Self {
match self {
TypeTerm::Ladder(rungs) => {
let mut rungs :Vec<_> = rungs.into_iter()
@ -122,16 +473,51 @@ impl TypeTerm {
TypeTerm::Ladder(rungs)
}
},
TypeTerm::App(args) => {
TypeTerm::Spec(args) => {
let mut args :Vec<_> = args.into_iter().map(|arg| arg.strip()).collect();
if args.len() == 0 {
TypeTerm::unit()
} else if args.len() == 1 {
args.pop().unwrap()
} else {
TypeTerm::App(args)
TypeTerm::Spec(args)
}
}
TypeTerm::Func(args) => TypeTerm::Func(args.into_iter().map(|arg| arg.strip()).collect()),
TypeTerm::Morph(src, dst) => TypeTerm::Morph(Box::new(src.strip()), Box::new(dst.strip())),
TypeTerm::Seq{ mut seq_repr, mut items } => {
if let Some(seq_repr) = seq_repr.as_mut() {
*seq_repr = Box::new(seq_repr.clone().strip());
}
for i in items.iter_mut() {
*i = i.clone().strip();
}
TypeTerm::Seq { seq_repr, items }
}
TypeTerm::Struct { mut struct_repr, mut members } => {
if let Some(struct_repr) = struct_repr.as_mut() {
*struct_repr = Box::new(struct_repr.clone().strip());
}
for m in members.iter_mut() {
m.ty = m.ty.clone().strip();
}
TypeTerm::Struct { struct_repr, members }
},
TypeTerm::Enum { mut enum_repr, mut variants } => {
if let Some(enum_repr) = enum_repr.as_mut() {
*enum_repr = Box::new(enum_repr.clone().strip());
}
for v in variants.iter_mut() {
v.ty = v.ty.clone().strip();
}
TypeTerm::Enum { enum_repr, variants }
},
atom => atom
}
}
@ -145,12 +531,147 @@ impl TypeTerm {
TypeTerm::unit()
}
}
TypeTerm::App(args) => {
TypeTerm::App(args.iter().map(|a| a.get_interface_type()).collect())
TypeTerm::Spec(args)
=> TypeTerm::Spec(args.iter().map(|a| a.get_interface_type()).collect()),
TypeTerm::Func(args)
=> TypeTerm::Func(args.iter().map(|a| a.get_interface_type()).collect()),
TypeTerm::Morph(src,dst)
=> TypeTerm::Morph(
Box::new(src.get_interface_type()),
Box::new(dst.get_interface_type())
),
TypeTerm::Univ(bound, t)
=> TypeTerm::Univ(bound.clone(), Box::new(t.get_interface_type())),
TypeTerm::Seq { seq_repr, items } => {
TypeTerm::Seq {
seq_repr: if let Some(sr) = seq_repr {
Some(Box::new(sr.clone().get_interface_type()))
} else { None },
items: items.iter().map(|t| t.get_interface_type()).collect()
}
}
TypeTerm::Struct { struct_repr, members } => {
TypeTerm::Struct {
struct_repr: if let Some(sr) = struct_repr {
Some(Box::new(sr.clone().get_interface_type()))
} else { None },
members: members.iter()
.map(|StructMember{symbol,ty}|
StructMember {symbol:symbol.clone(), ty:ty.get_interface_type() })
.collect()
}
}
TypeTerm::Enum { enum_repr, variants } => {
TypeTerm::Enum {
enum_repr: if let Some(sr) = enum_repr {
Some(Box::new(sr.clone().get_interface_type()))
} else { None },
variants: variants.iter()
.map(|EnumVariant{symbol,ty}|
EnumVariant{ symbol:symbol.clone(), ty:ty.get_interface_type() })
.collect()
}
}
TypeTerm::TypeID(tyid) => TypeTerm::TypeID(tyid.clone()),
TypeTerm::Num(n) => TypeTerm::Num(*n),
TypeTerm::Char(c) => TypeTerm::Char(*c)
}
}
pub fn get_floor_type(&self) -> (TypeTerm, TypeTerm) {
match self.clone() {
TypeTerm::Ladder(mut rungs) => {
if let Some(bot) = rungs.pop() {
let (bot_ψ, bot_floor) = bot.get_floor_type();
rungs.push(bot_ψ);
(TypeTerm::Ladder(rungs).strip(), bot_floor.strip())
} else {
(TypeTerm::unit(), TypeTerm::unit())
}
}
/*
SugaredTypeTerm::Spec(args)
=> (SugaredTypeTerm::SugaredTypeTerm::Spec(args.iter().map(|a| a.get_floor_type()).collect()),
SugaredTypeTerm::Func(args)
=> SugaredTypeTerm::Func(args.iter().map(|a| a.get_floor_type()).collect()),
SugaredTypeTerm::Morph(args)
=> SugaredTypeTerm::Spec(args.iter().map(|a| a.get_floor_type()).collect()),
SugaredTypeTerm::Univ(t)
=> SugaredTypeTerm::Univ(Box::new(t.get_floor_type())),
SugaredTypeTerm::Seq { seq_repr, items } => {
SugaredTypeTerm::Seq {
seq_repr: if let Some(sr) = seq_repr {
Some(Box::new(sr.clone().get_floor_type()))
} else { None },
items: items.iter().map(|t| t.get_floor_type()).collect()
}
}
SugaredTypeTerm::Struct { struct_repr, members } => {
SugaredTypeTerm::Struct {
struct_repr: if let Some(sr) = struct_repr {
Some(Box::new(sr.clone().get_floor_type()))
} else { None },
members: members.iter()
.map(|SugaredStructMember{symbol,ty}|
SugaredStructMember {symbol:symbol.clone(), ty:ty.get_floor_type() })
.collect()
}
}
SugaredTypeTerm::Enum { enum_repr, variants } => {
SugaredTypeTerm::Enum {
enum_repr: if let Some(sr) = enum_repr {
Some(Box::new(sr.clone().get_floor_type()))
} else { None },
variants: variants.iter()
.map(|SugaredEnumVariant{symbol,ty}|
SugaredEnumVariant{ symbol:symbol.clone(), ty:ty.get_floor_type() })
.collect()
}
}
SugaredTypeTerm::TypeID(tyid) => SugaredTypeTerm::TypeID(tyid.clone()),
SugaredTypeTerm::Num(n) => SugaredTypeTerm::Num(*n),
SugaredTypeTerm::Char(c) => SugaredTypeTerm::Char(*c)
*/
other => (TypeTerm::unit(), other.clone().strip())
}
}
pub fn is_empty(&self) -> bool {
match self {
TypeTerm::TypeID(_) => false,
TypeTerm::Num(_) => false,
TypeTerm::Char(_) => false,
TypeTerm::Univ(bound, t) => t.is_empty(),
TypeTerm::Spec(ts) |
TypeTerm::Ladder(ts) |
TypeTerm::Func(ts) => {
ts.iter().fold(true, |s,t| s && t.is_empty() )
}
TypeTerm::Morph(src,dst) => {
src.is_empty() && dst.is_empty()
}
TypeTerm::Seq{ seq_repr, items } => {
items.iter().fold(true, |s,t| s && t.is_empty() )
}
TypeTerm::Struct{ struct_repr, members } => {
members.iter()
.fold(true, |s,member_decl| s && member_decl.ty.is_empty() )
}
TypeTerm::Enum{ enum_repr, variants } => {
variants.iter()
.fold(true, |s,variant_decl| s && variant_decl.ty.is_empty() )
}
atom => atom.clone()
}
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

View file

@ -0,0 +1,158 @@
use {
crate::{dict::*, parser::*,
constraint_system::{
ConstraintSystem,
ConstraintPair,
ConstraintError
}
}
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) {
let mut dict = BimapTypeDict::new();
dict.add_varname(String::from("T"));
dict.add_varname(String::from("U"));
dict.add_varname(String::from("V"));
dict.add_varname(String::from("W"));
let mut t1 = dict.parse(ts1).unwrap();
let mut t2 = dict.parse(ts2).unwrap();
let σ = crate::unify( &t1, &t2 );
if expect_unificator {
assert!(σ.is_ok());
let σ = σ.unwrap();
assert_eq!(
t1.apply_subst(&σ),
t2.apply_subst(&σ)
);
} else {
assert!(! σ.is_ok());
}
}
#[test]
fn test_unification_error() {
let mut dict = BimapTypeDict::new();
dict.add_varname(String::from("T"));
assert_eq!(
crate::unify(
&dict.parse("<A T>").unwrap(),
&dict.parse("<B T>").unwrap()
),
Err(ConstraintError {
addr: vec![0],
t1: dict.parse("A").unwrap(),
t2: dict.parse("B").unwrap()
})
);
assert_eq!(
crate::unify(
&dict.parse("<V <U A> T>").unwrap(),
&dict.parse("<V <U B> T>").unwrap()
),
Err(ConstraintError {
addr: vec![1, 1],
t1: dict.parse("A").unwrap(),
t2: dict.parse("B").unwrap()
})
);
assert_eq!(
crate::unify(
&dict.parse("T").unwrap(),
&dict.parse("<Seq T>").unwrap()
),
Err(ConstraintError {
addr: vec![],
t1: dict.parse("T").unwrap(),
t2: dict.parse("<Seq T>").unwrap()
})
);
}
#[test]
fn test_unification() {
test_unify("A", "A", true);
test_unify("A", "B", false);
test_unify("<Seq T>", "<Seq Ascii~Char>", true);
// this worked easily with desugared terms,
// but is a weird edge case with sugared terms
// not relevant now
//test_unify("<Seq T>", "<U Char>", true);
test_unify(
"<Seq Path~<Seq Char>>~<SepSeq Char '\\n'>~<Seq Char>",
"<Seq T~<Seq Char>>~<SepSeq Char '\\n'>~<Seq Char>",
true
);
let mut dict = BimapTypeDict::new();
dict.add_varname(String::from("T"));
dict.add_varname(String::from("U"));
dict.add_varname(String::from("V"));
dict.add_varname(String::from("W"));
assert_eq!(
ConstraintSystem::new_eq(vec![
ConstraintPair {
addr: Vec::new(),
lhs: dict.parse("U").unwrap(),
rhs: dict.parse("<Seq Char>").unwrap()
},
ConstraintPair {
addr: Vec::new(),
lhs: dict.parse("T").unwrap(),
rhs: dict.parse("<Seq U>").unwrap()
}
]).solve(),
Ok((
vec![],
vec![
// T
(TypeID::Var(0), dict.parse("<Seq <Seq Char>>").unwrap()),
// U
(TypeID::Var(1), dict.parse("<Seq Char>").unwrap())
].into_iter().collect()
))
);
assert_eq!(
ConstraintSystem::new_eq(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("<Seq T>").unwrap(),
rhs : dict.parse("<Seq W~<Seq Char>>").unwrap()
},
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("<Seq >").unwrap(),
rhs : dict.parse("<Seq W>").unwrap(),
}
]).solve(),
Ok((
vec![],
vec![
// W
(TypeID::Var(3), dict.parse("").unwrap()),
// T
(TypeID::Var(0), dict.parse("~<Seq Char>").unwrap())
].into_iter().collect()
))
);
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

View file

@ -0,0 +1,59 @@
use {
crate::{dict::*, parser::*,
constraint_system::{
ConstraintSystem,
ConstraintPair,
ConstraintError
}
}
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[test]
fn test_mixed_trait_bounds() {
let mut dict = BimapTypeDict::new();
dict.add_varname("T".into());
dict.add_varname("Radix".into());
assert_eq!(
ConstraintSystem::new(
// eq constraints
vec![],
// sub constraints
vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse(" ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10>~Char~native.UInt8>").unwrap(),
rhs : dict.parse("<Seq T>").unwrap()
}
],
// trait constraints
vec![
ConstraintPair {
addr: Vec::new(),
lhs: dict.parse("T").unwrap(),
rhs: dict.parse("<Digit Radix>").unwrap(),
}
],
// parallel constraints
vec![]
).solve(),
Ok((
// ψ
vec![
dict.parse(" ~ <PosInt 10 BigEndian>").unwrap()
],
// σ
vec![
(dict.get_typeid(&"T".into()).unwrap(), dict.parse("<Digit 10>~Char~native.UInt8").unwrap()),
(dict.get_typeid(&"Radix".into()).unwrap(), dict.parse("10").unwrap())
].into_iter().collect()
))
);
}

View file

@ -0,0 +1,6 @@
pub mod eq_constraint;
pub mod sub_constraint;
pub mod trait_constraint;
pub mod par_cornstraint;
pub mod value_constraint;
pub mod mixed;

View file

@ -0,0 +1,11 @@
use {
crate::{dict::*, parser::*,
constraint_system::{
ConstraintSystem,
ConstraintPair,
ConstraintError
}
}
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

View file

@ -1,145 +1,31 @@
use {
crate::{dict::*, parser::*, unparser::*, term::*, unification::*},
std::iter::FromIterator
crate::{dict::*, term::*, parser::*,
constraint_system::{
subtype_unify,
ConstraintSystem,
ConstraintPair,
ConstraintError
}
}
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) {
let mut dict = BimapTypeDict::new();
dict.add_varname(String::from("T"));
dict.add_varname(String::from("U"));
dict.add_varname(String::from("V"));
dict.add_varname(String::from("W"));
let mut t1 = dict.parse(ts1).unwrap();
let mut t2 = dict.parse(ts2).unwrap();
let σ = crate::unify( &t1, &t2 );
if expect_unificator {
assert!(σ.is_ok());
let σ = σ.unwrap();
assert_eq!(
t1.apply_subst(&σ),
t2.apply_subst(&σ)
);
} else {
assert!(! σ.is_ok());
}
}
#[test]
fn test_unification_error() {
let mut dict = BimapTypeDict::new();
dict.add_varname(String::from("T"));
assert_eq!(
crate::unify(
&dict.parse("<A T>").unwrap(),
&dict.parse("<B T>").unwrap()
),
Err(UnificationError {
addr: vec![0],
t1: dict.parse("A").unwrap(),
t2: dict.parse("B").unwrap()
})
);
assert_eq!(
crate::unify(
&dict.parse("<V <U A> T>").unwrap(),
&dict.parse("<V <U B> T>").unwrap()
),
Err(UnificationError {
addr: vec![1, 1],
t1: dict.parse("A").unwrap(),
t2: dict.parse("B").unwrap()
})
);
assert_eq!(
crate::unify(
&dict.parse("T").unwrap(),
&dict.parse("<Seq T>").unwrap()
),
Err(UnificationError {
addr: vec![],
t1: dict.parse("T").unwrap(),
t2: dict.parse("<Seq T>").unwrap()
})
);
}
#[test]
fn test_unification() {
test_unify("A", "A", true);
test_unify("A", "B", false);
test_unify("<Seq T>", "<Seq Ascii~Char>", true);
test_unify("<Seq T>", "<U Char>", true);
test_unify(
"<Seq Path~<Seq Char>>~<SepSeq Char '\n'>~<Seq Char>",
"<Seq T~<Seq Char>>~<SepSeq Char '\n'>~<Seq Char>",
true
);
let mut dict = BimapTypeDict::new();
dict.add_varname(String::from("T"));
dict.add_varname(String::from("U"));
dict.add_varname(String::from("V"));
dict.add_varname(String::from("W"));
assert_eq!(
UnificationProblem::new_eq(vec![
(dict.parse("U").unwrap(), dict.parse("<Seq Char>").unwrap()),
(dict.parse("T").unwrap(), dict.parse("<Seq U>").unwrap()),
]).solve(),
Ok((
vec![],
vec![
// T
(TypeID::Var(0), dict.parse("<Seq <Seq Char>>").unwrap()),
// U
(TypeID::Var(1), dict.parse("<Seq Char>").unwrap())
].into_iter().collect()
))
);
assert_eq!(
UnificationProblem::new_eq(vec![
(dict.parse("<Seq T>").unwrap(), dict.parse("<Seq W~<Seq Char>>").unwrap()),
(dict.parse("<Seq >").unwrap(), dict.parse("<Seq W>").unwrap()),
]).solve(),
Ok((
vec![],
vec![
// W
(TypeID::Var(3), dict.parse("").unwrap()),
// T
(TypeID::Var(0), dict.parse("~<Seq Char>").unwrap())
].into_iter().collect()
))
);
}
/*
Only Ladders
*/
#[test]
fn test_subtype_unification1() {
let mut dict = BimapTypeDict::new();
dict.add_varname(String::from("T"));
assert_eq!(
UnificationProblem::new_sub(vec![
(dict.parse("A ~ B").unwrap(),
dict.parse("B").unwrap()),
ConstraintSystem::new_sub(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("A ~ B").unwrap(),
rhs : dict.parse("B").unwrap()
}
]).solve(),
Ok((
vec![ dict.parse("A").unwrap() ],
@ -148,9 +34,12 @@ fn test_subtype_unification1() {
);
assert_eq!(
UnificationProblem::new_sub(vec![
(dict.parse("A ~ B ~ C ~ D").unwrap(),
dict.parse("C ~ D").unwrap()),
ConstraintSystem::new_sub(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("A ~ B ~ C ~ D").unwrap(),
rhs : dict.parse("C ~ D").unwrap()
}
]).solve(),
Ok((
vec![ dict.parse("A ~ B").unwrap() ],
@ -159,22 +48,29 @@ fn test_subtype_unification1() {
);
assert_eq!(
UnificationProblem::new_sub(vec![
(dict.parse("A ~ B ~ C ~ D").unwrap(),
dict.parse("T ~ D").unwrap()),
ConstraintSystem::new_sub(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("A ~ B ~ C ~ D").unwrap(),
rhs : dict.parse("T ~ D").unwrap()
}
]).solve(),
Ok((
vec![ TypeTerm::unit() ],
vec![
(dict.get_typeid(&"T".into()).unwrap(), dict.parse("A ~ B ~ C").unwrap())
(dict.get_typeid(&"T".into()).unwrap(),
dict.parse("A ~ B ~ C").unwrap())
].into_iter().collect()
))
);
assert_eq!(
UnificationProblem::new_sub(vec![
(dict.parse("A ~ B ~ C ~ D").unwrap(),
dict.parse("B ~ T ~ D").unwrap()),
ConstraintSystem::new_sub(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("A ~ B ~ C ~ D").unwrap(),
rhs : dict.parse("B ~ T ~ D").unwrap(),
}
]).solve(),
Ok((
vec![ dict.parse("A").unwrap() ],
@ -185,6 +81,9 @@ fn test_subtype_unification1() {
);
}
/*
Variables
*/
#[test]
fn test_subtype_unification2() {
let mut dict = BimapTypeDict::new();
@ -195,9 +94,12 @@ fn test_subtype_unification2() {
dict.add_varname(String::from("W"));
assert_eq!(
UnificationProblem::new_sub(vec![
(dict.parse("<Seq~T <Digit 10> ~ Char ~ Ascii>").unwrap(),
dict.parse("<Seq~<LengthPrefix x86.UInt64> Char ~ Ascii>").unwrap()),
ConstraintSystem::new_sub(vec![
ConstraintPair{
addr: Vec::new(),
lhs: dict.parse("<Seq~T <Digit 10> ~ Char ~ Ascii>").unwrap(),
rhs: dict.parse("<Seq~<LengthPrefix x86.UInt64> Char ~ Ascii>").unwrap(),
}
]).solve(),
Ok((
vec![
@ -211,9 +113,17 @@ fn test_subtype_unification2() {
);
assert_eq!(
UnificationProblem::new_sub(vec![
(dict.parse("U").unwrap(), dict.parse("<Seq Char>").unwrap()),
(dict.parse("T").unwrap(), dict.parse("<Seq U>").unwrap()),
ConstraintSystem::new_sub(vec![
ConstraintPair {
addr: Vec::new(),
lhs: dict.parse("U").unwrap(),
rhs: dict.parse("<Seq Char>").unwrap()
},
ConstraintPair {
addr : Vec::new(),
lhs : dict.parse("T").unwrap(),
rhs : dict.parse("<Seq U>").unwrap(),
}
]).solve(),
Ok((
vec![
@ -231,11 +141,17 @@ fn test_subtype_unification2() {
);
assert_eq!(
UnificationProblem::new_sub(vec![
(dict.parse("<Seq T>").unwrap(),
dict.parse("<Seq W~<Seq Char>>").unwrap()),
(dict.parse("<Seq~<LengthPrefix x86.UInt64> ~<PosInt 10 BigEndian>>").unwrap(),
dict.parse("<<LengthPrefix x86.UInt64> W>").unwrap()),
ConstraintSystem::new_sub(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("<Seq T>").unwrap(),
rhs : dict.parse("<Seq W~<Seq Char>>").unwrap(),
},
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("<Seq~<LengthPrefix x86.UInt64> ~<PosInt 10 BigEndian>>").unwrap(),
rhs : dict.parse("<<LengthPrefix x86.UInt64> W>").unwrap()
}
]).solve(),
Ok((
vec![
@ -275,7 +191,59 @@ fn test_subtype_unification2() {
);
}
/*
Subtypes in some rungs
*/
#[test]
fn test_subtype_unification3() {
let mut dict = BimapTypeDict::new();
assert_eq!(
ConstraintSystem::new_sub(vec![
ConstraintPair {
addr: Vec::new(),
lhs: dict.parse("<A1~A2 B C D1~D2 E F1~F2>").expect("parse"),
rhs: dict.parse("<A2 B C D2 E F2>").expect("parse")
}
]).solve(),
Ok((
// halo
vec![
dict.parse("<A1~A2 B C D1~D2 E F1>").expect("parse")
],
// subst
vec![
].into_iter().collect()
))
);
assert_eq!(
ConstraintSystem::new_sub(vec![
ConstraintPair {
addr: Vec::new(),
lhs: dict.parse("<Seq~List B C D1~D2 E F1~F2>").expect("parse"),
rhs: dict.parse("<List B C D2 E F2>").expect("parse")
}
]).solve(),
Ok((
// halo
vec![
dict.parse("<Seq~List B C D1~D2 E F1>").expect("parse")
],
// subst
vec![
].into_iter().collect()
))
);
}
/*
Not a Subtype!
*/
#[test]
fn test_trait_not_subtype() {
let mut dict = BimapTypeDict::new();
@ -285,7 +253,7 @@ fn test_trait_not_subtype() {
&dict.parse("A ~ B").expect(""),
&dict.parse("A ~ B ~ C").expect("")
),
Err(UnificationError {
Err(ConstraintError {
addr: vec![1],
t1: dict.parse("B").expect(""),
t2: dict.parse("C").expect("")
@ -297,14 +265,17 @@ fn test_trait_not_subtype() {
&dict.parse("<Seq~List~Vec <Digit 10>~Char>").expect(""),
&dict.parse("<Seq~List~Vec Char~ReprTree>").expect("")
),
Err(UnificationError {
addr: vec![1,1],
Err(ConstraintError {
addr: vec![1],
t1: dict.parse("Char").expect(""),
t2: dict.parse("ReprTree").expect("")
})
);
}
/*
subtype inside a sequence item
*/
#[test]
fn test_reprtree_list_subtype() {
let mut dict = BimapTypeDict::new();
@ -333,29 +304,31 @@ pub fn test_subtype_delim() {
dict.add_varname(String::from("Delim"));
assert_eq!(
UnificationProblem::new_sub(vec![
ConstraintSystem::new_sub(vec![
(
//given type
dict.parse("
ConstraintPair {
addr: Vec::new(),
// given type
lhs : dict.parse("
< Seq <Seq <Digit 10>~Char~Ascii~UInt8> >
~ < ValueSep ':' Char~Ascii~UInt8 >
~ < Seq~<LengthPrefix UInt64> Char~Ascii~UInt8 >
").expect(""),
//expected type
dict.parse("
// expected type
rhs : dict.parse("
< Seq <Seq T> >
~ < ValueSep Delim T >
~ < Seq~<LengthPrefix UInt64> T >
").expect("")
),
},
// subtype bounds
(
dict.parse("T").expect(""),
dict.parse("UInt8").expect("")
),
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("T").expect(""),
rhs : dict.parse("UInt8").expect("")
},
/* todo
(
dict.parse("<TypeOf Delim>").expect(""),

View file

@ -0,0 +1,142 @@
use {
crate::{dict::*, parser::*,
constraint_system::{
ConstraintSystem,
ConstraintPair,
ConstraintError
}
}
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[test]
fn test_trait_bound1() {
let mut dict = BimapTypeDict::new();
assert_eq!(
ConstraintSystem::new_trait(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("A ~ B").unwrap(),
rhs : dict.parse("A").unwrap()
}
]).solve(),
Ok((
vec![],
vec![].into_iter().collect()
))
);
assert_eq!(
ConstraintSystem::new_trait(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("A ~ B").unwrap(),
rhs : dict.parse("B").unwrap()
}
]).solve(),
Ok((
vec![],
vec![].into_iter().collect()
))
);
assert_eq!(
ConstraintSystem::new_trait(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("A").unwrap(),
rhs : dict.parse("B").unwrap()
}
]).solve(),
Err(ConstraintError { addr: vec![], t1: dict.parse("A").unwrap(), t2: dict.parse("B").unwrap() })
);
assert_eq!(
ConstraintSystem::new_trait(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("A").unwrap(),
rhs : dict.parse("A~B").unwrap()
}
]).solve(),
Err(ConstraintError { addr: vec![], t1: dict.parse("A").unwrap(), t2: dict.parse("A~B").unwrap() })
);
}
#[test]
fn test_trait_bound_spec() {
let mut dict = BimapTypeDict::new();
assert_eq!(
ConstraintSystem::new_trait(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("A ~ <B~C D> ~ E").unwrap(),
rhs : dict.parse("<B D>").unwrap()
}
]).solve(),
Ok((
vec![],
vec![].into_iter().collect()
))
);
}
#[test]
fn test_trait_bound_struct() {
let mut dict = BimapTypeDict::new();
assert_eq!(
ConstraintSystem::new_trait(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("<Struct <a S~A> <b T~B>>").unwrap(),
rhs : dict.parse("<Struct <a S> <b T>>").unwrap()
}
]).solve(),
Ok((
vec![],
vec![].into_iter().collect()
))
);
assert_eq!(
ConstraintSystem::new_trait(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("<Struct <a S~A> <b T~B>>").unwrap(),
rhs : dict.parse("<Struct <a S>>").unwrap()
}
]).solve(),
Ok((
vec![],
vec![].into_iter().collect()
))
);
assert_eq!(
ConstraintSystem::new_trait(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("<Struct <a S~A> <b T~B>>").unwrap(),
rhs : dict.parse("<Struct <a A>>").unwrap()
}
]).solve(),
Ok((
vec![],
vec![].into_iter().collect()
))
);
assert_eq!(
ConstraintSystem::new_trait(vec![
ConstraintPair {
addr: Vec::new(),
lhs : dict.parse("<Struct <a S~A> <b T~B>>").unwrap(),
rhs : dict.parse("<Struct <a T>>").unwrap()
}
]).solve(),
Err(ConstraintError { addr: vec![0], t1: dict.parse("S~A").unwrap(), t2: dict.parse("T").unwrap() })
);
}

View file

@ -0,0 +1,11 @@
use {
crate::{dict::*, parser::*,
constraint_system::{
ConstraintSystem,
ConstraintPair,
ConstraintError
}
}
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

View file

@ -9,25 +9,25 @@ fn test_curry() {
let mut dict = BimapTypeDict::new();
assert_eq!(
dict.parse("<A B C>").unwrap().curry(),
dict.parse("<<A B> C>").unwrap()
dict.parse_desugared("<A B C>").unwrap().curry(),
dict.parse_desugared("<<A B> C>").unwrap()
);
assert_eq!(
dict.parse("<A B C D>").unwrap().curry(),
dict.parse("<<<A B> C> D>").unwrap()
dict.parse_desugared("<A B C D>").unwrap().curry(),
dict.parse_desugared("<<<A B> C> D>").unwrap()
);
assert_eq!(
dict.parse("<A B C D E F G H I J K>").unwrap().curry(),
dict.parse("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap()
dict.parse_desugared("<A B C D E F G H I J K>").unwrap().curry(),
dict.parse_desugared("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap()
);
assert_eq!(
dict.parse("<A~X B C>").unwrap().curry(),
dict.parse("<<A~X B> C>").unwrap()
dict.parse_desugared("<A~X B C>").unwrap().curry(),
dict.parse_desugared("<<A~X B> C>").unwrap()
);
assert_eq!(
dict.parse("<A B C~Y~Z> ~ K").unwrap().curry(),
dict.parse("< <A B> C~Y~Z > ~ K").unwrap()
dict.parse_desugared("<A B C~Y~Z> ~ K").unwrap().curry(),
dict.parse_desugared("< <A B> C~Y~Z > ~ K").unwrap()
);
}
@ -36,25 +36,25 @@ fn test_decurry() {
let mut dict = BimapTypeDict::new();
assert_eq!(
dict.parse("<<A B> C>").unwrap().decurry(),
dict.parse("<A B C>").unwrap()
dict.parse_desugared("<<A B> C>").unwrap().decurry(),
dict.parse_desugared("<A B C>").unwrap()
);
assert_eq!(
dict.parse("<<<A B> C> D>").unwrap().decurry(),
dict.parse("<A B C D>").unwrap(),
dict.parse_desugared("<<<A B> C> D>").unwrap().decurry(),
dict.parse_desugared("<A B C D>").unwrap(),
);
assert_eq!(
dict.parse("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap().decurry(),
dict.parse("<A B C D E F G H I J K>").unwrap()
dict.parse_desugared("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap().decurry(),
dict.parse_desugared("<A B C D E F G H I J K>").unwrap()
);
assert_eq!(
dict.parse("<<A~X B> C>").unwrap().decurry(),
dict.parse("<A~X B C>").unwrap()
dict.parse_desugared("<<A~X B> C>").unwrap().decurry(),
dict.parse_desugared("<A~X B C>").unwrap()
);
assert_eq!(
dict.parse("<<A~X B> C~Y>~K").unwrap().decurry(),
dict.parse("<A~X B C~Y> ~K").unwrap()
dict.parse_desugared("<<A~X B> C~Y>~K").unwrap().decurry(),
dict.parse_desugared("<A~X B C~Y> ~K").unwrap()
);
}

View file

@ -1,56 +0,0 @@
use crate::{dict::{BimapTypeDict}, parser::*};
#[test]
fn test_flat() {
let mut dict = BimapTypeDict::new();
assert!( dict.parse("A").expect("parse error").is_flat() );
assert!( dict.parse("10").expect("parse error").is_flat() );
assert!( dict.parse("'x'").expect("parse error").is_flat() );
assert!( dict.parse("<A B 23>").expect("parse error").is_flat() );
assert!( dict.parse("<A <B C 'x' D>>").expect("parse error").is_flat() );
assert!( ! dict.parse("A~B").expect("parse error").is_flat() );
assert!( ! dict.parse("<A B~C>").expect("parse error").is_flat() );
assert!( ! dict.parse("<A <B C~X D>>").expect("parse error").is_flat() );
}
#[test]
fn test_normalize() {
let mut dict = BimapTypeDict::new();
assert_eq!(
dict.parse("A~B~C").expect("parse error").normalize(),
dict.parse("A~B~C").expect("parse errror"),
);
assert_eq!(
dict.parse("<A B>~C").expect("parse error").normalize(),
dict.parse("<A B>~C").expect("parse errror"),
);
assert_eq!(
dict.parse("<A B~C>").expect("parse error").normalize(),
dict.parse("<A B>~<A C>").expect("parse errror"),
);
assert_eq!(
dict.parse("<A B~C D~E>").expect("parse error").normalize(),
dict.parse("<A B D>~<A C D>~<A C E>").expect("parse errror"),
);
assert_eq!(
dict.parse("<Seq <Digit 10>~Char>").expect("parse error").normalize(),
dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror"),
);
assert_eq!(
dict.parse("<A <B C~D~E> F~G H H>").expect("parse error").normalize(),
dict.parse("<A <B C> F H H>
~<A <B D> F H H>
~<A <B E> F H H>
~<A <B E> G H H>").expect("parse errror"),
);
}

View file

@ -2,10 +2,7 @@
pub mod lexer;
pub mod parser;
pub mod curry;
pub mod lnf;
pub mod pnf;
pub mod subtype;
pub mod substitution;
pub mod unification;
pub mod morphism;
pub mod constraint_system;

View file

@ -1,5 +1,11 @@
use {
crate::{dict::*, morphism::*, parser::*, unparser::*, TypeTerm, morphism_base::*, morphism_path::*}
crate::{dict::*, morphism_base::MorphismBase,
morphism_path::ShortestPathProblem,
morphism::{MorphismInstance, Morphism, MorphismType},
parser::*, TypeTerm,
DesugaredTypeTerm
},
std::collections::HashMap
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
@ -10,7 +16,7 @@ fn print_subst(m: &std::collections::HashMap<TypeID, TypeTerm>, dict: &mut impl
for (k,v) in m.iter() {
eprintln!(" {} --> {}",
dict.get_typename(k).unwrap(),
dict.unparse(v)
v.pretty(dict, 0)
);
}
@ -20,47 +26,30 @@ fn print_subst(m: &std::collections::HashMap<TypeID, TypeTerm>, dict: &mut impl
fn print_path(dict: &mut impl TypeDict, path: &Vec<MorphismInstance<DummyMorphism>>) {
for n in path.iter() {
eprintln!("
ψ = {}
morph {}
--> {}
with
",
n.halo.clone().sugar(dict).pretty(dict, 0),
n.m.get_type().src_type.sugar(dict).pretty(dict, 0),
n.m.get_type().dst_type.sugar(dict).pretty(dict, 0),
n.get_type().src_type.pretty(dict, 0),
n.get_type().dst_type.pretty(dict, 0),
);
print_subst(&n.σ, dict)
print_subst(&n.get_subst(), dict)
}
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[derive(Clone, Debug, PartialEq)]
#[derive(Clone, Debug, PartialEq, Eq)]
struct DummyMorphism(MorphismType);
impl Morphism for DummyMorphism {
fn get_type(&self) -> MorphismType {
self.0.clone().normalize()
}
fn map_morphism(&self, seq_type: TypeTerm) -> Option<DummyMorphism> {
Some(DummyMorphism(MorphismType {
src_type: TypeTerm::App(vec![
seq_type.clone(),
self.0.src_type.clone()
]),
dst_type: TypeTerm::App(vec![
seq_type.clone(),
self.0.dst_type.clone()
])
}))
self.0.clone()
}
}
fn morphism_test_setup() -> ( BimapTypeDict, MorphismBase<DummyMorphism> ) {
let mut dict = BimapTypeDict::new();
let mut base = MorphismBase::<DummyMorphism>::new( vec![ dict.parse("Seq").expect("") ] );
let mut base = MorphismBase::<DummyMorphism>::new();
dict.add_varname("Radix".into());
dict.add_varname("SrcRadix".into());
@ -68,32 +57,37 @@ fn morphism_test_setup() -> ( BimapTypeDict, MorphismBase<DummyMorphism> ) {
base.add_morphism(
DummyMorphism(MorphismType{
src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
dst_type: dict.parse("<Digit Radix> ~ _2^64 ~ machine.UInt64").unwrap()
bounds: Vec::new(),
src_type: dict.parse_desugared("<Digit Radix> ~ Char").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared("<Digit Radix> ~ _2^64 ~ machine.UInt64").unwrap().sugar(&mut dict)
})
);
base.add_morphism(
DummyMorphism(MorphismType{
src_type: dict.parse("<Digit Radix> ~ _2^64 ~ machine.UInt64").unwrap(),
dst_type: dict.parse("<Digit Radix> ~ Char").unwrap()
bounds: Vec::new(),
src_type: dict.parse_desugared("<Digit Radix> ~ _2^64 ~ machine.UInt64").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared("<Digit Radix> ~ Char").unwrap().sugar(&mut dict)
})
);
base.add_morphism(
DummyMorphism(MorphismType{
src_type: dict.parse(" ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~_2^64~machine.UInt64>").unwrap(),
dst_type: dict.parse(" ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~_2^64~machine.UInt64>").unwrap()
bounds: Vec::new(),
src_type: dict.parse_desugared(" ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~_2^64~machine.UInt64>").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared(" ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~_2^64~machine.UInt64>").unwrap().sugar(&mut dict)
})
);
base.add_morphism(
DummyMorphism(MorphismType{
src_type: dict.parse(" ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~_2^64~machine.UInt64>").unwrap(),
dst_type: dict.parse(" ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~_2^64~machine.UInt64>").unwrap()
bounds: Vec::new(),
src_type: dict.parse_desugared(" ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~_2^64~machine.UInt64>").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared(" ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~_2^64~machine.UInt64>").unwrap().sugar(&mut dict)
})
);
base.add_morphism(
DummyMorphism(MorphismType{
src_type: dict.parse(" ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix>~_2^64~machine.UInt64>").unwrap(),
dst_type: dict.parse(" ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix>~_2^64~machine.UInt64>").unwrap()
bounds: Vec::new(),
src_type: dict.parse_desugared(" ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix>~_2^64~machine.UInt64>").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared(" ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix>~_2^64~machine.UInt64>").unwrap().sugar(&mut dict)
})
);
@ -105,22 +99,24 @@ fn test_morphism_path1() {
let (mut dict, mut base) = morphism_test_setup();
let path = ShortestPathProblem::new(&base, MorphismType {
src_type: dict.parse("<Digit 10> ~ Char").unwrap(),
dst_type: dict.parse("<Digit 10> ~ _2^64 ~ machine.UInt64").unwrap(),
bounds: Vec::new(),
src_type: dict.parse_desugared("<Digit 10> ~ Char").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared("<Digit 10> ~ _2^64 ~ machine.UInt64").unwrap().sugar(&mut dict),
}).solve();
assert_eq!(
path,
Some(
vec![
MorphismInstance {
MorphismInstance::Primitive {
σ: vec![
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
].into_iter().collect(),
halo: TypeTerm::unit(),
m: DummyMorphism(MorphismType {
src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
dst_type: dict.parse("<Digit Radix> ~ _2^64 ~ machine.UInt64").unwrap()
ψ: TypeTerm::unit(),
morph: DummyMorphism(MorphismType {
bounds: Vec::new(),
src_type: dict.parse_desugared("<Digit Radix> ~ Char").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared("<Digit Radix> ~ _2^64 ~ machine.UInt64").unwrap().sugar(&mut dict)
}),
}
]
@ -133,23 +129,29 @@ fn test_morphism_path2() {
let (mut dict, mut base) = morphism_test_setup();
let path = ShortestPathProblem::new(&base, MorphismType {
src_type: dict.parse(" ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
dst_type: dict.parse(" ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ _2^64 ~ machine.UInt64>").unwrap(),
bounds: Vec::new(),
src_type: dict.parse_desugared(" ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared(" ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ _2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict),
}).solve();
assert_eq!(
path,
Some(
vec![
MorphismInstance {
σ: vec![
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
].into_iter().collect(),
halo: dict.parse(" ~ <PosInt 10 BigEndian>").expect(""),
m: DummyMorphism(MorphismType {
src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
dst_type: dict.parse("<Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap()
}),
MorphismInstance::MapSeq {
ψ: dict.parse_desugared(" ~ <PosInt 10 BigEndian>").expect("").sugar(&mut dict),
seq_repr: None,
item_morph: Box::new(MorphismInstance::Primitive {
σ: vec![
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
].into_iter().collect(),
ψ: TypeTerm::unit(),
morph: DummyMorphism(MorphismType {
bounds: Vec::new(),
src_type: dict.parse_desugared("<Digit Radix> ~ Char").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared("<Digit Radix> ~ _2^64 ~ machine.UInt64").unwrap().sugar(&mut dict)
}),
})
}
]
));
@ -161,8 +163,9 @@ fn test_morphism_path3() {
let (mut dict, mut base) = morphism_test_setup();
let path = ShortestPathProblem::new(&base, MorphismType {
src_type: dict.parse(" ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
dst_type: dict.parse(" ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ _2^64 ~ machine.UInt64>").unwrap(),
bounds: Vec::new(),
src_type: dict.parse_desugared(" ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared(" ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ _2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict),
}).solve();
if let Some(path) = path.as_ref() {
@ -173,26 +176,32 @@ fn test_morphism_path3() {
path,
Some(
vec![
MorphismInstance {
σ: vec![
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
].into_iter().collect(),
halo: dict.parse(" ~ <PosInt 10 LittleEndian>").expect(""),
m: DummyMorphism(MorphismType {
src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
dst_type: dict.parse("<Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap()
}),
MorphismInstance::MapSeq {
ψ: dict.parse_desugared(" ~ <PosInt 10 LittleEndian>").expect("").sugar(&mut dict),
seq_repr: None,
item_morph: Box::new(MorphismInstance::Primitive {
σ: vec![
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
].into_iter().collect(),
ψ: TypeTerm::unit(),
morph: DummyMorphism(MorphismType {
bounds: Vec::new(),
src_type: dict.parse_desugared("<Digit Radix> ~ Char").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared("<Digit Radix> ~ _2^64 ~ machine.UInt64").unwrap().sugar(&mut dict)
}),
})
},
MorphismInstance {
MorphismInstance::Primitive {
σ: vec![
(dict.get_typeid(&"SrcRadix".into()).unwrap(), TypeTerm::Num(10)),
(dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16)),
].into_iter().collect(),
halo: TypeTerm::unit(),
m: DummyMorphism(MorphismType {
src_type: dict.parse(" ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ _2^64 ~ machine.UInt64>").unwrap(),
dst_type: dict.parse(" ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ _2^64 ~ machine.UInt64>").unwrap()
ψ: TypeTerm::unit(),
morph: DummyMorphism(MorphismType {
bounds: Vec::new(),
src_type: dict.parse_desugared(" ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ _2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared(" ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ _2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict)
}),
}
]
@ -206,8 +215,9 @@ fn test_morphism_path4() {
let (mut dict, mut base) = morphism_test_setup();
let path = ShortestPathProblem::new(&base, MorphismType {
src_type: dict.parse(" ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
dst_type: dict.parse(" ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
bounds: Vec::new(),
src_type: dict.parse_desugared(" ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared(" ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().sugar(&mut dict)
}).solve();
if let Some(path) = path.as_ref() {
@ -218,40 +228,51 @@ fn test_morphism_path4() {
path,
Some(
vec![
MorphismInstance {
σ: vec![
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
].into_iter().collect(),
halo: dict.parse(" ~ <PosInt 10 LittleEndian>").expect(""),
m: DummyMorphism(MorphismType {
src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
dst_type: dict.parse("<Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap()
}),
MorphismInstance::MapSeq {
ψ: dict.parse_desugared(" ~ <PosInt 10 LittleEndian>").expect("").sugar(&mut dict),
seq_repr: None,
item_morph: Box::new(MorphismInstance::Primitive {
σ: vec![
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
].into_iter().collect(),
ψ: TypeTerm::unit(),
morph: DummyMorphism(MorphismType {
bounds: Vec::new(),
src_type: dict.parse_desugared("<Digit Radix> ~ Char").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared("<Digit Radix> ~ _2^64 ~ machine.UInt64").unwrap().sugar(&mut dict)
}),
})
},
MorphismInstance {
MorphismInstance::Primitive {
σ: vec![
(dict.get_typeid(&"SrcRadix".into()).unwrap(), TypeTerm::Num(10)),
(dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16)),
].into_iter().collect(),
halo: TypeTerm::unit(),
m: DummyMorphism(MorphismType {
src_type: dict.parse(" ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ _2^64 ~ machine.UInt64>").unwrap(),
dst_type: dict.parse(" ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ _2^64 ~ machine.UInt64>").unwrap()
ψ: TypeTerm::unit(),
morph: DummyMorphism(MorphismType {
bounds: Vec::new(),
src_type: dict.parse_desugared(" ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ _2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared(" ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ _2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict)
}),
},
MorphismInstance {
σ: vec![
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(16)),
].into_iter().collect(),
halo: dict.parse(" ~ <PosInt 16 LittleEndian>").expect(""),
m: DummyMorphism(MorphismType {
src_type: dict.parse("<Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap(),
dst_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap()
}),
MorphismInstance::MapSeq {
ψ: dict.parse_desugared(" ~ <PosInt 16 LittleEndian>").expect("").sugar(&mut dict),
seq_repr: None,
item_morph: Box::new(MorphismInstance::Primitive {
σ: vec![
(dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16)),
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(16)),
].into_iter().collect(),
ψ: TypeTerm::unit(),
morph: DummyMorphism(MorphismType {
bounds: Vec::new(),
src_type: dict.parse_desugared("<Digit Radix> ~ _2^64 ~ machine.UInt64").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared("<Digit Radix> ~ Char").unwrap().sugar(&mut dict)
}),
})
},
]
));
}
@ -264,8 +285,9 @@ fn test_morphism_path_posint() {
let (mut dict, mut base) = morphism_test_setup();
let path = ShortestPathProblem::new(&base, MorphismType {
src_type: dict.parse(" ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
dst_type: dict.parse(" ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap(),
bounds: Vec::new(),
src_type: dict.parse_desugared(" ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared(" ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().sugar(&mut dict),
}).solve();
if let Some(path) = path.as_ref() {
@ -276,57 +298,73 @@ fn test_morphism_path_posint() {
path,
Some(
vec![
MorphismInstance {
MorphismInstance::MapSeq {
ψ: dict.parse_desugared(" ~ <PosInt 10 BigEndian>").expect("").sugar(&mut dict),
seq_repr: None,
item_morph: Box::new(MorphismInstance::Primitive {
σ: vec![
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
].into_iter().collect(),
ψ: TypeTerm::unit(),
morph: DummyMorphism(MorphismType {
bounds: Vec::new(),
src_type: dict.parse_desugared("<Digit Radix> ~ Char").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared("<Digit Radix> ~ _2^64 ~ machine.UInt64").unwrap().sugar(&mut dict)
}),
})
},
MorphismInstance::Primitive {
σ: vec![
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
].into_iter().collect(),
halo: dict.parse(" ~ <PosInt 10 BigEndian>").unwrap(),
m: DummyMorphism(MorphismType {
src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
dst_type: dict.parse("<Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap()
ψ: TypeTerm::unit(),
morph: DummyMorphism(MorphismType{
bounds: Vec::new(),
src_type: dict.parse_desugared(" ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared(" ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict)
}),
},
MorphismInstance {
σ: vec![
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
].into_iter().collect(),
halo: TypeTerm::unit(),
m: DummyMorphism(MorphismType{
src_type: dict.parse(" ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap(),
dst_type: dict.parse(" ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap()
}),
},
MorphismInstance {
MorphismInstance::Primitive {
σ: vec![
(dict.get_typeid(&"SrcRadix".into()).unwrap(), TypeTerm::Num(10)),
(dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16)),
].into_iter().collect(),
halo: TypeTerm::unit(),
m: DummyMorphism(MorphismType{
src_type: dict.parse(" ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ _2^64 ~ machine.UInt64>").unwrap(),
dst_type: dict.parse(" ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ _2^64 ~ machine.UInt64>").unwrap()
ψ: TypeTerm::unit(),
morph: DummyMorphism(MorphismType{
bounds: Vec::new(),
src_type: dict.parse_desugared(" ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ _2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared(" ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ _2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict)
}),
},
MorphismInstance {
MorphismInstance::Primitive {
σ: vec![
(dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16)),
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(16)),
].into_iter().collect(),
halo: TypeTerm::unit(),
m: DummyMorphism(MorphismType{
src_type: dict.parse(" ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap(),
dst_type: dict.parse(" ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap(),
ψ: TypeTerm::unit(),
morph: DummyMorphism(MorphismType{
bounds: Vec::new(),
src_type: dict.parse_desugared(" ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared(" ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict),
}),
},
MorphismInstance {
σ: vec![
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(16))
].into_iter().collect(),
halo: dict.parse(" ~ <PosInt 16 BigEndian>").unwrap(),
m: DummyMorphism(MorphismType{
src_type: dict.parse("<Seq <Digit Radix> ~ _2^64 ~ machine.UInt64>").unwrap(),
dst_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap()
MorphismInstance::MapSeq {
ψ: dict.parse_desugared(" ~ <PosInt 16 BigEndian>").expect("").sugar(&mut dict),
seq_repr: None,
item_morph: Box::new(MorphismInstance::Primitive {
σ: vec![
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(16)),
].into_iter().collect(),
ψ: TypeTerm::unit(),
morph: DummyMorphism(MorphismType {
bounds: Vec::new(),
src_type: dict.parse_desugared("<Digit Radix> ~ _2^64 ~ machine.UInt64").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared("<Digit Radix> ~ Char").unwrap().sugar(&mut dict)
}),
})
}
},
]
)
);
@ -376,6 +414,40 @@ fn test_morphism_path_posint() {
*/
}
#[test]
fn morphism_test_seq_repr() {
let mut dict = BimapTypeDict::new();
let mut base = MorphismBase::<DummyMorphism>::new();
base.add_morphism(
DummyMorphism(MorphismType{
bounds: Vec::new(),
src_type: dict.parse_desugared("<Seq~<ValueTerminated 0> native.UInt8>").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared("<Seq~<LengthPrefix native.UInt64> native.UInt8>").unwrap().sugar(&mut dict)
})
);
assert_eq!(
base.get_morphism_instance(&MorphismType {
bounds: Vec::new(),
src_type: dict.parse_desugared("<Seq~<ValueTerminated 0> Char~Ascii~native.UInt8>").expect("parse").sugar(&mut dict),
dst_type: dict.parse_desugared("<Seq~<LengthPrefix native.UInt64> Char~Ascii~native.UInt8>").expect("parse").sugar(&mut dict)
}),
Some(
MorphismInstance::Primitive {
ψ: dict.parse_desugared("<Seq Char~Ascii>").expect("").sugar(&mut dict),
σ: HashMap::new(),
morph: DummyMorphism(MorphismType{
bounds: Vec::new(),
src_type: dict.parse_desugared("<Seq~<ValueTerminated 0> native.UInt8>").unwrap().sugar(&mut dict),
dst_type: dict.parse_desugared("<Seq~<LengthPrefix native.UInt64> native.UInt8>").unwrap().sugar(&mut dict)
})
}
)
);
}
/*
use std::collections::HashMap;
#[test]
@ -469,3 +541,4 @@ fn test_morphism_path_listedit()
])
);
}
*/

View file

@ -1,6 +1,6 @@
use {
crate::{term::*, dict::*, parser::*}
crate::{desugared_term::*, dict::*, parser::*, TypeTerm}
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
@ -42,14 +42,14 @@ fn test_parser_char() {
fn test_parser_app() {
assert_eq!(
BimapTypeDict::new().parse("<A B>"),
Ok(TypeTerm::App(vec![
Ok(TypeTerm::Spec(vec![
TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::TypeID(TypeID::Fun(1)),
]))
);
assert_eq!(
BimapTypeDict::new().parse("<A B C>"),
Ok(TypeTerm::App(vec![
Ok(TypeTerm::Spec(vec![
TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::TypeID(TypeID::Fun(1)),
TypeTerm::TypeID(TypeID::Fun(2)),
@ -97,7 +97,7 @@ fn test_parser_ladder_outside() {
assert_eq!(
BimapTypeDict::new().parse("<A B>~C"),
Ok(TypeTerm::Ladder(vec![
TypeTerm::App(vec![
TypeTerm::Spec(vec![
TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::TypeID(TypeID::Fun(1)),
]),
@ -110,7 +110,7 @@ fn test_parser_ladder_outside() {
fn test_parser_ladder_inside() {
assert_eq!(
BimapTypeDict::new().parse("<A B~C>"),
Ok(TypeTerm::App(vec![
Ok(TypeTerm::Spec(vec![
TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::Ladder(vec![
TypeTerm::TypeID(TypeID::Fun(1)),
@ -124,11 +124,11 @@ fn test_parser_ladder_inside() {
fn test_parser_ladder_between() {
assert_eq!(
BimapTypeDict::new().parse("<A B~<C D>>"),
Ok(TypeTerm::App(vec![
Ok(TypeTerm::Spec(vec![
TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::Ladder(vec![
TypeTerm::TypeID(TypeID::Fun(1)),
TypeTerm::App(vec![
TypeTerm::Spec(vec![
TypeTerm::TypeID(TypeID::Fun(2)),
TypeTerm::TypeID(TypeID::Fun(3)),
])
@ -141,7 +141,7 @@ fn test_parser_ladder_between() {
#[test]
fn test_parser_ladder_large() {
assert_eq!(
BimapTypeDict::new().parse(
BimapTypeDict::new().parse_desugared(
"<Seq Date
~<TimeSince UnixEpoch>
~<Duration Seconds>
@ -154,50 +154,50 @@ fn test_parser_ladder_large() {
~<Seq Byte>"),
Ok(
TypeTerm::Ladder(vec![
TypeTerm::App(vec![
TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::Ladder(vec![
TypeTerm::TypeID(TypeID::Fun(1)),
TypeTerm::App(vec![
TypeTerm::TypeID(TypeID::Fun(2)),
TypeTerm::TypeID(TypeID::Fun(3))
DesugaredTypeTerm::Ladder(vec![
DesugaredTypeTerm::App(vec![
DesugaredTypeTerm::TypeID(TypeID::Fun(0)),
DesugaredTypeTerm::Ladder(vec![
DesugaredTypeTerm::TypeID(TypeID::Fun(1)),
DesugaredTypeTerm::App(vec![
DesugaredTypeTerm::TypeID(TypeID::Fun(2)),
DesugaredTypeTerm::TypeID(TypeID::Fun(3))
]),
TypeTerm::App(vec![
TypeTerm::TypeID(TypeID::Fun(4)),
TypeTerm::TypeID(TypeID::Fun(5))
DesugaredTypeTerm::App(vec![
DesugaredTypeTerm::TypeID(TypeID::Fun(4)),
DesugaredTypeTerm::TypeID(TypeID::Fun(5))
]),
TypeTerm::TypeID(TypeID::Fun(6)),
TypeTerm::App(vec![
TypeTerm::TypeID(TypeID::Fun(7)),
TypeTerm::Num(10),
TypeTerm::TypeID(TypeID::Fun(8))
DesugaredTypeTerm::TypeID(TypeID::Fun(6)),
DesugaredTypeTerm::App(vec![
DesugaredTypeTerm::TypeID(TypeID::Fun(7)),
DesugaredTypeTerm::Num(10),
DesugaredTypeTerm::TypeID(TypeID::Fun(8))
]),
TypeTerm::App(vec![
TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::Ladder(vec![
TypeTerm::App(vec![
TypeTerm::TypeID(TypeID::Fun(9)),
TypeTerm::Num(10)
DesugaredTypeTerm::App(vec![
DesugaredTypeTerm::TypeID(TypeID::Fun(0)),
DesugaredTypeTerm::Ladder(vec![
DesugaredTypeTerm::App(vec![
DesugaredTypeTerm::TypeID(TypeID::Fun(9)),
DesugaredTypeTerm::Num(10)
]),
TypeTerm::TypeID(TypeID::Fun(10))
DesugaredTypeTerm::TypeID(TypeID::Fun(10))
])
])
])
]),
TypeTerm::App(vec![
TypeTerm::TypeID(TypeID::Fun(11)),
TypeTerm::TypeID(TypeID::Fun(10)),
TypeTerm::Char(':')
DesugaredTypeTerm::App(vec![
DesugaredTypeTerm::TypeID(TypeID::Fun(11)),
DesugaredTypeTerm::TypeID(TypeID::Fun(10)),
DesugaredTypeTerm::Char(':')
]),
TypeTerm::App(vec![
TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::TypeID(TypeID::Fun(10))
DesugaredTypeTerm::App(vec![
DesugaredTypeTerm::TypeID(TypeID::Fun(0)),
DesugaredTypeTerm::TypeID(TypeID::Fun(10))
]),
TypeTerm::TypeID(TypeID::Fun(12)),
TypeTerm::App(vec![
TypeTerm::TypeID(TypeID::Fun(0)),
TypeTerm::TypeID(TypeID::Fun(13))
DesugaredTypeTerm::TypeID(TypeID::Fun(12)),
DesugaredTypeTerm::App(vec![
DesugaredTypeTerm::TypeID(TypeID::Fun(0)),
DesugaredTypeTerm::TypeID(TypeID::Fun(13))
])
])
)

View file

@ -1,58 +1,117 @@
use crate::{dict::BimapTypeDict, parser::*};
#[test]
fn test_param_normalize() {
fn test_normalize_id() {
let mut dict = BimapTypeDict::new();
assert_eq!(
dict.parse("A~B~C").expect("parse error"),
dict.parse("A~B~C").expect("parse error").param_normalize(),
dict.parse("A~B~C").expect("parse error").normalize(),
);
assert_eq!(
dict.parse("<A B>~C").expect("parse error"),
dict.parse("<A B>~C").expect("parse error").param_normalize(),
dict.parse("<A B>~C").expect("parse error").normalize(),
);
}
#[test]
fn test_normalize_spec() {
let mut dict = BimapTypeDict::new();
assert_eq!(
dict.parse("<A B~C>").expect("parse error"),
dict.parse("<A B>~<A C>").expect("parse error").param_normalize(),
dict.parse("<A B>~<A C>").expect("parse error").normalize(),
);
assert_eq!(
dict.parse("<A~Y B>").expect("parse error"),
dict.parse("<A B>~<Y B>").expect("parse error").param_normalize(),
dict.parse("<A~Y B>~<Y B>").expect("parse error").normalize(),
);
assert_eq!(
dict.parse("<A B~C D~E>").expect("parse error"),
dict.parse("<A B D>~<A C D>~<A C E>").expect("parse errror").param_normalize(),
dict.parse("<A B D>~<A C D>~<A C E>").expect("parse errror").normalize(),
);
assert_eq!(
dict.parse("<A~X B~C D~E>").expect("parse error"),
dict.parse("<A B D>~<A B~C E>~<X C E>").expect("parse errror").param_normalize(),
);
assert_eq!(
dict.parse("<Seq <Digit 10>~Char>").expect("parse error"),
dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror").param_normalize(),
);
assert_eq!(
dict.parse("<Seq Char> ~ <<ValueDelim '\\0'> Char> ~ <<ValueDelim '\\0'> Ascii~x86.UInt8>").expect("parse error").param_normalize(),
dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~x86.UInt8>").expect("parse error")
);
assert_eq!(
dict.parse("<Seq Char~Ascii> ~ <<ValueDelim '\\0'> Char~Ascii> ~ <<ValueDelim '\\0'> x86.UInt8>").expect("parse error").param_normalize(),
dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~x86.UInt8>").expect("parse error")
);
assert_eq!(
dict.parse("<A~Y <B C~D~E> F H H>").expect("parse error"),
dict.parse("<A <B C> F H H>
~<A <B D> F H H>
~<A~Y <B E> F H H>").expect("parse errror")
.param_normalize(),
dict.parse("<A~X B D>~<A~X B~C E>~<X C E>").expect("parse errror").normalize(),
);
}
#[test]
fn test_normalize_seq() {
let mut dict = BimapTypeDict::new();
assert_eq!(
dict.parse("<Seq Char~Ascii>").expect("parse error"),
dict.parse("<Seq Char>~<Seq Ascii>").expect("parse errror").normalize(),
);
eprintln!("---------------");
assert_eq!(
dict.parse("<Seq <Digit 10>~Char>").expect("parse error"),
dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror").normalize(),
);
eprintln!("---------------");
assert_eq!(
dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~native.UInt8>").expect("parse error"),
dict.parse("<Seq Char> ~ <<ValueDelim '\\0'> Char> ~ <<ValueDelim '\\0'> Ascii~native.UInt8>").expect("parse error").normalize(),
);
eprintln!("---------------");
assert_eq!(
dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~native.UInt8>").expect("parse error"),
dict.parse("<Seq Char~Ascii> ~ <<ValueDelim '\\0'> Char~Ascii> ~ <<ValueDelim '\\0'> native.UInt8>").expect("parse error").normalize(),
);
}
#[test]
fn test_normalize_complex_spec() {
let mut dict = BimapTypeDict::new();
assert_eq!(
dict.parse("<A~Y <B C~D~E> F H H>").expect("parse error"),
dict.parse("<A~Y <B C> F H H>
~<A~Y <B D> F H H>
~<Y <B E> F H H>").expect("parse errror")
.normalize(),
);
}
#[test]
fn test_normalize_struct() {
let mut dict = BimapTypeDict::new();
assert_eq!(
dict.parse("< Struct~Aligned
< a TimePoint~<TimeSince UnixEpoch>~Seconds~native.UInt64 >
< b Angle ~ Degrees ~ ~ native.Float32 >
>
").expect("parse error"),
dict.parse("
< Struct <a TimePoint> <b Angle> >
~ < Struct <a <TimeSince UnixEpoch>~Seconds> <b Angle~Degrees~> >
~ < Struct~Aligned <a native.UInt64> <b native.Float32> >
").expect("parse errror")
.normalize(),
);
}
#[test]
fn test_normalize_enum() {
let mut dict = BimapTypeDict::new();
assert_eq!(
dict.parse("< Enum
< a TimePoint~<TimeSince UnixEpoch>~Seconds~native.UInt64 >
< b Angle ~ Degrees ~ ~ native.Float32 >
>
").expect("parse error"),
dict.parse("
< Enum <a TimePoint> <b Angle> >
~ < Enum <a <TimeSince UnixEpoch>~Seconds> <b Angle~Degrees~> >
~ < Enum <a native.UInt64> <b native.Float32> >
").expect("parse errror")
.normalize(),
);
}

View file

@ -1,6 +1,6 @@
use {
crate::{dict::*, term::*, parser::*, unparser::*, substitution::*},
crate::{dict::*, desugared_term::*, parser::*, unparser::*, substitution::*},
std::iter::FromIterator,
};
@ -15,16 +15,16 @@ fn test_subst() {
// T -->
σ.insert
(dict.add_varname(String::from("T")),
dict.parse("").unwrap());
dict.parse_desugared("").unwrap().sugar(&mut dict));
// U --> <Seq Char>
σ.insert
(dict.add_varname(String::from("U")),
dict.parse("<Seq Char>").unwrap());
dict.parse_desugared("<Seq Char>").unwrap().sugar(&mut dict));
assert_eq!(
dict.parse("<Seq T~U>").unwrap().apply_subst(&σ).clone(),
dict.parse("<Seq ~<Seq Char>>").unwrap()
dict.parse_desugared("<Seq T~U>").unwrap().sugar(&mut dict).apply_subst(&σ).clone(),
dict.parse_desugared("<Seq ~<Seq Char>>").unwrap().sugar(&mut dict)
);
}

View file

@ -1,96 +0,0 @@
use crate::{dict::BimapTypeDict, parser::*, unparser::*};
#[test]
fn test_semantic_subtype() {
let mut dict = BimapTypeDict::new();
assert_eq!(
dict.parse("A~B~C").expect("parse error")
.is_semantic_subtype_of(
&dict.parse("A~B~C").expect("parse errror")
),
Some((0, dict.parse("A~B~C").expect("parse errror")))
);
assert_eq!(
dict.parse("A~B1~C1").expect("parse error")
.is_semantic_subtype_of(
&dict.parse("A~B2~C2").expect("parse errror")
),
Some((0, dict.parse("A~B1~C1").expect("parse errror")))
);
assert_eq!(
dict.parse("A~B~C1").expect("parse error")
.is_semantic_subtype_of(
&dict.parse("B~C2").expect("parse errror")
),
Some((1, dict.parse("B~C1").expect("parse errror")))
);
}
#[test]
fn test_syntactic_subtype() {
let mut dict = BimapTypeDict::new();
assert_eq!(
dict.parse("A~B~C").expect("parse error")
.is_syntactic_subtype_of(
&dict.parse("A~B~C").expect("parse errror")
),
Ok(0)
);
assert_eq!(
dict.parse("A~B~C").expect("parse error")
.is_syntactic_subtype_of(
&dict.parse("B~C").expect("parse errror")
),
Ok(1)
);
assert_eq!(
dict.parse("A~B~C~D~E").expect("parse error")
.is_syntactic_subtype_of(
&dict.parse("C~D").expect("parse errror")
),
Ok(2)
);
assert_eq!(
dict.parse("A~B~C~D~E").expect("parse error")
.is_syntactic_subtype_of(
&dict.parse("C~G").expect("parse errror")
),
Err((2,3))
);
assert_eq!(
dict.parse("A~B~C~D~E").expect("parse error")
.is_syntactic_subtype_of(
&dict.parse("G~F~K").expect("parse errror")
),
Err((0,0))
);
assert_eq!(
dict.parse("<Duration Seconds>~").expect("parse error")
.is_syntactic_subtype_of(
&dict.parse("").expect("parse errror")
),
Ok(1)
);
assert_eq!(
dict.parse("
<Duration Seconds>
~
~<PosInt 10 BigEndian>
~< Seq <Digit 10> ~ Char >"
).expect("parse error")
.is_syntactic_subtype_of(
&dict.parse("<Seq Char>").expect("parse errror")
),
Ok(4)
);
}

View file

@ -1,529 +0,0 @@
use {
crate::{dict::*, term::*}, std::{collections::HashMap}
};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct UnificationError {
pub addr: Vec<usize>,
pub t1: TypeTerm,
pub t2: TypeTerm
}
#[derive(Clone, Debug)]
pub struct UnificationPair {
addr: Vec<usize>,
halo: TypeTerm,
lhs: TypeTerm,
rhs: TypeTerm,
}
#[derive(Debug)]
pub struct UnificationProblem {
σ: HashMap<TypeID, TypeTerm>,
upper_bounds: HashMap< u64, TypeTerm >,
lower_bounds: HashMap< u64, TypeTerm >,
equal_pairs: Vec<UnificationPair>,
subtype_pairs: Vec<UnificationPair>,
trait_pairs: Vec<UnificationPair>
}
impl UnificationProblem {
pub fn new(
equal_pairs: Vec<(TypeTerm, TypeTerm)>,
subtype_pairs: Vec<(TypeTerm, TypeTerm)>,
trait_pairs: Vec<(TypeTerm, TypeTerm)>
) -> Self {
UnificationProblem {
σ: HashMap::new(),
equal_pairs: equal_pairs.into_iter().map(|(lhs,rhs)|
UnificationPair{
lhs,rhs,
halo: TypeTerm::unit(),
addr: Vec::new()
}).collect(),
subtype_pairs: subtype_pairs.into_iter().map(|(lhs,rhs)|
UnificationPair{
lhs,rhs,
halo: TypeTerm::unit(),
addr: Vec::new()
}).collect(),
trait_pairs: trait_pairs.into_iter().map(|(lhs,rhs)|
UnificationPair{
lhs,rhs,
halo: TypeTerm::unit(),
addr: Vec::new()
}).collect(),
upper_bounds: HashMap::new(),
lower_bounds: HashMap::new(),
}
}
pub fn new_eq(eqs: Vec<(TypeTerm, TypeTerm)>) -> Self {
UnificationProblem::new( eqs, Vec::new(), Vec::new() )
}
pub fn new_sub(subs: Vec<(TypeTerm, TypeTerm)>) -> Self {
UnificationProblem::new( Vec::new(), subs, Vec::new() )
}
/// update all values in substitution
pub fn reapply_subst(&mut self) {
let mut new_σ = HashMap::new();
for (v, tt) in self.σ.iter() {
let mut tt = tt.clone().normalize();
tt.apply_subst(&self.σ);
tt = tt.normalize();
//eprintln!("update σ : {:?} --> {:?}", v, tt);
new_σ.insert(v.clone(), tt);
}
self.σ = new_σ;
}
pub fn eval_equation(&mut self, unification_pair: UnificationPair) -> Result<(), UnificationError> {
match (&unification_pair.lhs, &unification_pair.rhs) {
(TypeTerm::TypeID(TypeID::Var(varid)), t) |
(t, TypeTerm::TypeID(TypeID::Var(varid))) => {
if ! t.contains_var( *varid ) {
self.σ.insert(TypeID::Var(*varid), t.clone());
self.reapply_subst();
Ok(())
} else if t == &TypeTerm::TypeID(TypeID::Var(*varid)) {
Ok(())
} else {
Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(*varid)), t2: t.clone() })
}
}
(TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
if a1 == a2 { Ok(()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
}
(TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
if n1 == n2 { Ok(()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
}
(TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
if c1 == c2 { Ok(()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
}
(TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) |
(TypeTerm::App(a1), TypeTerm::App(a2)) => {
if a1.len() == a2.len() {
for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate().rev() {
let mut new_addr = unification_pair.addr.clone();
new_addr.push(i);
self.equal_pairs.push(
UnificationPair {
lhs: x,
rhs: y,
halo: TypeTerm::unit(),
addr: new_addr
});
}
Ok(())
} else {
Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
}
}
_ => Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
}
}
pub fn add_lower_subtype_bound(&mut self, v: u64, new_lower_bound: TypeTerm) -> Result<(),()> {
if new_lower_bound == TypeTerm::TypeID(TypeID::Var(v)) {
return Ok(());
}
if new_lower_bound.contains_var(v) {
// loop
return Err(());
}
if let Some(lower_bound) = self.lower_bounds.get(&v).cloned() {
// eprintln!("var already exists. check max. type");
if let Ok(halo) = self.eval_subtype(
UnificationPair {
lhs: lower_bound.clone(),
rhs: new_lower_bound.clone(),
halo: TypeTerm::unit(),
addr: vec![]
}
) {
// eprintln!("found more general lower bound");
// eprintln!("set var {}'s lowerbound to {:?}", varid, t.clone());
// generalize variable type to supertype
self.lower_bounds.insert(v, new_lower_bound);
Ok(())
} else if let Ok(halo) = self.eval_subtype(
UnificationPair{
lhs: new_lower_bound,
rhs: lower_bound,
halo: TypeTerm::unit(),
addr: vec![]
}
) {
// eprintln!("OK, is already larger type");
Ok(())
} else {
// eprintln!("violated subtype restriction");
Err(())
}
} else {
// eprintln!("set var {}'s lowerbound to {:?}", varid, t.clone());
self.lower_bounds.insert(v, new_lower_bound);
Ok(())
}
}
pub fn add_upper_subtype_bound(&mut self, v: u64, new_upper_bound: TypeTerm) -> Result<(),()> {
if new_upper_bound == TypeTerm::TypeID(TypeID::Var(v)) {
return Ok(());
}
if new_upper_bound.contains_var(v) {
// loop
return Err(());
}
if let Some(upper_bound) = self.upper_bounds.get(&v).cloned() {
if let Ok(_halo) = self.eval_subtype(
UnificationPair {
lhs: new_upper_bound.clone(),
rhs: upper_bound,
halo: TypeTerm::unit(),
addr: vec![]
}
) {
// found a lower upper bound
self.upper_bounds.insert(v, new_upper_bound);
Ok(())
} else {
Err(())
}
} else {
self.upper_bounds.insert(v, new_upper_bound);
Ok(())
}
}
pub fn eval_subtype(&mut self, unification_pair: UnificationPair) -> Result<
// ok: halo type
TypeTerm,
// error
UnificationError
> {
// eprintln!("eval_subtype {:?} <=? {:?}", unification_pair.lhs, unification_pair.rhs);
match (unification_pair.lhs.clone(), unification_pair.rhs.clone()) {
/*
Variables
*/
(t, TypeTerm::TypeID(TypeID::Var(v))) => {
//eprintln!("t <= variable");
if self.add_lower_subtype_bound(v, t.clone()).is_ok() {
Ok(TypeTerm::unit())
} else {
Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(v)), t2: t })
}
}
(TypeTerm::TypeID(TypeID::Var(v)), t) => {
//eprintln!("variable <= t");
if self.add_upper_subtype_bound(v, t.clone()).is_ok() {
Ok(TypeTerm::unit())
} else {
Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(v)), t2: t })
}
}
/*
Atoms
*/
(TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
if a1 == a2 { Ok(TypeTerm::unit()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs}) }
}
(TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
if n1 == n2 { Ok(TypeTerm::unit()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
}
(TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
if c1 == c2 { Ok(TypeTerm::unit()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
}
/*
Ladders
*/
(TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) => {
let mut l1_iter = a1.into_iter().enumerate().rev();
let mut l2_iter = a2.into_iter().rev();
let mut halo_ladder = Vec::new();
while let Some(rhs) = l2_iter.next() {
//eprintln!("take rhs = {:?}", rhs);
if let Some((i, lhs)) = l1_iter.next() {
//eprintln!("take lhs ({}) = {:?}", i, lhs);
let mut addr = unification_pair.addr.clone();
addr.push(i);
//eprintln!("addr = {:?}", addr);
match (lhs.clone(), rhs.clone()) {
(t, TypeTerm::TypeID(TypeID::Var(v))) => {
if self.add_upper_subtype_bound(v,t.clone()).is_ok() {
let mut new_upper_bound_ladder = vec![ t ];
if let Some(next_rhs) = l2_iter.next() {
// TODO
} else {
// take everything
while let Some((i,t)) = l1_iter.next() {
new_upper_bound_ladder.push(t);
}
}
new_upper_bound_ladder.reverse();
if self.add_upper_subtype_bound(v, TypeTerm::Ladder(new_upper_bound_ladder)).is_ok() {
// ok
} else {
return Err(UnificationError {
addr,
t1: lhs,
t2: rhs
});
}
} else {
return Err(UnificationError {
addr,
t1: lhs,
t2: rhs
});
}
}
(lhs, rhs) => {
if let Ok(ψ) = self.eval_subtype(
UnificationPair {
lhs: lhs.clone(), rhs: rhs.clone(),
addr:addr.clone(), halo: TypeTerm::unit()
}
) {
// ok.
//eprintln!("rungs are subtypes. continue");
halo_ladder.push(ψ);
} else {
return Err(UnificationError {
addr,
t1: lhs,
t2: rhs
});
}
}
}
} else {
// not a subtype,
return Err(UnificationError {
addr: vec![],
t1: unification_pair.lhs,
t2: unification_pair.rhs
});
}
}
//eprintln!("left ladder fully consumed");
for (i,t) in l1_iter {
halo_ladder.push(t);
}
halo_ladder.reverse();
Ok(TypeTerm::Ladder(halo_ladder).strip().param_normalize())
},
(t, TypeTerm::Ladder(a1)) => {
Err(UnificationError{ addr: unification_pair.addr, t1: t, t2: TypeTerm::Ladder(a1) })
}
(TypeTerm::Ladder(mut a1), t) => {
let mut new_addr = unification_pair.addr.clone();
new_addr.push( a1.len() -1 );
if let Ok(halo) = self.eval_subtype(
UnificationPair {
lhs: a1.pop().unwrap(),
rhs: t.clone(),
halo: TypeTerm::unit(),
addr: new_addr
}
) {
a1.push(halo);
if a1.len() == 1 {
Ok(a1.pop().unwrap())
} else {
Ok(TypeTerm::Ladder(a1))
}
} else {
Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::Ladder(a1), t2: t })
}
}
/*
Application
*/
(TypeTerm::App(a1), TypeTerm::App(a2)) => {
if a1.len() == a2.len() {
let mut halo_args = Vec::new();
let mut n_halos_required = 0;
for (i, (mut x, mut y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate() {
let mut new_addr = unification_pair.addr.clone();
new_addr.push(i);
x = x.strip();
// eprintln!("before strip: {:?}", y);
y = y.strip();
// eprintln!("after strip: {:?}", y);
// eprintln!("APP<> eval {:?} \n ?<=? {:?} ", x, y);
match self.eval_subtype(
UnificationPair {
lhs: x.clone(),
rhs: y.clone(),
halo: TypeTerm::unit(),
addr: new_addr,
}
) {
Ok(halo) => {
if halo == TypeTerm::unit() {
let mut y = y.clone();
y.apply_subst(&self.σ);
y = y.strip();
let mut top = y.get_lnf_vec().first().unwrap().clone();
halo_args.push(top.clone());
//eprintln!("add top {:?}", top);
} else {
//eprintln!("add halo {:?}", halo);
if n_halos_required > 0 {
let x = &mut halo_args[n_halos_required-1];
if let TypeTerm::Ladder(argrs) = x {
let mut a = a2[n_halos_required-1].clone();
a.apply_subst(&self.σ);
a = a.get_lnf_vec().first().unwrap().clone();
argrs.push(a);
} else {
*x = TypeTerm::Ladder(vec![
x.clone(),
a2[n_halos_required-1].clone().get_lnf_vec().first().unwrap().clone()
]);
x.apply_subst(&self.σ);
}
}
halo_args.push(halo);
n_halos_required += 1;
}
},
Err(err) => { return Err(err); }
}
}
if n_halos_required > 0 {
//eprintln!("halo args : {:?}", halo_args);
Ok(TypeTerm::App(halo_args))
} else {
Ok(TypeTerm::unit())
}
} else {
Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
}
}
_ => Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
}
}
pub fn solve(mut self) -> Result<(Vec<TypeTerm>, HashMap<TypeID, TypeTerm>), UnificationError> {
// solve equations
while let Some( mut equal_pair ) = self.equal_pairs.pop() {
equal_pair.lhs.apply_subst(&self.σ);
equal_pair.rhs.apply_subst(&self.σ);
self.eval_equation(equal_pair)?;
}
// solve subtypes
// eprintln!("------ SOLVE SUBTYPES ---- ");
for mut subtype_pair in self.subtype_pairs.clone().into_iter() {
subtype_pair.lhs.apply_subst(&self.σ);
subtype_pair.rhs.apply_subst(&self.σ);
let _halo = self.eval_subtype( subtype_pair.clone() )?.strip();
}
// add variables from subtype bounds
for (var_id, t) in self.upper_bounds.iter() {
// eprintln!("VAR {} upper bound {:?}", var_id, t);
self.σ.insert(TypeID::Var(*var_id), t.clone().strip());
}
for (var_id, t) in self.lower_bounds.iter() {
// eprintln!("VAR {} lower bound {:?}", var_id, t);
self.σ.insert(TypeID::Var(*var_id), t.clone().strip());
}
self.reapply_subst();
// eprintln!("------ MAKE HALOS -----");
let mut halo_types = Vec::new();
for mut subtype_pair in self.subtype_pairs.clone().into_iter() {
subtype_pair.lhs = subtype_pair.lhs.apply_subst(&self.σ).clone().strip();
subtype_pair.rhs = subtype_pair.rhs.apply_subst(&self.σ).clone().strip();
let halo = self.eval_subtype( subtype_pair.clone() )?.strip();
halo_types.push(halo);
}
// solve traits
while let Some( trait_pair ) = self.trait_pairs.pop() {
unimplemented!();
}
Ok((halo_types, self.σ))
}
}
pub fn unify(
t1: &TypeTerm,
t2: &TypeTerm
) -> Result<HashMap<TypeID, TypeTerm>, UnificationError> {
let unification = UnificationProblem::new_eq(vec![ (t1.clone(), t2.clone()) ]);
Ok(unification.solve()?.1)
}
pub fn subtype_unify(
t1: &TypeTerm,
t2: &TypeTerm
) -> Result<(TypeTerm, HashMap<TypeID, TypeTerm>), UnificationError> {
let unification = UnificationProblem::new_sub(vec![ (t1.clone(), t2.clone()) ]);
unification.solve().map( |(halos,σ)| ( halos.first().cloned().unwrap_or(TypeTerm::unit()), σ) )
}
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\

View file

@ -1,24 +1,24 @@
use crate::{dict::*, term::*};
use crate::{dict::*, desugared_term::*};
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
pub trait UnparseLadderType {
fn unparse(&self, t: &TypeTerm) -> String;
fn unparse(&self, t: &DesugaredTypeTerm) -> String;
}
impl<T: TypeDict> UnparseLadderType for T {
fn unparse(&self, t: &TypeTerm) -> String {
fn unparse(&self, t: &DesugaredTypeTerm) -> String {
match t {
TypeTerm::TypeID(id) => self.get_typename(id).unwrap(),
TypeTerm::Num(n) => format!("{}", n),
TypeTerm::Char(c) => match c {
DesugaredTypeTerm::TypeID(id) => self.get_typename(id).unwrap(),
DesugaredTypeTerm::Num(n) => format!("{}", n),
DesugaredTypeTerm::Char(c) => match c {
'\0' => "'\\0'".into(),
'\n' => "'\\n'".into(),
'\t' => "'\\t'".into(),
'\'' => "'\\''".into(),
c => format!("'{}'", c)
},
TypeTerm::Ladder(rungs) => {
DesugaredTypeTerm::Ladder(rungs) => {
let mut s = String::new();
let mut first = true;
for r in rungs.iter() {
@ -30,7 +30,7 @@ impl<T: TypeDict> UnparseLadderType for T {
}
s
}
TypeTerm::App(args) => {
DesugaredTypeTerm::App(args) => {
let mut s = String::new();
s.push('<');
let mut first = true;