Compare commits
33 commits
dev
...
topic-morp
Author | SHA1 | Date | |
---|---|---|---|
abc8e1fc1e | |||
7f844f2a2d | |||
e2f7a363d7 | |||
93634c2656 | |||
5af2b59278 | |||
1e29653c61 | |||
3e1fee2ee4 | |||
821d233094 | |||
c73818c620 | |||
41e425cfd9 | |||
d5ef37a433 | |||
b41cef5df3 | |||
10306eebb6 | |||
9d996be6fa | |||
c9b4d1c908 | |||
8212174cc4 | |||
314f2141d8 | |||
6ee2e447d4 | |||
cb9b62b00c | |||
0cd80c16d5 | |||
e8615aa42b | |||
bf5b2447be | |||
8cb3b1b8ec | |||
936ea9a649 | |||
17ba81c817 | |||
a1dd6a5121 | |||
c8b2ffae95 | |||
10947ce791 | |||
7b25b4472d | |||
c75cf43900 | |||
c8b3e4f4a2 | |||
646dfcfc2a | |||
fd1515c0fb |
55 changed files with 6192 additions and 2807 deletions
Cargo.tomlREADME.mdunification.rs
src
constraint_system
context
dict.rslib.rslnf.rsmorphism.rsmorphism
morphism_base.rsmorphism_path.rspnf.rssubstitution.rssubtype.rssugar.rsterm
test
constraint_system
context
curry.rsheuristic.rslnf.rsmod.rsmorphism.rsmorphism_graph.rsparser.rspnf.rssubstitution.rssubtype.rsunification.rs
|
@ -7,5 +7,8 @@ version = "0.1.0"
|
|||
[dependencies]
|
||||
tiny-ansi = { version = "0.1.0", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "*"
|
||||
|
||||
[features]
|
||||
pretty = ["dep:tiny-ansi"]
|
||||
|
|
21
README.md
21
README.md
|
@ -1,6 +1,6 @@
|
|||
# lib-laddertypes
|
||||
|
||||
Rust Implementation of Ladder-Types (parsing, unification, rewriting, etc)
|
||||
Rust Implementation of Ladder-Types (parsing, unification, rewriting, etc)
|
||||
<hr/>
|
||||
|
||||
## Ladder Types
|
||||
|
@ -121,16 +121,15 @@ fn main() {
|
|||
|
||||
- [x] (Un-)Parsing
|
||||
- [x] (De-)Currying
|
||||
- [x] Unification
|
||||
- [x] Ladder-Normal-Form
|
||||
- [x] Parameter-Normal-Form
|
||||
- [ ] (De)-Sugaring
|
||||
- [ ] Seq
|
||||
- [ ] Enum
|
||||
- [ ] Struct
|
||||
- [ ] References
|
||||
- [ ] Function
|
||||
- [x] Normal-Form
|
||||
- [x] Pretty Debug
|
||||
- [ ] Sugared Parser
|
||||
- [ ] Universal Types, Function Types
|
||||
- [x] Constraint Solving (Unification, Subtype Satisfaction)
|
||||
- [x] Morphism Graph
|
||||
- [x] Complex Morphisms
|
||||
- [x] Find Shortest Path
|
||||
- [x] Approximate Steiner Tree
|
||||
|
||||
## License
|
||||
[GPLv3](COPYING)
|
||||
|
||||
|
|
145
src/constraint_system/eval_eq.rs
Normal file
145
src/constraint_system/eval_eq.rs
Normal file
|
@ -0,0 +1,145 @@
|
|||
use {
|
||||
crate::{
|
||||
term::TypeTerm, EnumVariant, StructMember,
|
||||
ConstraintSystem, ConstraintPair, ConstraintError
|
||||
}
|
||||
};
|
||||
|
||||
impl ConstraintSystem {
|
||||
|
||||
|
||||
pub fn eval_equation(&mut self, unification_pair: ConstraintPair) -> Result<(), ConstraintError> {
|
||||
match (&unification_pair.lhs, &unification_pair.rhs) {
|
||||
(TypeTerm::Var(varid), t) |
|
||||
(t, TypeTerm::Var(varid)) => {
|
||||
if ! t.contains_var( *varid ) {
|
||||
self.σ.insert(*varid, t.clone());
|
||||
self.reapply_subst();
|
||||
Ok(())
|
||||
} else if t == &TypeTerm::Var(*varid) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ConstraintError{ addr: unification_pair.addr, t1: TypeTerm::Var(*varid), t2: t.clone() })
|
||||
}
|
||||
}
|
||||
|
||||
(TypeTerm::Id(a1), TypeTerm::Id(a2)) => {
|
||||
if a1 == a2 { Ok(()) } else { Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
|
||||
}
|
||||
(TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
|
||||
if n1 == n2 { Ok(()) } else { Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
|
||||
}
|
||||
(TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
|
||||
if c1 == c2 { Ok(()) } else { Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
|
||||
}
|
||||
|
||||
(TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) |
|
||||
(TypeTerm::Spec(a1), TypeTerm::Spec(a2)) => {
|
||||
if a1.len() == a2.len() {
|
||||
for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate().rev() {
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
new_addr.push(i);
|
||||
self.equal_pairs.push(
|
||||
ConstraintPair {
|
||||
lhs: x,
|
||||
rhs: y,
|
||||
addr: new_addr
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
|
||||
}
|
||||
}
|
||||
|
||||
(TypeTerm::Seq{ seq_repr: lhs_seq_repr, items: lhs_items },
|
||||
TypeTerm::Seq { seq_repr: rhs_seq_repr, items: rhs_items })
|
||||
=> {
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
new_addr.push(0);
|
||||
|
||||
if let Some(rhs_seq_repr) = rhs_seq_repr.as_ref() {
|
||||
if let Some(lhs_seq_repr) = lhs_seq_repr.as_ref() {
|
||||
let _seq_repr_ψ = self.eval_equation(ConstraintPair { addr: new_addr.clone(), lhs: *lhs_seq_repr.clone(), rhs: *rhs_seq_repr.clone() })?;
|
||||
} else {
|
||||
return Err(ConstraintError{ addr: new_addr, t1: unification_pair.lhs, t2: unification_pair.rhs });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if lhs_items.len() == rhs_items.len() {
|
||||
for (i, (lhs_ty, rhs_ty)) in lhs_items.into_iter().zip(rhs_items.into_iter()).enumerate()
|
||||
{
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
new_addr.push(i);
|
||||
self.equal_pairs.push( ConstraintPair { addr: new_addr, lhs: lhs_ty.clone(), rhs: rhs_ty.clone() } );
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
|
||||
}
|
||||
}
|
||||
(TypeTerm::Struct{ struct_repr: lhs_struct_repr, members: lhs_members },
|
||||
TypeTerm::Struct{ struct_repr: rhs_struct_repr, members: rhs_members })
|
||||
=> {
|
||||
let new_addr = unification_pair.addr.clone();
|
||||
if let Some(rhs_struct_repr) = rhs_struct_repr.as_ref() {
|
||||
if let Some(lhs_struct_repr) = lhs_struct_repr.as_ref() {
|
||||
let _struct_repr_ψ = self.eval_subtype(ConstraintPair { addr: new_addr.clone(), lhs: *lhs_struct_repr.clone(), rhs: *rhs_struct_repr.clone() })?;
|
||||
} else {
|
||||
return Err(ConstraintError{ addr: new_addr.clone(), t1: unification_pair.lhs, t2: unification_pair.rhs });
|
||||
}
|
||||
}
|
||||
|
||||
if lhs_members.len() == rhs_members.len() {
|
||||
for (i,
|
||||
(StructMember{ symbol: lhs_symbol, ty: lhs_ty},
|
||||
StructMember{ symbol: rhs_symbol, ty: rhs_ty })
|
||||
) in
|
||||
lhs_members.into_iter().zip(rhs_members.into_iter()).enumerate()
|
||||
{
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
new_addr.push(i);
|
||||
self.equal_pairs.push( ConstraintPair { addr: new_addr, lhs: lhs_ty.clone(), rhs: rhs_ty.clone() } );
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
|
||||
}
|
||||
}
|
||||
(TypeTerm::Enum{ enum_repr: lhs_enum_repr, variants: lhs_variants },
|
||||
TypeTerm::Enum{ enum_repr: rhs_enum_repr, variants: rhs_variants })
|
||||
=> {
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
if let Some(rhs_enum_repr) = rhs_enum_repr.as_ref() {
|
||||
if let Some(lhs_enum_repr) = lhs_enum_repr.as_ref() {
|
||||
let _enum_repr_ψ = self.eval_subtype(ConstraintPair { addr: new_addr.clone(), lhs: *lhs_enum_repr.clone(), rhs: *rhs_enum_repr.clone() })?;
|
||||
} else {
|
||||
return Err(ConstraintError{ addr: new_addr, t1: unification_pair.lhs, t2: unification_pair.rhs });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if lhs_variants.len() == rhs_variants.len() {
|
||||
for (i,
|
||||
(EnumVariant{ symbol: lhs_symbol, ty: lhs_ty },
|
||||
EnumVariant{ symbol: rhs_symbol, ty: rhs_ty })
|
||||
) in
|
||||
lhs_variants.into_iter().zip(rhs_variants.into_iter()).enumerate()
|
||||
{
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
new_addr.push(i);
|
||||
self.equal_pairs.push( ConstraintPair { addr: new_addr, lhs: lhs_ty.clone(), rhs: rhs_ty.clone() } );
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
|
||||
}
|
||||
}
|
||||
|
||||
_ => Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
0
src/constraint_system/eval_parallel.rs
Normal file
0
src/constraint_system/eval_parallel.rs
Normal file
514
src/constraint_system/eval_sub.rs
Normal file
514
src/constraint_system/eval_sub.rs
Normal file
|
@ -0,0 +1,514 @@
|
|||
use {
|
||||
crate::{
|
||||
term::TypeTerm, EnumVariant, StructMember,
|
||||
ConstraintSystem, ConstraintPair, ConstraintError
|
||||
}
|
||||
};
|
||||
|
||||
impl ConstraintSystem {
|
||||
pub fn add_lower_subtype_bound(&mut self, v: u64, new_lower_bound: TypeTerm) -> Result<(),()> {
|
||||
|
||||
if new_lower_bound == TypeTerm::Var(v) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if new_lower_bound.contains_var(v) {
|
||||
// loop
|
||||
return Err(());
|
||||
}
|
||||
|
||||
if let Some(lower_bound) = self.lower_bounds.get(&v).cloned() {
|
||||
//eprintln!("var already exists. check max. type");
|
||||
if let Ok(halo) = self.eval_subtype(
|
||||
ConstraintPair {
|
||||
lhs: lower_bound.clone(),
|
||||
rhs: new_lower_bound.clone(),
|
||||
addr: vec![]
|
||||
}
|
||||
) {
|
||||
//eprintln!("found more general lower bound");
|
||||
//eprintln!("set var {}'s lowerbound to {:?}", v, new_lower_bound.clone());
|
||||
// generalize variable type to supertype
|
||||
self.lower_bounds.insert(v, new_lower_bound);
|
||||
Ok(())
|
||||
} else if let Ok(halo) = self.eval_subtype(
|
||||
ConstraintPair{
|
||||
lhs: new_lower_bound,
|
||||
rhs: lower_bound,
|
||||
addr: vec![]
|
||||
}
|
||||
) {
|
||||
//eprintln!("OK, is already larger type");
|
||||
Ok(())
|
||||
} else {
|
||||
//eprintln!("violated subtype restriction");
|
||||
Err(())
|
||||
}
|
||||
} else {
|
||||
//eprintln!("set var {}'s lowerbound to {:?}", v, new_lower_bound.clone());
|
||||
self.lower_bounds.insert(v, new_lower_bound);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn add_upper_subtype_bound(&mut self, v: u64, new_upper_bound: TypeTerm) -> Result<(),()> {
|
||||
if new_upper_bound == TypeTerm::Var(v) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if new_upper_bound.contains_var(v) {
|
||||
// loop
|
||||
return Err(());
|
||||
}
|
||||
|
||||
if let Some(upper_bound) = self.upper_bounds.get(&v).cloned() {
|
||||
if let Ok(_halo) = self.eval_subtype(
|
||||
ConstraintPair {
|
||||
lhs: new_upper_bound.clone(),
|
||||
rhs: upper_bound,
|
||||
addr: vec![]
|
||||
}
|
||||
) {
|
||||
//println!("found a lower upper bound: {} <= {:?}", v, new_upper_bound);
|
||||
// found a lower upper bound
|
||||
self.upper_bounds.insert(v, new_upper_bound);
|
||||
Ok(())
|
||||
} else {
|
||||
//println!("new upper bound violates subtype restriction");
|
||||
Err(())
|
||||
}
|
||||
} else {
|
||||
//eprintln!("set upper bound: {} <= {:?}", v, new_upper_bound);
|
||||
self.upper_bounds.insert(v, new_upper_bound);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
pub fn eval_subtype(&mut self, unification_pair: ConstraintPair) -> Result<
|
||||
// ok: halo type
|
||||
TypeTerm,
|
||||
// error
|
||||
ConstraintError
|
||||
> {
|
||||
match (unification_pair.lhs.clone().strip(), unification_pair.rhs.clone().strip()) {
|
||||
|
||||
/*
|
||||
Variables
|
||||
*/
|
||||
|
||||
(TypeTerm::Var(v), t) => {
|
||||
//eprintln!("variable <= t");
|
||||
if self.add_upper_subtype_bound(v, t.clone()).is_ok() {
|
||||
Ok(TypeTerm::unit())
|
||||
} else {
|
||||
Err(ConstraintError{ addr: unification_pair.addr, t1: TypeTerm::Var(v), t2: t })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
(t, TypeTerm::Var(v)) => {
|
||||
//eprintln!("t <= variable");
|
||||
if self.add_lower_subtype_bound(v, t.clone()).is_ok() {
|
||||
Ok(TypeTerm::unit())
|
||||
} else {
|
||||
Err(ConstraintError{ addr: unification_pair.addr, t1: TypeTerm::Var(v), t2: t })
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Atoms
|
||||
*/
|
||||
(TypeTerm::Id(a1), TypeTerm::Id(a2)) => {
|
||||
if a1 == a2 { Ok(TypeTerm::unit()) } else { Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs}) }
|
||||
}
|
||||
(TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
|
||||
if n1 == n2 { Ok(TypeTerm::unit()) } else { Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
|
||||
}
|
||||
(TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
|
||||
if c1 == c2 { Ok(TypeTerm::unit()) } else { Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
|
||||
}
|
||||
|
||||
/*
|
||||
Complex Types
|
||||
*/
|
||||
|
||||
(TypeTerm::Seq{ seq_repr: lhs_seq_repr, items: lhs_items },
|
||||
TypeTerm::Seq { seq_repr: rhs_seq_repr, items: rhs_items })
|
||||
=> {
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
new_addr.push(0);
|
||||
|
||||
if let Some(rhs_seq_repr) = rhs_seq_repr.as_ref() {
|
||||
//eprintln!("subtype unify: rhs has seq-repr: {:?}", rhs_seq_repr);
|
||||
if let Some(lhs_seq_repr) = lhs_seq_repr.as_ref() {
|
||||
//eprintln!("check if it maches lhs seq-repr: {:?}", lhs_seq_repr);
|
||||
let _seq_repr_ψ = self.eval_subtype(ConstraintPair { addr: new_addr.clone(), lhs: *lhs_seq_repr.clone(), rhs: *rhs_seq_repr.clone() })?;
|
||||
//eprintln!("..yes!");
|
||||
} else {
|
||||
//eprintln!("...but lhs has none.");
|
||||
return Err(ConstraintError{ addr: new_addr, t1: unification_pair.lhs, t2: unification_pair.rhs });
|
||||
}
|
||||
}
|
||||
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
new_addr.push(1);
|
||||
if lhs_items.len() == rhs_items.len() && lhs_items.len() > 0 {
|
||||
match self.eval_subtype( ConstraintPair { addr: new_addr.clone(), lhs: lhs_items[0].clone(), rhs: rhs_items[0].clone() } ) {
|
||||
Ok(ψ) => Ok(TypeTerm::Seq {
|
||||
seq_repr: None, // <<- todo
|
||||
items: vec![ψ]
|
||||
}.strip()),
|
||||
Err(e) => Err(ConstraintError{
|
||||
addr: new_addr,
|
||||
t1: e.t1,
|
||||
t2: e.t2,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
Err(ConstraintError{ addr: new_addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
|
||||
}
|
||||
}
|
||||
(TypeTerm::Struct{ struct_repr: lhs_struct_repr, members: lhs_members },
|
||||
TypeTerm::Struct{ struct_repr: rhs_struct_repr, members: rhs_members })
|
||||
=> {
|
||||
let new_addr = unification_pair.addr.clone();
|
||||
if let Some(rhs_struct_repr) = rhs_struct_repr.as_ref() {
|
||||
if let Some(lhs_struct_repr) = lhs_struct_repr.as_ref() {
|
||||
let _struct_repr_ψ = self.eval_subtype(ConstraintPair { addr: new_addr.clone(), lhs: *lhs_struct_repr.clone(), rhs: *rhs_struct_repr.clone() })?;
|
||||
} else {
|
||||
return Err(ConstraintError{ addr: new_addr.clone(), t1: unification_pair.lhs, t2: unification_pair.rhs });
|
||||
}
|
||||
}
|
||||
|
||||
if lhs_members.len() == rhs_members.len() {
|
||||
let mut halo_members = Vec::new();
|
||||
for (i,
|
||||
(StructMember{ symbol: lhs_symbol, ty: lhs_ty},
|
||||
StructMember{ symbol: rhs_symbol, ty: rhs_ty })
|
||||
) in
|
||||
lhs_members.into_iter().zip(rhs_members.into_iter()).enumerate()
|
||||
{
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
new_addr.push(i);
|
||||
|
||||
let ψ = self.eval_subtype( ConstraintPair { addr: new_addr, lhs: lhs_ty.clone(), rhs: rhs_ty.clone() } )?;
|
||||
halo_members.push(StructMember { symbol: lhs_symbol, ty: ψ });
|
||||
}
|
||||
Ok(TypeTerm::Struct {
|
||||
struct_repr: None,
|
||||
members: halo_members
|
||||
})
|
||||
} else {
|
||||
Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
|
||||
}
|
||||
}
|
||||
(TypeTerm::Enum{ enum_repr: lhs_enum_repr, variants: lhs_variants },
|
||||
TypeTerm::Enum{ enum_repr: rhs_enum_repr, variants: rhs_variants })
|
||||
=> {
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
if let Some(rhs_enum_repr) = rhs_enum_repr.as_ref() {
|
||||
if let Some(lhs_enum_repr) = lhs_enum_repr.as_ref() {
|
||||
let _enum_repr_ψ = self.eval_subtype(ConstraintPair { addr: new_addr.clone(), lhs: *lhs_enum_repr.clone(), rhs: *rhs_enum_repr.clone() })?;
|
||||
} else {
|
||||
return Err(ConstraintError{ addr: new_addr, t1: unification_pair.lhs, t2: unification_pair.rhs });
|
||||
}
|
||||
}
|
||||
|
||||
if lhs_variants.len() == rhs_variants.len() {
|
||||
let mut halo_variants = Vec::new();
|
||||
for (i,
|
||||
(EnumVariant{ symbol: lhs_symbol, ty: lhs_ty },
|
||||
EnumVariant{ symbol: rhs_symbol, ty: rhs_ty })
|
||||
) in
|
||||
lhs_variants.into_iter().zip(rhs_variants.into_iter()).enumerate()
|
||||
{
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
new_addr.push(i);
|
||||
let ψ = self.eval_subtype( ConstraintPair { addr: new_addr, lhs: lhs_ty.clone(), rhs: rhs_ty.clone() } )?;
|
||||
halo_variants.push(EnumVariant { symbol: lhs_symbol, ty: ψ });
|
||||
}
|
||||
Ok(TypeTerm::Enum {
|
||||
enum_repr: None,
|
||||
variants: halo_variants
|
||||
})
|
||||
} else {
|
||||
Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
/*
|
||||
Ladders
|
||||
*/
|
||||
|
||||
(TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) => {
|
||||
|
||||
let mut l1_iter = a1.into_iter().enumerate().rev();
|
||||
let mut l2_iter = a2.into_iter().rev();
|
||||
|
||||
let mut halo_ladder = Vec::new();
|
||||
|
||||
while let Some(rhs) = l2_iter.next() {
|
||||
//eprintln!("take rhs = {:?}", rhs);
|
||||
if let Some((i, lhs)) = l1_iter.next() {
|
||||
//eprintln!("take lhs ({}) = {:?}", i, lhs);
|
||||
let mut addr = unification_pair.addr.clone();
|
||||
addr.push(i);
|
||||
//eprintln!("addr = {:?}", addr);
|
||||
|
||||
match (lhs.clone(), rhs.clone()) {
|
||||
(t, TypeTerm::Var(v)) => {
|
||||
|
||||
if self.add_upper_subtype_bound(v,t.clone()).is_ok() {
|
||||
let mut new_upper_bound_ladder = vec![ t ];
|
||||
|
||||
if let Some(next_rhs) = l2_iter.next() {
|
||||
|
||||
} else {
|
||||
// ladder of rhs is empty
|
||||
// take everything
|
||||
|
||||
while let Some((i,t)) = l1_iter.next() {
|
||||
new_upper_bound_ladder.push(t);
|
||||
}
|
||||
}
|
||||
|
||||
new_upper_bound_ladder.reverse();
|
||||
if self.add_upper_subtype_bound(v, TypeTerm::Ladder(new_upper_bound_ladder)).is_ok() {
|
||||
// ok
|
||||
} else {
|
||||
return Err(ConstraintError {
|
||||
addr,
|
||||
t1: lhs,
|
||||
t2: rhs
|
||||
});
|
||||
}
|
||||
} else {
|
||||
return Err(ConstraintError {
|
||||
addr,
|
||||
t1: lhs,
|
||||
t2: rhs
|
||||
});
|
||||
}
|
||||
}
|
||||
(lhs, rhs) => {
|
||||
if let Ok(ψ) = self.eval_subtype(
|
||||
ConstraintPair {
|
||||
lhs: lhs.clone(),
|
||||
rhs: rhs.clone(),
|
||||
addr:addr.clone(),
|
||||
}
|
||||
) {
|
||||
// ok.
|
||||
//eprintln!("rungs are subtypes. continue");
|
||||
halo_ladder.push(ψ);
|
||||
} else {
|
||||
return Err(ConstraintError {
|
||||
addr,
|
||||
t1: lhs,
|
||||
t2: rhs
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// not a subtype,
|
||||
return Err(ConstraintError {
|
||||
addr: vec![],
|
||||
t1: unification_pair.lhs,
|
||||
t2: unification_pair.rhs
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
//eprintln!("left ladder fully consumed");
|
||||
|
||||
for (i,t) in l1_iter {
|
||||
//!("push {} to halo ladder", t.pretty(self.dict,0));
|
||||
halo_ladder.push(t);
|
||||
}
|
||||
halo_ladder.reverse();
|
||||
Ok(TypeTerm::Ladder(halo_ladder).strip())//.param_normalize())
|
||||
},
|
||||
|
||||
(TypeTerm::Seq { seq_repr, items }, TypeTerm::Spec(mut args)) => {
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
|
||||
if args.len() > 1 {
|
||||
if let Some(seq_repr) = seq_repr {
|
||||
let repr_rhs = args.remove(0);
|
||||
let reprψinterface = repr_rhs.get_interface_type();
|
||||
let mut reprψ = self.eval_subtype(ConstraintPair{
|
||||
addr: new_addr.clone(),
|
||||
lhs: seq_repr.as_ref().clone(),
|
||||
rhs: repr_rhs
|
||||
})?;
|
||||
|
||||
let mut itemsψ = Vec::new();
|
||||
let mut n_halos_required = 0;
|
||||
let mut next_arg_with_common_rung = 0;
|
||||
|
||||
for (i,(item, arg)) in items.iter().zip(args.iter()).enumerate() {
|
||||
let mut new_addr = new_addr.clone();
|
||||
new_addr.push(i);
|
||||
let ψ = self.eval_subtype(ConstraintPair {
|
||||
addr: new_addr,
|
||||
lhs: item.clone(),
|
||||
rhs: arg.clone()
|
||||
})?;
|
||||
|
||||
if ψ.is_empty() {
|
||||
itemsψ.push(item.get_interface_type());
|
||||
} else {
|
||||
while next_arg_with_common_rung < i {
|
||||
let x = &mut itemsψ[next_arg_with_common_rung];
|
||||
*x = TypeTerm::Ladder(vec![
|
||||
x.clone(),
|
||||
args[next_arg_with_common_rung].get_interface_type()
|
||||
]).normalize();
|
||||
x.apply_subst(&self.σ);
|
||||
next_arg_with_common_rung += 1;
|
||||
}
|
||||
|
||||
n_halos_required += 1;
|
||||
itemsψ.push(ψ);
|
||||
}
|
||||
}
|
||||
eprintln!("itemsψ = {:?}", itemsψ);
|
||||
|
||||
if n_halos_required > 0 {
|
||||
reprψ = TypeTerm::Ladder(vec![
|
||||
reprψ,
|
||||
reprψinterface
|
||||
]);
|
||||
}
|
||||
|
||||
Ok(
|
||||
TypeTerm::Seq {
|
||||
seq_repr: if reprψ.is_empty() { None }
|
||||
else { Some(Box::new(reprψ)) },
|
||||
items: itemsψ
|
||||
}
|
||||
)
|
||||
} else {
|
||||
Err(ConstraintError {
|
||||
addr: new_addr,
|
||||
t1: unification_pair.lhs,
|
||||
t2: unification_pair.rhs
|
||||
})
|
||||
}
|
||||
} else {
|
||||
Err(ConstraintError {
|
||||
addr: unification_pair.addr,
|
||||
t1: unification_pair.lhs,
|
||||
t2: unification_pair.rhs
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
(t, TypeTerm::Ladder(a1)) => {
|
||||
Err(ConstraintError{ addr: unification_pair.addr, t1: t, t2: TypeTerm::Ladder(a1) })
|
||||
}
|
||||
|
||||
(TypeTerm::Ladder(mut a1), t) => {
|
||||
if a1.len() > 0 {
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
new_addr.push( a1.len() - 1 );
|
||||
if let Ok(halo) = self.eval_subtype(
|
||||
ConstraintPair {
|
||||
lhs: a1.pop().unwrap(),
|
||||
rhs: t.clone(),
|
||||
addr: new_addr
|
||||
}
|
||||
) {
|
||||
a1.push(halo);
|
||||
if a1.len() == 1 {
|
||||
Ok(a1.pop().unwrap())
|
||||
} else {
|
||||
Ok(TypeTerm::Ladder(a1).normalize())
|
||||
}
|
||||
} else {
|
||||
Err(ConstraintError{ addr: unification_pair.addr, t1: TypeTerm::Ladder(a1), t2: t })
|
||||
}
|
||||
} else if t == TypeTerm::unit() {
|
||||
Ok(TypeTerm::unit())
|
||||
} else {
|
||||
Err(ConstraintError { addr: unification_pair.addr, t1: TypeTerm::unit(), t2: t })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
Application
|
||||
*/
|
||||
|
||||
(TypeTerm::Spec(a1), TypeTerm::Spec(a2)) => {
|
||||
if a1.len() == a2.len() {
|
||||
let mut halo_args = Vec::new();
|
||||
let mut n_halos_required = 0;
|
||||
let mut next_arg_with_common_rung = 0;
|
||||
|
||||
for (i, (mut x, mut y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate() {
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
new_addr.push(i);
|
||||
|
||||
x = x.strip();
|
||||
|
||||
// eprintln!("before strip: {:?}", y);
|
||||
y = y.strip();
|
||||
// eprintln!("after strip: {:?}", y);
|
||||
// eprintln!("APP<> eval {:?} \n ?<=? {:?} ", x, y);
|
||||
|
||||
match self.eval_subtype(
|
||||
ConstraintPair {
|
||||
lhs: x.clone(),
|
||||
rhs: y.clone(),
|
||||
addr: new_addr,
|
||||
}
|
||||
) {
|
||||
Ok(halo) => {
|
||||
if halo.is_empty() {
|
||||
let mut y = y.clone();
|
||||
y.apply_subst(&self.σ);
|
||||
y = y.strip();
|
||||
|
||||
let top = y.get_interface_type();
|
||||
halo_args.push(top);
|
||||
} else {
|
||||
//println!("add halo {}", halo.pretty(self.dict, 0));
|
||||
while next_arg_with_common_rung < i {
|
||||
let x = &mut halo_args[next_arg_with_common_rung];
|
||||
*x = TypeTerm::Ladder(vec![
|
||||
x.clone(),
|
||||
a2[next_arg_with_common_rung].get_interface_type()
|
||||
]).normalize();
|
||||
x.apply_subst(&self.σ);
|
||||
next_arg_with_common_rung += 1;
|
||||
}
|
||||
|
||||
halo_args.push(halo);
|
||||
n_halos_required += 1;
|
||||
}
|
||||
},
|
||||
Err(err) => { return Err(err); }
|
||||
}
|
||||
}
|
||||
|
||||
if n_halos_required > 0 {
|
||||
Ok(TypeTerm::Spec(halo_args))
|
||||
} else {
|
||||
Ok(TypeTerm::unit())
|
||||
}
|
||||
} else {
|
||||
Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
|
||||
}
|
||||
}
|
||||
|
||||
_ => Err(ConstraintError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
|
||||
}
|
||||
}
|
||||
}
|
179
src/constraint_system/eval_trait.rs
Normal file
179
src/constraint_system/eval_trait.rs
Normal file
|
@ -0,0 +1,179 @@
|
|||
use {
|
||||
crate::{
|
||||
term::TypeTerm, ConstraintError, ConstraintPair, ConstraintSystem
|
||||
}, std::ops::Deref
|
||||
};
|
||||
|
||||
impl ConstraintSystem {
|
||||
/* chek if lhs has trait given by rhs
|
||||
*/
|
||||
pub fn eval_trait(
|
||||
&mut self,
|
||||
pair: ConstraintPair
|
||||
) -> Result<(), ConstraintError> {
|
||||
match (pair.lhs.clone().strip(), pair.rhs.clone().strip()) {
|
||||
|
||||
// check if at least some rung of the ladder has trait τ
|
||||
(TypeTerm::Ladder(r1), τ) => {
|
||||
for (i, rung) in r1.iter().enumerate() {
|
||||
let mut addr = pair.addr.clone();
|
||||
addr.push(i);
|
||||
|
||||
if self.eval_trait(ConstraintPair{
|
||||
addr,
|
||||
lhs: rung.clone(),
|
||||
rhs: τ.clone()
|
||||
}).is_ok() {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
Err(ConstraintError { addr: pair.addr, t1: pair.lhs, t2: pair.rhs })
|
||||
}
|
||||
|
||||
// otherwise check for equality
|
||||
|
||||
(TypeTerm::Var(varid), t) |
|
||||
(t, TypeTerm::Var(varid)) => {
|
||||
if ! t.contains_var( varid ) {
|
||||
self.σ.insert(varid, t.clone());
|
||||
self.reapply_subst();
|
||||
Ok(())
|
||||
} else if t == TypeTerm::Var(varid) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ConstraintError{
|
||||
addr: pair.addr,
|
||||
t1: TypeTerm::Var(varid),
|
||||
t2: t.clone()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
(TypeTerm::Id(a1), TypeTerm::Id(a2)) => {
|
||||
if a1 == a2 { Ok(()) } else { Err(ConstraintError{ addr: pair.addr, t1: pair.lhs, t2: pair.rhs }) }
|
||||
}
|
||||
(TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
|
||||
if n1 == n2 { Ok(()) } else { Err(ConstraintError{ addr: pair.addr, t1: pair.lhs, t2: pair.rhs }) }
|
||||
}
|
||||
(TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
|
||||
if c1 == c2 { Ok(()) } else { Err(ConstraintError{ addr: pair.addr, t1: pair.lhs, t2: pair.rhs }) }
|
||||
}
|
||||
|
||||
(TypeTerm::Spec(a1), TypeTerm::Spec(a2)) => {
|
||||
if a1.len() == a2.len() {
|
||||
for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate().rev() {
|
||||
let mut new_addr = pair.addr.clone();
|
||||
new_addr.push(i);
|
||||
self.trait_pairs.push(
|
||||
ConstraintPair {
|
||||
lhs: x,
|
||||
rhs: y,
|
||||
addr: new_addr
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ConstraintError{ addr: pair.addr, t1: pair.lhs, t2: pair.rhs })
|
||||
}
|
||||
}
|
||||
|
||||
(TypeTerm::Seq { seq_repr: lhs_sr, items: lhs_it },
|
||||
TypeTerm::Seq { seq_repr: rhs_sr, items: rhs_it })
|
||||
=> {
|
||||
{
|
||||
let mut addr = pair.addr.clone();
|
||||
addr.push(0);
|
||||
if let Some(rhs_sr) = rhs_sr {
|
||||
if let Some(lhs_sr) = lhs_sr {
|
||||
self.trait_pairs.push(ConstraintPair { addr, lhs: lhs_sr.deref().clone(), rhs: rhs_sr.deref().clone() });
|
||||
} else {
|
||||
return Err(ConstraintError{ addr, t1: TypeTerm::unit(), t2: rhs_sr.deref().clone() });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (i, (lhs_member, rhs_member)) in lhs_it.into_iter().zip(rhs_it.into_iter()).enumerate() {
|
||||
let mut addr = pair.addr.clone();
|
||||
addr.push(0);
|
||||
self.trait_pairs.push(ConstraintPair { addr, lhs: lhs_member, rhs: rhs_member });
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
(TypeTerm::Struct { struct_repr: lhs_sr, members: lhs_it },
|
||||
TypeTerm::Struct { struct_repr: rhs_sr, members: rhs_it })
|
||||
=> {
|
||||
{
|
||||
let mut addr = pair.addr.clone();
|
||||
addr.push(0);
|
||||
if let Some(rhs_sr) = rhs_sr {
|
||||
if let Some(lhs_sr) = lhs_sr {
|
||||
self.trait_pairs.push(ConstraintPair { addr, lhs: lhs_sr.deref().clone(), rhs: rhs_sr.deref().clone() });
|
||||
} else {
|
||||
return Err(ConstraintError{ addr, t1: TypeTerm::unit(), t2: rhs_sr.deref().clone() });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (i, rhs_member) in rhs_it.into_iter().enumerate() {
|
||||
let mut found = false;
|
||||
for lhs_member in lhs_it.iter() {
|
||||
if lhs_member.symbol == rhs_member.symbol {
|
||||
let mut addr = pair.addr.clone();
|
||||
addr.push(0);
|
||||
self.trait_pairs.push(ConstraintPair { addr, lhs: lhs_member.ty.clone(), rhs: rhs_member.ty.clone() });
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if ! found {
|
||||
let mut addr = pair.addr.clone();
|
||||
addr.push(i);
|
||||
return Err(ConstraintError { addr, t1: TypeTerm::unit(), t2: rhs_member.ty })
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
(TypeTerm::Enum { enum_repr: lhs_sr, variants: lhs_it },
|
||||
TypeTerm::Enum { enum_repr: rhs_sr, variants: rhs_it })
|
||||
=> {
|
||||
{
|
||||
let mut addr = pair.addr.clone();
|
||||
addr.push(0);
|
||||
if let Some(rhs_sr) = rhs_sr {
|
||||
if let Some(lhs_sr) = lhs_sr {
|
||||
self.trait_pairs.push(ConstraintPair { addr, lhs: lhs_sr.deref().clone(), rhs: rhs_sr.deref().clone() });
|
||||
} else {
|
||||
return Err(ConstraintError{ addr, t1: TypeTerm::unit(), t2: rhs_sr.deref().clone() });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (i, rhs_member) in rhs_it.into_iter().enumerate() {
|
||||
let mut found = false;
|
||||
for lhs_member in lhs_it.iter() {
|
||||
if lhs_member.symbol == rhs_member.symbol {
|
||||
let mut addr = pair.addr.clone();
|
||||
addr.push(0);
|
||||
self.trait_pairs.push(ConstraintPair { addr, lhs: lhs_member.ty.clone(), rhs: rhs_member.ty.clone() });
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if ! found {
|
||||
let mut addr = pair.addr.clone();
|
||||
addr.push(i);
|
||||
return Err(ConstraintError { addr, t1: TypeTerm::unit(), t2: rhs_member.ty })
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(ConstraintError { addr: pair.addr, t1: pair.lhs, t2: pair.rhs })
|
||||
}
|
||||
}
|
||||
|
||||
}
|
164
src/constraint_system/mod.rs
Normal file
164
src/constraint_system/mod.rs
Normal file
|
@ -0,0 +1,164 @@
|
|||
use {
|
||||
crate::{
|
||||
term::TypeTerm, Substitution,
|
||||
context::*,
|
||||
},
|
||||
std::{collections::HashMap}
|
||||
};
|
||||
|
||||
pub mod eval_eq;
|
||||
pub mod eval_sub;
|
||||
pub mod eval_trait;
|
||||
pub mod eval_parallel;
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone, Eq, PartialEq, Debug)]
|
||||
pub struct ConstraintError {
|
||||
pub addr: Vec<usize>,
|
||||
pub t1: TypeTerm,
|
||||
pub t2: TypeTerm
|
||||
}
|
||||
|
||||
#[derive(Clone, Eq, PartialEq, Debug)]
|
||||
pub struct ConstraintPair {
|
||||
pub addr: Vec<usize>,
|
||||
pub lhs: TypeTerm,
|
||||
pub rhs: TypeTerm,
|
||||
}
|
||||
|
||||
impl ConstraintPair {
|
||||
pub fn new(lhs: TypeTerm, rhs: TypeTerm) -> Self {
|
||||
ConstraintPair {
|
||||
lhs,rhs, addr:vec![]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ConstraintSystem {
|
||||
σ: HashMapSubst,
|
||||
upper_bounds: HashMap< u64, TypeTerm >,
|
||||
lower_bounds: HashMap< u64, TypeTerm >,
|
||||
|
||||
equal_pairs: Vec<ConstraintPair>,
|
||||
subtype_pairs: Vec<ConstraintPair>,
|
||||
trait_pairs: Vec<ConstraintPair>,
|
||||
parallel_pairs: Vec<ConstraintPair>
|
||||
}
|
||||
|
||||
impl ConstraintSystem {
|
||||
pub fn new(
|
||||
equal_pairs: Vec<ConstraintPair>,
|
||||
subtype_pairs: Vec<ConstraintPair>,
|
||||
trait_pairs: Vec<ConstraintPair>,
|
||||
parallel_pairs: Vec<ConstraintPair>
|
||||
) -> Self {
|
||||
ConstraintSystem {
|
||||
σ: HashMapSubst::new(),
|
||||
|
||||
equal_pairs,
|
||||
subtype_pairs,
|
||||
trait_pairs,
|
||||
parallel_pairs,
|
||||
|
||||
upper_bounds: HashMap::new(),
|
||||
lower_bounds: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_eq(eqs: Vec<ConstraintPair>) -> Self {
|
||||
ConstraintSystem::new( eqs, Vec::new(), Vec::new(), Vec::new() )
|
||||
}
|
||||
|
||||
pub fn new_sub(subs: Vec<ConstraintPair>) -> Self {
|
||||
ConstraintSystem::new( Vec::new(), subs, Vec::new(), Vec::new() )
|
||||
}
|
||||
|
||||
pub fn new_trait(traits: Vec<ConstraintPair>) -> Self {
|
||||
ConstraintSystem::new( Vec::new(), Vec::new(), traits, Vec::new() )
|
||||
}
|
||||
|
||||
pub fn new_parallel(parallels: Vec<ConstraintPair>) -> Self {
|
||||
ConstraintSystem::new( Vec::new(), Vec::new(), Vec::new(), parallels )
|
||||
}
|
||||
|
||||
|
||||
/// update all values in substitution
|
||||
pub fn reapply_subst(&mut self) {
|
||||
self.σ.saturate();
|
||||
}
|
||||
|
||||
pub fn solve(mut self) -> Result<(Vec<TypeTerm>, HashMapSubst), ConstraintError> {
|
||||
// solve equations
|
||||
while let Some( mut equal_pair ) = self.equal_pairs.pop() {
|
||||
equal_pair.lhs.apply_subst(&self.σ);
|
||||
equal_pair.rhs.apply_subst(&self.σ);
|
||||
|
||||
self.eval_equation(equal_pair)?;
|
||||
}
|
||||
|
||||
// solve subtypes
|
||||
//eprintln!("------ SOLVE SUBTYPES ---- ");
|
||||
for mut subtype_pair in self.subtype_pairs.clone().into_iter() {
|
||||
subtype_pair.lhs.apply_subst(&self.σ);
|
||||
subtype_pair.rhs.apply_subst(&self.σ);
|
||||
let _halo = self.eval_subtype( subtype_pair.clone() )?.strip();
|
||||
}
|
||||
|
||||
// add variables from subtype bounds
|
||||
for (var_id, t) in self.upper_bounds.iter() {
|
||||
self.σ.insert(*var_id, t.clone().strip());
|
||||
}
|
||||
|
||||
for (var_id, t) in self.lower_bounds.iter() {
|
||||
self.σ.insert(*var_id, t.clone().strip());
|
||||
}
|
||||
|
||||
self.reapply_subst();
|
||||
|
||||
//eprintln!("------ MAKE HALOS -----");
|
||||
let mut halo_types = Vec::new();
|
||||
for mut subtype_pair in self.subtype_pairs.clone().into_iter() {
|
||||
subtype_pair.lhs = subtype_pair.lhs.apply_subst(&self.σ).clone();
|
||||
subtype_pair.rhs = subtype_pair.rhs.apply_subst(&self.σ).clone();
|
||||
|
||||
let halo = self.eval_subtype( subtype_pair.clone() )?.strip();
|
||||
halo_types.push(halo);
|
||||
}
|
||||
|
||||
// solve traits
|
||||
while let Some( mut trait_pair ) = self.trait_pairs.pop() {
|
||||
trait_pair.lhs.apply_subst(&self.σ);
|
||||
trait_pair.rhs.apply_subst(&self.σ);
|
||||
self.eval_trait(trait_pair)?;
|
||||
}
|
||||
|
||||
Ok((halo_types, self.σ))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unify(
|
||||
t1: &TypeTerm,
|
||||
t2: &TypeTerm
|
||||
) -> Result<HashMapSubst, ConstraintError> {
|
||||
let unification = ConstraintSystem::new_eq( vec![ ConstraintPair{ lhs: t1.clone(), rhs: t2.clone(), addr:vec![] } ]);
|
||||
Ok(unification.solve()?.1)
|
||||
}
|
||||
|
||||
pub fn subtype_unify(
|
||||
t1: &TypeTerm,
|
||||
t2: &TypeTerm
|
||||
) -> Result<(TypeTerm, HashMapSubst), ConstraintError> {
|
||||
let unification = ConstraintSystem::new_sub(vec![ ConstraintPair{ lhs: t1.clone(), rhs: t2.clone(), addr:vec![] } ]);
|
||||
unification.solve().map(|(halos, σ)| (halos.first().cloned().unwrap_or(TypeTerm::unit()), σ) )
|
||||
}
|
||||
|
||||
pub fn parallel_unify(
|
||||
t1: &TypeTerm,
|
||||
t2: &TypeTerm
|
||||
) -> Result<(TypeTerm, HashMapSubst), ConstraintError> {
|
||||
let unification = ConstraintSystem::new_parallel(vec![ ConstraintPair{ lhs: t1.clone(), rhs: t2.clone(), addr:vec![] } ]);
|
||||
unification.solve().map(|(halos, σ)| (halos.first().cloned().unwrap_or(TypeTerm::unit()), σ) )
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
117
src/context/dict.rs
Normal file
117
src/context/dict.rs
Normal file
|
@ -0,0 +1,117 @@
|
|||
use crate::{bimap::Bimap, TypeKind};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Eq, PartialEq, Hash, Clone, Copy, Debug)]
|
||||
pub enum TypeID {
|
||||
Fun(u64),
|
||||
Var(u64)
|
||||
}
|
||||
|
||||
pub trait TypeDict : Send + Sync {
|
||||
fn add_typename(&mut self, tn: &str) -> u64;
|
||||
fn get_typeid(&self, tn: &str) -> Option<TypeID>;
|
||||
fn get_typename(&self, tid: u64) -> Option<String>;
|
||||
fn get_varname(&self, var_id: u64) -> Option<String>;
|
||||
fn get_varkind(&self, var_id: u64) -> Option<TypeKind>;
|
||||
|
||||
fn get_name(&self, id: TypeID) -> Option<String> {
|
||||
match id {
|
||||
TypeID::Fun(id) => self.get_typename(id),
|
||||
TypeID::Var(id) => self.get_varname(id)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_varid(&self, varname: &str) -> Option<u64> {
|
||||
match self.get_typeid(varname) {
|
||||
Some(TypeID::Var(id)) => Some(id),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
fn get_typeid_creat(&mut self, tn: &str) -> TypeID {
|
||||
if let Some(id) = self.get_typeid(tn) {
|
||||
id
|
||||
} else {
|
||||
TypeID::Fun(self.add_typename(tn))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BimapTypeDict {
|
||||
typenames: Bimap<String, TypeID>,
|
||||
type_lit_counter: u64,
|
||||
type_var_counter: u64,
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl BimapTypeDict {
|
||||
pub fn new() -> Self {
|
||||
BimapTypeDict {
|
||||
typenames: Bimap::new(),
|
||||
type_lit_counter: 0,
|
||||
type_var_counter: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, name: &str, id: TypeID) {
|
||||
self.typenames.insert(name.into(), id);
|
||||
}
|
||||
pub fn add_varname(&mut self, tn: &str) -> u64 {
|
||||
let tyid = self.type_var_counter;
|
||||
self.type_var_counter += 1;
|
||||
self.insert(tn.into(), TypeID::Var(tyid));
|
||||
tyid
|
||||
}
|
||||
}
|
||||
|
||||
impl TypeDict for BimapTypeDict {
|
||||
fn add_typename(&mut self, tn: &str) -> u64 {
|
||||
let tyid = self.type_lit_counter;
|
||||
self.type_lit_counter += 1;
|
||||
self.insert(tn.into(), TypeID::Fun(tyid));
|
||||
tyid
|
||||
}
|
||||
|
||||
fn get_typename(&self, id: u64) -> Option<String> {
|
||||
self.typenames.my.get(&TypeID::Fun(id)).cloned()
|
||||
}
|
||||
fn get_varname(&self, id: u64) -> Option<String> {
|
||||
self.typenames.my.get(&TypeID::Var(id)).cloned()
|
||||
}
|
||||
fn get_typeid(&self, tn: &str) -> Option<TypeID> {
|
||||
self.typenames.mλ.get(tn).cloned()
|
||||
}
|
||||
|
||||
fn get_varkind(&self, id: u64) -> Option<TypeKind> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
|
||||
|
||||
use std::sync::{Arc,RwLock};
|
||||
|
||||
impl<T: TypeDict> TypeDict for Arc<RwLock<T>> {
|
||||
fn add_typename(&mut self, tn: &str) -> u64 {
|
||||
self.write().unwrap().add_typename(tn)
|
||||
}
|
||||
fn get_typename(&self, id: u64)-> Option<String> {
|
||||
self.read().unwrap().get_typename(id)
|
||||
}
|
||||
fn get_varname(&self, id: u64)-> Option<String> {
|
||||
self.read().unwrap().get_varname(id)
|
||||
}
|
||||
fn get_typeid(&self, tn: &str) -> Option<TypeID> {
|
||||
self.read().unwrap().get_typeid(tn)
|
||||
}
|
||||
fn get_varkind(&self, id: u64) -> Option<TypeKind> {
|
||||
self.read().unwrap().get_varkind(id)
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
|
342
src/context/mod.rs
Normal file
342
src/context/mod.rs
Normal file
|
@ -0,0 +1,342 @@
|
|||
use {
|
||||
crate::{
|
||||
TypeTerm
|
||||
},
|
||||
std::{sync::{Arc, RwLock}}
|
||||
};
|
||||
|
||||
pub mod bimap;
|
||||
pub mod dict;
|
||||
pub mod substitution;
|
||||
|
||||
pub use {
|
||||
bimap::*,
|
||||
dict::*,
|
||||
substitution::*
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum TypeKind {
|
||||
Type,
|
||||
Arrow( Box<TypeKind>, Box<TypeKind> ),
|
||||
ValueUInt
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ContextEntry {
|
||||
pub symbol: String,
|
||||
pub kind: TypeKind,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ContextPtr(pub Arc<RwLock<Context>>);
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Context {
|
||||
ctxname: String,
|
||||
sub_count: u64,
|
||||
|
||||
parent: Option<ContextPtr>,
|
||||
names: Vec< String >,
|
||||
pub γ: Vec<ContextEntry>,
|
||||
pub σ: HashMapSubst,
|
||||
}
|
||||
|
||||
static count: RwLock<u64> = RwLock::new(0);
|
||||
|
||||
impl Context {
|
||||
pub fn new() -> ContextPtr {
|
||||
let mut c = count.write().unwrap();
|
||||
*c += 1;
|
||||
ContextPtr(Arc::new(RwLock::new(Context {
|
||||
ctxname: format!("Ctx {}", *c),
|
||||
sub_count: 0,
|
||||
|
||||
parent: None,
|
||||
names: Vec::new(),
|
||||
γ: Vec::new(),
|
||||
σ: HashMapSubst::new(),
|
||||
})))
|
||||
}
|
||||
|
||||
pub fn n_variables(&self) -> u64 {
|
||||
self.γ.len() as u64
|
||||
+ if let Some(p) = self.parent.as_ref() {
|
||||
p.0.read().unwrap().n_variables()
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ContextPtr {
|
||||
|
||||
pub fn pretty(&self) -> String {
|
||||
let locked_self = self.0.read().unwrap();
|
||||
let mut s = String::new();
|
||||
|
||||
s.push_str(&format!("({}) ∀{{", self.get_ctxname()));
|
||||
for entry in locked_self.γ.iter() {
|
||||
s.push_str(&format!("{} ↦ {:?};", entry.symbol, entry.kind));
|
||||
}
|
||||
s.push_str("}");
|
||||
|
||||
s.push_str(",σ={");
|
||||
for (v,t) in locked_self.σ.iter() {
|
||||
s.push_str(&format!("{}({}) ↦ {};",
|
||||
self.get_varname(*v).unwrap_or("??".into()),
|
||||
v,
|
||||
t.pretty(&mut self.clone(), 0)));
|
||||
}
|
||||
s.push_str("}");
|
||||
|
||||
if let Some(p) = locked_self.parent.as_ref() {
|
||||
s.push_str(".");
|
||||
s.push_str(&p.pretty());
|
||||
}
|
||||
|
||||
s
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for ContextPtr {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let locked_lhs = self.0.read().unwrap();
|
||||
let locked_rhs = other.0.read().unwrap();
|
||||
|
||||
match (locked_lhs.clone(),locked_rhs.clone()) {
|
||||
(Context { ctxname:_, sub_count: _, parent:p1, names:n1, σ:σ1, γ:γ1 },
|
||||
Context { ctxname:_, sub_count: _, parent:p2, names:n2, σ:σ2, γ:γ2 }) => {
|
||||
|
||||
if let (Some(p1),Some(p2)) = (&p1,&p2) {
|
||||
if p1 == p2 && σ1==σ2 {
|
||||
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else if p1.is_none() && p2.is_none() {
|
||||
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
||||
( n1 == n2 ) && (γ1 == γ2)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ContextPtr {
|
||||
pub fn get_ctxname(&self) -> String {
|
||||
self.0.read().unwrap().ctxname.clone()
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl TypeDict for ContextPtr {
|
||||
fn add_typename(&mut self, tn: &str) -> u64 {
|
||||
let mut locked_self = self.0.write().unwrap();
|
||||
if let Some(parent) = locked_self.parent.as_mut() {
|
||||
parent.add_typename(tn) + locked_self.names.len() as u64
|
||||
} else {
|
||||
let idx = locked_self.names.len();
|
||||
locked_self.names.push(tn.into());
|
||||
idx as u64
|
||||
}
|
||||
}
|
||||
|
||||
fn get_typeid(&self, tn: &str) -> Option<TypeID> {
|
||||
let locked_self = self.0.read().unwrap();
|
||||
|
||||
for (i,n) in locked_self.γ.iter().enumerate() {
|
||||
if n.symbol == tn {
|
||||
return Some(TypeID::Var(i as u64));
|
||||
}
|
||||
}
|
||||
|
||||
for (i,n) in locked_self.names.iter().enumerate() {
|
||||
if n == tn {
|
||||
return Some(TypeID::Fun(i as u64));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(parent) = locked_self.parent.as_ref() {
|
||||
match parent.get_typeid(tn) {
|
||||
Some(TypeID::Fun(i)) => Some(TypeID::Fun(i + locked_self.names.len() as u64)),
|
||||
Some(TypeID::Var(i)) => Some(TypeID::Var(i + locked_self.γ.len() as u64)),
|
||||
None => None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn get_typename(&self, tid: u64) -> Option<String> {
|
||||
let locked_self = self.0.read().unwrap();
|
||||
if (tid as usize) < locked_self.names.len() {
|
||||
Some(locked_self.names[tid as usize].clone())
|
||||
} else {
|
||||
if let Some(parent) = locked_self.parent.as_ref() {
|
||||
parent.get_typename(tid)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_varname(&self, var_id: u64) -> Option<String> {
|
||||
let locked_self = self.0.read().unwrap();
|
||||
let l = locked_self.γ.len() as u64;
|
||||
if var_id < l {
|
||||
Some(locked_self.γ[var_id as usize].symbol.clone())
|
||||
} else {
|
||||
if let Some(parent) = locked_self.parent.as_ref() {
|
||||
parent.get_varname(var_id - l)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_varkind(&self, var: u64) -> Option<TypeKind> {
|
||||
let mut locked_self = self.0.read().unwrap();
|
||||
if var < locked_self.γ.len() as u64 {
|
||||
Some(locked_self.γ[var as usize].kind.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl Substitution for ContextPtr {
|
||||
fn saturate(&mut self) {
|
||||
let mut locked_self = self.0.read().unwrap();
|
||||
for ContextEntry{ symbol, kind } in locked_self.γ.iter() {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
fn get(&self, var: u64) -> Result< crate::TypeTerm, SubstError > {
|
||||
let locked_self = self.0.read().unwrap();
|
||||
let l = locked_self.γ.len() as u64;
|
||||
if let Some(t) = locked_self.σ.get(&var) {
|
||||
return Ok(t.clone())
|
||||
} else {
|
||||
if var >= l {
|
||||
if let Some(parent) = locked_self.parent.clone() {
|
||||
parent.get(var - l)
|
||||
} else {
|
||||
Err(SubstError::InvalidVariable)
|
||||
}
|
||||
} else {
|
||||
Err(SubstError::UnassignedVariable)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
pub trait LayeredContext {
|
||||
fn add_variable(&self, symbol: &str, kind: TypeKind ) -> u64;
|
||||
fn bind(&self, var: u64, val: TypeTerm) -> Result<(), SubstError>;
|
||||
fn scope(&self) -> Self;
|
||||
|
||||
fn shift_variables(&self, other: &ContextPtr) -> HashMapSubst;
|
||||
fn shift_from_parent(&self) -> HashMapSubst;
|
||||
}
|
||||
|
||||
impl LayeredContext for ContextPtr {
|
||||
/*
|
||||
* take all variables declared in context `other`
|
||||
* and copy them over to self.
|
||||
* Return a substitution to map terms under context `other`
|
||||
* to context `self`.
|
||||
*/
|
||||
fn shift_variables(&self, other: &ContextPtr) -> HashMapSubst {
|
||||
eprintln!("shift Vars from {:?} to {:?}", other.get_ctxname(), self.get_ctxname());
|
||||
|
||||
// substitution mapping Variables of `other` to variables of `self`
|
||||
let mut σs = HashMapSubst::new();
|
||||
|
||||
// number of local variables in `self`
|
||||
let l = self.0.read().unwrap().γ.len() as u64;
|
||||
|
||||
for (i, entry) in other.0.read().unwrap().γ.iter().enumerate() {
|
||||
let i = i as u64;
|
||||
|
||||
// make variable name unique
|
||||
let mut s = entry.symbol.clone();
|
||||
while let Some(id) = self.get_typeid(&s) {
|
||||
eprintln!("already have {} -> {:?}", s, id);
|
||||
s.push_str("'");
|
||||
}
|
||||
|
||||
eprintln!("add {} ({} -> {})", s, i, i+l);
|
||||
|
||||
self.add_variable(&s, entry.kind.clone());
|
||||
σs.insert(i, TypeTerm::Var(i + l));
|
||||
}
|
||||
σs
|
||||
}
|
||||
|
||||
fn shift_from_parent(&self) -> HashMapSubst {
|
||||
let mut σss = HashMapSubst::new();
|
||||
let locked_self = self.0.read().unwrap();
|
||||
let l = locked_self.γ.len() as u64;
|
||||
|
||||
if let Some(p) = locked_self.parent.as_ref() {
|
||||
for i in 0..p.0.read().unwrap().n_variables() {
|
||||
σss.insert( i as u64, TypeTerm::Var(l+i as u64) );
|
||||
}
|
||||
}
|
||||
|
||||
σss
|
||||
}
|
||||
|
||||
fn add_variable(&self, symbol: &str, kind: TypeKind ) -> u64 {
|
||||
//self.write().unwrap().dict.add_varname(symbol.into());
|
||||
let mut locked_self = self.0.write().unwrap();
|
||||
|
||||
let idx = locked_self.γ.len();
|
||||
eprintln!("Ctx {}, add {} : {:?} = {}", locked_self.ctxname, symbol, kind, idx);
|
||||
locked_self.γ.push(ContextEntry{
|
||||
symbol: symbol.into(),
|
||||
kind
|
||||
});
|
||||
idx as u64
|
||||
}
|
||||
|
||||
fn bind(&self, var: u64, val: TypeTerm) -> Result<(), SubstError> {
|
||||
let mut locked_self = self.0.write().unwrap();
|
||||
let l = locked_self.γ.len() as u64;
|
||||
locked_self.σ.insert(var, val.clone());
|
||||
/*
|
||||
if var >= l {
|
||||
if let Some(parent) = locked_self.parent.as_ref() {
|
||||
parent.bind(var - l, val)?;
|
||||
}
|
||||
}
|
||||
*/
|
||||
// todo check if var is in valid range
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn scope(&self) -> ContextPtr {
|
||||
let mut locked_self = self.0.write().unwrap();
|
||||
locked_self.sub_count += 1;
|
||||
ContextPtr(Arc::new(RwLock::new(Context{
|
||||
ctxname: format!("{}+{}", locked_self.ctxname, locked_self.sub_count),
|
||||
sub_count: 0,
|
||||
parent: Some(self.clone()),
|
||||
names: Vec::new(),
|
||||
γ: Vec::new(),
|
||||
σ: HashMapSubst::new()
|
||||
})))
|
||||
}
|
||||
}
|
141
src/context/substitution.rs
Normal file
141
src/context/substitution.rs
Normal file
|
@ -0,0 +1,141 @@
|
|||
use crate::term::*;
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum SubstError {
|
||||
InvalidVariable,
|
||||
UnassignedVariable,
|
||||
AlreadyAssigned
|
||||
}
|
||||
|
||||
pub trait Substitution {
|
||||
fn saturate(&mut self);
|
||||
fn get(&self, t: u64) -> Result<TypeTerm, SubstError>;
|
||||
}
|
||||
|
||||
|
||||
pub type HashMapSubst = std::collections::HashMap<u64, TypeTerm>;
|
||||
|
||||
pub trait SubstitutionMut {
|
||||
fn append(&mut self, other: &Self);
|
||||
fn filter(self, f: impl FnMut(&(u64, TypeTerm)) -> bool) -> Self;
|
||||
fn filter_morphtype(self, ty: &crate::MorphismType) -> Self;
|
||||
}
|
||||
|
||||
impl SubstitutionMut for HashMapSubst {
|
||||
fn append(&mut self, other: &HashMapSubst) {
|
||||
for (v,t) in other.iter() {
|
||||
self.insert(*v,t.clone());
|
||||
}
|
||||
}
|
||||
|
||||
fn filter(self, f: impl FnMut(&(u64, TypeTerm)) -> bool) -> Self {
|
||||
self.into_iter().filter(f).collect()
|
||||
}
|
||||
|
||||
fn filter_morphtype(self, ty: &crate::MorphismType) -> Self {
|
||||
self.filter(|(v,t)| {
|
||||
ty.src_type.contains_var(*v) ||
|
||||
ty.dst_type.contains_var(*v)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Substitution for HashMapSubst {
|
||||
fn saturate(&mut self) {
|
||||
let mut new_σ = std::collections::HashMap::new();
|
||||
for (id, t) in self.iter() {
|
||||
let mut t = t.clone();
|
||||
t.apply_subst(self);
|
||||
new_σ.insert(*id, t.normalize());
|
||||
}
|
||||
*self = new_σ;
|
||||
}
|
||||
|
||||
fn get(&self, t : u64) -> Result<TypeTerm, SubstError> {
|
||||
if let Some(t) = (self as &std::collections::HashMap<u64,TypeTerm>).get(&t).cloned() {
|
||||
Ok(t)
|
||||
} else {
|
||||
Err(SubstError::InvalidVariable)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl TypeTerm {
|
||||
/// recursively apply substitution to all subterms,
|
||||
/// which will replace all occurences of variables which map
|
||||
/// some type-term in `subst`
|
||||
pub fn apply_substitution(
|
||||
&mut self,
|
||||
σ: &impl Substitution
|
||||
) -> &mut Self {
|
||||
self.apply_subst(σ)
|
||||
}
|
||||
|
||||
pub fn apply_subst(
|
||||
&mut self,
|
||||
σ: &impl Substitution
|
||||
) -> &mut Self {
|
||||
match self {
|
||||
TypeTerm::Id(_) => {},
|
||||
TypeTerm::Num(_) => {},
|
||||
TypeTerm::Char(_) => {},
|
||||
|
||||
TypeTerm::Var(var) => {
|
||||
if let Ok(t) = σ.get(*var) {
|
||||
*self = t;
|
||||
}
|
||||
}
|
||||
TypeTerm::Ladder(args) |
|
||||
TypeTerm::Spec(args) |
|
||||
TypeTerm::Func(args)
|
||||
=> {
|
||||
for r in args.iter_mut() {
|
||||
r.apply_subst(σ);
|
||||
}
|
||||
}
|
||||
|
||||
TypeTerm::Univ(bound, t) => {
|
||||
bound.apply_subst(σ);
|
||||
t.apply_subst(σ);
|
||||
}
|
||||
|
||||
TypeTerm::Morph(src, dst) => {
|
||||
src.apply_subst(σ);
|
||||
dst.apply_subst(σ);
|
||||
}
|
||||
|
||||
TypeTerm::Struct { struct_repr, members } => {
|
||||
if let Some(struct_repr) = struct_repr.as_mut() {
|
||||
struct_repr.apply_subst(σ);
|
||||
}
|
||||
for StructMember{ symbol:_, ty } in members.iter_mut() {
|
||||
ty.apply_subst(σ);
|
||||
}
|
||||
},
|
||||
TypeTerm::Enum { enum_repr, variants } => {
|
||||
if let Some(enum_repr) = enum_repr.as_mut() {
|
||||
enum_repr.apply_subst(σ);
|
||||
}
|
||||
for EnumVariant{ symbol:_, ty } in variants.iter_mut() {
|
||||
ty.apply_subst(σ);
|
||||
}
|
||||
}
|
||||
TypeTerm::Seq { seq_repr, items } => {
|
||||
if let Some(seq_repr) = seq_repr {
|
||||
seq_repr.apply_subst(σ);
|
||||
}
|
||||
for ty in items.iter_mut() {
|
||||
ty.apply_subst(σ);
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
110
src/dict.rs
110
src/dict.rs
|
@ -1,110 +0,0 @@
|
|||
use crate::bimap::Bimap;
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Eq, PartialEq, Hash, Clone, Copy, Debug)]
|
||||
pub enum TypeID {
|
||||
Fun(u64),
|
||||
Var(u64)
|
||||
}
|
||||
|
||||
pub trait TypeDict : Send + Sync {
|
||||
fn insert(&mut self, name: String, id: TypeID);
|
||||
fn add_varname(&mut self, vn: String) -> TypeID;
|
||||
fn add_typename(&mut self, tn: String) -> TypeID;
|
||||
fn get_typeid(&self, tn: &String) -> Option<TypeID>;
|
||||
fn get_typename(&self, tid: &TypeID) -> Option<String>;
|
||||
|
||||
fn get_varname(&self, var_id: u64) -> Option<String> {
|
||||
self.get_typename(&TypeID::Var(var_id))
|
||||
}
|
||||
|
||||
fn add_synonym(&mut self, new: String, old: String) {
|
||||
if let Some(tyid) = self.get_typeid(&old) {
|
||||
self.insert(new, tyid);
|
||||
}
|
||||
}
|
||||
|
||||
fn get_typeid_creat(&mut self, tn: &String) -> TypeID {
|
||||
if let Some(id) = self.get_typeid(tn) {
|
||||
id
|
||||
} else {
|
||||
self.add_typename(tn.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BimapTypeDict {
|
||||
typenames: Bimap<String, TypeID>,
|
||||
type_lit_counter: u64,
|
||||
type_var_counter: u64,
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl BimapTypeDict {
|
||||
pub fn new() -> Self {
|
||||
BimapTypeDict {
|
||||
typenames: Bimap::new(),
|
||||
type_lit_counter: 0,
|
||||
type_var_counter: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TypeDict for BimapTypeDict {
|
||||
fn insert(&mut self, name: String, id: TypeID) {
|
||||
self.typenames.insert(name, id);
|
||||
}
|
||||
|
||||
fn add_varname(&mut self, tn: String) -> TypeID {
|
||||
let tyid = TypeID::Var(self.type_var_counter);
|
||||
self.type_var_counter += 1;
|
||||
self.insert(tn, tyid.clone());
|
||||
tyid
|
||||
}
|
||||
|
||||
fn add_typename(&mut self, tn: String) -> TypeID {
|
||||
let tyid = TypeID::Fun(self.type_lit_counter);
|
||||
self.type_lit_counter += 1;
|
||||
self.insert(tn, tyid.clone());
|
||||
tyid
|
||||
}
|
||||
|
||||
fn get_typename(&self, tid: &TypeID) -> Option<String> {
|
||||
self.typenames.my.get(tid).cloned()
|
||||
}
|
||||
|
||||
fn get_typeid(&self, tn: &String) -> Option<TypeID> {
|
||||
self.typenames.mλ.get(tn).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::RwLock;
|
||||
|
||||
impl<T: TypeDict> TypeDict for Arc<RwLock<T>> {
|
||||
fn insert(&mut self, name: String, id: TypeID) {
|
||||
self.write().unwrap().insert(name, id);
|
||||
}
|
||||
fn add_varname(&mut self, vn: String) -> TypeID {
|
||||
self.write().unwrap().add_varname(vn)
|
||||
}
|
||||
fn add_typename(&mut self, tn: String) -> TypeID {
|
||||
self.write().unwrap().add_typename(tn)
|
||||
}
|
||||
fn get_typename(&self, tid: &TypeID)-> Option<String> {
|
||||
self.read().unwrap().get_typename(tid)
|
||||
}
|
||||
fn get_typeid(&self, tn: &String) -> Option<TypeID> {
|
||||
self.read().unwrap().get_typeid(tn)
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>
|
32
src/lib.rs
32
src/lib.rs
|
@ -1,33 +1,19 @@
|
|||
#![allow(mixed_script_confusables)]
|
||||
#![allow(confusable_idents)]
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
pub mod bimap;
|
||||
pub mod dict;
|
||||
pub mod term;
|
||||
pub mod substitution;
|
||||
|
||||
pub mod lexer;
|
||||
pub mod parser;
|
||||
pub mod unparser;
|
||||
pub mod sugar;
|
||||
pub mod curry;
|
||||
pub mod lnf;
|
||||
pub mod pnf;
|
||||
pub mod subtype;
|
||||
pub mod unification;
|
||||
pub mod context;
|
||||
pub mod constraint_system;
|
||||
pub mod morphism;
|
||||
pub mod morphism_base;
|
||||
pub mod morphism_path;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
||||
#[cfg(feature = "pretty")]
|
||||
mod pretty;
|
||||
|
||||
pub use {
|
||||
dict::*,
|
||||
context::*,
|
||||
desugared_term::*,
|
||||
term::*,
|
||||
substitution::*,
|
||||
sugar::*,
|
||||
unification::*,
|
||||
morphism::*
|
||||
constraint_system::*,
|
||||
morphism::*,
|
||||
};
|
||||
|
|
119
src/lnf.rs
119
src/lnf.rs
|
@ -1,119 +0,0 @@
|
|||
use crate::term::TypeTerm;
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl TypeTerm {
|
||||
/// does the type contain ladders (false) or is it 'flat' (true) ?
|
||||
///
|
||||
/// Example:
|
||||
/// ```<Seq <Digit 10>>``` is flat, but
|
||||
/// ```<Digit 10>~Char``` is not
|
||||
pub fn is_flat(&self) -> bool {
|
||||
match self {
|
||||
TypeTerm::TypeID(_) => true,
|
||||
TypeTerm::Num(_) => true,
|
||||
TypeTerm::Char(_) => true,
|
||||
TypeTerm::App(args) => args.iter().fold(true, |s,x| s && x.is_flat()),
|
||||
TypeTerm::Ladder(_) => false
|
||||
}
|
||||
}
|
||||
|
||||
/// transmute type into Ladder-Normal-Form (LNF)
|
||||
///
|
||||
/// Example:
|
||||
/// ```ignore
|
||||
/// <Seq <Digit 10>~Char>
|
||||
/// ⇒ <Seq <Digit 10>>~<Seq Char>
|
||||
/// ```
|
||||
pub fn normalize(self) -> Self {
|
||||
let mut new_ladder = Vec::<TypeTerm>::new();
|
||||
|
||||
match self {
|
||||
TypeTerm::Ladder(args) => {
|
||||
for x in args.into_iter() {
|
||||
match x.normalize() {
|
||||
TypeTerm::Ladder(gs) => {
|
||||
for g in gs {
|
||||
new_ladder.push(g);
|
||||
}
|
||||
}
|
||||
g => {
|
||||
new_ladder.push(g);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TypeTerm::App(args) => {
|
||||
let args_iter = args.into_iter();
|
||||
|
||||
new_ladder.push( TypeTerm::App(vec![]) );
|
||||
|
||||
for arg in args_iter {
|
||||
match arg.normalize() {
|
||||
TypeTerm::Ladder(rungs) => {
|
||||
// duplicate last element for each rung
|
||||
let l = new_ladder.len();
|
||||
for _ in 1..rungs.len() {
|
||||
new_ladder.push( new_ladder.last().unwrap().clone() );
|
||||
}
|
||||
|
||||
for (i,r) in new_ladder.iter_mut().enumerate() {
|
||||
match r {
|
||||
TypeTerm::App(al) => {
|
||||
if i < l {
|
||||
al.push(rungs[0].clone());
|
||||
} else {
|
||||
al.push(rungs[i-l+1].clone());
|
||||
}
|
||||
}
|
||||
_ => unreachable!()
|
||||
}
|
||||
}
|
||||
}
|
||||
mut other => {
|
||||
other = other.normalize();
|
||||
for rung in new_ladder.iter_mut() {
|
||||
match rung {
|
||||
TypeTerm::App(al) => {
|
||||
al.push(other.clone());
|
||||
}
|
||||
_ => unreachable!()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
atom => {
|
||||
new_ladder.push(atom);
|
||||
}
|
||||
}
|
||||
|
||||
match new_ladder.len() {
|
||||
0 => TypeTerm::unit(),
|
||||
1 => new_ladder.into_iter().next().unwrap(),
|
||||
_ => TypeTerm::Ladder( new_ladder )
|
||||
}
|
||||
}
|
||||
|
||||
/// transmute type into a `Vec` containing
|
||||
/// all rungs of the type in LNF
|
||||
///
|
||||
/// Example:
|
||||
/// ```<Seq <Digit 10>~Char>``` gives
|
||||
/// ```ignore
|
||||
/// vec![ <Seq <Digit 10>>, <Seq Char> ]
|
||||
/// ```
|
||||
pub fn get_lnf_vec(self) -> Vec<TypeTerm> {
|
||||
match self.normalize() {
|
||||
TypeTerm::Ladder( v ) => {
|
||||
v
|
||||
},
|
||||
flat => vec![ flat ]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
|
@ -1,63 +0,0 @@
|
|||
use {
|
||||
crate::{
|
||||
subtype_unify, sugar::SugaredTypeTerm, unification::UnificationProblem, unparser::*, TypeDict, TypeID, TypeTerm
|
||||
},
|
||||
std::{collections::HashMap, u64}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct MorphismType {
|
||||
pub src_type: TypeTerm,
|
||||
pub dst_type: TypeTerm,
|
||||
}
|
||||
|
||||
impl MorphismType {
|
||||
pub fn normalize(self) -> Self {
|
||||
MorphismType {
|
||||
src_type: self.src_type.normalize().param_normalize(),
|
||||
dst_type: self.dst_type.normalize().param_normalize()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
pub trait Morphism : Sized {
|
||||
fn get_type(&self) -> MorphismType;
|
||||
fn map_morphism(&self, seq_type: TypeTerm) -> Option< Self >;
|
||||
|
||||
fn weight(&self) -> u64 {
|
||||
1
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct MorphismInstance<M: Morphism + Clone> {
|
||||
pub halo: TypeTerm,
|
||||
pub m: M,
|
||||
pub σ: HashMap<TypeID, TypeTerm>
|
||||
}
|
||||
|
||||
impl<M: Morphism + Clone> MorphismInstance<M> {
|
||||
pub fn get_type(&self) -> MorphismType {
|
||||
MorphismType {
|
||||
src_type: TypeTerm::Ladder(vec![
|
||||
self.halo.clone(),
|
||||
self.m.get_type().src_type.clone()
|
||||
]).apply_substitution(&self.σ)
|
||||
.clone(),
|
||||
|
||||
dst_type: TypeTerm::Ladder(vec![
|
||||
self.halo.clone(),
|
||||
self.m.get_type().dst_type.clone()
|
||||
]).apply_substitution(&self.σ)
|
||||
.clone()
|
||||
}.normalize()
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
193
src/morphism/base.rs
Normal file
193
src/morphism/base.rs
Normal file
|
@ -0,0 +1,193 @@
|
|||
use {
|
||||
crate::{
|
||||
morphism::{Morphism, MorphismInstance, MorphismType}, Context, ContextPtr, HashMapSubst, LayeredContext, StructMember, TypeDict, TypeTerm
|
||||
}, std::{arch::x86_64::_MM_ROUND_NEAREST, collections::HashMap, io::Write, sync::{Arc, RwLock}}
|
||||
};
|
||||
|
||||
pub trait MorphBase<
|
||||
Morph: Morphism + Clone,
|
||||
Weight: Eq + Ord + Default
|
||||
> {
|
||||
fn get_morphisms(&self, halo_key: &TypeTerm) -> Vec<MorphismInstance<Morph>> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn heuristic(&self, t: &MorphismType) -> Weight {
|
||||
Weight::default()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum DecomposedMorphismType {
|
||||
SeqMap { item: MorphismType },
|
||||
StructMap { members: Vec<(String, MorphismType)> },
|
||||
EnumMap { variants: Vec<(String, MorphismType)> }
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MorphismBase<M: Morphism + Clone> {
|
||||
Γ: ContextPtr,
|
||||
morphisms: Vec< M >
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl<M: Morphism + Clone> MorphismBase<M> {
|
||||
pub fn new(Γ: ContextPtr) -> Self {
|
||||
MorphismBase {
|
||||
Γ,
|
||||
morphisms: Vec::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ctx(&self) -> ContextPtr {
|
||||
self.Γ.clone()
|
||||
}
|
||||
|
||||
pub fn add_morphism(&mut self, m: M) {
|
||||
self.morphisms.push( m );
|
||||
}
|
||||
|
||||
/*
|
||||
given a morphism type (src/dst types),
|
||||
try to match their outer structure (Struct/Seq/Map)
|
||||
and spawn a GraphSearch for each component
|
||||
*/
|
||||
pub fn morphism_decomposition(&self, src_type: &TypeTerm, dst_type: &TypeTerm) ->
|
||||
Option< (TypeTerm, DecomposedMorphismType) >
|
||||
{
|
||||
let (src_ψ, src_floor) = src_type.get_floor_type();
|
||||
let (dst_ψ, dst_floor) = dst_type.get_floor_type();
|
||||
|
||||
if !dst_ψ.is_empty() {
|
||||
if !crate::constraint_system::subtype_unify(&src_ψ, &dst_ψ).is_ok() {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
match (src_floor, dst_floor) {
|
||||
(TypeTerm::Struct{ struct_repr: struct_repr_lhs, members: members_lhs},
|
||||
TypeTerm::Struct { struct_repr: struct_repr_rhs, members: members_rhs })
|
||||
=> {
|
||||
// todo: optimization: check if struct repr match
|
||||
|
||||
let mut member_morph_types = Vec::new();
|
||||
let mut failed = false;
|
||||
let mut necessary = false;
|
||||
|
||||
for StructMember{ symbol: symbol_rhs, ty: ty_rhs } in members_rhs.iter() {
|
||||
let mut found_src_member = false;
|
||||
for StructMember{ symbol: symbol_lhs, ty: ty_lhs } in members_lhs.iter() {
|
||||
if symbol_rhs == symbol_lhs {
|
||||
found_src_member = true;
|
||||
|
||||
// todo: check if member-morph-type is parallel
|
||||
|
||||
member_morph_types.push((symbol_rhs.clone(), MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: ty_lhs.clone(), dst_type: ty_rhs.clone()
|
||||
}));
|
||||
|
||||
if ty_lhs != ty_rhs {
|
||||
necessary = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// member of rhs not found in lhs
|
||||
if ! found_src_member {
|
||||
failed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if ! failed && necessary {
|
||||
Some((src_ψ, DecomposedMorphismType::StructMap {
|
||||
members: member_morph_types
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
(TypeTerm::Seq{ seq_repr: seq_repr_lhs, items: items_lhs },
|
||||
TypeTerm::Seq{ seq_repr: _seq_rerpr_rhs, items: items_rhs })
|
||||
=> {
|
||||
for (ty_lhs, ty_rhs) in items_lhs.iter().zip(items_rhs.iter()) {
|
||||
return Some((src_ψ, DecomposedMorphismType::SeqMap {
|
||||
item: MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: ty_lhs.clone(), dst_type: ty_rhs.clone() }
|
||||
}));
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
(TypeTerm::Enum { enum_repr: enum_repr_lhs, variants: variants_lhs },
|
||||
TypeTerm::Enum { enum_repr: enum_repr_rhs, variants: variants_rhs }
|
||||
) => {
|
||||
todo!()
|
||||
}
|
||||
|
||||
_ => {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn enum_morphisms_from(&self, Γ0: &ContextPtr, src_type: &TypeTerm) -> Vec< (TypeTerm, ContextPtr, HashMapSubst, M) > {
|
||||
let mut morphs = Vec::new();
|
||||
|
||||
for m in self.morphisms.iter() {
|
||||
let mut m_src_type = m.get_type().src_type.normalize();
|
||||
let mut m_dst_type = m.get_type().dst_type.normalize();
|
||||
|
||||
let Γ = Γ0.scope();
|
||||
let σs = Γ.shift_variables(&m.ctx());
|
||||
m_src_type.apply_subst(&σs);
|
||||
m_dst_type.apply_subst(&σs);
|
||||
|
||||
let mut src_type = src_type.clone();
|
||||
src_type.apply_subst(&Γ.shift_from_parent());
|
||||
|
||||
|
||||
// check if the given source type is compatible with the
|
||||
// morphisms source type,
|
||||
// i.e. check if `src_type` is a subtype of `m_src_type`
|
||||
if let Ok((ψ, σ)) = crate::constraint_system::subtype_unify(&src_type, &m_src_type) {
|
||||
for (v,t) in σ.iter() {
|
||||
Γ.bind(*v, t.clone()).expect("cant bind variable");
|
||||
}
|
||||
morphs.push((ψ, Γ, σs, m.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
morphs
|
||||
}
|
||||
|
||||
pub fn enum_complex_morphisms(&self, Γ0: &ContextPtr, src_type: &TypeTerm) -> Vec<(TypeTerm, ContextPtr, HashMapSubst, DecomposedMorphismType)> {
|
||||
let mut morphs = Vec::<(TypeTerm, ContextPtr, HashMapSubst, DecomposedMorphismType)>::new();
|
||||
for m in self.morphisms.iter() {
|
||||
let mut src_type = src_type.clone();
|
||||
let mut m_src_type = m.get_type().src_type.normalize();
|
||||
|
||||
let Γ = Γ0.scope();
|
||||
let σs = Γ.shift_variables(&m.ctx());
|
||||
m_src_type.apply_subst(&σs);
|
||||
|
||||
src_type.apply_subst(&Γ.shift_from_parent());
|
||||
|
||||
/* 2. check complex types */
|
||||
if let Some((ψ,decomposition)) = self.morphism_decomposition(&src_type, &m_src_type) {
|
||||
morphs.push((ψ,Γ,σs,decomposition));
|
||||
}
|
||||
}
|
||||
morphs
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
253
src/morphism/graph.rs
Normal file
253
src/morphism/graph.rs
Normal file
|
@ -0,0 +1,253 @@
|
|||
use {
|
||||
crate::{
|
||||
morphism::DecomposedMorphismType, search_node::{SearchNode, SearchNodeExt, Step}, Context, ContextPtr, EnumVariant, HashMapSubst, LayeredContext, Morphism, MorphismBase, MorphismInstance, MorphismType, StructMember, SubstitutionMut, TypeDict, TypeTerm
|
||||
},
|
||||
std::{collections::HashMap, ops::Deref, sync::{Arc,RwLock}}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
pub struct MorphismGraph<M: Morphism+Clone> {
|
||||
solved_paths: HashMap< MorphismType, MorphismInstance<M> >,
|
||||
base: MorphismBase<M>
|
||||
}
|
||||
|
||||
pub struct GraphSearch<M: Morphism+Clone> {
|
||||
Γ: ContextPtr,
|
||||
goal: MorphismType,
|
||||
solution: Option< MorphismInstance<M> >,
|
||||
explore_queue: Vec< Arc<RwLock<SearchNode<M>>> >,
|
||||
|
||||
skip_preview: bool
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum GraphSearchState<M: Morphism+Clone> {
|
||||
Solved( MorphismInstance<M> ),
|
||||
Continue,
|
||||
Err( GraphSearchError )
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum GraphSearchError {
|
||||
NoMorphismFound
|
||||
}
|
||||
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl<M: Morphism+Clone> MorphismGraph<M> {
|
||||
pub fn new(base: MorphismBase<M>) -> Self {
|
||||
MorphismGraph {
|
||||
solved_paths: HashMap::new(),
|
||||
base
|
||||
}
|
||||
}
|
||||
|
||||
pub fn search(&self, goal: MorphismType) -> Result<
|
||||
MorphismInstance<M>,
|
||||
GraphSearchError
|
||||
>
|
||||
{
|
||||
let Γ = self.base.ctx().scope();
|
||||
eprintln!("Start search (Γ={})", Γ.get_ctxname());
|
||||
let mut search = GraphSearch::<M>::new(Γ, goal);
|
||||
loop {
|
||||
match search.advance(&self.base) {
|
||||
GraphSearchState::Solved(m) => { return Ok(m); }
|
||||
GraphSearchState::Continue => { continue; }
|
||||
GraphSearchState::Err(err) => { return Err(err); }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<M: Morphism+Clone> GraphSearch<M> {
|
||||
pub fn new(Γ: ContextPtr, goal: MorphismType) -> Self {
|
||||
GraphSearch {
|
||||
goal: goal.clone(),
|
||||
solution: None,
|
||||
Γ: Γ.clone(),
|
||||
explore_queue: vec![
|
||||
Arc::new(RwLock::new(SearchNode {
|
||||
Γ,
|
||||
pred: None,
|
||||
weight: 0,
|
||||
ty: MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: goal.src_type.clone(),
|
||||
dst_type: goal.src_type.clone()
|
||||
},
|
||||
step: Step::Id { τ: goal.src_type.clone() },
|
||||
ψ: TypeTerm::unit()
|
||||
}))
|
||||
],
|
||||
skip_preview: false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_solution(&self) -> Option< MorphismInstance<M> > {
|
||||
self.solution.clone()
|
||||
}
|
||||
|
||||
pub fn best_path_weight(&self) -> u64 {
|
||||
if let Some(best) = self.explore_queue.last() {
|
||||
best.get_weight()
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* for node `search_node` , calculate the estimated cost for completing
|
||||
* the path to fulfill the morphism type `goal`
|
||||
*/
|
||||
pub fn est_remain(goal: &MorphismType, search_node: &Arc<RwLock<SearchNode<M>>>) -> u64 {
|
||||
MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: goal.src_type.clone(),
|
||||
dst_type: search_node.get_type().src_type.clone()
|
||||
}.estimated_cost()
|
||||
+
|
||||
MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: search_node.get_type().dst_type.clone(),
|
||||
dst_type: goal.dst_type.clone()
|
||||
}.estimated_cost()
|
||||
}
|
||||
|
||||
/*
|
||||
* consider the nodes in `self.explore_queue` and take the most promising node
|
||||
*/
|
||||
pub fn choose_next_node(&mut self, dict: &mut impl TypeDict) -> Option<Arc<RwLock<SearchNode<M>>>> {
|
||||
let goal= self.goal.clone();
|
||||
|
||||
/* sort all nodes by descending weight whereby we use the sum of the
|
||||
* already manifested cost of the taken path
|
||||
* plus the estimated remaining cost to complete the path
|
||||
*/
|
||||
self.explore_queue.sort_by(
|
||||
|a,b| {
|
||||
(Self::est_remain(&goal, b) + b.get_weight() )
|
||||
.cmp(
|
||||
&(Self::est_remain(&goal, a) + a.get_weight())
|
||||
)
|
||||
}
|
||||
);
|
||||
|
||||
/*
|
||||
if !self.skip_preview {
|
||||
eprintln!("===== TOP 5 PATHS =====\nGoal:\n {} -> {}",
|
||||
goal.src_type.pretty(dict, 0),
|
||||
goal.dst_type.pretty(dict, 0)
|
||||
);
|
||||
for i in 1 ..= usize::min(self.explore_queue.len(), 5) {
|
||||
let n = &self.explore_queue[self.explore_queue.len() - i];
|
||||
eprintln!("[[ {} ]] (weight: {} + est remain: {}) --- {} --> {}", i,
|
||||
n.get_weight(),
|
||||
Self::est_remain(&goal, &n),
|
||||
n.get_type().src_type.pretty(&mut n.read().unwrap().Γ.clone(), 0),
|
||||
n.get_type().dst_type.pretty(&mut n.read().unwrap().Γ.clone(), 0));
|
||||
}
|
||||
} else {
|
||||
self.skip_preview = false;
|
||||
}
|
||||
*/
|
||||
|
||||
self.explore_queue.pop()
|
||||
}
|
||||
|
||||
pub fn add_explore_node(&mut self, node: Arc<RwLock<SearchNode<M>>>) {
|
||||
if ! node.creates_loop() {
|
||||
self.explore_queue.push(node);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* take the most promising node and iterate its search by one step
|
||||
*/
|
||||
pub fn advance(&mut self, base: &MorphismBase<M>) -> GraphSearchState<M> {
|
||||
//eprintln!("choose node...");
|
||||
if let Some(node) = self.choose_next_node(&mut self.Γ.clone()) {
|
||||
|
||||
/*
|
||||
* in case this node contains a sub-search graph,
|
||||
* advance it first
|
||||
*/
|
||||
match node.advance(base) {
|
||||
Ok(false) => {
|
||||
/* sub search solved */
|
||||
assert!( node.is_ready() );
|
||||
let w = node.to_morphism_instance().unwrap().get_weight();
|
||||
//eprintln!("set Weight of complex morph to {}", w);
|
||||
node.write().unwrap().weight = w;
|
||||
}
|
||||
Ok(true) => {
|
||||
/* sub search was continued, but still ongoing */
|
||||
if ! node.is_ready() {
|
||||
self.skip_preview = true;
|
||||
self.add_explore_node(node);
|
||||
return GraphSearchState::Continue;
|
||||
} else {
|
||||
eprintln!("node returned true even though it is ready");
|
||||
return GraphSearchState::Continue;
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
return GraphSearchState::Err(err);
|
||||
}
|
||||
}
|
||||
|
||||
/* 1. Check if goal is already reached by the current path */
|
||||
if let Ok((_ψ, σ)) = crate::constraint_system::subtype_unify( &node.get_type().dst_type, &self.goal.dst_type ) {
|
||||
for (v,t) in σ.into_iter() {
|
||||
node.read().unwrap().Γ.bind(v, t).expect("cant bind");
|
||||
}
|
||||
|
||||
/* found path */
|
||||
self.solution = node.to_morphism_instance();
|
||||
return GraphSearchState::Solved(self.get_solution().unwrap());
|
||||
}
|
||||
|
||||
let mut decompositions = base.enum_complex_morphisms(&node.read().unwrap().Γ, &node.get_type().dst_type);
|
||||
if let Some((ψ,d)) = base.morphism_decomposition(&node.get_type().dst_type, &self.goal.dst_type) {
|
||||
decompositions.push((ψ,node.read().unwrap().Γ.clone(),HashMap::new(),d));
|
||||
}
|
||||
|
||||
//eprintln!("{} decompositions", decompositions.len());
|
||||
|
||||
let mut done = Vec::new();
|
||||
for (ψ,Γ,σs,decomposition) in decompositions {
|
||||
if ! done.contains(&(ψ.clone(),σs.clone(),decomposition.clone())) {
|
||||
eprintln!("decomposition {:?} ~ {:?}", ψ,decomposition);
|
||||
let mut new_node =
|
||||
match &decomposition {
|
||||
DecomposedMorphismType::SeqMap { item } => { node.map_seq( item.clone() ) },
|
||||
DecomposedMorphismType::StructMap { members } => { node.map_struct(members.clone()) },
|
||||
DecomposedMorphismType::EnumMap { variants } => { node.map_enum(variants.clone()) },
|
||||
}.set_sub(ψ.clone());
|
||||
|
||||
new_node.write().unwrap().Γ = Γ;
|
||||
|
||||
self.add_explore_node(new_node);
|
||||
done.push((ψ, σs, decomposition));
|
||||
} else {
|
||||
eprintln!("avoid duplicate decomposition");
|
||||
}
|
||||
}
|
||||
|
||||
/* 2. Try to advance current path */
|
||||
//eprintln!("enumerate direct morphisms");
|
||||
for (ψ,Γ,σs,m) in base.enum_morphisms_from(&node.read().unwrap().Γ, &node.get_type().dst_type) {
|
||||
//eprintln!("add direct path with ψ={}, Γ={}, σs={:?}", ψ.pretty(&Γ, 0), Γ.pretty(), σs);
|
||||
self.add_explore_node( node.chain(ψ,&Γ,σs, m) );
|
||||
}
|
||||
|
||||
GraphSearchState::Continue
|
||||
} else {
|
||||
GraphSearchState::Err(GraphSearchError::NoMorphismFound)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
59
src/morphism/heuristic.rs
Normal file
59
src/morphism/heuristic.rs
Normal file
|
@ -0,0 +1,59 @@
|
|||
use crate::{morphism::MorphismType, TypeTerm};
|
||||
|
||||
impl MorphismType {
|
||||
|
||||
pub fn estimated_cost(&self) -> u64 {
|
||||
|
||||
if let Ok((ψ,σ)) = crate::subtype_unify(&self.src_type, &self.dst_type) {
|
||||
0
|
||||
} else {
|
||||
match (self.src_type.clone().normalize(),
|
||||
self.dst_type.clone().normalize())
|
||||
{
|
||||
(TypeTerm::Ladder(r1),
|
||||
TypeTerm::Ladder(r2)) => {
|
||||
let mut cost = 10;
|
||||
for i in 0..usize::min( r1.len(), r2.len() ) {
|
||||
cost += MorphismType { bounds: Vec::new(), src_type: r1[i].clone(), dst_type: r2[i].clone() }.estimated_cost();
|
||||
}
|
||||
cost
|
||||
}
|
||||
(TypeTerm::Spec(a1),
|
||||
TypeTerm::Spec(a2)) => {
|
||||
let mut cost = 10;
|
||||
for i in 0..usize::min( a1.len(), a2.len() ) {
|
||||
cost += MorphismType { bounds: Vec::new(), src_type: a1[i].clone(), dst_type: a2[i].clone() }.estimated_cost();
|
||||
}
|
||||
cost
|
||||
}
|
||||
(TypeTerm::Seq{ seq_repr: sr1, items: items1 },
|
||||
TypeTerm::Seq{ seq_repr: sr2, items: items2 }) => {
|
||||
let mut cost = 10;
|
||||
/* // todo : add cost seq-repr conversion?
|
||||
estimated_morphism_cost(
|
||||
&MorphismType { src_type: sr1, dst_type: sr2 }
|
||||
);
|
||||
*/
|
||||
for i in 0..usize::min( items1.len(), items2.len() ) {
|
||||
cost += MorphismType { bounds: Vec::new(), src_type: items1[i].clone(), dst_type: items2[i].clone() }.estimated_cost();
|
||||
}
|
||||
|
||||
cost
|
||||
}
|
||||
|
||||
(TypeTerm::Var(_), x)
|
||||
| (x, TypeTerm::Var(_))
|
||||
=> {
|
||||
return 1;
|
||||
}
|
||||
(a, b) => {
|
||||
if a == b {
|
||||
return 0;
|
||||
} else {
|
||||
return 10;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
290
src/morphism/instance.rs
Normal file
290
src/morphism/instance.rs
Normal file
|
@ -0,0 +1,290 @@
|
|||
|
||||
use {
|
||||
crate::{
|
||||
term::{StructMember, TypeTerm},
|
||||
context::*,
|
||||
morphism::*,
|
||||
},
|
||||
std::collections::HashMap,
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum MorphismInstance<M: Morphism + Clone> {
|
||||
Id { τ: TypeTerm },
|
||||
Primitive{ σs: HashMapSubst, m: M },
|
||||
Sub {
|
||||
ψ: TypeTerm,
|
||||
m: Box<MorphismInstance<M>>
|
||||
},
|
||||
Specialize {
|
||||
Γ: ContextPtr,
|
||||
m: Box<MorphismInstance<M>>
|
||||
},
|
||||
Chain {
|
||||
path: Vec<MorphismInstance<M>>
|
||||
},
|
||||
MapSeq {
|
||||
seq_repr: Option<Box<TypeTerm>>,
|
||||
item_morph: Box<MorphismInstance<M>>,
|
||||
},
|
||||
MapStruct {
|
||||
struct_repr: Option<Box<TypeTerm>>,
|
||||
member_morph: Vec< (String, MorphismInstance<M>) >
|
||||
},
|
||||
MapEnum {
|
||||
enum_repr: Option<Box<TypeTerm>>,
|
||||
variant_morph: Vec< (String, MorphismInstance<M>) >
|
||||
}
|
||||
}
|
||||
|
||||
impl<M: Morphism + Clone> MorphismInstance<M> {
|
||||
|
||||
#[cfg(feature = "pretty")]
|
||||
pub fn pretty(&self, Γ: &ContextPtr) -> String {
|
||||
let mut s = String::new();
|
||||
|
||||
match self {
|
||||
MorphismInstance::Id { τ } => {
|
||||
s.push_str( &τ.pretty(&mut Γ.clone(), 0) );
|
||||
},
|
||||
MorphismInstance::Primitive { σs, m } => {
|
||||
let ty = m.get_type().apply_subst(σs);//.apply_subst(Γ);
|
||||
s.push_str(&format!("{}\n -morph->\n{}\n", ty.src_type.pretty(&mut Γ.clone(), 0), ty.dst_type.pretty(&mut Γ.clone(), 0)));
|
||||
},
|
||||
MorphismInstance::Sub { ψ, m } => {
|
||||
s.push_str("Sub {\n");
|
||||
s.push_str(&format!("ψ = {}\n", ψ.pretty(&mut Γ.clone(), 0)));
|
||||
s.push_str(&m.pretty(Γ));
|
||||
s.push_str("}");
|
||||
},
|
||||
MorphismInstance::Specialize { Γ, m } => {
|
||||
s.push_str(&format!("(Γ:{})", Γ.pretty()));
|
||||
s.push_str(&m.pretty(Γ));
|
||||
},
|
||||
MorphismInstance::Chain { path } => {
|
||||
s.push_str("Chain {\n");
|
||||
for m in path.iter() {
|
||||
s.push_str(&m.pretty(Γ));
|
||||
s.push_str(",");
|
||||
}
|
||||
s.push_str("}");
|
||||
},
|
||||
MorphismInstance::MapSeq { seq_repr, item_morph } => {
|
||||
s.push_str("MapSeq {\n");
|
||||
s.push_str(&item_morph.pretty(Γ));
|
||||
s.push_str("}");
|
||||
},
|
||||
MorphismInstance::MapStruct { struct_repr, member_morph } => {
|
||||
s.push_str("MapStruct {\n");
|
||||
for m in member_morph.iter() {
|
||||
s.push_str(&format!("{} ↦ {}\n", m.0, m.1.pretty(&mut Γ.clone())));
|
||||
}
|
||||
s.push_str("}");
|
||||
},
|
||||
MorphismInstance::MapEnum { enum_repr, variant_morph } => {
|
||||
s.push_str("MapEnum {\n");
|
||||
for m in variant_morph.iter() {
|
||||
s.push_str(&format!("{} ↦ {}\n", m.0, m.1.pretty(&mut Γ.clone())));
|
||||
}
|
||||
s.push_str("}");
|
||||
},
|
||||
}
|
||||
|
||||
s
|
||||
}
|
||||
|
||||
pub fn get_action_type(&self) -> MorphismType {
|
||||
self.get_type().strip_common_rungs()
|
||||
}
|
||||
|
||||
pub fn from_chain(τ: TypeTerm, path: &Vec<MorphismInstance<M>>) -> Self {
|
||||
if path.len() == 0 {
|
||||
MorphismInstance::Id { τ }
|
||||
} else if path.len() == 1 {
|
||||
path[0].clone()
|
||||
} else {
|
||||
MorphismInstance::Chain { path: path.clone() }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_weight(&self) -> u64 {
|
||||
match self {
|
||||
MorphismInstance::Id { τ } => 0,
|
||||
MorphismInstance::Sub { ψ, m } => m.get_weight(),
|
||||
MorphismInstance::Specialize { Γ, m } => m.get_weight(),
|
||||
MorphismInstance::Primitive { σs, m } => 10,
|
||||
MorphismInstance::Chain { path } => path.iter().map(|m| m.get_weight()).sum(),
|
||||
MorphismInstance::MapSeq { seq_repr, item_morph } => item_morph.get_weight() + 15,
|
||||
MorphismInstance::MapStruct { struct_repr, member_morph } => member_morph.iter().map(|m| m.1.get_weight()).sum(),
|
||||
MorphismInstance::MapEnum { enum_repr, variant_morph } => variant_morph.iter().map(|m| m.1.get_weight()).sum()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_type(&self) -> MorphismType {
|
||||
match self {
|
||||
MorphismInstance::Id { τ } => {
|
||||
MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: τ.clone(),
|
||||
dst_type: τ.clone()
|
||||
}
|
||||
}
|
||||
MorphismInstance::Primitive { σs, m } => { m.get_type().apply_subst(σs) },
|
||||
MorphismInstance::Sub { ψ, m } =>
|
||||
MorphismType {
|
||||
bounds: m.get_type().bounds,
|
||||
src_type:
|
||||
TypeTerm::Ladder(vec![
|
||||
ψ.clone(),
|
||||
m.get_type().src_type
|
||||
]),
|
||||
dst_type: TypeTerm::Ladder(vec![
|
||||
ψ.clone(),
|
||||
m.get_type().dst_type
|
||||
]),
|
||||
},
|
||||
MorphismInstance::Specialize { Γ, m } => {
|
||||
m.get_type().apply_subst(Γ)
|
||||
}
|
||||
MorphismInstance::Chain { path } => {
|
||||
if path.len() > 0 {
|
||||
MorphismType {
|
||||
bounds: Vec::new(),//Γ.get_bounds(),
|
||||
src_type: path.first().unwrap().get_type().src_type.clone(),
|
||||
dst_type: path.last().unwrap().get_type().dst_type.clone()
|
||||
}
|
||||
} else {
|
||||
unreachable!();
|
||||
/*MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: TypeTerm::Id(45454),
|
||||
dst_type: TypeTerm::Id(45454)
|
||||
}*/
|
||||
}
|
||||
}
|
||||
MorphismInstance::MapSeq { seq_repr, item_morph } => {
|
||||
MorphismType {
|
||||
bounds: item_morph.get_type().bounds,
|
||||
src_type: TypeTerm::Seq{ seq_repr: seq_repr.clone(),
|
||||
items: vec![ item_morph.get_type().src_type ]},
|
||||
dst_type: TypeTerm::Seq{ seq_repr: seq_repr.clone(),
|
||||
items: vec![ item_morph.get_type().dst_type ]},
|
||||
}
|
||||
}
|
||||
MorphismInstance::MapStruct { struct_repr, member_morph } => {
|
||||
MorphismType {
|
||||
bounds: Vec::new(), // <-- fixme: same as with chain
|
||||
src_type: TypeTerm::Struct{
|
||||
struct_repr: struct_repr.clone(),
|
||||
members:
|
||||
member_morph.iter().map(|(symbol, morph)| {
|
||||
StructMember{ symbol:symbol.clone(), ty: morph.get_type().src_type }
|
||||
}).collect()
|
||||
},
|
||||
|
||||
dst_type: TypeTerm::Struct {
|
||||
struct_repr: struct_repr.clone(),
|
||||
members: member_morph.iter().map(|(symbol, morph)| {
|
||||
StructMember { symbol: symbol.clone(), ty: morph.get_type().dst_type}
|
||||
}).collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
MorphismInstance::MapEnum { enum_repr, variant_morph } => {
|
||||
MorphismType {
|
||||
bounds: Vec::new(), // <-- fixme: same as with chain
|
||||
src_type: TypeTerm::Struct{
|
||||
struct_repr: enum_repr.clone(),
|
||||
members:
|
||||
variant_morph.iter().map(|(symbol, morph)| {
|
||||
StructMember{ symbol:symbol.clone(), ty: morph.get_type().src_type }
|
||||
}).collect()
|
||||
},
|
||||
dst_type: TypeTerm::Struct{
|
||||
struct_repr: enum_repr.clone(),
|
||||
members: variant_morph.iter().map(|(symbol, morph)| {
|
||||
StructMember { symbol: symbol.clone(), ty: morph.get_type().dst_type}
|
||||
}).collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
}.normalize()
|
||||
}
|
||||
|
||||
pub fn get_subst(&self) -> HashMapSubst {
|
||||
match self {
|
||||
MorphismInstance::Id { τ } => HashMap::new(),
|
||||
MorphismInstance::Primitive { σs, m } => σs.clone(),
|
||||
MorphismInstance::Sub { ψ, m } => m.get_subst(),
|
||||
MorphismInstance::Specialize { Γ, m } => {
|
||||
todo!();
|
||||
HashMap::new()
|
||||
}
|
||||
MorphismInstance::Chain { path } => {
|
||||
path.iter().fold(
|
||||
std::collections::HashMap::new(),
|
||||
|mut σ, m| {
|
||||
σ.append(&m.get_subst());
|
||||
σ
|
||||
}
|
||||
)
|
||||
},
|
||||
MorphismInstance::MapSeq { seq_repr, item_morph } => {
|
||||
item_morph.get_subst()
|
||||
},
|
||||
MorphismInstance::MapStruct { struct_repr, member_morph } => {
|
||||
let mut σ = HashMap::new();
|
||||
for (symbol, m) in member_morph.iter() {
|
||||
σ.append(&mut m.get_subst());
|
||||
}
|
||||
σ
|
||||
},
|
||||
MorphismInstance::MapEnum { enum_repr, variant_morph } => {
|
||||
todo!();
|
||||
HashMap::new()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_subst(&mut self, γ: &impl Substitution) {
|
||||
let ty = self.get_type();
|
||||
match self {
|
||||
MorphismInstance::Id { τ } => {
|
||||
τ.apply_subst( γ );
|
||||
}
|
||||
MorphismInstance::Primitive { σs, m } => { },
|
||||
MorphismInstance::Sub { ψ, m } => {
|
||||
ψ.apply_subst(γ);
|
||||
m.apply_subst(γ);
|
||||
}
|
||||
MorphismInstance::Specialize { Γ, m } => {
|
||||
todo!();
|
||||
/*
|
||||
for (v,t) in Γ.0.
|
||||
Γ.bind(*i + Γ.0.read().unwrap().γ.len() as u64, t.clone()).expect("cant bind");
|
||||
}
|
||||
*/
|
||||
}
|
||||
MorphismInstance::Chain { path } => {
|
||||
for n in path.iter_mut() {
|
||||
n.apply_subst(γ);
|
||||
}
|
||||
}
|
||||
MorphismInstance::MapSeq { seq_repr, item_morph } => {
|
||||
item_morph.apply_subst(γ);
|
||||
}
|
||||
MorphismInstance::MapStruct { struct_repr, member_morph } => {
|
||||
for (_,ty) in member_morph {
|
||||
ty.apply_subst(γ);
|
||||
}
|
||||
},
|
||||
MorphismInstance::MapEnum { enum_repr, variant_morph } => {
|
||||
for (_,ty) in variant_morph {
|
||||
ty.apply_subst(γ);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
237
src/morphism/mod.rs
Normal file
237
src/morphism/mod.rs
Normal file
|
@ -0,0 +1,237 @@
|
|||
pub mod base;
|
||||
pub mod search_node;
|
||||
pub mod graph;
|
||||
pub mod instance;
|
||||
pub mod heuristic;
|
||||
|
||||
pub use base::*;
|
||||
pub use graph::*;
|
||||
pub use instance::*;
|
||||
|
||||
use {
|
||||
crate::{
|
||||
constraint_system::ConstraintSystem, substitution::Substitution, term::{StructMember, TypeTerm},
|
||||
unparser::*, EnumVariant, TypeDict, TypeID, VariableConstraint,
|
||||
context::*
|
||||
},
|
||||
std::{
|
||||
collections::HashMap,
|
||||
sync::{Arc, RwLock}
|
||||
}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
pub trait Morphism : Sized {
|
||||
fn ctx(&self) -> ContextPtr;
|
||||
fn get_type(&self) -> MorphismType;
|
||||
fn weight(&self) -> u64 {
|
||||
1
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct MorphismType {
|
||||
pub bounds: Vec< VariableConstraint >,
|
||||
pub src_type: TypeTerm,
|
||||
pub dst_type: TypeTerm
|
||||
}
|
||||
|
||||
impl MorphismType {
|
||||
pub fn strip_common_rungs(&self) -> MorphismType {
|
||||
match (&self.src_type.clone().strip(), &self.dst_type.clone().strip()) {
|
||||
(TypeTerm::Ladder(rungs_lhs), TypeTerm::Ladder(rungs_rhs)) => {
|
||||
|
||||
let mut lhs_iter = rungs_lhs.iter();
|
||||
let mut rhs_iter = rungs_rhs.iter();
|
||||
let mut last = MorphismType {
|
||||
bounds: self.bounds.clone(),
|
||||
src_type: TypeTerm::unit(),
|
||||
dst_type: TypeTerm::unit()
|
||||
};
|
||||
|
||||
while let (Some(lhs_top), Some(rhs_top)) = (lhs_iter.next(), rhs_iter.next()) {
|
||||
last.src_type = lhs_top.clone();
|
||||
last.dst_type = rhs_top.clone();
|
||||
|
||||
if lhs_top != rhs_top {
|
||||
let x = MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: lhs_top.clone(),
|
||||
dst_type: rhs_top.clone()
|
||||
}.strip_common_rungs();
|
||||
|
||||
let mut rl : Vec<_> = lhs_iter.cloned().collect();
|
||||
rl.insert(0, x.src_type);
|
||||
let mut rr : Vec<_> = rhs_iter.cloned().collect();
|
||||
rr.insert(0, x.dst_type);
|
||||
|
||||
return MorphismType {
|
||||
bounds: self.bounds.clone(),
|
||||
src_type: TypeTerm::Ladder(rl),
|
||||
dst_type: TypeTerm::Ladder(rr)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
last
|
||||
}
|
||||
|
||||
(TypeTerm::Spec(args_lhs), TypeTerm::Spec(args_rhs)) => {
|
||||
|
||||
let (rl, rr) = args_lhs.iter().zip(args_rhs.iter()).map(
|
||||
|(al,ar)| MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: al.clone(),
|
||||
dst_type: ar.clone()
|
||||
}.strip_common_rungs()
|
||||
)
|
||||
.fold((vec![], vec![]), |(mut rl, mut rr), x| {
|
||||
rl.push(x.src_type);
|
||||
rr.push(x.dst_type);
|
||||
(rl,rr)
|
||||
});
|
||||
|
||||
MorphismType {
|
||||
bounds: self.bounds.clone(),
|
||||
src_type: TypeTerm::Spec(rl),
|
||||
dst_type: TypeTerm::Spec(rr)
|
||||
}
|
||||
}
|
||||
|
||||
(TypeTerm::Seq { seq_repr:seq_repr_lhs, items:items_lhs },
|
||||
TypeTerm::Seq { seq_repr: seq_repr_rhs, items:items_rhs })
|
||||
=> {
|
||||
let (rl, rr) = items_lhs.iter().zip(items_rhs.iter()).map(
|
||||
|(al,ar)| MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: al.clone(),
|
||||
dst_type: ar.clone()
|
||||
}.strip_common_rungs()
|
||||
)
|
||||
.fold((vec![], vec![]), |(mut rl, mut rr), x| {
|
||||
rl.push(x.src_type);
|
||||
rr.push(x.dst_type);
|
||||
(rl,rr)
|
||||
});
|
||||
MorphismType {
|
||||
bounds: self.bounds.clone(),
|
||||
src_type: TypeTerm::Seq{ seq_repr: seq_repr_lhs.clone(), items: rl },
|
||||
dst_type: TypeTerm::Seq { seq_repr: seq_repr_rhs.clone(), items: rr }
|
||||
}
|
||||
}
|
||||
|
||||
(TypeTerm::Struct { struct_repr:struct_repr_lhs, members:members_lhs },
|
||||
TypeTerm::Struct { struct_repr: struct_repr_rhs, members:members_rhs })
|
||||
=> {
|
||||
let mut rl = Vec::new();
|
||||
let mut rr = Vec::new();
|
||||
|
||||
for ar in members_rhs.iter() {
|
||||
let mut found = false;
|
||||
for al in members_lhs.iter() {
|
||||
if al.symbol == ar.symbol {
|
||||
let x = MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: al.ty.clone(),
|
||||
dst_type: ar.ty.clone()
|
||||
}.strip_common_rungs();
|
||||
|
||||
rl.push( StructMember{
|
||||
symbol: al.symbol.clone(),
|
||||
ty: x.src_type
|
||||
});
|
||||
rr.push( StructMember{
|
||||
symbol: ar.symbol.clone(),
|
||||
ty: x.dst_type
|
||||
});
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
return MorphismType {
|
||||
bounds: self.bounds.clone(),
|
||||
src_type: TypeTerm::Struct { struct_repr: struct_repr_lhs.clone(), members:members_lhs.clone() },
|
||||
dst_type: TypeTerm::Struct { struct_repr: struct_repr_rhs.clone(), members:members_rhs.clone() }
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
MorphismType {
|
||||
bounds: self.bounds.clone(),
|
||||
src_type: TypeTerm::Struct{ struct_repr: struct_repr_lhs.clone(), members: rl },
|
||||
dst_type: TypeTerm::Struct{ struct_repr: struct_repr_rhs.clone(), members: rr }
|
||||
}
|
||||
}
|
||||
|
||||
(TypeTerm::Enum { enum_repr:enum_repr_lhs, variants:variants_lhs },
|
||||
TypeTerm::Enum { enum_repr: enum_repr_rhs, variants:variants_rhs })
|
||||
=> {
|
||||
let mut rl = Vec::new();
|
||||
let mut rr = Vec::new();
|
||||
|
||||
for ar in variants_rhs.iter() {
|
||||
let mut found = false;
|
||||
for al in variants_lhs.iter() {
|
||||
if al.symbol == ar.symbol {
|
||||
let x = MorphismType {
|
||||
bounds: self.bounds.clone(),
|
||||
src_type: al.ty.clone(),
|
||||
dst_type: ar.ty.clone()
|
||||
}.strip_common_rungs();
|
||||
|
||||
rl.push( EnumVariant{
|
||||
symbol: al.symbol.clone(),
|
||||
ty: x.src_type
|
||||
});
|
||||
rr.push( EnumVariant{
|
||||
symbol: ar.symbol.clone(),
|
||||
ty: x.dst_type
|
||||
});
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
return MorphismType {
|
||||
bounds: self.bounds.clone(),
|
||||
src_type: TypeTerm::Enum { enum_repr: enum_repr_lhs.clone(), variants:variants_lhs.clone() },
|
||||
dst_type: TypeTerm::Enum { enum_repr: enum_repr_rhs.clone(), variants:variants_rhs.clone() }
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
MorphismType {
|
||||
bounds: self.bounds.clone(),
|
||||
src_type: TypeTerm::Enum{ enum_repr: enum_repr_lhs.clone(), variants: rl },
|
||||
dst_type: TypeTerm::Enum { enum_repr: enum_repr_rhs.clone(), variants: rr }
|
||||
}
|
||||
}
|
||||
|
||||
(x,y) => MorphismType { bounds: self.bounds.clone(), src_type: x.clone(), dst_type: y.clone() }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_subst(&self, σ: &impl Substitution) -> MorphismType {
|
||||
MorphismType {
|
||||
bounds: self.bounds.iter().map(|b| b.clone().apply_subst(σ).clone()).collect(),
|
||||
src_type: self.src_type.clone().apply_subst(σ).clone(),
|
||||
dst_type: self.dst_type.clone().apply_subst(σ).clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn normalize(&self) -> MorphismType {
|
||||
MorphismType {
|
||||
bounds: self.bounds.iter().map(|bound| bound.normalize()).collect(),
|
||||
src_type: self.src_type.clone().normalize(),
|
||||
dst_type: self.dst_type.clone().normalize(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
398
src/morphism/search_node.rs
Normal file
398
src/morphism/search_node.rs
Normal file
|
@ -0,0 +1,398 @@
|
|||
use {
|
||||
crate::{
|
||||
morphism::DecomposedMorphismType, Context, ContextPtr, EnumVariant, GraphSearch, GraphSearchError, GraphSearchState, HashMapSubst, LayeredContext, Morphism, MorphismBase, MorphismInstance, MorphismType, StructMember, SubstitutionMut, TypeDict, TypeTerm
|
||||
},
|
||||
std::{collections::HashMap, ops::Deref, sync::{Arc,RwLock}}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
/// represents a partial path during search in the morphism graph
|
||||
pub struct SearchNode<M: Morphism+Clone> {
|
||||
/// predecessor node
|
||||
pub pred: Option< Arc<RwLock< SearchNode<M> >> >,
|
||||
|
||||
/// (measured) weight of the preceding path
|
||||
pub weight: u64,
|
||||
|
||||
pub Γ: ContextPtr,
|
||||
pub ty: MorphismType,
|
||||
|
||||
/// the advancement over pred
|
||||
pub step: Step<M>,
|
||||
pub ψ: TypeTerm,
|
||||
}
|
||||
|
||||
pub enum Step<M: Morphism+Clone> {
|
||||
Id { τ: TypeTerm },
|
||||
Inst { m: MorphismInstance<M> },
|
||||
MapSeq { seq_repr: Option<Box<TypeTerm>>, item: GraphSearch<M> },
|
||||
MapStruct { struct_repr: Option<Box<TypeTerm>>, members: Vec< (String, GraphSearch<M>) > },
|
||||
MapEnum { enum_repr: Option<Box<TypeTerm>>, variants: Vec< (String, GraphSearch<M>) > }
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SolvedStep<M: Morphism+Clone> {
|
||||
Id { τ: TypeTerm },
|
||||
Inst { m: MorphismInstance<M> },
|
||||
MapSeq { seq_repr: Option<Box<TypeTerm>>, item: MorphismInstance<M> },
|
||||
MapStruct { struct_repr: Option<Box<TypeTerm>>, members: Vec< (String, MorphismInstance<M>) > },
|
||||
MapEnum { enum_repr: Option<Box<TypeTerm>>, variants: Vec< (String, MorphismInstance<M>) > }
|
||||
}
|
||||
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
pub trait SearchNodeExt<M: Morphism+Clone> {
|
||||
// fn specialize(&self, σ: HashMapSubst) -> Arc<RwLock<SearchNode<M>>>;
|
||||
fn chain(&self, ψ: TypeTerm, Γ: &ContextPtr, σs: HashMapSubst, m: M) -> Arc<RwLock<SearchNode<M>>>;
|
||||
fn set_sub(&self, ψ: TypeTerm) -> Arc<RwLock<SearchNode<M>>>;
|
||||
fn map_seq(&self, goal: MorphismType) -> Arc<RwLock<SearchNode<M>>>;
|
||||
fn map_struct(&self, goals: Vec<(String, MorphismType)>) -> Arc<RwLock<SearchNode<M>>>;
|
||||
fn map_enum(&self, goals: Vec<(String, MorphismType)>) -> Arc<RwLock<SearchNode<M>>>;
|
||||
|
||||
fn advance(&self, base: &MorphismBase<M>) -> Result<bool, GraphSearchError>;
|
||||
fn to_morphism_instance(&self) -> Option< MorphismInstance<M> >;
|
||||
|
||||
fn is_ready(&self) -> bool;
|
||||
fn get_weight(&self) -> u64;
|
||||
fn get_type(&self) -> MorphismType;
|
||||
|
||||
fn creates_loop(&self) -> bool;
|
||||
}
|
||||
|
||||
impl<M: Morphism+Clone> SearchNodeExt<M> for Arc<RwLock<SearchNode<M>>> {
|
||||
fn get_weight(&self) -> u64 {
|
||||
self.read().unwrap().weight
|
||||
+ match &self.read().unwrap().step {
|
||||
Step::Id { τ } => 0,
|
||||
Step::Inst { m } => 1+m.get_weight(),
|
||||
Step::MapSeq { seq_repr, item } => item.best_path_weight(),
|
||||
Step::MapStruct { struct_repr, members } => members.iter().map(|(_,g)| g.best_path_weight() ).sum(),
|
||||
Step::MapEnum { enum_repr, variants } => variants.iter().map(|(_,g)| g.best_path_weight() ).max().unwrap_or(0),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_type(&self) -> MorphismType {
|
||||
let s = self.read().unwrap();
|
||||
MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: TypeTerm::Ladder(vec![ s.ψ.clone(), s.ty.src_type.clone() ]).normalize(),
|
||||
dst_type: TypeTerm::Ladder(vec![ s.ψ.clone(), s.ty.dst_type.clone() ]).normalize(),
|
||||
}.apply_subst(&s.Γ)
|
||||
}
|
||||
|
||||
// tell if this sub-search already has a solution
|
||||
fn is_ready(&self) -> bool {
|
||||
let n = self.read().unwrap();
|
||||
match &n.step {
|
||||
Step::Id { τ } => true,
|
||||
Step::MapSeq { seq_repr, item } => {
|
||||
item.get_solution().is_some()
|
||||
}
|
||||
Step::MapStruct { struct_repr, members } => {
|
||||
members.iter().map(|(s,g)| g.get_solution().is_some()).min().unwrap_or(true)
|
||||
}
|
||||
Step::MapEnum { enum_repr, variants } => {
|
||||
variants.iter().map(|(s,g)| g.get_solution().is_some()).min().unwrap_or(true)
|
||||
}
|
||||
Step::Inst { m } => true
|
||||
}
|
||||
}
|
||||
|
||||
fn creates_loop(&self) -> bool {
|
||||
let mut cur_node = self.read().unwrap().pred.clone();
|
||||
while let Some(n) = cur_node {
|
||||
let s = &n.read().unwrap().step;
|
||||
match s {
|
||||
Step::Id { τ } => {}
|
||||
_ => {
|
||||
// V- dst_type ?
|
||||
if n.get_type().src_type == self.get_type().dst_type {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cur_node = n.read().unwrap().pred.clone();
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn advance(&self, base: &MorphismBase<M>) -> Result<bool, GraphSearchError> {
|
||||
let mut n = self.write().unwrap();
|
||||
match &mut n.step {
|
||||
Step::MapSeq { seq_repr, item } => {
|
||||
//eprintln!("advance seq-map");
|
||||
match item.advance(base) {
|
||||
GraphSearchState::Solved(item_morph) => {
|
||||
//eprintln!("Sequence-Map Sub Graph Solved!!");
|
||||
n.ty = MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: TypeTerm::Seq { seq_repr: seq_repr.clone(), items: vec![ item_morph.get_type().src_type ] },
|
||||
dst_type: TypeTerm::Seq { seq_repr: seq_repr.clone(), items: vec![ item_morph.get_type().dst_type ] },
|
||||
};
|
||||
Ok(false)
|
||||
}
|
||||
GraphSearchState::Continue => Ok(true),
|
||||
GraphSearchState::Err(err) => Err(err)
|
||||
}
|
||||
}
|
||||
Step::MapStruct { struct_repr, members } => {
|
||||
for (symbol, sub_search) in members.iter_mut() {
|
||||
if sub_search.get_solution().is_none() {
|
||||
match sub_search.advance(base) {
|
||||
GraphSearchState::Solved(_) => {
|
||||
return Ok(true);
|
||||
},
|
||||
GraphSearchState::Continue => { return Ok(true); },
|
||||
GraphSearchState::Err(err) => { return Err(err); }
|
||||
}
|
||||
} else {
|
||||
// already solved, continue with next member
|
||||
}
|
||||
}
|
||||
|
||||
// all sub searches are solved
|
||||
n.ty = MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: TypeTerm::Struct { struct_repr: struct_repr.clone(), members: members.iter().map(|(s,g)| StructMember{ symbol:s.clone(), ty: g.get_solution().unwrap().get_type().src_type }).collect() },
|
||||
dst_type: TypeTerm::Struct { struct_repr: struct_repr.clone(), members: members.iter().map(|(s,g)| StructMember{ symbol:s.clone(), ty: g.get_solution().unwrap().get_type().dst_type }).collect() },
|
||||
};
|
||||
return Ok(false);
|
||||
}
|
||||
Step::MapEnum { enum_repr, variants } => {
|
||||
for (symbol, sub_search) in variants.iter_mut() {
|
||||
if sub_search.get_solution().is_none() {
|
||||
match sub_search.advance(base) {
|
||||
GraphSearchState::Solved(_) => {
|
||||
return Ok(true);
|
||||
},
|
||||
GraphSearchState::Continue => { return Ok(true); },
|
||||
GraphSearchState::Err(err) => { return Err(err); }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// all sub searches are solved
|
||||
n.ty = MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: TypeTerm::Enum { enum_repr: enum_repr.clone(), variants: variants.iter().map(|(s,g)| EnumVariant{ symbol:s.clone(), ty: g.get_solution().unwrap().get_type().src_type }).collect() },
|
||||
dst_type: TypeTerm::Enum { enum_repr: enum_repr.clone(), variants: variants.iter().map(|(s,g)| EnumVariant{ symbol:s.clone(), ty: g.get_solution().unwrap().get_type().dst_type }).collect() },
|
||||
};
|
||||
return Ok(false);
|
||||
}
|
||||
_ => Ok(false)
|
||||
}
|
||||
}
|
||||
|
||||
fn chain(&self, ψ: TypeTerm, Γinst: &ContextPtr, σs: HashMapSubst, m: M) -> Arc<RwLock<SearchNode<M>>> {
|
||||
let m = MorphismInstance::Primitive { σs: σs.clone(), m: m.clone() };
|
||||
|
||||
let mut src_type = self.get_type().src_type;
|
||||
src_type.apply_subst(&Γinst.shift_from_parent());
|
||||
|
||||
let dst_type = m.get_type().dst_type;
|
||||
|
||||
let n = Arc::new(RwLock::new(SearchNode {
|
||||
Γ: Γinst.clone(),
|
||||
pred: Some(self.clone()),
|
||||
weight: self.get_weight(),
|
||||
ty: MorphismType { bounds: Vec::new(), src_type, dst_type },
|
||||
step: Step::Inst{ m },
|
||||
ψ: TypeTerm::unit(),
|
||||
}));
|
||||
n.set_sub(ψ);
|
||||
|
||||
n
|
||||
}
|
||||
|
||||
fn set_sub(&self, ψ: TypeTerm) -> Arc<RwLock<SearchNode<M>>> {
|
||||
let oldψ = &mut self.write().unwrap().ψ;
|
||||
*oldψ = TypeTerm::Ladder(vec![ ψ, oldψ.clone() ]).normalize();
|
||||
self.clone()
|
||||
}
|
||||
|
||||
fn map_seq(&self, goal: MorphismType) -> Arc<RwLock<SearchNode<M>>> {
|
||||
|
||||
let seq_repr = match self.read().unwrap().ty.dst_type.get_floor_type().1 {
|
||||
TypeTerm::Seq { seq_repr, items } => {
|
||||
seq_repr.clone()
|
||||
}
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
||||
Arc::new(RwLock::new(SearchNode {
|
||||
Γ: self.read().unwrap().Γ.clone(),
|
||||
pred: Some(self.clone()),
|
||||
weight: self.get_weight(),
|
||||
ty: MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: TypeTerm::Seq{ seq_repr: seq_repr.clone(), items: vec![goal.src_type.clone()] },
|
||||
dst_type: TypeTerm::Seq{ seq_repr: seq_repr.clone(), items: vec![goal.src_type.clone()] }
|
||||
},
|
||||
step: Step::MapSeq { seq_repr, item: GraphSearch::new(self.read().unwrap().Γ.scope(), goal) },
|
||||
ψ: self.read().unwrap().ψ.clone()
|
||||
}))
|
||||
}
|
||||
|
||||
fn map_struct(&self, goals: Vec<(String, MorphismType)>) -> Arc<RwLock<SearchNode<M>>> {
|
||||
|
||||
let struct_repr = match self.read().unwrap().ty.dst_type.get_floor_type().1 {
|
||||
TypeTerm::Struct { struct_repr, members } => {
|
||||
struct_repr.clone()
|
||||
}
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
||||
Arc::new(RwLock::new(SearchNode {
|
||||
Γ: self.read().unwrap().Γ.clone(),
|
||||
pred: Some(self.clone()),
|
||||
weight: self.get_weight(),
|
||||
ty: MorphismType {
|
||||
bounds:Vec::new(),
|
||||
src_type: TypeTerm::Struct { struct_repr: struct_repr.clone(), members: goals.iter().map(|(s,t)| StructMember{ symbol: s.clone(), ty: t.src_type.clone() }).collect() },
|
||||
dst_type: TypeTerm::Struct { struct_repr: struct_repr.clone(), members: goals.iter().map(|(s,t)| StructMember{ symbol: s.clone(), ty: t.dst_type.clone() }).collect() }
|
||||
},
|
||||
step: Step::MapStruct {
|
||||
struct_repr,
|
||||
members: goals.into_iter().map(|(name,goal)| (name, GraphSearch::new(self.read().unwrap().Γ.scope(), goal))).collect() },
|
||||
ψ: self.read().unwrap().ψ.clone()
|
||||
}))
|
||||
}
|
||||
|
||||
fn map_enum(&self, goals: Vec<(String, MorphismType)>) -> Arc<RwLock<SearchNode<M>>> {
|
||||
let enum_repr = match self.read().unwrap().ty.dst_type.get_floor_type().1 {
|
||||
TypeTerm::Enum { enum_repr, variants } => {
|
||||
enum_repr.clone()
|
||||
}
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
||||
Arc::new(RwLock::new(SearchNode {
|
||||
Γ: self.read().unwrap().Γ.clone(),
|
||||
pred: Some(self.clone()),
|
||||
weight: self.get_weight(),
|
||||
ty: MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: TypeTerm::Enum { enum_repr: enum_repr.clone(), variants: goals.iter().map(|(s,t)| EnumVariant{ symbol: s.clone(), ty: t.src_type.clone() }).collect() },
|
||||
dst_type: TypeTerm::Enum { enum_repr: enum_repr.clone(), variants: goals.iter().map(|(s,t)| EnumVariant{ symbol: s.clone(), ty: t.dst_type.clone() }).collect() }
|
||||
},
|
||||
step: Step::MapEnum { enum_repr, variants: goals.into_iter().map(|(name,goal)| (name, GraphSearch::new(self.read().unwrap().Γ.scope(), goal))).collect() },
|
||||
ψ: self.read().unwrap().ψ.clone()
|
||||
}))
|
||||
}
|
||||
|
||||
|
||||
fn to_morphism_instance(&self) -> Option< MorphismInstance<M> > {
|
||||
let mut steps = Vec::new();
|
||||
let mut cur_node = Some(self.clone());
|
||||
|
||||
let mut Γ = self.read().unwrap().Γ.clone();
|
||||
|
||||
let mut offset = 0;
|
||||
while let Some(n) = cur_node {
|
||||
let n = n.read().unwrap();
|
||||
steps.push((n.ψ.clone(),
|
||||
match &n.step {
|
||||
Step::Id { τ } => SolvedStep::Id { τ: τ.clone() },
|
||||
Step::Inst { m } => SolvedStep::Inst { m: m.clone() },
|
||||
Step::MapSeq { seq_repr, item } => {
|
||||
let mut item = item.get_solution().unwrap();
|
||||
//item.apply_subst(&n.Γ.0.read().unwrap().σ);
|
||||
SolvedStep::MapSeq {
|
||||
seq_repr: seq_repr.clone(),
|
||||
item,
|
||||
}
|
||||
},
|
||||
Step::MapStruct { struct_repr, members } => SolvedStep::MapStruct {
|
||||
struct_repr: struct_repr.clone(),
|
||||
members: members.iter().map(|(n,m)| (n.clone(), m.get_solution().unwrap())).collect()
|
||||
},
|
||||
Step::MapEnum { enum_repr, variants } => SolvedStep::MapEnum {
|
||||
enum_repr: enum_repr.clone(),
|
||||
variants: variants.iter().map(|(n,m)| (n.clone(), m.get_solution().unwrap())).collect()
|
||||
},
|
||||
}));
|
||||
|
||||
//let σs = Γ.shift_variables(&n.Γ);
|
||||
//let sigma = n.Γ.0.read().unwrap().σ.clone();
|
||||
//offset += n.Γ.0.read().unwrap().γ.len() as u64;
|
||||
/*
|
||||
for (v,t) in sigma.into_iter() {
|
||||
Γ.bind(v, t).expect("cant bind");
|
||||
}
|
||||
*/
|
||||
|
||||
cur_node = n.pred.clone();
|
||||
}
|
||||
|
||||
steps.reverse();
|
||||
|
||||
let mut begin = TypeTerm::unit();
|
||||
let mut path = Vec::new();
|
||||
//eprintln!("to_morph_instance:\n==");
|
||||
for (ψ, s) in steps {
|
||||
match s {
|
||||
SolvedStep::Id { τ } => {
|
||||
//eprintln!("to_morph_instance: ID {:?}", τ);
|
||||
begin = τ.clone();
|
||||
}
|
||||
SolvedStep::Inst{ m } => {
|
||||
eprintln!("to_morph_instance: Inst {:?} -- {:?}", ψ, m.get_type());
|
||||
let mut m = m.clone();
|
||||
|
||||
if ! ψ.is_empty() {
|
||||
m = MorphismInstance::Sub { ψ, m: Box::new(m) };
|
||||
}
|
||||
path.push(m.clone());
|
||||
}
|
||||
SolvedStep::MapSeq { seq_repr, item } => {
|
||||
let mut m = MorphismInstance::MapSeq {
|
||||
seq_repr: seq_repr.clone(),
|
||||
item_morph: Box::new(item)
|
||||
};
|
||||
|
||||
if ! ψ.is_empty() {
|
||||
m = MorphismInstance::Sub { ψ, m: Box::new(m) };
|
||||
}
|
||||
path.push(m);
|
||||
}
|
||||
SolvedStep::MapStruct { struct_repr, members } => {
|
||||
let mut m = MorphismInstance::MapStruct {
|
||||
struct_repr: struct_repr.clone(),
|
||||
member_morph: members
|
||||
};
|
||||
if ! ψ.is_empty() {
|
||||
m = MorphismInstance::Sub { ψ, m: Box::new(m) };
|
||||
}
|
||||
path.push(m);
|
||||
}
|
||||
SolvedStep::MapEnum { enum_repr, variants } => {
|
||||
let mut m = MorphismInstance::MapEnum {
|
||||
enum_repr: enum_repr.clone(),
|
||||
variant_morph: variants
|
||||
};
|
||||
if ! ψ.is_empty() {
|
||||
m = MorphismInstance::Sub { ψ, m: Box::new(m) };
|
||||
}
|
||||
path.push(m);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
eprintln!("to_morphism_instance: Γ: {}", Γ.pretty());
|
||||
|
||||
if Γ.0.read().unwrap().n_variables() > 0 {
|
||||
Some(
|
||||
MorphismInstance::Specialize {
|
||||
Γ: Γ.clone(),
|
||||
m: Box::new(MorphismInstance::from_chain(begin, &path))
|
||||
}
|
||||
)
|
||||
} else {
|
||||
Some(MorphismInstance::from_chain(begin, &path))
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,183 +0,0 @@
|
|||
use {
|
||||
crate::{
|
||||
subtype_unify, sugar::SugaredTypeTerm, unification::UnificationProblem, unparser::*, TypeDict, TypeID, TypeTerm,
|
||||
morphism::{MorphismType, Morphism, MorphismInstance}
|
||||
},
|
||||
std::{collections::HashMap, u64}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MorphismBase<M: Morphism + Clone> {
|
||||
morphisms: Vec< M >,
|
||||
seq_types: Vec< TypeTerm >
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl<M: Morphism + Clone> MorphismBase<M> {
|
||||
pub fn new(seq_types: Vec<TypeTerm>) -> Self {
|
||||
MorphismBase {
|
||||
morphisms: Vec::new(),
|
||||
seq_types
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_morphism(&mut self, m: M) {
|
||||
self.morphisms.push( m );
|
||||
}
|
||||
|
||||
pub fn enum_direct_morphisms(&self, src_type: &TypeTerm)
|
||||
-> Vec< MorphismInstance<M> >
|
||||
{
|
||||
let mut dst_types = Vec::new();
|
||||
for m in self.morphisms.iter() {
|
||||
if let Ok((halo, σ)) = crate::unification::subtype_unify(
|
||||
&src_type.clone().param_normalize(),
|
||||
&m.get_type().src_type.param_normalize(),
|
||||
) {
|
||||
dst_types.push(MorphismInstance{ halo, m: m.clone(), σ });
|
||||
}
|
||||
}
|
||||
dst_types
|
||||
}
|
||||
|
||||
pub fn enum_map_morphisms(&self, src_type: &TypeTerm)
|
||||
-> Vec< MorphismInstance<M> > {
|
||||
let src_type = src_type.clone().param_normalize();
|
||||
let mut dst_types = Vec::new();
|
||||
|
||||
// Check if we have a List type, and if so, see what the Item type is
|
||||
// TODO: function for generating fresh variables
|
||||
let item_variable = TypeID::Var(800);
|
||||
|
||||
for seq_type in self.seq_types.iter() {
|
||||
if let Ok((halo, σ)) = crate::unification::subtype_unify(
|
||||
&src_type,
|
||||
&TypeTerm::App(vec![
|
||||
seq_type.clone(),
|
||||
TypeTerm::TypeID(item_variable)
|
||||
])
|
||||
) {
|
||||
let src_item_type = σ.get(&item_variable).expect("var not in unificator").clone();
|
||||
for item_morph_inst in self.enum_morphisms( &src_item_type ) {
|
||||
|
||||
let mut dst_halo_ladder = vec![ halo.clone() ];
|
||||
if item_morph_inst.halo != TypeTerm::unit() {
|
||||
dst_halo_ladder.push(
|
||||
TypeTerm::App(vec![
|
||||
seq_type.clone().get_lnf_vec().first().unwrap().clone(),
|
||||
item_morph_inst.halo.clone()
|
||||
]));
|
||||
}
|
||||
|
||||
if let Some( map_morph ) = item_morph_inst.m.map_morphism( seq_type.clone() ) {
|
||||
dst_types.push(
|
||||
MorphismInstance {
|
||||
halo: TypeTerm::Ladder(dst_halo_ladder).strip().param_normalize(),
|
||||
m: map_morph,
|
||||
σ: item_morph_inst.σ
|
||||
}
|
||||
);
|
||||
} else {
|
||||
eprintln!("could not get map morphism");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dst_types
|
||||
}
|
||||
|
||||
pub fn enum_morphisms(&self, src_type: &TypeTerm) -> Vec< MorphismInstance<M> > {
|
||||
let mut dst_types = Vec::new();
|
||||
dst_types.append(&mut self.enum_direct_morphisms(src_type));
|
||||
dst_types.append(&mut self.enum_map_morphisms(src_type));
|
||||
dst_types
|
||||
}
|
||||
|
||||
pub fn find_direct_morphism(&self,
|
||||
ty: &MorphismType,
|
||||
dict: &mut impl TypeDict
|
||||
) -> Option< MorphismInstance<M> > {
|
||||
eprintln!("find direct morph");
|
||||
for m in self.morphisms.iter() {
|
||||
let ty = ty.clone().normalize();
|
||||
let morph_type = m.get_type().normalize();
|
||||
|
||||
eprintln!("find direct morph:\n {} <= {}",
|
||||
dict.unparse(&ty.src_type), dict.unparse(&morph_type.src_type),
|
||||
);
|
||||
|
||||
if let Ok((halo, σ)) = subtype_unify(&ty.src_type, &morph_type.src_type) {
|
||||
eprintln!("halo: {}", dict.unparse(&halo));
|
||||
|
||||
let dst_type = TypeTerm::Ladder(vec![
|
||||
halo.clone(),
|
||||
morph_type.dst_type.clone()
|
||||
]).normalize().param_normalize();
|
||||
|
||||
eprintln!("-----------> {} <= {}",
|
||||
dict.unparse(&dst_type), dict.unparse(&ty.dst_type)
|
||||
);
|
||||
|
||||
if let Ok((halo2, σ2)) = subtype_unify(&dst_type, &ty.dst_type) {
|
||||
eprintln!("match. halo2 = {}", dict.unparse(&halo2));
|
||||
return Some(MorphismInstance {
|
||||
m: m.clone(),
|
||||
halo,
|
||||
σ,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn find_map_morphism(&self, ty: &MorphismType, dict: &mut impl TypeDict) -> Option< MorphismInstance<M> > {
|
||||
for seq_type in self.seq_types.iter() {
|
||||
if let Ok((halos, σ)) = UnificationProblem::new_sub(vec![
|
||||
(ty.src_type.clone().param_normalize(),
|
||||
TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(100)) ])),
|
||||
|
||||
(TypeTerm::App(vec![ seq_type.clone(), TypeTerm::TypeID(TypeID::Var(101)) ]),
|
||||
ty.dst_type.clone().param_normalize()),
|
||||
]).solve() {
|
||||
// TODO: use real fresh variable names
|
||||
let item_morph_type = MorphismType {
|
||||
src_type: σ.get(&TypeID::Var(100)).unwrap().clone(),
|
||||
dst_type: σ.get(&TypeID::Var(101)).unwrap().clone(),
|
||||
}.normalize();
|
||||
|
||||
//eprintln!("Map Morph: try to find item-morph with type {:?}", item_morph_type);
|
||||
if let Some(item_morph_inst) = self.find_morphism( &item_morph_type, dict ) {
|
||||
if let Some( list_morph ) = item_morph_inst.m.map_morphism( seq_type.clone() ) {
|
||||
return Some( MorphismInstance {
|
||||
m: list_morph,
|
||||
σ,
|
||||
halo: halos[0].clone()
|
||||
} );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn find_morphism(&self, ty: &MorphismType,
|
||||
dict: &mut impl TypeDict
|
||||
)
|
||||
-> Option< MorphismInstance<M> > {
|
||||
if let Some(m) = self.find_direct_morphism(ty, dict) {
|
||||
return Some(m);
|
||||
}
|
||||
if let Some(m) = self.find_map_morphism(ty, dict) {
|
||||
return Some(m);
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
|
@ -1,136 +0,0 @@
|
|||
use {
|
||||
crate::{
|
||||
morphism::{MorphismType, Morphism, MorphismInstance},
|
||||
morphism_base::MorphismBase,
|
||||
dict::*,
|
||||
term::*
|
||||
}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MorphismPath<M: Morphism + Clone> {
|
||||
pub weight: u64,
|
||||
pub cur_type: TypeTerm,
|
||||
pub morphisms: Vec< MorphismInstance<M> >
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
pub struct ShortestPathProblem<'a, M: Morphism + Clone> {
|
||||
pub morphism_base: &'a MorphismBase<M>,
|
||||
pub goal: TypeTerm,
|
||||
queue: Vec< MorphismPath<M> >
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl<'a, M:Morphism+Clone> ShortestPathProblem<'a, M> {
|
||||
pub fn new(morphism_base: &'a MorphismBase<M>, ty: MorphismType) -> Self {
|
||||
ShortestPathProblem {
|
||||
morphism_base,
|
||||
queue: vec![
|
||||
MorphismPath::<M> { weight: 0, cur_type: ty.src_type, morphisms: vec![] }
|
||||
],
|
||||
goal: ty.dst_type
|
||||
}
|
||||
}
|
||||
|
||||
pub fn solve(&mut self) -> Option< Vec<MorphismInstance<M>> > {
|
||||
while ! self.queue.is_empty() {
|
||||
self.queue.sort_by( |p1,p2| p2.weight.cmp(&p1.weight));
|
||||
|
||||
if let Some(mut cur_path) = self.queue.pop() {
|
||||
if let Ok((halo, σ)) = crate::unification::subtype_unify( &cur_path.cur_type, &self.goal ) {
|
||||
/* found path,
|
||||
* now apply substitution and trim to variables in terms of each step
|
||||
*/
|
||||
for n in cur_path.morphisms.iter_mut() {
|
||||
let src_type = n.m.get_type().src_type;
|
||||
let dst_type = n.m.get_type().dst_type;
|
||||
|
||||
let mut new_σ = std::collections::HashMap::new();
|
||||
for (k,v) in σ.iter() {
|
||||
if let TypeID::Var(varid) = k {
|
||||
if src_type.contains_var(*varid)
|
||||
|| dst_type.contains_var(*varid) {
|
||||
new_σ.insert(
|
||||
k.clone(),
|
||||
v.clone().apply_substitution(&σ).clone().strip()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (k,v) in n.σ.iter() {
|
||||
if let TypeID::Var(varid) = k {
|
||||
if src_type.contains_var(*varid)
|
||||
|| dst_type.contains_var(*varid) {
|
||||
new_σ.insert(
|
||||
k.clone(),
|
||||
v.clone().apply_substitution(&σ).clone().strip()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
n.halo = n.halo.clone().apply_substitution(&σ).clone().strip().param_normalize();
|
||||
|
||||
n.σ = new_σ;
|
||||
}
|
||||
|
||||
return Some(cur_path.morphisms);
|
||||
}
|
||||
|
||||
//eprintln!("cur path (w ={}) : @ {:?}", cur_path.weight, cur_path.cur_type);//.clone().sugar(type_dict).pretty(type_dict, 0) );
|
||||
for mut next_morph_inst in self.morphism_base.enum_morphisms(&cur_path.cur_type) {
|
||||
let dst_type = next_morph_inst.get_type().dst_type;
|
||||
// eprintln!("try morph to {}", dst_type.clone().sugar(type_dict).pretty(type_dict, 0));
|
||||
|
||||
let mut creates_loop = false;
|
||||
|
||||
let mut new_path = cur_path.clone();
|
||||
for n in new_path.morphisms.iter_mut() {
|
||||
let mut new_σ = std::collections::HashMap::new();
|
||||
|
||||
for (k,v) in next_morph_inst.σ.iter() {
|
||||
new_σ.insert(
|
||||
k.clone(),
|
||||
v.clone().apply_substitution(&next_morph_inst.σ).clone()
|
||||
);
|
||||
}
|
||||
|
||||
for (k,v) in n.σ.iter() {
|
||||
new_σ.insert(
|
||||
k.clone(),
|
||||
v.clone().apply_substitution(&next_morph_inst.σ).clone()
|
||||
);
|
||||
}
|
||||
|
||||
n.halo = n.halo.clone().apply_substitution(&next_morph_inst.σ).clone().strip().param_normalize();
|
||||
|
||||
n.σ = new_σ;
|
||||
}
|
||||
|
||||
for m in new_path.morphisms.iter() {
|
||||
if m.get_type().src_type == dst_type {
|
||||
creates_loop = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if ! creates_loop {
|
||||
new_path.weight += next_morph_inst.m.weight();
|
||||
new_path.cur_type = dst_type;
|
||||
|
||||
new_path.morphisms.push(next_morph_inst);
|
||||
self.queue.push(new_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
138
src/pnf.rs
138
src/pnf.rs
|
@ -1,138 +0,0 @@
|
|||
use crate::term::TypeTerm;
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
pub fn splice_ladders( mut upper: Vec< TypeTerm >, mut lower: Vec< TypeTerm > ) -> Vec< TypeTerm > {
|
||||
for i in 0 .. upper.len() {
|
||||
if upper[i] == lower[0] {
|
||||
let mut result_ladder = Vec::<TypeTerm>::new();
|
||||
result_ladder.append(&mut upper[0..i].iter().cloned().collect());
|
||||
result_ladder.append(&mut lower);
|
||||
return result_ladder;
|
||||
}
|
||||
}
|
||||
|
||||
upper.append(&mut lower);
|
||||
upper
|
||||
}
|
||||
|
||||
impl TypeTerm {
|
||||
/// transmute type into Parameter-Normal-Form (PNF)
|
||||
///
|
||||
/// Example:
|
||||
/// ```ignore
|
||||
/// <Seq <Digit 10>>~<Seq Char>
|
||||
/// ⇒ <Seq <Digit 10>~Char>
|
||||
/// ```
|
||||
pub fn param_normalize(mut self) -> Self {
|
||||
match self {
|
||||
TypeTerm::Ladder(mut rungs) => {
|
||||
if rungs.len() > 0 {
|
||||
let mut new_rungs = Vec::new();
|
||||
while let Some(bottom) = rungs.pop() {
|
||||
if let Some(last_but) = rungs.last_mut() {
|
||||
match (bottom, last_but) {
|
||||
(TypeTerm::App(bot_args), TypeTerm::App(last_args)) => {
|
||||
if bot_args.len() == last_args.len() {
|
||||
let mut new_rung_params = Vec::new();
|
||||
let mut require_break = false;
|
||||
|
||||
if bot_args.len() > 0 {
|
||||
if let Ok(_idx) = last_args[0].is_syntactic_subtype_of(&bot_args[0]) {
|
||||
for i in 0 .. bot_args.len() {
|
||||
|
||||
let spliced_type_ladder = splice_ladders(
|
||||
last_args[i].clone().get_lnf_vec(),
|
||||
bot_args[i].clone().get_lnf_vec()
|
||||
);
|
||||
let spliced_type =
|
||||
if spliced_type_ladder.len() == 1 {
|
||||
spliced_type_ladder[0].clone()
|
||||
} else if spliced_type_ladder.len() > 1 {
|
||||
TypeTerm::Ladder(spliced_type_ladder)
|
||||
} else {
|
||||
TypeTerm::unit()
|
||||
};
|
||||
|
||||
new_rung_params.push( spliced_type.param_normalize() );
|
||||
}
|
||||
|
||||
} else {
|
||||
new_rung_params.push(
|
||||
TypeTerm::Ladder(vec![
|
||||
last_args[0].clone(),
|
||||
bot_args[0].clone()
|
||||
]).normalize()
|
||||
);
|
||||
|
||||
for i in 1 .. bot_args.len() {
|
||||
if let Ok(_idx) = last_args[i].is_syntactic_subtype_of(&bot_args[i]) {
|
||||
let spliced_type_ladder = splice_ladders(
|
||||
last_args[i].clone().get_lnf_vec(),
|
||||
bot_args[i].clone().get_lnf_vec()
|
||||
);
|
||||
let spliced_type =
|
||||
if spliced_type_ladder.len() == 1 {
|
||||
spliced_type_ladder[0].clone()
|
||||
} else if spliced_type_ladder.len() > 1 {
|
||||
TypeTerm::Ladder(spliced_type_ladder)
|
||||
} else {
|
||||
TypeTerm::unit()
|
||||
};
|
||||
|
||||
new_rung_params.push( spliced_type.param_normalize() );
|
||||
} else {
|
||||
new_rung_params.push( bot_args[i].clone() );
|
||||
require_break = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if require_break {
|
||||
new_rungs.push( TypeTerm::App(new_rung_params) );
|
||||
} else {
|
||||
rungs.pop();
|
||||
rungs.push(TypeTerm::App(new_rung_params));
|
||||
}
|
||||
|
||||
} else {
|
||||
new_rungs.push( TypeTerm::App(bot_args) );
|
||||
}
|
||||
}
|
||||
(bottom, last_buf) => {
|
||||
new_rungs.push( bottom );
|
||||
}
|
||||
}
|
||||
} else {
|
||||
new_rungs.push( bottom );
|
||||
}
|
||||
}
|
||||
|
||||
new_rungs.reverse();
|
||||
|
||||
if new_rungs.len() > 1 {
|
||||
TypeTerm::Ladder(new_rungs)
|
||||
} else if new_rungs.len() == 1 {
|
||||
new_rungs[0].clone()
|
||||
} else {
|
||||
TypeTerm::unit()
|
||||
}
|
||||
} else {
|
||||
TypeTerm::unit()
|
||||
}
|
||||
}
|
||||
|
||||
TypeTerm::App(params) => {
|
||||
TypeTerm::App(
|
||||
params.into_iter()
|
||||
.map(|p| p.param_normalize())
|
||||
.collect())
|
||||
}
|
||||
|
||||
atomic => atomic
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
|
@ -1,64 +0,0 @@
|
|||
|
||||
use crate::{
|
||||
TypeID,
|
||||
TypeTerm
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
pub trait Substitution {
|
||||
fn get(&self, t: &TypeID) -> Option< TypeTerm >;
|
||||
}
|
||||
|
||||
impl<S: Fn(&TypeID)->Option<TypeTerm>> Substitution for S {
|
||||
fn get(&self, t: &TypeID) -> Option< TypeTerm > {
|
||||
(self)(t)
|
||||
}
|
||||
}
|
||||
|
||||
impl Substitution for std::collections::HashMap< TypeID, TypeTerm > {
|
||||
fn get(&self, t: &TypeID) -> Option< TypeTerm > {
|
||||
(self as &std::collections::HashMap< TypeID, TypeTerm >).get(t).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
pub type HashMapSubst = std::collections::HashMap< TypeID, TypeTerm >;
|
||||
|
||||
impl TypeTerm {
|
||||
/// recursively apply substitution to all subterms,
|
||||
/// which will replace all occurences of variables which map
|
||||
/// some type-term in `subst`
|
||||
pub fn apply_substitution(
|
||||
&mut self,
|
||||
σ: &impl Substitution
|
||||
) -> &mut Self {
|
||||
self.apply_subst(σ)
|
||||
}
|
||||
|
||||
pub fn apply_subst(
|
||||
&mut self,
|
||||
σ: &impl Substitution
|
||||
) -> &mut Self {
|
||||
match self {
|
||||
TypeTerm::TypeID(typid) => {
|
||||
if let Some(t) = σ.get(typid) {
|
||||
*self = t;
|
||||
}
|
||||
}
|
||||
|
||||
TypeTerm::Ladder(rungs) => {
|
||||
for r in rungs.iter_mut() {
|
||||
r.apply_subst(σ);
|
||||
}
|
||||
}
|
||||
TypeTerm::App(args) => {
|
||||
for r in args.iter_mut() {
|
||||
r.apply_subst(σ);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
}
|
|
@ -1,51 +0,0 @@
|
|||
use crate::term::TypeTerm;
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl TypeTerm {
|
||||
// returns ladder-step of first match and provided representation-type
|
||||
pub fn is_semantic_subtype_of(&self, expected_type: &TypeTerm) -> Option<(usize, TypeTerm)> {
|
||||
let provided_lnf = self.clone().get_lnf_vec();
|
||||
let expected_lnf = expected_type.clone().get_lnf_vec();
|
||||
|
||||
for i in 0..provided_lnf.len() {
|
||||
if provided_lnf[i] == expected_lnf[0] {
|
||||
return Some((i, TypeTerm::Ladder(
|
||||
provided_lnf[i..].into_iter().cloned().collect()
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn is_syntactic_subtype_of(&self, expected_type: &TypeTerm) -> Result<usize, (usize, usize)> {
|
||||
if let Some((first_match, provided_type)) = self.is_semantic_subtype_of( expected_type ) {
|
||||
let provided_lnf = provided_type.get_lnf_vec();
|
||||
let expected_lnf = expected_type.clone().get_lnf_vec();
|
||||
|
||||
for i in 0 .. usize::min( provided_lnf.len(), expected_lnf.len() ) {
|
||||
if provided_lnf[i] != expected_lnf[i] {
|
||||
return Err((first_match, first_match+i))
|
||||
}
|
||||
}
|
||||
|
||||
Ok(first_match)
|
||||
} else {
|
||||
Err((0,0))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// supertype analogs
|
||||
|
||||
pub fn is_semantic_supertype_of(&self, t: &TypeTerm) -> Option<(usize, TypeTerm)> {
|
||||
t.is_semantic_subtype_of(self)
|
||||
}
|
||||
|
||||
pub fn is_syntactic_supertype_of(&self, t: &TypeTerm) -> Result<usize, (usize, usize)> {
|
||||
t.is_syntactic_subtype_of(self)
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
114
src/sugar.rs
114
src/sugar.rs
|
@ -1,114 +0,0 @@
|
|||
use {
|
||||
crate::{TypeTerm, TypeID, parser::ParseLadderType}
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum SugaredTypeTerm {
|
||||
TypeID(TypeID),
|
||||
Num(i64),
|
||||
Char(char),
|
||||
Univ(Box< SugaredTypeTerm >),
|
||||
Spec(Vec< SugaredTypeTerm >),
|
||||
Func(Vec< SugaredTypeTerm >),
|
||||
Morph(Vec< SugaredTypeTerm >),
|
||||
Ladder(Vec< SugaredTypeTerm >),
|
||||
Struct(Vec< SugaredTypeTerm >),
|
||||
Enum(Vec< SugaredTypeTerm >),
|
||||
Seq(Vec< SugaredTypeTerm >)
|
||||
}
|
||||
|
||||
impl TypeTerm {
|
||||
pub fn sugar(self: TypeTerm, dict: &mut impl crate::TypeDict) -> SugaredTypeTerm {
|
||||
match self {
|
||||
TypeTerm::TypeID(id) => SugaredTypeTerm::TypeID(id),
|
||||
TypeTerm::Num(n) => SugaredTypeTerm::Num(n),
|
||||
TypeTerm::Char(c) => SugaredTypeTerm::Char(c),
|
||||
TypeTerm::App(args) => if let Some(first) = args.first() {
|
||||
if first == &dict.parse("Func").unwrap() {
|
||||
SugaredTypeTerm::Func( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
|
||||
}
|
||||
else if first == &dict.parse("Morph").unwrap() {
|
||||
SugaredTypeTerm::Morph( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
|
||||
}
|
||||
else if first == &dict.parse("Struct").unwrap() {
|
||||
SugaredTypeTerm::Struct( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
|
||||
}
|
||||
else if first == &dict.parse("Enum").unwrap() {
|
||||
SugaredTypeTerm::Enum( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
|
||||
}
|
||||
else if first == &dict.parse("Seq").unwrap() {
|
||||
SugaredTypeTerm::Seq( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
|
||||
}
|
||||
else if first == &dict.parse("Spec").unwrap() {
|
||||
SugaredTypeTerm::Spec( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
|
||||
}
|
||||
else if first == &dict.parse("Univ").unwrap() {
|
||||
SugaredTypeTerm::Univ(Box::new(
|
||||
SugaredTypeTerm::Spec(
|
||||
args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect()
|
||||
)
|
||||
))
|
||||
}
|
||||
else {
|
||||
SugaredTypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
|
||||
}
|
||||
} else {
|
||||
SugaredTypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
|
||||
},
|
||||
TypeTerm::Ladder(rungs) =>
|
||||
SugaredTypeTerm::Ladder(rungs.into_iter().map(|t| t.sugar(dict)).collect())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SugaredTypeTerm {
|
||||
pub fn desugar(self, dict: &mut impl crate::TypeDict) -> TypeTerm {
|
||||
match self {
|
||||
SugaredTypeTerm::TypeID(id) => TypeTerm::TypeID(id),
|
||||
SugaredTypeTerm::Num(n) => TypeTerm::Num(n),
|
||||
SugaredTypeTerm::Char(c) => TypeTerm::Char(c),
|
||||
SugaredTypeTerm::Univ(t) => t.desugar(dict),
|
||||
SugaredTypeTerm::Spec(ts) => TypeTerm::App(ts.into_iter().map(|t| t.desugar(dict)).collect()),
|
||||
SugaredTypeTerm::Ladder(ts) => TypeTerm::Ladder(ts.into_iter().map(|t|t.desugar(dict)).collect()),
|
||||
SugaredTypeTerm::Func(ts) => TypeTerm::App(
|
||||
std::iter::once( dict.parse("Func").unwrap() ).chain(
|
||||
ts.into_iter().map(|t| t.desugar(dict))
|
||||
).collect()),
|
||||
SugaredTypeTerm::Morph(ts) => TypeTerm::App(
|
||||
std::iter::once( dict.parse("Morph").unwrap() ).chain(
|
||||
ts.into_iter().map(|t| t.desugar(dict))
|
||||
).collect()),
|
||||
SugaredTypeTerm::Struct(ts) => TypeTerm::App(
|
||||
std::iter::once( dict.parse("Struct").unwrap() ).chain(
|
||||
ts.into_iter().map(|t| t.desugar(dict))
|
||||
).collect()),
|
||||
SugaredTypeTerm::Enum(ts) => TypeTerm::App(
|
||||
std::iter::once( dict.parse("Enum").unwrap() ).chain(
|
||||
ts.into_iter().map(|t| t.desugar(dict))
|
||||
).collect()),
|
||||
SugaredTypeTerm::Seq(ts) => TypeTerm::App(
|
||||
std::iter::once( dict.parse("Seq").unwrap() ).chain(
|
||||
ts.into_iter().map(|t| t.desugar(dict))
|
||||
).collect()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
SugaredTypeTerm::TypeID(_) => false,
|
||||
SugaredTypeTerm::Num(_) => false,
|
||||
SugaredTypeTerm::Char(_) => false,
|
||||
SugaredTypeTerm::Univ(t) => t.is_empty(),
|
||||
SugaredTypeTerm::Spec(ts) |
|
||||
SugaredTypeTerm::Ladder(ts) |
|
||||
SugaredTypeTerm::Func(ts) |
|
||||
SugaredTypeTerm::Morph(ts) |
|
||||
SugaredTypeTerm::Struct(ts) |
|
||||
SugaredTypeTerm::Enum(ts) |
|
||||
SugaredTypeTerm::Seq(ts) => {
|
||||
ts.iter().fold(true, |s,t|s&&t.is_empty())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,12 +1,12 @@
|
|||
use crate::term::*;
|
||||
use crate::desugared_term::*;
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl TypeTerm {
|
||||
impl DesugaredTypeTerm {
|
||||
/// transform term to have at max 2 entries in Application list
|
||||
pub fn curry(self) -> TypeTerm {
|
||||
pub fn curry(self) -> DesugaredTypeTerm {
|
||||
match self {
|
||||
TypeTerm::App(args) => {
|
||||
DesugaredTypeTerm::App(args) => {
|
||||
if args.len() >= 2 {
|
||||
let mut old_args = args.into_iter();
|
||||
let mut new_args = vec![
|
||||
|
@ -16,19 +16,19 @@ impl TypeTerm {
|
|||
|
||||
for x in old_args {
|
||||
new_args = vec![
|
||||
TypeTerm::App(new_args),
|
||||
DesugaredTypeTerm::App(new_args),
|
||||
x
|
||||
];
|
||||
}
|
||||
|
||||
TypeTerm::App(new_args)
|
||||
DesugaredTypeTerm::App(new_args)
|
||||
} else {
|
||||
TypeTerm::App(args)
|
||||
DesugaredTypeTerm::App(args)
|
||||
}
|
||||
}
|
||||
|
||||
TypeTerm::Ladder(rungs) => {
|
||||
TypeTerm::Ladder(rungs.into_iter().map(|r| r.curry()).collect())
|
||||
DesugaredTypeTerm::Ladder(rungs) => {
|
||||
DesugaredTypeTerm::Ladder(rungs.into_iter().map(|r| r.curry()).collect())
|
||||
}
|
||||
|
||||
_ => self
|
||||
|
@ -38,11 +38,11 @@ impl TypeTerm {
|
|||
/// summarize all curried applications into one vec
|
||||
pub fn decurry(self) -> Self {
|
||||
match self {
|
||||
TypeTerm::App(mut args) => {
|
||||
DesugaredTypeTerm::App(mut args) => {
|
||||
if args.len() > 0 {
|
||||
let a0 = args.remove(0).decurry();
|
||||
match a0 {
|
||||
TypeTerm::App(sub_args) => {
|
||||
DesugaredTypeTerm::App(sub_args) => {
|
||||
for (i,x) in sub_args.into_iter().enumerate() {
|
||||
args.insert(i, x);
|
||||
}
|
||||
|
@ -50,10 +50,10 @@ impl TypeTerm {
|
|||
other => { args.insert(0, other); }
|
||||
}
|
||||
}
|
||||
TypeTerm::App(args)
|
||||
DesugaredTypeTerm::App(args)
|
||||
}
|
||||
TypeTerm::Ladder(args) => {
|
||||
TypeTerm::Ladder(args.into_iter().map(|a| a.decurry()).collect())
|
||||
DesugaredTypeTerm::Ladder(args) => {
|
||||
DesugaredTypeTerm::Ladder(args.into_iter().map(|a| a.decurry()).collect())
|
||||
}
|
||||
_ => self
|
||||
}
|
|
@ -3,7 +3,7 @@ use crate::TypeID;
|
|||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub enum TypeTerm {
|
||||
pub enum DesugaredTypeTerm {
|
||||
|
||||
/* Atomic Terms */
|
||||
|
||||
|
@ -14,54 +14,51 @@ pub enum TypeTerm {
|
|||
Num(i64),
|
||||
Char(char),
|
||||
|
||||
|
||||
|
||||
/* Complex Terms */
|
||||
|
||||
// Type Parameters
|
||||
// avoid currying to save space & indirection
|
||||
App(Vec< TypeTerm >),
|
||||
App(Vec< DesugaredTypeTerm >),
|
||||
|
||||
// Type Ladders
|
||||
Ladder(Vec< TypeTerm >),
|
||||
Ladder(Vec< DesugaredTypeTerm >),
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl TypeTerm {
|
||||
impl DesugaredTypeTerm {
|
||||
pub fn unit() -> Self {
|
||||
TypeTerm::Ladder(vec![])
|
||||
DesugaredTypeTerm::Ladder(vec![])
|
||||
}
|
||||
|
||||
pub fn new(id: TypeID) -> Self {
|
||||
TypeTerm::TypeID(id)
|
||||
DesugaredTypeTerm::TypeID(id)
|
||||
}
|
||||
|
||||
pub fn arg(&mut self, t: impl Into<TypeTerm>) -> &mut Self {
|
||||
pub fn arg(&mut self, t: impl Into<DesugaredTypeTerm>) -> &mut Self {
|
||||
match self {
|
||||
TypeTerm::App(args) => {
|
||||
DesugaredTypeTerm::App(args) => {
|
||||
args.push(t.into());
|
||||
}
|
||||
|
||||
_ => {
|
||||
*self = TypeTerm::App(vec![
|
||||
*self = DesugaredTypeTerm::App(vec![
|
||||
self.clone(),
|
||||
t.into()
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub fn repr_as(&mut self, t: impl Into<TypeTerm>) -> &mut Self {
|
||||
pub fn repr_as(&mut self, t: impl Into<DesugaredTypeTerm>) -> &mut Self {
|
||||
match self {
|
||||
TypeTerm::Ladder(rungs) => {
|
||||
DesugaredTypeTerm::Ladder(rungs) => {
|
||||
rungs.push(t.into());
|
||||
}
|
||||
|
||||
_ => {
|
||||
*self = TypeTerm::Ladder(vec![
|
||||
*self = DesugaredTypeTerm::Ladder(vec![
|
||||
self.clone(),
|
||||
t.into()
|
||||
])
|
||||
|
@ -72,18 +69,18 @@ impl TypeTerm {
|
|||
}
|
||||
|
||||
pub fn num_arg(&mut self, v: i64) -> &mut Self {
|
||||
self.arg(TypeTerm::Num(v))
|
||||
self.arg(DesugaredTypeTerm::Num(v))
|
||||
}
|
||||
|
||||
pub fn char_arg(&mut self, c: char) -> &mut Self {
|
||||
self.arg(TypeTerm::Char(c))
|
||||
self.arg(DesugaredTypeTerm::Char(c))
|
||||
}
|
||||
|
||||
pub fn contains_var(&self, var_id: u64) -> bool {
|
||||
match self {
|
||||
TypeTerm::TypeID(TypeID::Var(v)) => (&var_id == v),
|
||||
TypeTerm::App(args) |
|
||||
TypeTerm::Ladder(args) => {
|
||||
DesugaredTypeTerm::TypeID(TypeID::Var(v)) => &var_id == v,
|
||||
DesugaredTypeTerm::App(args) |
|
||||
DesugaredTypeTerm::Ladder(args) => {
|
||||
for a in args.iter() {
|
||||
if a.contains_var(var_id) {
|
||||
return true;
|
||||
|
@ -101,13 +98,13 @@ impl TypeTerm {
|
|||
*/
|
||||
pub fn strip(self) -> Self {
|
||||
match self {
|
||||
TypeTerm::Ladder(rungs) => {
|
||||
DesugaredTypeTerm::Ladder(rungs) => {
|
||||
let mut rungs :Vec<_> = rungs.into_iter()
|
||||
.filter_map(|mut r| {
|
||||
r = r.strip();
|
||||
if r != TypeTerm::unit() {
|
||||
if r != DesugaredTypeTerm::unit() {
|
||||
Some(match r {
|
||||
TypeTerm::Ladder(r) => r,
|
||||
DesugaredTypeTerm::Ladder(r) => r,
|
||||
a => vec![ a ]
|
||||
})
|
||||
}
|
||||
|
@ -119,34 +116,36 @@ impl TypeTerm {
|
|||
if rungs.len() == 1 {
|
||||
rungs.pop().unwrap()
|
||||
} else {
|
||||
TypeTerm::Ladder(rungs)
|
||||
DesugaredTypeTerm::Ladder(rungs)
|
||||
}
|
||||
},
|
||||
TypeTerm::App(args) => {
|
||||
DesugaredTypeTerm::App(args) => {
|
||||
let mut args :Vec<_> = args.into_iter().map(|arg| arg.strip()).collect();
|
||||
if args.len() == 0 {
|
||||
TypeTerm::unit()
|
||||
DesugaredTypeTerm::unit()
|
||||
} else if args.len() == 1 {
|
||||
args.pop().unwrap()
|
||||
} else {
|
||||
TypeTerm::App(args)
|
||||
DesugaredTypeTerm::App(args)
|
||||
}
|
||||
}
|
||||
atom => atom
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_interface_type(&self) -> TypeTerm {
|
||||
|
||||
|
||||
pub fn get_interface_type(&self) -> DesugaredTypeTerm {
|
||||
match self {
|
||||
TypeTerm::Ladder(rungs) => {
|
||||
DesugaredTypeTerm::Ladder(rungs) => {
|
||||
if let Some(top) = rungs.first() {
|
||||
top.get_interface_type()
|
||||
} else {
|
||||
TypeTerm::unit()
|
||||
DesugaredTypeTerm::unit()
|
||||
}
|
||||
}
|
||||
TypeTerm::App(args) => {
|
||||
TypeTerm::App(args.iter().map(|a| a.get_interface_type()).collect())
|
||||
DesugaredTypeTerm::App(args) => {
|
||||
DesugaredTypeTerm::App(args.iter().map(|a| a.get_interface_type()).collect())
|
||||
}
|
||||
atom => atom.clone()
|
||||
}
|
707
src/term/mod.rs
Normal file
707
src/term/mod.rs
Normal file
|
@ -0,0 +1,707 @@
|
|||
pub mod lexer;
|
||||
pub mod parser; // todo sugared variant
|
||||
pub mod curry; // todo: sugared variant
|
||||
pub mod unparser;
|
||||
pub mod desugared_term; // deprecated
|
||||
pub mod pnf;
|
||||
|
||||
#[cfg(feature = "pretty")]
|
||||
mod pretty;
|
||||
|
||||
|
||||
use {
|
||||
crate::{
|
||||
parser::ParseLadderType,
|
||||
DesugaredTypeTerm,
|
||||
MorphismType,
|
||||
Substitution,
|
||||
TypeDict,
|
||||
TypeID},
|
||||
std::{ops::Deref}
|
||||
};
|
||||
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub enum VariableConstraint {
|
||||
UnconstrainedType, // <<- add TypeKind here ?
|
||||
Subtype(TypeTerm),
|
||||
Trait(TypeTerm),
|
||||
Parallel(TypeTerm),
|
||||
ValueUInt,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct StructMember {
|
||||
pub symbol: String,
|
||||
pub ty: TypeTerm
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct EnumVariant {
|
||||
pub symbol: String,
|
||||
pub ty: TypeTerm
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub enum TypeTerm {
|
||||
Id(u64),
|
||||
Var(u64),
|
||||
Num(i64),
|
||||
Char(char),
|
||||
Univ(Box< VariableConstraint >, Box< TypeTerm >),
|
||||
Spec(Vec< TypeTerm >),
|
||||
Func(Vec< TypeTerm >),
|
||||
Morph(Box< TypeTerm >, Box< TypeTerm >),
|
||||
Ladder(Vec< TypeTerm >),
|
||||
Struct{
|
||||
struct_repr: Option< Box<TypeTerm> >,
|
||||
members: Vec< StructMember >
|
||||
},
|
||||
Enum{
|
||||
enum_repr: Option<Box< TypeTerm >>,
|
||||
variants: Vec< EnumVariant >
|
||||
},
|
||||
Seq{
|
||||
seq_repr: Option<Box< TypeTerm >>,
|
||||
items: Vec< TypeTerm >
|
||||
},
|
||||
|
||||
/*
|
||||
Todo: Ref, RefMut
|
||||
*/
|
||||
}
|
||||
|
||||
impl TypeTerm {
|
||||
pub fn into_morphism_type(self) -> Option< MorphismType > {
|
||||
match self.normalize() {
|
||||
TypeTerm::Univ(bound, τ) => {
|
||||
let mut m = τ.into_morphism_type()?;
|
||||
m.bounds.push(bound.deref().clone());
|
||||
Some(m)
|
||||
}
|
||||
TypeTerm::Morph(src,dst) => {
|
||||
Some(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: src.deref().clone(),
|
||||
dst_type: dst.deref().clone()
|
||||
})
|
||||
},
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl VariableConstraint {
|
||||
pub fn normalize(&self) -> Self {
|
||||
match self {
|
||||
VariableConstraint::UnconstrainedType => VariableConstraint::UnconstrainedType,
|
||||
VariableConstraint::Subtype(τ) => VariableConstraint::Subtype(τ.clone().normalize()),
|
||||
VariableConstraint::Trait(τ) => VariableConstraint::Trait(τ.clone().normalize()),
|
||||
VariableConstraint::Parallel(τ) => VariableConstraint::Parallel(τ.clone().normalize()),
|
||||
VariableConstraint::ValueUInt => VariableConstraint::ValueUInt
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_subst(&mut self, σ: &impl Substitution) -> &mut Self {
|
||||
match self {
|
||||
VariableConstraint::Subtype(type_term) => { type_term.apply_subst(σ); },
|
||||
VariableConstraint::Trait(type_term) => { type_term.apply_subst(σ); },
|
||||
VariableConstraint::Parallel(type_term) => { type_term.clone().apply_subst(σ); },
|
||||
_ => {}
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl StructMember {
|
||||
pub fn parse( dict: &mut impl TypeDict, ty: &DesugaredTypeTerm ) -> Option<Self> {
|
||||
match ty {
|
||||
DesugaredTypeTerm::App(args) => {
|
||||
if args.len() != 2 {
|
||||
return None;
|
||||
}
|
||||
/*
|
||||
if args[0] != dict.parse("Struct.Field").expect("parse") {
|
||||
return None;
|
||||
}
|
||||
*/
|
||||
let symbol = match args[0] {
|
||||
DesugaredTypeTerm::Char(c) => c.to_string(),
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(id)) => dict.get_typename(id).expect("cant get member name"),
|
||||
DesugaredTypeTerm::TypeID(TypeID::Var(id)) => dict.get_varname(id).expect("cant get member name"),
|
||||
_ => {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
let ty = args[1].clone().sugar(dict);
|
||||
|
||||
Some(StructMember { symbol, ty })
|
||||
}
|
||||
_ => {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EnumVariant {
|
||||
pub fn parse( dict: &mut impl TypeDict, ty: &DesugaredTypeTerm ) -> Option<Self> {
|
||||
match ty {
|
||||
DesugaredTypeTerm::App(args) => {
|
||||
if args.len() != 2 {
|
||||
return None;
|
||||
}
|
||||
/*
|
||||
if args[0] != dict.parse("Enum.Variant").expect("parse") {
|
||||
return None;
|
||||
}
|
||||
*/
|
||||
let symbol = match args[0] {
|
||||
DesugaredTypeTerm::Char(c) => c.to_string(),
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(id)) =>
|
||||
dict.get_typename(id).expect("cant get member name"),
|
||||
DesugaredTypeTerm::TypeID(TypeID::Var(id)) =>
|
||||
dict.get_varname(id).expect("cant get member name"),
|
||||
_ => {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
let ty = args[1].clone().sugar(dict);
|
||||
|
||||
Some(EnumVariant { symbol, ty })
|
||||
}
|
||||
_ => {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DesugaredTypeTerm {
|
||||
pub fn sugar(self: DesugaredTypeTerm, dict: &mut impl crate::TypeDict) -> TypeTerm {
|
||||
//dict.add_varname("StructRepr".into());
|
||||
//dict.add_varname("EnumRepr".into());
|
||||
//dict.add_varname("SeqRepr".into());
|
||||
|
||||
match self {
|
||||
DesugaredTypeTerm::TypeID(id) => match id {
|
||||
TypeID::Fun(id) => TypeTerm::Id(id),
|
||||
TypeID::Var(id) => TypeTerm::Var(id)
|
||||
},
|
||||
DesugaredTypeTerm::Num(n) => TypeTerm::Num(n),
|
||||
DesugaredTypeTerm::Char(c) => TypeTerm::Char(c),
|
||||
DesugaredTypeTerm::App(args) => if let Some(first) = args.first() {
|
||||
if first == &dict.parse_desugared("Func").unwrap() {
|
||||
TypeTerm::Func( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
|
||||
}
|
||||
else if first == &dict.parse_desugared("Morph").unwrap() {
|
||||
if args.len() == 3 {
|
||||
TypeTerm::Morph(
|
||||
Box::new(args[1].clone().sugar(dict)),
|
||||
Box::new(args[2].clone().sugar(dict)),
|
||||
)
|
||||
} else {
|
||||
panic!("sugar: invalid arguments for morphism type")
|
||||
}
|
||||
}
|
||||
else if first == &dict.parse_desugared("Seq").unwrap() {
|
||||
TypeTerm::Seq{
|
||||
seq_repr: None,
|
||||
items: args[1..].into_iter()
|
||||
.map(|t| t.clone().sugar(dict))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
else if first == &dict.parse_desugared("Struct").unwrap() {
|
||||
TypeTerm::Struct{
|
||||
struct_repr: None,
|
||||
members: args[1..].into_iter()
|
||||
.map(|t| StructMember::parse(dict, t).expect("cant parse field"))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
else if first == &dict.parse_desugared("Enum").unwrap() {
|
||||
TypeTerm::Enum{
|
||||
enum_repr: None,
|
||||
variants: args[1..].into_iter()
|
||||
.map(|t| EnumVariant::parse(dict, t).expect("cant parse variant"))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
else if let DesugaredTypeTerm::Ladder(mut rungs) = first.clone() {
|
||||
if rungs.len() > 0 {
|
||||
match rungs.remove(0) {
|
||||
DesugaredTypeTerm::TypeID(tyid) => {
|
||||
if tyid == dict.get_typeid("Seq").expect("") {
|
||||
TypeTerm::Seq {
|
||||
seq_repr:
|
||||
if rungs.len() > 0 {
|
||||
Some(Box::new(
|
||||
TypeTerm::Ladder(rungs.into_iter()
|
||||
.map(|r| r.clone().sugar(dict))
|
||||
.collect()
|
||||
).normalize()
|
||||
))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
items: args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect()
|
||||
}
|
||||
} else if tyid == dict.get_typeid("Struct").expect("") {
|
||||
TypeTerm::Struct {
|
||||
struct_repr:
|
||||
if rungs.len() > 0 {
|
||||
Some(Box::new(
|
||||
TypeTerm::Ladder(rungs.into_iter()
|
||||
.map(|r| r.clone().sugar(dict))
|
||||
.collect()
|
||||
).normalize()
|
||||
))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
members: args[1..].into_iter()
|
||||
.map(|t| StructMember::parse(dict, t).expect("cant parse field"))
|
||||
.collect()
|
||||
}
|
||||
} else if tyid == dict.get_typeid("Enum").expect("") {
|
||||
TypeTerm::Enum {
|
||||
enum_repr:
|
||||
if rungs.len() > 0 {
|
||||
Some(Box::new(
|
||||
TypeTerm::Ladder(rungs.into_iter()
|
||||
.map(|r| r.clone().sugar(dict))
|
||||
.collect()
|
||||
).normalize()
|
||||
))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
variants: args[1..].into_iter()
|
||||
.map(|t| EnumVariant::parse(dict, t).expect("cant parse field"))
|
||||
.collect()
|
||||
}
|
||||
} else {
|
||||
TypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
|
||||
else if first == &dict.parse_desugared("Spec").unwrap() {
|
||||
TypeTerm::Spec( args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect() )
|
||||
}
|
||||
else if first == &dict.parse_desugared("Univ").unwrap() {
|
||||
TypeTerm::Univ(
|
||||
// fixme: ignored bound, will be superseded by new parser
|
||||
Box::new(VariableConstraint::UnconstrainedType),
|
||||
|
||||
Box::new(TypeTerm::Spec(args[1..].into_iter().map(|t| t.clone().sugar(dict)).collect()))
|
||||
)
|
||||
}
|
||||
else {
|
||||
TypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
|
||||
}
|
||||
} else {
|
||||
TypeTerm::Spec(args.into_iter().map(|t| t.sugar(dict)).collect())
|
||||
},
|
||||
DesugaredTypeTerm::Ladder(rungs) =>
|
||||
TypeTerm::Ladder(rungs.into_iter().map(|t| t.sugar(dict)).collect())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl StructMember {
|
||||
pub fn desugar(self, dict: &mut impl crate::TypeDict) -> DesugaredTypeTerm {
|
||||
DesugaredTypeTerm::App(vec![
|
||||
//dict.parse("Struct.Field").expect("parse"),
|
||||
dict.parse_desugared(&self.symbol).expect("parse"),
|
||||
self.ty.desugar(dict)
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
impl EnumVariant {
|
||||
pub fn desugar(self, dict: &mut impl crate::TypeDict) -> DesugaredTypeTerm {
|
||||
DesugaredTypeTerm::App(vec![
|
||||
//dict.parse("Enum.Variant").expect("parse"),
|
||||
dict.parse_desugared(&self.symbol).expect("parse"),
|
||||
self.ty.desugar(dict)
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
impl TypeTerm {
|
||||
pub fn unit() -> Self {
|
||||
TypeTerm::Ladder(vec![])
|
||||
}
|
||||
|
||||
pub fn desugar(self, dict: &mut impl crate::TypeDict) -> DesugaredTypeTerm {
|
||||
match self {
|
||||
TypeTerm::Id(id) => DesugaredTypeTerm::TypeID(TypeID::Fun(id)),
|
||||
TypeTerm::Var(id) => DesugaredTypeTerm::TypeID(TypeID::Var(id)),
|
||||
TypeTerm::Num(n) => DesugaredTypeTerm::Num(n),
|
||||
TypeTerm::Char(c) => DesugaredTypeTerm::Char(c),
|
||||
TypeTerm::Univ(bound, t) => t.desugar(dict), // <- fixme: missing bound
|
||||
TypeTerm::Spec(ts) => DesugaredTypeTerm::App(ts.into_iter().map(|t| t.desugar(dict)).collect()),
|
||||
TypeTerm::Ladder(ts) => DesugaredTypeTerm::Ladder(ts.into_iter().map(|t|t.desugar(dict)).collect()),
|
||||
TypeTerm::Func(ts) => DesugaredTypeTerm::App(
|
||||
std::iter::once( dict.parse_desugared("Func").unwrap() ).chain(
|
||||
ts.into_iter().map(|t| t.desugar(dict))
|
||||
).collect()),
|
||||
TypeTerm::Morph(src,dst) => DesugaredTypeTerm::App(vec![
|
||||
dict.parse_desugared("Morph").unwrap(),
|
||||
src.desugar(dict),
|
||||
dst.desugar(dict)
|
||||
]),
|
||||
TypeTerm::Struct{ struct_repr, members } => DesugaredTypeTerm::App(
|
||||
std::iter::once(
|
||||
if let Some(sr) = struct_repr {
|
||||
DesugaredTypeTerm::Ladder(vec![
|
||||
dict.parse_desugared("Struct").unwrap(),
|
||||
sr.desugar(dict)
|
||||
])
|
||||
} else {
|
||||
dict.parse_desugared("Struct").unwrap()
|
||||
}
|
||||
).chain(
|
||||
members.into_iter().map(|t| t.desugar(dict))
|
||||
).collect()),
|
||||
TypeTerm::Enum{ enum_repr, variants } => DesugaredTypeTerm::App(
|
||||
std::iter::once(
|
||||
if let Some(sr) = enum_repr {
|
||||
DesugaredTypeTerm::Ladder(vec![
|
||||
dict.parse_desugared("Enum").unwrap(),
|
||||
sr.desugar(dict)
|
||||
])
|
||||
} else {
|
||||
dict.parse_desugared("Enum").unwrap()
|
||||
}
|
||||
).chain(
|
||||
variants.into_iter().map(|t| t.desugar(dict))
|
||||
).collect()),
|
||||
TypeTerm::Seq{ seq_repr, items } => DesugaredTypeTerm::App(
|
||||
std::iter::once(
|
||||
if let Some(sr) = seq_repr {
|
||||
DesugaredTypeTerm::Ladder(vec![
|
||||
dict.parse_desugared("Seq").unwrap(),
|
||||
sr.desugar(dict)
|
||||
])
|
||||
} else {
|
||||
dict.parse_desugared("Seq").unwrap()
|
||||
}
|
||||
).chain(
|
||||
items.into_iter().map(|t| t.desugar(dict))
|
||||
).collect()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn contains_var(&self, var_id: u64) -> bool {
|
||||
match self {
|
||||
TypeTerm::Var(v) => &var_id == v,
|
||||
TypeTerm::Spec(args) |
|
||||
TypeTerm::Func(args) |
|
||||
TypeTerm::Ladder(args) => {
|
||||
for a in args.iter() {
|
||||
if a.contains_var(var_id) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
TypeTerm::Morph(src,dst) => {
|
||||
src.contains_var(var_id) || dst.contains_var(var_id)
|
||||
}
|
||||
TypeTerm::Univ(bound,t) => {
|
||||
// todo: capture avoidance (via debruijn)
|
||||
t.contains_var(var_id)
|
||||
}
|
||||
TypeTerm::Struct { struct_repr, members } => {
|
||||
if let Some(struct_repr) = struct_repr {
|
||||
if struct_repr.contains_var(var_id) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
for StructMember{ symbol, ty } in members {
|
||||
if ty.contains_var(var_id) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
TypeTerm::Enum { enum_repr, variants } => {
|
||||
if let Some(enum_repr) = enum_repr {
|
||||
if enum_repr.contains_var(var_id) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
for EnumVariant{ symbol, ty } in variants {
|
||||
if ty.contains_var(var_id) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
TypeTerm::Seq { seq_repr, items } => {
|
||||
if let Some(seq_repr) = seq_repr {
|
||||
if seq_repr.contains_var(var_id) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
for ty in items {
|
||||
if ty.contains_var(var_id) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
TypeTerm::Num(_) |
|
||||
TypeTerm::Char(_) |
|
||||
TypeTerm::Id(_) => false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn strip(self) -> TypeTerm {
|
||||
if self.is_empty() {
|
||||
return TypeTerm::unit();
|
||||
}
|
||||
|
||||
match self {
|
||||
TypeTerm::Ladder(rungs) => {
|
||||
let mut rungs :Vec<_> = rungs.into_iter()
|
||||
.filter_map(|mut r| {
|
||||
r = r.strip();
|
||||
if r != TypeTerm::unit() {
|
||||
Some(match r {
|
||||
TypeTerm::Ladder(r) => r,
|
||||
a => vec![ a ]
|
||||
})
|
||||
}
|
||||
else { None }
|
||||
})
|
||||
.flatten()
|
||||
.collect();
|
||||
|
||||
if rungs.len() == 1 {
|
||||
rungs.pop().unwrap()
|
||||
} else {
|
||||
TypeTerm::Ladder(rungs)
|
||||
}
|
||||
},
|
||||
TypeTerm::Spec(args) => {
|
||||
let mut args :Vec<_> = args.into_iter().map(|arg| arg.strip()).collect();
|
||||
if args.len() == 0 {
|
||||
TypeTerm::unit()
|
||||
} else if args.len() == 1 {
|
||||
args.pop().unwrap()
|
||||
} else {
|
||||
TypeTerm::Spec(args)
|
||||
}
|
||||
}
|
||||
|
||||
TypeTerm::Func(args) => TypeTerm::Func(args.into_iter().map(|arg| arg.strip()).collect()),
|
||||
TypeTerm::Morph(src, dst) => TypeTerm::Morph(Box::new(src.strip()), Box::new(dst.strip())),
|
||||
|
||||
TypeTerm::Seq{ mut seq_repr, mut items } => {
|
||||
if let Some(seq_repr) = seq_repr.as_mut() {
|
||||
*seq_repr = Box::new(seq_repr.clone().strip());
|
||||
}
|
||||
for i in items.iter_mut() {
|
||||
*i = i.clone().strip();
|
||||
}
|
||||
|
||||
TypeTerm::Seq { seq_repr, items }
|
||||
}
|
||||
TypeTerm::Struct { mut struct_repr, mut members } => {
|
||||
if let Some(struct_repr) = struct_repr.as_mut() {
|
||||
*struct_repr = Box::new(struct_repr.clone().strip());
|
||||
}
|
||||
for m in members.iter_mut() {
|
||||
m.ty = m.ty.clone().strip();
|
||||
}
|
||||
|
||||
TypeTerm::Struct { struct_repr, members }
|
||||
},
|
||||
TypeTerm::Enum { mut enum_repr, mut variants } => {
|
||||
if let Some(enum_repr) = enum_repr.as_mut() {
|
||||
*enum_repr = Box::new(enum_repr.clone().strip());
|
||||
}
|
||||
for v in variants.iter_mut() {
|
||||
v.ty = v.ty.clone().strip();
|
||||
}
|
||||
|
||||
TypeTerm::Enum { enum_repr, variants }
|
||||
},
|
||||
|
||||
atom => atom
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_interface_type(&self) -> TypeTerm {
|
||||
match self {
|
||||
TypeTerm::Ladder(rungs) => {
|
||||
if let Some(top) = rungs.first() {
|
||||
top.get_interface_type()
|
||||
} else {
|
||||
TypeTerm::unit()
|
||||
}
|
||||
}
|
||||
TypeTerm::Spec(args)
|
||||
=> TypeTerm::Spec(args.iter().map(|a| a.get_interface_type()).collect()),
|
||||
|
||||
TypeTerm::Func(args)
|
||||
=> TypeTerm::Func(args.iter().map(|a| a.get_interface_type()).collect()),
|
||||
|
||||
TypeTerm::Morph(src,dst)
|
||||
=> TypeTerm::Morph(
|
||||
Box::new(src.get_interface_type()),
|
||||
Box::new(dst.get_interface_type())
|
||||
),
|
||||
|
||||
TypeTerm::Univ(bound, t)
|
||||
=> TypeTerm::Univ(bound.clone(), Box::new(t.get_interface_type())),
|
||||
|
||||
TypeTerm::Seq { seq_repr, items } => {
|
||||
TypeTerm::Seq {
|
||||
seq_repr: if let Some(sr) = seq_repr {
|
||||
Some(Box::new(sr.clone().get_interface_type()))
|
||||
} else { None },
|
||||
items: items.iter().map(|t| t.get_interface_type()).collect()
|
||||
}
|
||||
}
|
||||
TypeTerm::Struct { struct_repr, members } => {
|
||||
TypeTerm::Struct {
|
||||
struct_repr: if let Some(sr) = struct_repr {
|
||||
Some(Box::new(sr.clone().get_interface_type()))
|
||||
} else { None },
|
||||
members: members.iter()
|
||||
.map(|StructMember{symbol,ty}|
|
||||
StructMember {symbol:symbol.clone(), ty:ty.get_interface_type() })
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
TypeTerm::Enum { enum_repr, variants } => {
|
||||
TypeTerm::Enum {
|
||||
enum_repr: if let Some(sr) = enum_repr {
|
||||
Some(Box::new(sr.clone().get_interface_type()))
|
||||
} else { None },
|
||||
variants: variants.iter()
|
||||
.map(|EnumVariant{symbol,ty}|
|
||||
EnumVariant{ symbol:symbol.clone(), ty:ty.get_interface_type() })
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
TypeTerm::Var(varid) => TypeTerm::Var(*varid),
|
||||
TypeTerm::Id(tyid) => TypeTerm::Id(*tyid),
|
||||
TypeTerm::Num(n) => TypeTerm::Num(*n),
|
||||
TypeTerm::Char(c) => TypeTerm::Char(*c)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_floor_type(&self) -> (TypeTerm, TypeTerm) {
|
||||
match self.clone() {
|
||||
TypeTerm::Ladder(mut rungs) => {
|
||||
if let Some(bot) = rungs.pop() {
|
||||
let (bot_ψ, bot_floor) = bot.get_floor_type();
|
||||
rungs.push(bot_ψ);
|
||||
(TypeTerm::Ladder(rungs).strip(), bot_floor.strip())
|
||||
} else {
|
||||
(TypeTerm::unit(), TypeTerm::unit())
|
||||
}
|
||||
}
|
||||
/*
|
||||
SugaredTypeTerm::Spec(args)
|
||||
=> (SugaredTypeTerm::SugaredTypeTerm::Spec(args.iter().map(|a| a.get_floor_type()).collect()),
|
||||
|
||||
SugaredTypeTerm::Func(args)
|
||||
=> SugaredTypeTerm::Func(args.iter().map(|a| a.get_floor_type()).collect()),
|
||||
|
||||
SugaredTypeTerm::Morph(args)
|
||||
=> SugaredTypeTerm::Spec(args.iter().map(|a| a.get_floor_type()).collect()),
|
||||
|
||||
SugaredTypeTerm::Univ(t)
|
||||
=> SugaredTypeTerm::Univ(Box::new(t.get_floor_type())),
|
||||
|
||||
SugaredTypeTerm::Seq { seq_repr, items } => {
|
||||
SugaredTypeTerm::Seq {
|
||||
seq_repr: if let Some(sr) = seq_repr {
|
||||
Some(Box::new(sr.clone().get_floor_type()))
|
||||
} else { None },
|
||||
items: items.iter().map(|t| t.get_floor_type()).collect()
|
||||
}
|
||||
}
|
||||
SugaredTypeTerm::Struct { struct_repr, members } => {
|
||||
SugaredTypeTerm::Struct {
|
||||
struct_repr: if let Some(sr) = struct_repr {
|
||||
Some(Box::new(sr.clone().get_floor_type()))
|
||||
} else { None },
|
||||
members: members.iter()
|
||||
.map(|SugaredStructMember{symbol,ty}|
|
||||
SugaredStructMember {symbol:symbol.clone(), ty:ty.get_floor_type() })
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
SugaredTypeTerm::Enum { enum_repr, variants } => {
|
||||
SugaredTypeTerm::Enum {
|
||||
enum_repr: if let Some(sr) = enum_repr {
|
||||
Some(Box::new(sr.clone().get_floor_type()))
|
||||
} else { None },
|
||||
variants: variants.iter()
|
||||
.map(|SugaredEnumVariant{symbol,ty}|
|
||||
SugaredEnumVariant{ symbol:symbol.clone(), ty:ty.get_floor_type() })
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
SugaredTypeTerm::TypeID(tyid) => SugaredTypeTerm::TypeID(tyid.clone()),
|
||||
SugaredTypeTerm::Num(n) => SugaredTypeTerm::Num(*n),
|
||||
SugaredTypeTerm::Char(c) => SugaredTypeTerm::Char(*c)
|
||||
*/
|
||||
|
||||
other => (TypeTerm::unit(), other.clone().strip())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
TypeTerm::Id(_) => false,
|
||||
TypeTerm::Var(_) => false,
|
||||
TypeTerm::Num(_) => false,
|
||||
TypeTerm::Char(_) => false,
|
||||
TypeTerm::Univ(bound, t) => t.is_empty(),
|
||||
TypeTerm::Spec(ts) |
|
||||
TypeTerm::Ladder(ts) |
|
||||
TypeTerm::Func(ts) => {
|
||||
ts.iter().fold(true, |s,t| s && t.is_empty() )
|
||||
}
|
||||
TypeTerm::Morph(src,dst) => {
|
||||
src.is_empty() && dst.is_empty()
|
||||
}
|
||||
TypeTerm::Seq{ seq_repr, items } => {
|
||||
items.iter().fold(true, |s,t| s && t.is_empty() )
|
||||
}
|
||||
TypeTerm::Struct{ struct_repr, members } => {
|
||||
members.iter()
|
||||
.fold(true, |s,member_decl| s && member_decl.ty.is_empty() )
|
||||
}
|
||||
TypeTerm::Enum{ enum_repr, variants } => {
|
||||
variants.iter()
|
||||
.fold(true, |s,variant_decl| s && variant_decl.ty.is_empty() )
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,10 +1,7 @@
|
|||
use {
|
||||
std::iter::Peekable,
|
||||
crate::{
|
||||
dict::*,
|
||||
term::*,
|
||||
lexer::*
|
||||
}
|
||||
context::dict::*, desugared_term::*, lexer::*, term::*
|
||||
}, std::{iter::Peekable}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
@ -19,22 +16,28 @@ pub enum ParseError {
|
|||
}
|
||||
|
||||
pub trait ParseLadderType {
|
||||
fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError>;
|
||||
|
||||
fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
fn parse(&mut self, s:&str) -> Result<TypeTerm, ParseError>;
|
||||
|
||||
fn parse_desugared(&mut self, s: &str) -> Result<DesugaredTypeTerm, ParseError>;
|
||||
|
||||
fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<DesugaredTypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>;
|
||||
|
||||
fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<DesugaredTypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>;
|
||||
|
||||
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<DesugaredTypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>;
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
impl<T: TypeDict> ParseLadderType for T {
|
||||
fn parse(&mut self, s: &str) -> Result<TypeTerm, ParseError> {
|
||||
fn parse(&mut self, s:&str) -> Result<TypeTerm, ParseError> {
|
||||
Ok(self.parse_desugared(s)?.sugar(self))
|
||||
}
|
||||
|
||||
fn parse_desugared(&mut self, s: &str) -> Result<DesugaredTypeTerm, ParseError> {
|
||||
let mut tokens = LadderTypeLexer::from(s.chars()).peekable();
|
||||
|
||||
match self.parse_ladder(&mut tokens) {
|
||||
|
@ -49,7 +52,7 @@ impl<T: TypeDict> ParseLadderType for T {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
fn parse_app<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<DesugaredTypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>
|
||||
{
|
||||
let mut args = Vec::new();
|
||||
|
@ -57,7 +60,7 @@ impl<T: TypeDict> ParseLadderType for T {
|
|||
match tok {
|
||||
Ok(LadderTypeToken::Close) => {
|
||||
tokens.next();
|
||||
return Ok(TypeTerm::App(args));
|
||||
return Ok(DesugaredTypeTerm::App(args));
|
||||
}
|
||||
_ => {
|
||||
match self.parse_ladder(tokens) {
|
||||
|
@ -70,7 +73,7 @@ impl<T: TypeDict> ParseLadderType for T {
|
|||
Err(ParseError::UnexpectedEnd)
|
||||
}
|
||||
|
||||
fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
fn parse_rung<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<DesugaredTypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>
|
||||
{
|
||||
match tokens.next() {
|
||||
|
@ -78,21 +81,21 @@ impl<T: TypeDict> ParseLadderType for T {
|
|||
Some(Ok(LadderTypeToken::Close)) => Err(ParseError::UnexpectedClose),
|
||||
Some(Ok(LadderTypeToken::Ladder)) => Err(ParseError::UnexpectedLadder),
|
||||
Some(Ok(LadderTypeToken::Symbol(s))) =>
|
||||
Ok(TypeTerm::TypeID(
|
||||
if let Some(tyid) = self.get_typeid(&s) {
|
||||
Ok(DesugaredTypeTerm::TypeID(
|
||||
if let Some(tyid) = self.get_typeid(s.as_str()) {
|
||||
tyid
|
||||
} else {
|
||||
self.add_typename(s)
|
||||
TypeID::Fun(self.add_typename(s.as_str()))
|
||||
}
|
||||
)),
|
||||
Some(Ok(LadderTypeToken::Char(c))) => Ok(TypeTerm::Char(c)),
|
||||
Some(Ok(LadderTypeToken::Num(n))) => Ok(TypeTerm::Num(n)),
|
||||
Some(Ok(LadderTypeToken::Char(c))) => Ok(DesugaredTypeTerm::Char(c)),
|
||||
Some(Ok(LadderTypeToken::Num(n))) => Ok(DesugaredTypeTerm::Num(n)),
|
||||
Some(Err(err)) => Err(ParseError::LexError(err)),
|
||||
None => Err(ParseError::UnexpectedEnd)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<TypeTerm, ParseError>
|
||||
fn parse_ladder<It>(&mut self, tokens: &mut Peekable<LadderTypeLexer<It>>) -> Result<DesugaredTypeTerm, ParseError>
|
||||
where It: Iterator<Item = char>
|
||||
{
|
||||
let mut rungs = Vec::new();
|
||||
|
@ -101,7 +104,7 @@ impl<T: TypeDict> ParseLadderType for T {
|
|||
Ok(t) => { rungs.push(t); }
|
||||
Err(err) => { return Err(err); }
|
||||
}
|
||||
|
||||
|
||||
while let Some(tok) = tokens.peek() {
|
||||
match tok {
|
||||
Ok(LadderTypeToken::Ladder) => {
|
||||
|
@ -113,7 +116,7 @@ impl<T: TypeDict> ParseLadderType for T {
|
|||
Err(err) => { return Err(err); }
|
||||
}
|
||||
} else {
|
||||
return Err(ParseError::UnexpectedLadder);
|
||||
return Err(ParseError::UnexpectedLadder);
|
||||
}
|
||||
}
|
||||
Err(lexerr) => {
|
||||
|
@ -128,7 +131,7 @@ impl<T: TypeDict> ParseLadderType for T {
|
|||
match rungs.len() {
|
||||
0 => Err(ParseError::UnexpectedEnd),
|
||||
1 => Ok(rungs[0].clone()),
|
||||
_ => Ok(TypeTerm::Ladder(rungs)),
|
||||
_ => Ok(DesugaredTypeTerm::Ladder(rungs)),
|
||||
}
|
||||
}
|
||||
}
|
336
src/term/pnf.rs
Normal file
336
src/term/pnf.rs
Normal file
|
@ -0,0 +1,336 @@
|
|||
use crate::{term::TypeTerm, constraint_system, EnumVariant, StructMember};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
pub fn splice_ladders( mut upper: Vec< TypeTerm >, mut lower: Vec< TypeTerm > ) -> Vec< TypeTerm > {
|
||||
//eprintln!("splice ladders {:?} <<<====>>> {:?} ", upper, lower);
|
||||
// check for overlap
|
||||
if lower.len() > 0 {
|
||||
for i in 0 .. upper.len() {
|
||||
if upper[i] == lower[0] {
|
||||
let mut result_ladder = Vec::<TypeTerm>::new();
|
||||
result_ladder.append(&mut upper[0..i].iter().cloned().collect());
|
||||
result_ladder.append(&mut lower);
|
||||
return result_ladder;
|
||||
}
|
||||
}
|
||||
|
||||
// no overlap found, just concatenate ladders
|
||||
upper.append(&mut lower);
|
||||
}
|
||||
upper
|
||||
}
|
||||
|
||||
impl TypeTerm {
|
||||
/// transmute type into Parameter-Normal-Form (PNF)
|
||||
///
|
||||
/// Example:
|
||||
/// ```ignore
|
||||
/// <Seq <Digit 10>>~<Seq Char>
|
||||
/// ⇒ <Seq <Digit 10>~Char>
|
||||
/// ```
|
||||
pub fn normalize(mut self) -> Self {
|
||||
match self {
|
||||
TypeTerm::Ladder(mut rungs) => {
|
||||
if rungs.len() == 0 {
|
||||
return TypeTerm::unit();
|
||||
} else if rungs.len() == 1 {
|
||||
return rungs.pop().unwrap().normalize();
|
||||
}
|
||||
|
||||
let mut new_rungs = Vec::new();
|
||||
let mut r2 = rungs.pop().unwrap().strip();
|
||||
while let Some(r1) = rungs.pop() {
|
||||
let r1 = r1.strip();
|
||||
match (r1.clone(), r2.clone()) {
|
||||
(TypeTerm::Seq { seq_repr: seq_repr1, items: items1 },
|
||||
TypeTerm::Seq { seq_repr: seq_repr2, items: items2 })
|
||||
=> {
|
||||
r2 = TypeTerm::Seq {
|
||||
seq_repr:
|
||||
if seq_repr1.is_some() || seq_repr2.is_some() {
|
||||
let sr1 = if let Some(seq_repr1) = seq_repr1 { *seq_repr1.clone() }
|
||||
else { TypeTerm::unit() };
|
||||
let sr2 = if let Some(seq_repr2) = seq_repr2 { *seq_repr2 }
|
||||
else { TypeTerm::unit() };
|
||||
|
||||
Some(Box::new(
|
||||
if sr1 == sr2 {
|
||||
sr1
|
||||
} else if sr1 == TypeTerm::unit() {
|
||||
sr2
|
||||
} else {
|
||||
TypeTerm::Ladder(vec![ sr1, sr2 ]).normalize()
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
items:
|
||||
items1.into_iter()
|
||||
.zip(items2.into_iter())
|
||||
.map(|(item1, item2)| {
|
||||
if item1 == item2 {
|
||||
item1
|
||||
} else {
|
||||
TypeTerm::Ladder(vec![ item1.clone(), item2 ])
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
}
|
||||
|
||||
(TypeTerm::Seq { seq_repr, items },
|
||||
TypeTerm::Spec( mut args )
|
||||
) => {
|
||||
if args.len() == items.len()+1 {
|
||||
r2 = TypeTerm::Seq {
|
||||
seq_repr: Some(Box::new(TypeTerm::Ladder(vec![
|
||||
if let Some(seq_repr) = seq_repr {
|
||||
*seq_repr.clone()
|
||||
} else {
|
||||
TypeTerm::unit()
|
||||
},
|
||||
args.remove(0)
|
||||
]).normalize())),
|
||||
|
||||
items: items.into_iter()
|
||||
.zip(args.into_iter())
|
||||
.map(|(i1, i2)| {
|
||||
if i1 == i2 {
|
||||
i1
|
||||
} else {
|
||||
TypeTerm::Ladder(vec![ i1, i2 ]).normalize()
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
} else {
|
||||
new_rungs.push(r2);
|
||||
r2 = r1;
|
||||
}
|
||||
}
|
||||
|
||||
(TypeTerm::Struct { struct_repr: struct_repr1, members: members1 },
|
||||
TypeTerm::Struct { struct_repr: struct_repr2, members: members2 }) => {
|
||||
|
||||
let mut condensed_struct_repr = None;
|
||||
let mut condensed_members = Vec::new();
|
||||
let mut require_break = false;
|
||||
|
||||
|
||||
if let Some(struct_repr1) = struct_repr1 {
|
||||
if let Some(struct_repr2) = struct_repr2 {
|
||||
condensed_struct_repr = Some(Box::new(TypeTerm::Ladder(
|
||||
vec![
|
||||
struct_repr1.as_ref().clone(),
|
||||
struct_repr2.as_ref().clone()
|
||||
]
|
||||
).normalize()))
|
||||
} else {
|
||||
condensed_struct_repr = Some(Box::new(struct_repr1.as_ref().clone()));
|
||||
}
|
||||
} else {
|
||||
condensed_struct_repr = struct_repr2.clone();
|
||||
}
|
||||
|
||||
for StructMember{ symbol: symbol2, ty: ty2 } in members2.iter() {
|
||||
let mut found = false;
|
||||
for StructMember{ symbol: symbol1, ty: ty1 } in members1.iter() {
|
||||
if symbol2 == symbol1 {
|
||||
condensed_members.push(StructMember {
|
||||
symbol: symbol1.clone(),
|
||||
ty: TypeTerm::Ladder(vec![
|
||||
ty1.clone(),
|
||||
ty2.clone()
|
||||
]).normalize()
|
||||
});
|
||||
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if ! found {
|
||||
require_break = true;
|
||||
}
|
||||
}
|
||||
|
||||
if require_break {
|
||||
new_rungs.push(r2);
|
||||
r2 = r1;
|
||||
} else {
|
||||
r2 = TypeTerm::Struct {
|
||||
struct_repr: condensed_struct_repr,
|
||||
members: condensed_members
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
(TypeTerm::Enum { enum_repr: enum_repr1, variants: variants1 },
|
||||
TypeTerm::Enum { enum_repr: enum_repr2, variants: variants2 }) => {
|
||||
let mut condensed_enum_repr = None;
|
||||
let mut condensed_variants = Vec::new();
|
||||
let mut require_break = false;
|
||||
|
||||
if let Some(enum_repr1) = enum_repr1 {
|
||||
if let Some(enum_repr2) = enum_repr2 {
|
||||
condensed_enum_repr = Some(Box::new(TypeTerm::Ladder(
|
||||
vec![
|
||||
enum_repr1.as_ref().clone(),
|
||||
enum_repr2.as_ref().clone()
|
||||
]
|
||||
).normalize()))
|
||||
} else {
|
||||
condensed_enum_repr = Some(Box::new(enum_repr1.as_ref().clone()));
|
||||
}
|
||||
} else {
|
||||
condensed_enum_repr = enum_repr2.clone();
|
||||
}
|
||||
|
||||
for EnumVariant{ symbol: symbol2, ty: ty2 } in variants2.iter() {
|
||||
let mut found = false;
|
||||
for EnumVariant{ symbol: symbol1, ty: ty1 } in variants1.iter() {
|
||||
if symbol2 == symbol1 {
|
||||
condensed_variants.push(EnumVariant {
|
||||
symbol: symbol1.clone(),
|
||||
ty: TypeTerm::Ladder(vec![
|
||||
ty1.clone(),
|
||||
ty2.clone()
|
||||
]).normalize()
|
||||
});
|
||||
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if ! found {
|
||||
require_break = true;
|
||||
}
|
||||
}
|
||||
|
||||
if require_break {
|
||||
new_rungs.push(r2);
|
||||
r2 = r1;
|
||||
} else {
|
||||
r2 = TypeTerm::Enum {
|
||||
enum_repr: condensed_enum_repr,
|
||||
variants: condensed_variants
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
(TypeTerm::Spec(args1), TypeTerm::Spec(args2)) => {
|
||||
if args1.len() == args2.len() {
|
||||
if let Ok((ψ,σ)) = constraint_system::subtype_unify(&args1[0], &args2[0]) {
|
||||
let mut new_args = Vec::new();
|
||||
|
||||
for (a1, a2) in args1.into_iter().zip(args2.into_iter()) {
|
||||
new_args.push(TypeTerm::Ladder(vec![ a1, a2 ]).normalize());
|
||||
}
|
||||
|
||||
r2 = TypeTerm::Spec(new_args);
|
||||
//new_rungs.push(r2.clone());
|
||||
} else {
|
||||
new_rungs.push(r2);
|
||||
r2 = r1;
|
||||
}
|
||||
} else {
|
||||
new_rungs.push(r2);
|
||||
r2 = r1;
|
||||
}
|
||||
}
|
||||
|
||||
(TypeTerm::Univ(bound1, args1), TypeTerm::Univ(bound2, args2)) => {
|
||||
todo!();
|
||||
}
|
||||
|
||||
(TypeTerm::Func(args1), TypeTerm::Func(args2)) => {
|
||||
todo!();
|
||||
}
|
||||
|
||||
(TypeTerm::Morph(src1,dst1), TypeTerm::Morph(src2,dst2)) => {
|
||||
todo!();
|
||||
}
|
||||
|
||||
(TypeTerm::Ladder(rr1), TypeTerm::Ladder(rr2)) => {
|
||||
if rr1.len() > 0 {
|
||||
let l = splice_ladders(rr1, rr2);
|
||||
r2 = TypeTerm::Ladder(l).normalize();
|
||||
}
|
||||
}
|
||||
|
||||
(atomic1, TypeTerm::Ladder(mut rr2)) => {
|
||||
if !atomic1.is_empty() {
|
||||
if rr2.first() != Some(&atomic1) {
|
||||
rr2.insert(0, atomic1);
|
||||
}
|
||||
}
|
||||
r2 = TypeTerm::Ladder(rr2).normalize();
|
||||
}
|
||||
|
||||
|
||||
(TypeTerm::Ladder(mut rr1), atomic2) => {
|
||||
if !atomic2.is_empty() {
|
||||
if rr1.last() != Some(&atomic2) {
|
||||
rr1.push(atomic2);
|
||||
}
|
||||
}
|
||||
r2 = TypeTerm::Ladder(rr1).normalize();
|
||||
}
|
||||
|
||||
|
||||
(atomic1, atomic2) => {
|
||||
if atomic1.is_empty() {
|
||||
} else if atomic1 == atomic2 {
|
||||
} else if atomic2.is_empty() {
|
||||
r2 = atomic1;
|
||||
} else {
|
||||
new_rungs.push(atomic2);
|
||||
r2 = atomic1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if new_rungs.len() > 0 {
|
||||
new_rungs.push(r2);
|
||||
new_rungs.reverse();
|
||||
return TypeTerm::Ladder(new_rungs);
|
||||
} else {
|
||||
return r2;
|
||||
}
|
||||
}
|
||||
|
||||
TypeTerm::Spec(params) => {
|
||||
TypeTerm::Spec(
|
||||
params.into_iter()
|
||||
.map(|p| p.normalize())
|
||||
.collect())
|
||||
}
|
||||
|
||||
TypeTerm::Seq { seq_repr, items } => TypeTerm::Seq {
|
||||
seq_repr: if let Some(seq_repr) = seq_repr { Some(Box::new(seq_repr.normalize())) } else { None },
|
||||
items: items.into_iter().map(|p| p.normalize()).collect()
|
||||
},
|
||||
TypeTerm::Struct { struct_repr, members } => TypeTerm::Struct {
|
||||
struct_repr: if let Some(struct_repr) = struct_repr { Some(Box::new(struct_repr.normalize())) } else { None },
|
||||
members: members.into_iter()
|
||||
.map(|StructMember{symbol, ty}|
|
||||
StructMember{ symbol, ty: ty.normalize() })
|
||||
.collect()
|
||||
},
|
||||
TypeTerm::Enum { enum_repr, variants } => TypeTerm::Enum {
|
||||
enum_repr: if let Some(enum_repr) = enum_repr { Some(Box::new(enum_repr.normalize())) } else { None },
|
||||
variants: variants.into_iter()
|
||||
.map(|EnumVariant{symbol, ty}|
|
||||
EnumVariant{ symbol, ty: ty.normalize() })
|
||||
.collect()
|
||||
},
|
||||
|
||||
atomic => atomic
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
|
@ -1,29 +1,48 @@
|
|||
use {
|
||||
crate::{TypeDict, dict::TypeID},
|
||||
crate::sugar::SugaredTypeTerm,
|
||||
crate::{term::TypeTerm, EnumVariant, StructMember, TypeDict, VariableConstraint},
|
||||
tiny_ansi::TinyAnsi
|
||||
};
|
||||
|
||||
impl SugaredTypeTerm {
|
||||
pub fn pretty(&self, dict: &TypeDict, indent: u64) -> String {
|
||||
|
||||
impl StructMember {
|
||||
pub fn pretty(&self, dict: &impl TypeDict, indent: u64) -> String {
|
||||
format!("{}: {}", self.symbol, self.ty.pretty(dict, indent+1))
|
||||
}
|
||||
}
|
||||
impl EnumVariant {
|
||||
pub fn pretty(&self, dict: &impl TypeDict, indent: u64) -> String {
|
||||
format!("{}: {}", self.symbol, self.ty.pretty(dict, indent+1))
|
||||
}
|
||||
}
|
||||
|
||||
impl VariableConstraint {
|
||||
pub fn pretty(&self, dict: &impl TypeDict, indent: u64) -> String {
|
||||
match self {
|
||||
VariableConstraint::UnconstrainedType => format!(""),
|
||||
VariableConstraint::Subtype(τ) => format!(":<= {}", τ.pretty(dict, indent)),
|
||||
VariableConstraint::Trait(τ) => format!(":>< {}", τ.pretty(dict, indent)),
|
||||
VariableConstraint::Parallel(τ) => format!(":|| {}", τ.pretty(dict, indent)),
|
||||
VariableConstraint::ValueUInt => format!(": ℤ"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TypeTerm {
|
||||
pub fn pretty(&self, dict: &impl TypeDict, indent: u64) -> String {
|
||||
let indent_width = 4;
|
||||
match self {
|
||||
SugaredTypeTerm::TypeID(id) => {
|
||||
match id {
|
||||
TypeID::Var(varid) => {
|
||||
format!("{}", dict.get_typename(id).unwrap_or("??".bright_red())).bright_magenta()
|
||||
},
|
||||
TypeID::Fun(funid) => {
|
||||
format!("{}", dict.get_typename(id).unwrap_or("??".bright_red())).blue().bold()
|
||||
}
|
||||
}
|
||||
TypeTerm::Id(id) => {
|
||||
format!("{}", dict.get_typename(*id).unwrap_or("??".bright_red())).blue().bold()
|
||||
}
|
||||
TypeTerm::Var(id) => {
|
||||
format!("{}({})", dict.get_varname(*id).unwrap_or("??".bright_red()).bright_magenta(), id)
|
||||
},
|
||||
|
||||
SugaredTypeTerm::Num(n) => {
|
||||
TypeTerm::Num(n) => {
|
||||
format!("{}", n).green().bold()
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Char(c) => {
|
||||
TypeTerm::Char(c) => {
|
||||
match c {
|
||||
'\0' => format!("'\\0'"),
|
||||
'\n' => format!("'\\n'"),
|
||||
|
@ -31,15 +50,16 @@ impl SugaredTypeTerm {
|
|||
}
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Univ(t) => {
|
||||
format!("{} {} . {}",
|
||||
TypeTerm::Univ(bound, t) => {
|
||||
format!("{} {}{} . {}",
|
||||
"∀".yellow().bold(),
|
||||
dict.get_varname(0).unwrap_or("??".into()).bright_blue(),
|
||||
bound.pretty(dict, indent),
|
||||
t.pretty(dict,indent)
|
||||
)
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Spec(args) => {
|
||||
TypeTerm::Spec(args) => {
|
||||
let mut s = String::new();
|
||||
s.push_str(&"<".yellow());
|
||||
for i in 0..args.len() {
|
||||
|
@ -53,15 +73,20 @@ impl SugaredTypeTerm {
|
|||
s
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Struct(args) => {
|
||||
TypeTerm::Struct{ struct_repr, members } => {
|
||||
let mut s = String::new();
|
||||
s.push_str(&"{".yellow().bold());
|
||||
for arg in args {
|
||||
|
||||
if let Some(struct_repr) = struct_repr {
|
||||
s.push_str(&format!("{}{} ", "~".yellow(), struct_repr.pretty(dict, indent+1)));
|
||||
}
|
||||
|
||||
for member in members {
|
||||
s.push('\n');
|
||||
for x in 0..(indent+1)*indent_width {
|
||||
s.push(' ');
|
||||
}
|
||||
s.push_str(&arg.pretty(dict, indent + 1));
|
||||
s.push_str(&member.pretty(dict, indent + 1));
|
||||
s.push_str(&";\n".bright_yellow());
|
||||
}
|
||||
|
||||
|
@ -73,11 +98,16 @@ impl SugaredTypeTerm {
|
|||
s
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Enum(args) => {
|
||||
TypeTerm::Enum{ enum_repr, variants } => {
|
||||
let mut s = String::new();
|
||||
s.push_str(&"(".yellow().bold());
|
||||
for i in 0..args.len() {
|
||||
let arg = &args[i];
|
||||
|
||||
if let Some(enum_repr) = enum_repr {
|
||||
s.push_str(&format!("{}{} ", "~".yellow(), enum_repr.pretty(dict, indent+1)));
|
||||
}
|
||||
|
||||
|
||||
for (i,variant) in variants.iter().enumerate() {
|
||||
s.push('\n');
|
||||
for x in 0..(indent+1)*indent_width {
|
||||
s.push(' ');
|
||||
|
@ -85,7 +115,7 @@ impl SugaredTypeTerm {
|
|||
if i > 0 {
|
||||
s.push_str(&"| ".yellow().bold());
|
||||
}
|
||||
s.push_str(&arg.pretty(dict, indent + 1));
|
||||
s.push_str(&variant.pretty(dict, indent + 1));
|
||||
}
|
||||
|
||||
s.push('\n');
|
||||
|
@ -96,30 +126,34 @@ impl SugaredTypeTerm {
|
|||
s
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Seq(args) => {
|
||||
TypeTerm::Seq{ seq_repr, items } => {
|
||||
let mut s = String::new();
|
||||
s.push_str(&"[ ".yellow().bold());
|
||||
for i in 0..args.len() {
|
||||
let arg = &args[i];
|
||||
s.push_str(&"[".yellow().bold());
|
||||
|
||||
if let Some(seq_repr) = seq_repr {
|
||||
s.push_str(&format!("{}{}", "~".yellow(), seq_repr.pretty(dict, indent+1)));
|
||||
}
|
||||
s.push(' ');
|
||||
|
||||
for (i, item) in items.iter().enumerate() {
|
||||
if i > 0 {
|
||||
s.push(' ');
|
||||
}
|
||||
s.push_str(&arg.pretty(dict, indent+1));
|
||||
s.push_str(&item.pretty(dict, indent+1));
|
||||
}
|
||||
s.push_str(&" ]".yellow().bold());
|
||||
s
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Morph(args) => {
|
||||
TypeTerm::Morph(src,dst) => {
|
||||
let mut s = String::new();
|
||||
for arg in args {
|
||||
s.push_str(&" ~~morph~~> ".bright_yellow());
|
||||
s.push_str(&arg.pretty(dict, indent));
|
||||
}
|
||||
s.push_str(&src.pretty(dict, indent));
|
||||
s.push_str(&" ~~morph~~> ".bright_yellow());
|
||||
s.push_str(&dst.pretty(dict, indent));
|
||||
s
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Func(args) => {
|
||||
TypeTerm::Func(args) => {
|
||||
let mut s = String::new();
|
||||
for i in 0..args.len() {
|
||||
let arg = &args[i];
|
||||
|
@ -137,7 +171,7 @@ impl SugaredTypeTerm {
|
|||
s
|
||||
}
|
||||
|
||||
SugaredTypeTerm::Ladder(rungs) => {
|
||||
TypeTerm::Ladder(rungs) => {
|
||||
let mut s = String::new();
|
||||
for i in 0..rungs.len() {
|
||||
let rung = &rungs[i];
|
|
@ -1,24 +1,29 @@
|
|||
use crate::{dict::*, term::*};
|
||||
use crate::{dict::*, desugared_term::*};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
pub trait UnparseLadderType {
|
||||
fn unparse(&self, t: &TypeTerm) -> String;
|
||||
fn unparse(&self, t: &DesugaredTypeTerm) -> String;
|
||||
}
|
||||
|
||||
impl<T: TypeDict> UnparseLadderType for T {
|
||||
fn unparse(&self, t: &TypeTerm) -> String {
|
||||
fn unparse(&self, t: &DesugaredTypeTerm) -> String {
|
||||
match t {
|
||||
TypeTerm::TypeID(id) => self.get_typename(id).unwrap(),
|
||||
TypeTerm::Num(n) => format!("{}", n),
|
||||
TypeTerm::Char(c) => match c {
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(id)) => {
|
||||
self.get_typename(*id).unwrap_or("?Fun?".into())
|
||||
},
|
||||
DesugaredTypeTerm::TypeID(TypeID::Var(id)) => {
|
||||
self.get_varname(*id).unwrap_or("?Var?".into())
|
||||
},
|
||||
DesugaredTypeTerm::Num(n) => format!("{}", n),
|
||||
DesugaredTypeTerm::Char(c) => match c {
|
||||
'\0' => "'\\0'".into(),
|
||||
'\n' => "'\\n'".into(),
|
||||
'\t' => "'\\t'".into(),
|
||||
'\'' => "'\\''".into(),
|
||||
c => format!("'{}'", c)
|
||||
},
|
||||
TypeTerm::Ladder(rungs) => {
|
||||
DesugaredTypeTerm::Ladder(rungs) => {
|
||||
let mut s = String::new();
|
||||
let mut first = true;
|
||||
for r in rungs.iter() {
|
||||
|
@ -30,7 +35,7 @@ impl<T: TypeDict> UnparseLadderType for T {
|
|||
}
|
||||
s
|
||||
}
|
||||
TypeTerm::App(args) => {
|
||||
DesugaredTypeTerm::App(args) => {
|
||||
let mut s = String::new();
|
||||
s.push('<');
|
||||
let mut first = true;
|
159
src/test/constraint_system/eq_constraint.rs
Normal file
159
src/test/constraint_system/eq_constraint.rs
Normal file
|
@ -0,0 +1,159 @@
|
|||
use {
|
||||
crate::{parser::*,
|
||||
context::*,
|
||||
constraint_system::{
|
||||
ConstraintSystem,
|
||||
ConstraintPair,
|
||||
ConstraintError
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) {
|
||||
let mut ctx = Context::new();
|
||||
ctx.add_variable("T", TypeKind::Type);
|
||||
ctx.add_variable("U", TypeKind::Type);
|
||||
ctx.add_variable("V", TypeKind::Type);
|
||||
ctx.add_variable("W", TypeKind::Type);
|
||||
|
||||
let mut t1 = ctx.parse(ts1).unwrap();
|
||||
let mut t2 = ctx.parse(ts2).unwrap();
|
||||
let σ = crate::unify( &t1, &t2 );
|
||||
|
||||
if expect_unificator {
|
||||
assert!(σ.is_ok());
|
||||
|
||||
let σ = σ.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
t1.apply_subst(&σ),
|
||||
t2.apply_subst(&σ)
|
||||
);
|
||||
} else {
|
||||
assert!(! σ.is_ok());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unification_error() {
|
||||
let mut ctx = Context::new();
|
||||
ctx.add_variable("T", TypeKind::Type);
|
||||
|
||||
assert_eq!(
|
||||
crate::unify(
|
||||
&ctx.parse("<A T>").unwrap(),
|
||||
&ctx.parse("<B T>").unwrap()
|
||||
),
|
||||
|
||||
Err(ConstraintError {
|
||||
addr: vec![0],
|
||||
t1: ctx.parse("A").unwrap(),
|
||||
t2: ctx.parse("B").unwrap()
|
||||
})
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
crate::unify(
|
||||
&ctx.parse("<V <U A> T>").unwrap(),
|
||||
&ctx.parse("<V <U B> T>").unwrap()
|
||||
),
|
||||
|
||||
Err(ConstraintError {
|
||||
addr: vec![1, 1],
|
||||
t1: ctx.parse("A").unwrap(),
|
||||
t2: ctx.parse("B").unwrap()
|
||||
})
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
crate::unify(
|
||||
&ctx.parse("T").unwrap(),
|
||||
&ctx.parse("<Seq T>").unwrap()
|
||||
),
|
||||
|
||||
Err(ConstraintError {
|
||||
addr: vec![],
|
||||
t1: ctx.parse("T").unwrap(),
|
||||
t2: ctx.parse("<Seq T>").unwrap()
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unification() {
|
||||
test_unify("A", "A", true);
|
||||
// test_unify("A", "B", false);
|
||||
// test_unify("<Seq T>", "<Seq Ascii~Char>", true);
|
||||
|
||||
// this worked easily with desugared terms,
|
||||
// but is a weird edge case with sugared terms
|
||||
// not relevant now
|
||||
//test_unify("<Seq T>", "<U Char>", true);
|
||||
/*
|
||||
test_unify(
|
||||
"<Seq Path~<Seq Char>>~<SepSeq Char '\\n'>~<Seq Char>",
|
||||
"<Seq T~<Seq Char>>~<SepSeq Char '\\n'>~<Seq Char>",
|
||||
true
|
||||
);
|
||||
*/
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
dict.add_varname("T");
|
||||
dict.add_varname("U");
|
||||
dict.add_varname("V");
|
||||
dict.add_varname("W");
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_eq(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs: dict.parse("U").unwrap(),
|
||||
rhs: dict.parse("<Seq Char>").unwrap()
|
||||
},
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs: dict.parse("T").unwrap(),
|
||||
rhs: dict.parse("<Seq U>").unwrap()
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![],
|
||||
vec![
|
||||
// T
|
||||
(0, dict.parse("<Seq <Seq Char>>").unwrap()),
|
||||
|
||||
// U
|
||||
(1, dict.parse("<Seq Char>").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_eq(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("<Seq T>").unwrap(),
|
||||
rhs : dict.parse("<Seq W~<Seq Char>>").unwrap()
|
||||
},
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("<Seq ℕ>").unwrap(),
|
||||
rhs : dict.parse("<Seq W>").unwrap(),
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![],
|
||||
vec![
|
||||
// W
|
||||
(3, dict.parse("ℕ").unwrap()),
|
||||
|
||||
// T
|
||||
(0, dict.parse("ℕ~<Seq Char>").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
5
src/test/constraint_system/mod.rs
Normal file
5
src/test/constraint_system/mod.rs
Normal file
|
@ -0,0 +1,5 @@
|
|||
pub mod eq_constraint;
|
||||
pub mod sub_constraint;
|
||||
pub mod trait_constraint;
|
||||
pub mod par_constraint;
|
||||
pub mod value_constraint;
|
11
src/test/constraint_system/par_constraint.rs
Normal file
11
src/test/constraint_system/par_constraint.rs
Normal file
|
@ -0,0 +1,11 @@
|
|||
use {
|
||||
crate::{dict::*, parser::*,
|
||||
constraint_system::{
|
||||
ConstraintSystem,
|
||||
ConstraintPair,
|
||||
ConstraintError
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
349
src/test/constraint_system/sub_constraint.rs
Normal file
349
src/test/constraint_system/sub_constraint.rs
Normal file
|
@ -0,0 +1,349 @@
|
|||
use {
|
||||
crate::{constraint_system::{
|
||||
subtype_unify, ConstraintError, ConstraintPair, ConstraintSystem
|
||||
}, dict::*, parser::*, term::*, HashMapSubst
|
||||
}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
/*
|
||||
Only Ladders
|
||||
*/
|
||||
#[test]
|
||||
fn test_subtype_unification1() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
dict.add_varname("T");
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_sub(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("A ~ B").unwrap(),
|
||||
rhs : dict.parse("B").unwrap()
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![ dict.parse("A").unwrap() ],
|
||||
vec![].into_iter().collect::<HashMapSubst>(),
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_sub(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("A ~ B ~ C ~ D").unwrap(),
|
||||
rhs : dict.parse("C ~ D").unwrap()
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![ dict.parse("A ~ B").unwrap() ],
|
||||
vec![].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_sub(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("A ~ B ~ C ~ D").unwrap(),
|
||||
rhs : dict.parse("T ~ D").unwrap()
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![ TypeTerm::unit() ],
|
||||
vec![
|
||||
(0,
|
||||
dict.parse("A ~ B ~ C").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_sub(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("A ~ B ~ C ~ D").unwrap(),
|
||||
rhs : dict.parse("B ~ T ~ D").unwrap(),
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![ dict.parse("A").unwrap() ],
|
||||
vec![
|
||||
(0, dict.parse("C").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
/*
|
||||
Variables
|
||||
*/
|
||||
#[test]
|
||||
fn test_subtype_unification2() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
dict.add_varname("T");
|
||||
dict.add_varname("U");
|
||||
dict.add_varname("V");
|
||||
dict.add_varname("W");
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_sub(vec![
|
||||
ConstraintPair{
|
||||
addr: Vec::new(),
|
||||
lhs: dict.parse("<Seq~T <Digit 10> ~ Char ~ Ascii>").unwrap(),
|
||||
rhs: dict.parse("<Seq~<LengthPrefix x86.UInt64> Char ~ Ascii>").unwrap(),
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![
|
||||
dict.parse("<Seq <Digit 10>>").unwrap()
|
||||
],
|
||||
vec![
|
||||
// T
|
||||
(0, dict.parse("<LengthPrefix x86.UInt64>").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_sub(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs: dict.parse("U").unwrap(),
|
||||
rhs: dict.parse("<Seq Char>").unwrap()
|
||||
},
|
||||
ConstraintPair {
|
||||
addr : Vec::new(),
|
||||
lhs : dict.parse("T").unwrap(),
|
||||
rhs : dict.parse("<Seq U>").unwrap(),
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![
|
||||
TypeTerm::unit(),
|
||||
TypeTerm::unit(),
|
||||
],
|
||||
vec![
|
||||
// T
|
||||
(0, dict.parse("<Seq <Seq Char>>").unwrap()),
|
||||
|
||||
// U
|
||||
(1, dict.parse("<Seq Char>").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_sub(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("<Seq T>").unwrap(),
|
||||
rhs : dict.parse("<Seq W~<Seq Char>>").unwrap(),
|
||||
},
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("<Seq~<LengthPrefix x86.UInt64> ℕ~<PosInt 10 BigEndian>>").unwrap(),
|
||||
rhs : dict.parse("<<LengthPrefix x86.UInt64> W>").unwrap()
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![
|
||||
TypeTerm::unit(),
|
||||
dict.parse("<Seq ℕ>").unwrap(),
|
||||
],
|
||||
vec![
|
||||
// W
|
||||
(3, dict.parse("ℕ~<PosInt 10 BigEndian>").unwrap()),
|
||||
|
||||
// T
|
||||
(0, dict.parse("ℕ~<PosInt 10 BigEndian>~<Seq Char>").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
subtype_unify(
|
||||
&dict.parse("<Seq~List~Vec <Digit 16>~Char>").expect(""),
|
||||
&dict.parse("<List~Vec Char>").expect("")
|
||||
),
|
||||
Ok((
|
||||
dict.parse("<Seq~List <Digit 16>>").expect(""),
|
||||
vec![].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
subtype_unify(
|
||||
&dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq~List~Vec <Digit 16>~Char>").expect(""),
|
||||
&dict.parse("<List~Vec Char>").expect("")
|
||||
),
|
||||
Ok((
|
||||
dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq~List <Digit 16>>").expect(""),
|
||||
vec![].into_iter().collect()
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
/*
|
||||
Subtypes in some rungs
|
||||
*/
|
||||
#[test]
|
||||
fn test_subtype_unification3() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_sub(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs: dict.parse("<A1~A2 B C D1~D2 E F1~F2>").expect("parse"),
|
||||
rhs: dict.parse("<A2 B C D2 E F2>").expect("parse")
|
||||
}
|
||||
]).solve(),
|
||||
|
||||
Ok((
|
||||
// halo
|
||||
vec![
|
||||
dict.parse("<A1~A2 B C D1~D2 E F1>").expect("parse")
|
||||
],
|
||||
|
||||
// subst
|
||||
vec![
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_sub(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs: dict.parse("<Seq~List B C D1~D2 E F1~F2>").expect("parse"),
|
||||
rhs: dict.parse("<List B C D2 E F2>").expect("parse")
|
||||
}
|
||||
]).solve(),
|
||||
|
||||
Ok((
|
||||
// halo
|
||||
vec![
|
||||
dict.parse("<Seq~List B C D1~D2 E F1>").expect("parse")
|
||||
],
|
||||
|
||||
// subst
|
||||
vec![
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
/*
|
||||
Not a Subtype!
|
||||
*/
|
||||
#[test]
|
||||
fn test_trait_not_subtype() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
subtype_unify(
|
||||
&dict.parse("A ~ B").expect(""),
|
||||
&dict.parse("A ~ B ~ C").expect("")
|
||||
),
|
||||
Err(ConstraintError {
|
||||
addr: vec![1],
|
||||
t1: dict.parse("B").expect(""),
|
||||
t2: dict.parse("C").expect("")
|
||||
})
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
subtype_unify(
|
||||
&dict.parse("<Seq~List~Vec <Digit 10>~Char>").expect(""),
|
||||
&dict.parse("<Seq~List~Vec Char~ReprTree>").expect("")
|
||||
),
|
||||
Err(ConstraintError {
|
||||
addr: vec![1],
|
||||
t1: dict.parse("Char").expect(""),
|
||||
t2: dict.parse("ReprTree").expect("")
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/*
|
||||
subtype inside a sequence item
|
||||
*/
|
||||
#[test]
|
||||
fn test_reprtree_list_subtype() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
dict.add_varname("Item".into());
|
||||
|
||||
assert_eq!(
|
||||
subtype_unify(
|
||||
&dict.parse("<List~Vec <Digit 10>~Char~ReprTree>").expect(""),
|
||||
&dict.parse("<List~Vec Item~ReprTree>").expect("")
|
||||
),
|
||||
Ok((
|
||||
TypeTerm::unit(),
|
||||
vec![
|
||||
(0, dict.parse("<Digit 10>~Char").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_subtype_delim() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
dict.add_varname("T");
|
||||
dict.add_varname("Delim");
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_sub(vec![
|
||||
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
// given type
|
||||
lhs : dict.parse("
|
||||
< Seq <Seq <Digit 10>~Char~Ascii~UInt8> >
|
||||
~ < ValueSep ':' Char~Ascii~UInt8 >
|
||||
~ < Seq~<LengthPrefix UInt64> Char~Ascii~UInt8 >
|
||||
").expect(""),
|
||||
|
||||
// expected type
|
||||
rhs : dict.parse("
|
||||
< Seq <Seq T> >
|
||||
~ < ValueSep Delim T >
|
||||
~ < Seq~<LengthPrefix UInt64> T >
|
||||
").expect("")
|
||||
},
|
||||
|
||||
// subtype bounds
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("T").expect(""),
|
||||
rhs : dict.parse("UInt8").expect("")
|
||||
},
|
||||
/* todo
|
||||
(
|
||||
dict.parse("<TypeOf Delim>").expect(""),
|
||||
dict.parse("T").expect("")
|
||||
),
|
||||
*/
|
||||
]).solve(),
|
||||
Ok((
|
||||
// halo types for each rhs in the sub-equations
|
||||
vec![
|
||||
dict.parse("<Seq <Seq <Digit 10>>>").expect(""),
|
||||
dict.parse("Char~Ascii").expect(""),
|
||||
],
|
||||
|
||||
// variable substitution
|
||||
vec![
|
||||
(0, dict.parse("Char~Ascii~UInt8").expect("")),
|
||||
(1, TypeTerm::Char(':')),
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
}
|
142
src/test/constraint_system/trait_constraint.rs
Normal file
142
src/test/constraint_system/trait_constraint.rs
Normal file
|
@ -0,0 +1,142 @@
|
|||
use {
|
||||
crate::{dict::*, parser::*,
|
||||
constraint_system::{
|
||||
ConstraintSystem,
|
||||
ConstraintPair,
|
||||
ConstraintError
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[test]
|
||||
fn test_trait_bound1() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_trait(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("A ~ B").unwrap(),
|
||||
rhs : dict.parse("A").unwrap()
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![],
|
||||
vec![].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_trait(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("A ~ B").unwrap(),
|
||||
rhs : dict.parse("B").unwrap()
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![],
|
||||
vec![].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_trait(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("A").unwrap(),
|
||||
rhs : dict.parse("B").unwrap()
|
||||
}
|
||||
]).solve(),
|
||||
Err(ConstraintError { addr: vec![], t1: dict.parse("A").unwrap(), t2: dict.parse("B").unwrap() })
|
||||
);
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_trait(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("A").unwrap(),
|
||||
rhs : dict.parse("A~B").unwrap()
|
||||
}
|
||||
]).solve(),
|
||||
Err(ConstraintError { addr: vec![], t1: dict.parse("A").unwrap(), t2: dict.parse("A~B").unwrap() })
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trait_bound_spec() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_trait(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("A ~ <B~C D> ~ E").unwrap(),
|
||||
rhs : dict.parse("<B D>").unwrap()
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![],
|
||||
vec![].into_iter().collect()
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trait_bound_struct() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_trait(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("<Struct <a S~A> <b T~B>>").unwrap(),
|
||||
rhs : dict.parse("<Struct <a S> <b T>>").unwrap()
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![],
|
||||
vec![].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_trait(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("<Struct <a S~A> <b T~B>>").unwrap(),
|
||||
rhs : dict.parse("<Struct <a S>>").unwrap()
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![],
|
||||
vec![].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_trait(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("<Struct <a S~A> <b T~B>>").unwrap(),
|
||||
rhs : dict.parse("<Struct <a A>>").unwrap()
|
||||
}
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![],
|
||||
vec![].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
ConstraintSystem::new_trait(vec![
|
||||
ConstraintPair {
|
||||
addr: Vec::new(),
|
||||
lhs : dict.parse("<Struct <a S~A> <b T~B>>").unwrap(),
|
||||
rhs : dict.parse("<Struct <a T>>").unwrap()
|
||||
}
|
||||
]).solve(),
|
||||
Err(ConstraintError { addr: vec![0], t1: dict.parse("S~A").unwrap(), t2: dict.parse("T").unwrap() })
|
||||
);
|
||||
}
|
11
src/test/constraint_system/value_constraint.rs
Normal file
11
src/test/constraint_system/value_constraint.rs
Normal file
|
@ -0,0 +1,11 @@
|
|||
use {
|
||||
crate::{dict::*, parser::*,
|
||||
constraint_system::{
|
||||
ConstraintSystem,
|
||||
ConstraintPair,
|
||||
ConstraintError
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
141
src/test/context/mod.rs
Normal file
141
src/test/context/mod.rs
Normal file
|
@ -0,0 +1,141 @@
|
|||
pub mod substitution;
|
||||
|
||||
use crate::{
|
||||
context::{Context, LayeredContext, TypeKind}, parser::*, term::TypeTerm, ConstraintPair, ConstraintSystem, MorphismType, TypeDict, TypeID
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_context() {
|
||||
|
||||
|
||||
/*
|
||||
* Set up variable scopes
|
||||
*/
|
||||
|
||||
let root_ctx = Context::new();
|
||||
root_ctx.add_variable(
|
||||
"DstRadix",
|
||||
TypeKind::ValueUInt
|
||||
);
|
||||
|
||||
let mut sub1_ctx = root_ctx.scope();
|
||||
assert_eq!( sub1_ctx.add_variable("Radix", TypeKind::ValueUInt), 0 );
|
||||
|
||||
let mut sub2_ctx = root_ctx.scope();
|
||||
assert_eq!( sub2_ctx.add_variable("SrcRadix", TypeKind::ValueUInt), 0 );
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* check variable IDs
|
||||
*/
|
||||
|
||||
assert_eq!( sub1_ctx.get_typeid("Radix"), Some(TypeID::Var(0)) );
|
||||
assert_eq!( sub1_ctx.get_typeid("DstRadix"), Some(TypeID::Var(1)) );
|
||||
|
||||
assert_eq!( sub2_ctx.get_typeid("SrcRadix"), Some(TypeID::Var(0)) );
|
||||
assert_eq!( sub2_ctx.get_typeid("DstRadix"), Some(TypeID::Var(1)) );
|
||||
|
||||
assert_eq!( sub1_ctx.parse("Radix"), Ok(TypeTerm::Var(0)) );
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* assign variables in scoped context
|
||||
*/
|
||||
|
||||
// Radix
|
||||
sub1_ctx.bind(0, TypeTerm::Num(10));
|
||||
|
||||
// SrcRadix
|
||||
sub2_ctx.bind(0, TypeTerm::Num(10));
|
||||
|
||||
// Dst Radix
|
||||
sub2_ctx.bind(1, TypeTerm::Num(16));
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* test that bound variables are substituted
|
||||
*/
|
||||
|
||||
assert_eq!(
|
||||
sub1_ctx
|
||||
.parse("<PosInt Radix LittleEndian> ~ <Seq <Digit Radix>>").expect("parse error")
|
||||
.apply_subst(&sub1_ctx).clone(),
|
||||
|
||||
sub1_ctx
|
||||
.parse("<PosInt 10 LittleEndian> ~ <Seq <Digit 10>>").expect("parse error")
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
sub2_ctx
|
||||
.parse("<PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix>>").expect("parse error")
|
||||
.apply_subst(&sub1_ctx).clone(),
|
||||
|
||||
sub2_ctx
|
||||
.parse("<PosInt 10 LittleEndian> ~ <Seq <Digit 10>>").expect("parse error")
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
sub2_ctx
|
||||
.parse("<PosInt DstRadix LittleEndian>").expect("parse error")
|
||||
.apply_subst(&sub2_ctx).clone(),
|
||||
|
||||
sub2_ctx
|
||||
.parse("<PosInt 16 LittleEndian>").expect("parse error")
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_morphism_compat() {
|
||||
let mut ctx = Context::new();
|
||||
|
||||
let mut c1 = ctx.scope();
|
||||
c1.add_variable("T1", TypeKind::Type);
|
||||
c1.add_variable("T2", TypeKind::Type);
|
||||
let t1 = MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: c1.parse("<Seq T1>~<A T1 T2>").unwrap(),
|
||||
dst_type: c1.parse("<Seq T1>~<B T2 T2>").unwrap()
|
||||
};
|
||||
|
||||
let mut c2 = ctx.scope();
|
||||
c2.add_variable("S1", TypeKind::Type);
|
||||
c2.add_variable("T1", TypeKind::Type); //< this variable name is scoped thus a *different* variable than T1 from t1
|
||||
let t2 = MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: c2.parse("<Seq NotT>~<B S1 T1>").unwrap(),
|
||||
dst_type: c2.parse("<Seq NotT>~<C T1>").unwrap()
|
||||
};
|
||||
|
||||
// pull t1 & t2 into root ctx
|
||||
let mut t1 = t1.dst_type.clone();
|
||||
t1.apply_subst(&ctx.shift_variables(&c1));
|
||||
let mut t2 = t2.src_type.clone();
|
||||
t2.apply_subst(&ctx.shift_variables(&c2));
|
||||
|
||||
let csp = ConstraintSystem::new_sub(vec![
|
||||
ConstraintPair {
|
||||
lhs: t1.clone(),
|
||||
rhs: t2.clone(),
|
||||
addr: vec![]
|
||||
}
|
||||
]);
|
||||
|
||||
eprintln!("t1 = {:?} = {}", t1, t1.pretty(&mut ctx.clone(), 0));
|
||||
eprintln!("t2 = {:?} = {}", t2, t2.pretty(&mut ctx.clone(), 0));
|
||||
|
||||
match csp.solve() {
|
||||
Ok((Ψ,σ)) => {
|
||||
eprintln!("σ = {:?}", σ);
|
||||
assert!(true);
|
||||
}
|
||||
Err(err) => {
|
||||
assert!(false);
|
||||
}
|
||||
}
|
||||
}
|
29
src/test/context/substitution.rs
Normal file
29
src/test/context/substitution.rs
Normal file
|
@ -0,0 +1,29 @@
|
|||
|
||||
use {
|
||||
crate::{dict::*, parser::*,}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[test]
|
||||
fn test_subst() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
let mut σ = std::collections::HashMap::new();
|
||||
|
||||
// T --> ℕ
|
||||
σ.insert
|
||||
(dict.add_varname("T"),
|
||||
dict.parse_desugared("ℕ").unwrap().sugar(&mut dict));
|
||||
|
||||
// U --> <Seq Char>
|
||||
σ.insert
|
||||
(dict.add_varname("U"),
|
||||
dict.parse_desugared("<Seq Char>").unwrap().sugar(&mut dict));
|
||||
|
||||
|
||||
assert_eq!(
|
||||
dict.parse_desugared("<Seq T~U>").unwrap().sugar(&mut dict).apply_subst(&σ).clone(),
|
||||
dict.parse_desugared("<Seq ℕ~<Seq Char>>").unwrap().sugar(&mut dict)
|
||||
);
|
||||
}
|
|
@ -9,25 +9,25 @@ fn test_curry() {
|
|||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A B C>").unwrap().curry(),
|
||||
dict.parse("<<A B> C>").unwrap()
|
||||
dict.parse_desugared("<A B C>").unwrap().curry(),
|
||||
dict.parse_desugared("<<A B> C>").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
dict.parse("<A B C D>").unwrap().curry(),
|
||||
dict.parse("<<<A B> C> D>").unwrap()
|
||||
dict.parse_desugared("<A B C D>").unwrap().curry(),
|
||||
dict.parse_desugared("<<<A B> C> D>").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
dict.parse("<A B C D E F G H I J K>").unwrap().curry(),
|
||||
dict.parse("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap()
|
||||
dict.parse_desugared("<A B C D E F G H I J K>").unwrap().curry(),
|
||||
dict.parse_desugared("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A~X B C>").unwrap().curry(),
|
||||
dict.parse("<<A~X B> C>").unwrap()
|
||||
dict.parse_desugared("<A~X B C>").unwrap().curry(),
|
||||
dict.parse_desugared("<<A~X B> C>").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
dict.parse("<A B C~Y~Z> ~ K").unwrap().curry(),
|
||||
dict.parse("< <A B> C~Y~Z > ~ K").unwrap()
|
||||
dict.parse_desugared("<A B C~Y~Z> ~ K").unwrap().curry(),
|
||||
dict.parse_desugared("< <A B> C~Y~Z > ~ K").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -36,25 +36,25 @@ fn test_decurry() {
|
|||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<<A B> C>").unwrap().decurry(),
|
||||
dict.parse("<A B C>").unwrap()
|
||||
dict.parse_desugared("<<A B> C>").unwrap().decurry(),
|
||||
dict.parse_desugared("<A B C>").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
dict.parse("<<<A B> C> D>").unwrap().decurry(),
|
||||
dict.parse("<A B C D>").unwrap(),
|
||||
dict.parse_desugared("<<<A B> C> D>").unwrap().decurry(),
|
||||
dict.parse_desugared("<A B C D>").unwrap(),
|
||||
);
|
||||
assert_eq!(
|
||||
dict.parse("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap().decurry(),
|
||||
dict.parse("<A B C D E F G H I J K>").unwrap()
|
||||
dict.parse_desugared("<<<<<<<<<<A B> C> D> E> F> G> H> I> J> K>").unwrap().decurry(),
|
||||
dict.parse_desugared("<A B C D E F G H I J K>").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<<A~X B> C>").unwrap().decurry(),
|
||||
dict.parse("<A~X B C>").unwrap()
|
||||
dict.parse_desugared("<<A~X B> C>").unwrap().decurry(),
|
||||
dict.parse_desugared("<A~X B C>").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
dict.parse("<<A~X B> C~Y>~K").unwrap().decurry(),
|
||||
dict.parse("<A~X B C~Y> ~K").unwrap()
|
||||
dict.parse_desugared("<<A~X B> C~Y>~K").unwrap().decurry(),
|
||||
dict.parse_desugared("<A~X B C~Y> ~K").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
|
|
24
src/test/heuristic.rs
Normal file
24
src/test/heuristic.rs
Normal file
|
@ -0,0 +1,24 @@
|
|||
use crate::{heuristic::*, dict::*, parser::*, morphism::*};
|
||||
|
||||
#[test]
|
||||
fn test_heuristic() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("A").expect("parse"),
|
||||
dst_type: dict.parse("A").expect("parse")
|
||||
}.estimated_cost(),
|
||||
0
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("<Digit 10> ~ Char ~ Ascii ~ native.UInt8").expect("parse"),
|
||||
dst_type: dict.parse("<Digit 16> ~ native.UInt8").expect("parse")
|
||||
}.estimated_cost(),
|
||||
40
|
||||
);
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
use crate::{dict::{BimapTypeDict}, parser::*};
|
||||
|
||||
#[test]
|
||||
fn test_flat() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert!( dict.parse("A").expect("parse error").is_flat() );
|
||||
assert!( dict.parse("10").expect("parse error").is_flat() );
|
||||
assert!( dict.parse("'x'").expect("parse error").is_flat() );
|
||||
assert!( dict.parse("<A B 23>").expect("parse error").is_flat() );
|
||||
assert!( dict.parse("<A <B C 'x' D>>").expect("parse error").is_flat() );
|
||||
|
||||
assert!( ! dict.parse("A~B").expect("parse error").is_flat() );
|
||||
assert!( ! dict.parse("<A B~C>").expect("parse error").is_flat() );
|
||||
assert!( ! dict.parse("<A <B C~X D>>").expect("parse error").is_flat() );
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C").expect("parse error").normalize(),
|
||||
dict.parse("A~B~C").expect("parse errror"),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A B>~C").expect("parse error").normalize(),
|
||||
dict.parse("<A B>~C").expect("parse errror"),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A B~C>").expect("parse error").normalize(),
|
||||
dict.parse("<A B>~<A C>").expect("parse errror"),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A B~C D~E>").expect("parse error").normalize(),
|
||||
dict.parse("<A B D>~<A C D>~<A C E>").expect("parse errror"),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<Seq <Digit 10>~Char>").expect("parse error").normalize(),
|
||||
dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror"),
|
||||
);
|
||||
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A <B C~D~E> F~G H H>").expect("parse error").normalize(),
|
||||
dict.parse("<A <B C> F H H>
|
||||
~<A <B D> F H H>
|
||||
~<A <B E> F H H>
|
||||
~<A <B E> G H H>").expect("parse errror"),
|
||||
);
|
||||
|
||||
}
|
|
@ -2,10 +2,8 @@
|
|||
pub mod lexer;
|
||||
pub mod parser;
|
||||
pub mod curry;
|
||||
pub mod lnf;
|
||||
pub mod pnf;
|
||||
pub mod subtype;
|
||||
pub mod substitution;
|
||||
pub mod unification;
|
||||
pub mod morphism;
|
||||
|
||||
pub mod context;
|
||||
pub mod constraint_system;
|
||||
pub mod morphism_graph;
|
||||
pub mod heuristic;
|
||||
|
|
|
@ -1,471 +0,0 @@
|
|||
use {
|
||||
crate::{dict::*, morphism::*, parser::*, unparser::*, TypeTerm, morphism_base::*, morphism_path::*}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
fn print_subst(m: &std::collections::HashMap<TypeID, TypeTerm>, dict: &mut impl TypeDict) {
|
||||
eprintln!("{{");
|
||||
|
||||
for (k,v) in m.iter() {
|
||||
eprintln!(" {} --> {}",
|
||||
dict.get_typename(k).unwrap(),
|
||||
dict.unparse(v)
|
||||
);
|
||||
}
|
||||
|
||||
eprintln!("}}");
|
||||
}
|
||||
|
||||
fn print_path(dict: &mut impl TypeDict, path: &Vec<MorphismInstance<DummyMorphism>>) {
|
||||
for n in path.iter() {
|
||||
eprintln!("
|
||||
ψ = {}
|
||||
morph {}
|
||||
--> {}
|
||||
with
|
||||
",
|
||||
n.halo.clone().sugar(dict).pretty(dict, 0),
|
||||
n.m.get_type().src_type.sugar(dict).pretty(dict, 0),
|
||||
n.m.get_type().dst_type.sugar(dict).pretty(dict, 0),
|
||||
);
|
||||
print_subst(&n.σ, dict)
|
||||
}
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
struct DummyMorphism(MorphismType);
|
||||
|
||||
impl Morphism for DummyMorphism {
|
||||
fn get_type(&self) -> MorphismType {
|
||||
self.0.clone().normalize()
|
||||
}
|
||||
|
||||
fn map_morphism(&self, seq_type: TypeTerm) -> Option<DummyMorphism> {
|
||||
Some(DummyMorphism(MorphismType {
|
||||
src_type: TypeTerm::App(vec![
|
||||
seq_type.clone(),
|
||||
self.0.src_type.clone()
|
||||
]),
|
||||
|
||||
dst_type: TypeTerm::App(vec![
|
||||
seq_type.clone(),
|
||||
self.0.dst_type.clone()
|
||||
])
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
fn morphism_test_setup() -> ( BimapTypeDict, MorphismBase<DummyMorphism> ) {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
let mut base = MorphismBase::<DummyMorphism>::new( vec![ dict.parse("Seq").expect("") ] );
|
||||
|
||||
dict.add_varname("Radix".into());
|
||||
dict.add_varname("SrcRadix".into());
|
||||
dict.add_varname("DstRadix".into());
|
||||
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
|
||||
dst_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap(),
|
||||
dst_type: dict.parse("<Digit Radix> ~ Char").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix>~ℤ_2^64~machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix>~ℤ_2^64~machine.UInt64>").unwrap()
|
||||
})
|
||||
);
|
||||
|
||||
(dict, base)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_morphism_path1() {
|
||||
let (mut dict, mut base) = morphism_test_setup();
|
||||
|
||||
let path = ShortestPathProblem::new(&base, MorphismType {
|
||||
src_type: dict.parse("<Digit 10> ~ Char").unwrap(),
|
||||
dst_type: dict.parse("<Digit 10> ~ ℤ_2^64 ~ machine.UInt64").unwrap(),
|
||||
}).solve();
|
||||
|
||||
assert_eq!(
|
||||
path,
|
||||
Some(
|
||||
vec![
|
||||
MorphismInstance {
|
||||
σ: vec![
|
||||
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
|
||||
].into_iter().collect(),
|
||||
halo: TypeTerm::unit(),
|
||||
m: DummyMorphism(MorphismType {
|
||||
src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
|
||||
dst_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap()
|
||||
}),
|
||||
}
|
||||
]
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_morphism_path2() {
|
||||
let (mut dict, mut base) = morphism_test_setup();
|
||||
|
||||
let path = ShortestPathProblem::new(&base, MorphismType {
|
||||
src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
|
||||
}).solve();
|
||||
|
||||
assert_eq!(
|
||||
path,
|
||||
Some(
|
||||
vec![
|
||||
MorphismInstance {
|
||||
σ: vec![
|
||||
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
|
||||
].into_iter().collect(),
|
||||
halo: dict.parse("ℕ ~ <PosInt 10 BigEndian>").expect(""),
|
||||
m: DummyMorphism(MorphismType {
|
||||
src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
|
||||
}),
|
||||
}
|
||||
]
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_morphism_path3() {
|
||||
let (mut dict, mut base) = morphism_test_setup();
|
||||
|
||||
let path = ShortestPathProblem::new(&base, MorphismType {
|
||||
src_type: dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
|
||||
}).solve();
|
||||
|
||||
if let Some(path) = path.as_ref() {
|
||||
print_path(&mut dict, path);
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
path,
|
||||
Some(
|
||||
vec![
|
||||
MorphismInstance {
|
||||
σ: vec![
|
||||
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
|
||||
].into_iter().collect(),
|
||||
halo: dict.parse("ℕ ~ <PosInt 10 LittleEndian>").expect(""),
|
||||
m: DummyMorphism(MorphismType {
|
||||
src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
|
||||
}),
|
||||
},
|
||||
|
||||
MorphismInstance {
|
||||
σ: vec![
|
||||
(dict.get_typeid(&"SrcRadix".into()).unwrap(), TypeTerm::Num(10)),
|
||||
(dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16)),
|
||||
].into_iter().collect(),
|
||||
halo: TypeTerm::unit(),
|
||||
m: DummyMorphism(MorphismType {
|
||||
src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
|
||||
}),
|
||||
}
|
||||
]
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_morphism_path4() {
|
||||
let (mut dict, mut base) = morphism_test_setup();
|
||||
|
||||
let path = ShortestPathProblem::new(&base, MorphismType {
|
||||
src_type: dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
|
||||
}).solve();
|
||||
|
||||
if let Some(path) = path.as_ref() {
|
||||
print_path(&mut dict, path);
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
path,
|
||||
Some(
|
||||
vec![
|
||||
MorphismInstance {
|
||||
σ: vec![
|
||||
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
|
||||
].into_iter().collect(),
|
||||
halo: dict.parse("ℕ ~ <PosInt 10 LittleEndian>").expect(""),
|
||||
m: DummyMorphism(MorphismType {
|
||||
src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
|
||||
}),
|
||||
},
|
||||
|
||||
MorphismInstance {
|
||||
σ: vec![
|
||||
(dict.get_typeid(&"SrcRadix".into()).unwrap(), TypeTerm::Num(10)),
|
||||
(dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16)),
|
||||
].into_iter().collect(),
|
||||
halo: TypeTerm::unit(),
|
||||
m: DummyMorphism(MorphismType {
|
||||
src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
|
||||
}),
|
||||
},
|
||||
|
||||
MorphismInstance {
|
||||
σ: vec![
|
||||
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(16)),
|
||||
].into_iter().collect(),
|
||||
halo: dict.parse("ℕ ~ <PosInt 16 LittleEndian>").expect(""),
|
||||
m: DummyMorphism(MorphismType {
|
||||
src_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap()
|
||||
}),
|
||||
},
|
||||
|
||||
]
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_morphism_path_posint() {
|
||||
let (mut dict, mut base) = morphism_test_setup();
|
||||
|
||||
let path = ShortestPathProblem::new(&base, MorphismType {
|
||||
src_type: dict.parse("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap(),
|
||||
}).solve();
|
||||
|
||||
if let Some(path) = path.as_ref() {
|
||||
print_path(&mut dict, path);
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
path,
|
||||
Some(
|
||||
vec![
|
||||
MorphismInstance {
|
||||
σ: vec![
|
||||
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
|
||||
].into_iter().collect(),
|
||||
halo: dict.parse("ℕ ~ <PosInt 10 BigEndian>").unwrap(),
|
||||
m: DummyMorphism(MorphismType {
|
||||
src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
|
||||
}),
|
||||
},
|
||||
MorphismInstance {
|
||||
σ: vec![
|
||||
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(10)),
|
||||
].into_iter().collect(),
|
||||
halo: TypeTerm::unit(),
|
||||
m: DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
|
||||
}),
|
||||
},
|
||||
MorphismInstance {
|
||||
σ: vec![
|
||||
(dict.get_typeid(&"SrcRadix".into()).unwrap(), TypeTerm::Num(10)),
|
||||
(dict.get_typeid(&"DstRadix".into()).unwrap(), TypeTerm::Num(16)),
|
||||
].into_iter().collect(),
|
||||
halo: TypeTerm::unit(),
|
||||
m: DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
|
||||
}),
|
||||
},
|
||||
MorphismInstance {
|
||||
σ: vec![
|
||||
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(16)),
|
||||
].into_iter().collect(),
|
||||
halo: TypeTerm::unit(),
|
||||
m: DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
|
||||
}),
|
||||
},
|
||||
MorphismInstance {
|
||||
σ: vec![
|
||||
(dict.get_typeid(&"Radix".into()).unwrap(), TypeTerm::Num(16))
|
||||
].into_iter().collect(),
|
||||
halo: dict.parse("ℕ ~ <PosInt 16 BigEndian>").unwrap(),
|
||||
m: DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
|
||||
dst_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap()
|
||||
})
|
||||
}
|
||||
]
|
||||
)
|
||||
);
|
||||
/*
|
||||
assert_eq!(
|
||||
base.find_morphism_path(MorphismType {
|
||||
src_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
|
||||
}),
|
||||
Some(
|
||||
vec![
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().normalize(),
|
||||
]
|
||||
)
|
||||
);
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
assert_eq!(
|
||||
base.find_morphism_with_subtyping(
|
||||
&MorphismType {
|
||||
src_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
|
||||
}
|
||||
),
|
||||
|
||||
Some((
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
|
||||
}),
|
||||
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian>").unwrap(),
|
||||
|
||||
vec![
|
||||
(dict.get_typeid(&"Radix".into()).unwrap(),
|
||||
dict.parse("10").unwrap())
|
||||
].into_iter().collect::<std::collections::HashMap<TypeID, TypeTerm>>()
|
||||
))
|
||||
);
|
||||
*/
|
||||
}
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[test]
|
||||
fn test_morphism_path_listedit()
|
||||
{
|
||||
let mut dict = BimapTypeDict::new();
|
||||
let mut base = MorphismBase::<DummyMorphism>::new( vec![ dict.parse("List").expect("") ] );
|
||||
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("Char").unwrap(),
|
||||
dst_type: dict.parse("Char ~ EditTree").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List~Vec Char>").unwrap(),
|
||||
dst_type: dict.parse("<List Char>").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List Char>").unwrap(),
|
||||
dst_type: dict.parse("<List Char~ReprTree>").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List ReprTree>").unwrap(),
|
||||
dst_type: dict.parse("<List~Vec ReprTree>").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List~Vec Char~ReprTree>").unwrap(),
|
||||
dst_type: dict.parse("<List Char> ~ EditTree").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List~Vec Char~ReprTree>").unwrap(),
|
||||
dst_type: dict.parse("<List Char> ~ EditTree").unwrap()
|
||||
})
|
||||
);
|
||||
|
||||
|
||||
let path = ShortestPathProblem::new(&base, MorphismType {
|
||||
src_type: dict.parse("<Seq~List~Vec <Digit 10>~Char>").unwrap(),
|
||||
dst_type: dict.parse("<Seq~List <Digit 10>~Char> ~ EditTree").unwrap(),
|
||||
}).solve();
|
||||
|
||||
if let Some(path) = path.as_ref() {
|
||||
print_path(&mut dict, path);
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
path,
|
||||
Some(vec![
|
||||
MorphismInstance {
|
||||
m: DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List~Vec Char>").unwrap(),
|
||||
dst_type: dict.parse("<List Char>").unwrap()
|
||||
}),
|
||||
halo: dict.parse("<Seq~List <Digit 10>>").unwrap(),
|
||||
σ: HashMap::new()
|
||||
},
|
||||
MorphismInstance {
|
||||
m: DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List Char>").unwrap(),
|
||||
dst_type: dict.parse("<List Char~ReprTree>").unwrap()
|
||||
}),
|
||||
halo: dict.parse("<Seq~List <Digit 10>>").unwrap(),
|
||||
σ: HashMap::new()
|
||||
},
|
||||
MorphismInstance {
|
||||
m: DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List ReprTree>").unwrap(),
|
||||
dst_type: dict.parse("<List~Vec ReprTree>").unwrap()
|
||||
}),
|
||||
halo: dict.parse("<Seq~List <Digit 10>~Char>").unwrap(),
|
||||
σ: HashMap::new()
|
||||
},
|
||||
MorphismInstance {
|
||||
m: DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List~Vec Char~ReprTree>").unwrap(),
|
||||
dst_type: dict.parse("<List Char> ~ EditTree").unwrap()
|
||||
}),
|
||||
halo: dict.parse("<Seq~List <Digit 10>>").unwrap(),
|
||||
σ: HashMap::new()
|
||||
},
|
||||
])
|
||||
);
|
||||
}
|
876
src/test/morphism_graph.rs
Normal file
876
src/test/morphism_graph.rs
Normal file
|
@ -0,0 +1,876 @@
|
|||
use {
|
||||
crate::{dict::*, morphism::*, parser::*, ConstraintError, ConstraintPair, ConstraintSystem, Context, ContextPtr, HashMapSubst, LayeredContext, TypeKind, TypeTerm
|
||||
},
|
||||
std::{collections::HashMap, sync::{Arc, RwLock}}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct DummyMorphism(ContextPtr, MorphismType);
|
||||
impl Morphism for DummyMorphism {
|
||||
fn ctx(&self) -> ContextPtr {
|
||||
self.0.clone()
|
||||
}
|
||||
|
||||
fn get_type(&self) -> MorphismType {
|
||||
self.1.clone()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn morphism_test_setup() -> MorphismBase<DummyMorphism> {
|
||||
let mut Γ = Context::new();
|
||||
let mut base = MorphismBase::<DummyMorphism>::new(Γ.clone());
|
||||
|
||||
base.add_morphism({
|
||||
let mut Γ = Γ.scope();
|
||||
Γ.add_variable("Radix", TypeKind::ValueUInt);
|
||||
DummyMorphism(Γ.clone(), MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: Γ.parse("<Digit Radix> ~ Char").unwrap(),
|
||||
dst_type: Γ.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap()
|
||||
})
|
||||
});
|
||||
|
||||
base.add_morphism({
|
||||
let mut Γ = Γ.scope();
|
||||
Γ.add_variable("Radix", TypeKind::ValueUInt);
|
||||
DummyMorphism(Γ.clone(), MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: Γ.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap(),
|
||||
dst_type: Γ.parse("<Digit Radix> ~ Char").unwrap()
|
||||
})
|
||||
});
|
||||
|
||||
base.add_morphism({
|
||||
let mut Γ = Γ.scope();
|
||||
Γ.add_variable("Radix", TypeKind::ValueUInt);
|
||||
DummyMorphism(Γ.clone(), MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: Γ.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
|
||||
dst_type: Γ.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
|
||||
})
|
||||
});
|
||||
|
||||
base.add_morphism({
|
||||
let mut Γ = Γ.scope();
|
||||
Γ.add_variable("Radix", TypeKind::ValueUInt);
|
||||
DummyMorphism(Γ.clone(), MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: Γ.parse("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap(),
|
||||
dst_type: Γ.parse("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix>~ℤ_2^64~machine.UInt64>").unwrap()
|
||||
})
|
||||
});
|
||||
|
||||
base.add_morphism({
|
||||
let mut Γ = Γ.scope();
|
||||
Γ.add_variable("SrcRadix", TypeKind::ValueUInt);
|
||||
Γ.add_variable("DstRadix", TypeKind::ValueUInt);
|
||||
DummyMorphism(Γ.clone(), MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: Γ.parse("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix>~ℤ_2^64~machine.UInt64>").unwrap(),
|
||||
dst_type: Γ.parse("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix>~ℤ_2^64~machine.UInt64>").unwrap()
|
||||
})
|
||||
});
|
||||
|
||||
base.add_morphism({
|
||||
let mut Γ = Γ.scope();
|
||||
Γ.add_variable("SrcRadix", TypeKind::ValueUInt);
|
||||
DummyMorphism(Γ.clone(), MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: Γ.parse("ℤ_2^64 ~ ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix>~ℤ_2^64~machine.UInt64>").unwrap(),
|
||||
dst_type: Γ.parse("ℤ_2^64 ~ machine.UInt64").unwrap()
|
||||
})
|
||||
});
|
||||
base.add_morphism({
|
||||
let mut Γ = Γ.scope();
|
||||
DummyMorphism(Γ.clone(), MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: Γ.parse("ℤ_2^64 ~ machine.UInt64").unwrap(),
|
||||
dst_type: Γ.parse("ℤ_2^64 ~ ℕ ~ <PosInt 0 LittleEndian> ~ <Seq <Digit 0>~ℤ_2^64~machine.UInt64>").unwrap()
|
||||
})
|
||||
});
|
||||
|
||||
base
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_morphgraph_id() {
|
||||
let base = morphism_test_setup();
|
||||
let mut Γ = base.ctx();
|
||||
let morph_graph = MorphismGraph::new(base);
|
||||
|
||||
|
||||
assert_eq!(
|
||||
morph_graph.search(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: Γ.parse("ℤ_2^64 ~ machine.UInt64").expect("parse"),
|
||||
dst_type: Γ.parse("ℤ_2^64 ~ machine.UInt64").expect("parse"),
|
||||
}),
|
||||
|
||||
Ok(MorphismInstance::Id {
|
||||
τ: Γ.parse("ℤ_2^64 ~ machine.UInt64").expect("parse")
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_morphgraph_prim() {
|
||||
let base = morphism_test_setup();
|
||||
let mut Γ = base.ctx();
|
||||
let morph_graph = MorphismGraph::new(base);
|
||||
|
||||
let mut Γm1 = Γ.scope();
|
||||
Γm1.add_variable("Radix", TypeKind::ValueUInt);
|
||||
|
||||
let mut Γ1 = Γ.scope();
|
||||
let mut Γ3 = Γ1.scope();
|
||||
let σs = Γ3.shift_variables(&Γm1);
|
||||
assert!( Γ3.bind(Γ3.get_varid("Radix").unwrap(), TypeTerm::Num(10)).is_ok() );
|
||||
|
||||
assert_eq!(
|
||||
morph_graph.search(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: Γ.parse("<Digit 10> ~ Char").expect("parse"),
|
||||
dst_type: Γ.parse("<Digit 10> ~ ℤ_2^64 ~ machine.UInt64").expect("parse"),
|
||||
}),
|
||||
|
||||
Ok(MorphismInstance::Specialize {
|
||||
Γ: Γ3.clone(),
|
||||
m: Box::new(
|
||||
MorphismInstance::Primitive {
|
||||
σs,
|
||||
m: DummyMorphism(Γm1.clone(), MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: Γm1.parse("<Digit Radix> ~ Char").expect("parse"),
|
||||
dst_type: Γm1.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").expect("parse"),
|
||||
})
|
||||
})
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_morphgraph_chain() {
|
||||
let base = morphism_test_setup();
|
||||
let mut Γ = base.ctx();
|
||||
let morph_graph = MorphismGraph::new(base);
|
||||
|
||||
let mut Γm1 = Γ.scope();
|
||||
Γm1.add_variable("Radix", TypeKind::ValueUInt);
|
||||
let mut Γm2 = Γ.scope();
|
||||
|
||||
let mut Γ2 = Γ.scope();
|
||||
|
||||
// first instance
|
||||
let mut Γ3 = Γ2.scope();
|
||||
let σs1 = Γ.shift_variables(&Γm1);
|
||||
let σs2 = Γ.shift_variables(&Γm2);
|
||||
|
||||
// second instance
|
||||
let mut Γ4 = Γ3.scope();
|
||||
|
||||
assert!( Γ3.bind(Γ3.get_varid("Radix").unwrap(), TypeTerm::Num(10)).is_ok() );
|
||||
|
||||
assert_eq!(
|
||||
morph_graph.search(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: Γ.parse("<Digit 10> ~ Char").expect("parse"),
|
||||
dst_type: Γ.parse("<Digit 10> ~ ℤ_2^64 ~ ℕ ~ <PosInt 0 LittleEndian> ~ <Seq <Digit 0>~ℤ_2^64~machine.UInt64>").expect("parse"),
|
||||
}),
|
||||
|
||||
Ok(
|
||||
MorphismInstance::Specialize {
|
||||
Γ: Γ4.clone(),
|
||||
m: Box::new(MorphismInstance::Chain {
|
||||
path: vec![
|
||||
MorphismInstance::Primitive {
|
||||
σs: σs1,
|
||||
m: DummyMorphism(Γm1.clone(), MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: Γm1.parse("<Digit Radix> ~ Char").expect("parse"),
|
||||
dst_type: Γm1.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").expect("parse"),
|
||||
})
|
||||
},
|
||||
MorphismInstance::Sub {
|
||||
ψ: Γ3.parse("<Digit 10>").expect("parse"),
|
||||
m: Box::new(MorphismInstance::Primitive {
|
||||
σs: σs2,
|
||||
m: DummyMorphism(Γm2.clone(), MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: Γm2.parse("ℤ_2^64 ~ machine.UInt64").unwrap(),
|
||||
dst_type: Γm2.parse("ℤ_2^64 ~ ℕ ~ <PosInt 0 LittleEndian> ~ <Seq <Digit 0>~ℤ_2^64~machine.UInt64>").unwrap()
|
||||
})
|
||||
})
|
||||
}
|
||||
]
|
||||
})
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_morphgraph_spec1() {
|
||||
let mut base = MorphismBase::<DummyMorphism>::new(Context::new());
|
||||
let mut Γ = base.ctx();
|
||||
|
||||
let mut Γm1 = Γ.scope();
|
||||
base.add_morphism({
|
||||
Γm1.add_variable("X", TypeKind::Type);
|
||||
DummyMorphism(Γm1.clone(), MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: Γm1.parse("T ~ A").expect(""),
|
||||
dst_type: Γm1.parse("T ~ <B X> ~ U").expect("")
|
||||
})
|
||||
});
|
||||
|
||||
let mut Γm2 = Γ.scope();
|
||||
base.add_morphism({
|
||||
Γm2.add_variable("Y", TypeKind::Type);
|
||||
DummyMorphism(Γm2.clone(), MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: Γm2.parse("T ~ <B Y> ~ U").expect(""),
|
||||
dst_type: Γm2.parse("T ~ <B Y> ~ V").expect("")
|
||||
})
|
||||
});
|
||||
|
||||
let morph_graph = MorphismGraph::new(base);
|
||||
|
||||
let mut Γ1 = Γ.scope();
|
||||
let σs1 = Γ1.shift_variables(&Γm1);
|
||||
assert!( Γ1.clone().bind(Γ1.get_varid("X").expect(""), Γ1.parse("test").expect("")).is_ok() );
|
||||
|
||||
assert_eq!(
|
||||
morph_graph.search(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: Γ.parse("T ~ A").unwrap(),
|
||||
dst_type: Γ.parse("T ~ <B test> ~ U").unwrap(),
|
||||
}),
|
||||
Ok(
|
||||
MorphismInstance::Specialize {
|
||||
Γ: Γ1.clone(),
|
||||
m: Box::new(
|
||||
MorphismInstance::Primitive {
|
||||
σs:σs1.clone(),
|
||||
m: DummyMorphism(Γm1.clone(),
|
||||
MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: Γm1.parse("T ~ A").expect("parse"),
|
||||
dst_type: Γm1.parse("T ~ <B X> ~ U").expect("parse")
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_morphgraph_spec2() {
|
||||
let mut base = MorphismBase::<DummyMorphism>::new(Context::new());
|
||||
let mut Γ = base.ctx();
|
||||
|
||||
let mut Γm1 = Γ.scope();
|
||||
base.add_morphism({
|
||||
Γm1.add_variable("X", TypeKind::Type);
|
||||
DummyMorphism(Γm1.clone(), MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: Γm1.parse("T ~ A").expect(""),
|
||||
dst_type: Γm1.parse("T ~ <B X> ~ U").expect("")
|
||||
})
|
||||
});
|
||||
|
||||
let mut Γm2 = Γ.scope();
|
||||
base.add_morphism({
|
||||
Γm2.add_variable("Y", TypeKind::Type);
|
||||
DummyMorphism(Γm2.clone(), MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: Γm2.parse("T ~ <B Y> ~ U").expect(""),
|
||||
dst_type: Γm2.parse("T ~ <B Y> ~ V").expect("")
|
||||
})
|
||||
});
|
||||
|
||||
let morph_graph = MorphismGraph::new(base);
|
||||
|
||||
let mut Γ4 = Γ.scope();
|
||||
let σs1 = Γ4.shift_variables(&Γm1);
|
||||
|
||||
//let mut Γ5 = Γ4.scope();
|
||||
let σs2 = Γ4.shift_variables(&Γm2);
|
||||
|
||||
//assert!( Γ4.clone().bind(Γ4.get_varid("X").expect(""), Γ4.parse("test").expect("")).is_ok() );
|
||||
assert!( Γ4.clone().bind(Γ4.get_varid("Y").expect(""), Γ4.parse("test").expect("")).is_ok() );
|
||||
|
||||
let inst =
|
||||
morph_graph.search(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: Γ.parse("T ~ A").unwrap(),
|
||||
dst_type: Γ.parse("T ~ <B test> ~ V").unwrap(),
|
||||
});
|
||||
|
||||
if let Ok(i) = inst.as_ref() {
|
||||
eprintln!("Found morphism instance: = \n==\n{}\n========", i.pretty(&Γ));
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
inst,
|
||||
Ok(
|
||||
MorphismInstance::Specialize { Γ:Γ4,
|
||||
m: Box::new(MorphismInstance::Chain {
|
||||
path: vec![
|
||||
MorphismInstance::Primitive {
|
||||
σs:σs1,
|
||||
m: DummyMorphism(Γm2.clone(), MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: Γm1.parse("T ~ A").expect("parse"),
|
||||
dst_type: Γm1.parse("T ~ <B X> ~ U").expect("parse")
|
||||
})
|
||||
},
|
||||
MorphismInstance::Primitive {
|
||||
σs:σs2,
|
||||
m: DummyMorphism(Γm2.clone(), MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: Γm2.parse("T ~ <B Y> ~ U").expect("parse"),
|
||||
dst_type: Γm2.parse("T ~ <B Y> ~ V").expect("parse")
|
||||
})
|
||||
}
|
||||
]
|
||||
})
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/*
|
||||
#[test]
|
||||
fn test_morphgraph_map_seq() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
let mut base = MorphismBase::<DummyMorphism>::new();
|
||||
|
||||
base.add_morphism(DummyMorphism(MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("A ~ F").expect(""),
|
||||
dst_type: dict.parse("A ~ E").expect("")
|
||||
}));
|
||||
|
||||
let morph_graph = MorphismGraph::new(base);
|
||||
|
||||
assert_eq!(
|
||||
morph_graph.search(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("<Seq A ~ F>").unwrap(),
|
||||
dst_type: dict.parse("<Seq A ~ E>").unwrap(),
|
||||
}, &mut dict),
|
||||
Ok(
|
||||
MorphismInstance::MapSeq { seq_repr: None, item_morph: Box::new(
|
||||
MorphismInstance::Primitive {
|
||||
m: DummyMorphism(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("A ~ F").unwrap(),
|
||||
dst_type: dict.parse("A ~ E").unwrap()
|
||||
})
|
||||
}
|
||||
) }
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_morphgraph_map_seq_repr() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
let mut base = MorphismBase::<DummyMorphism>::new();
|
||||
|
||||
base.add_morphism(DummyMorphism(MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("A ~ F").expect(""),
|
||||
dst_type: dict.parse("A ~ E").expect("")
|
||||
}));
|
||||
|
||||
let morph_graph = MorphismGraph::new(base);
|
||||
|
||||
assert_eq!(
|
||||
morph_graph.search(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("<Seq~<StaticLength 64> A ~ F>").unwrap(),
|
||||
dst_type: dict.parse("<Seq~<StaticLength 64> A ~ E>").unwrap(),
|
||||
}, &mut dict),
|
||||
Ok(
|
||||
MorphismInstance::MapSeq {
|
||||
seq_repr: Some(Box::new(dict.parse("<StaticLength 64>").unwrap())),
|
||||
item_morph: Box::new(
|
||||
MorphismInstance::Primitive {
|
||||
m: DummyMorphism(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("A ~ F").unwrap(),
|
||||
dst_type: dict.parse("A ~ E").unwrap()
|
||||
})
|
||||
})
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_morphism_path1() {
|
||||
let (mut dict, base) = morphism_test_setup();
|
||||
|
||||
let morph_graph = MorphismGraph::new(base);
|
||||
|
||||
assert_eq!(
|
||||
morph_graph.search(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
|
||||
}, &mut dict),
|
||||
Ok(
|
||||
MorphismInstance::Sub {
|
||||
ψ: dict.parse("ℕ ~ <PosInt 10 LittleEndian>").expect(""),
|
||||
m: Box::new(
|
||||
MorphismInstance::MapSeq {
|
||||
seq_repr: None,
|
||||
item_morph: Box::new(
|
||||
MorphismInstance::Specialize {
|
||||
σ: vec![
|
||||
(0, TypeTerm::Num(10)),
|
||||
].into_iter().collect(),
|
||||
m: Box::new(MorphismInstance::Primitive {
|
||||
m: DummyMorphism(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
|
||||
dst_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap()
|
||||
}),
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_morphism_path2() {
|
||||
let (mut dict, base) = morphism_test_setup();
|
||||
|
||||
let morph_graph = MorphismGraph::new(base);
|
||||
|
||||
assert_eq!(
|
||||
morph_graph.search(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap(),
|
||||
}, &mut dict),
|
||||
Ok(
|
||||
MorphismInstance::Specialize {
|
||||
σ: vec![
|
||||
(2, TypeTerm::Num(16)),
|
||||
].into_iter().collect(),
|
||||
m: Box::new(
|
||||
MorphismInstance::Chain {
|
||||
path: vec![
|
||||
MorphismInstance::Sub {
|
||||
ψ: dict.parse("ℕ ~ <PosInt 10 LittleEndian>").expect(""),
|
||||
m: Box::new(
|
||||
MorphismInstance::MapSeq {
|
||||
seq_repr: None,
|
||||
item_morph: Box::new(
|
||||
MorphismInstance::Specialize {
|
||||
σ: vec![
|
||||
(0, TypeTerm::Num(10)),
|
||||
].into_iter().collect(),
|
||||
m: Box::new(MorphismInstance::Primitive {
|
||||
m: DummyMorphism(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
|
||||
dst_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap()
|
||||
}),
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
MorphismInstance::Specialize {
|
||||
σ: vec![
|
||||
(1, TypeTerm::Num(10))
|
||||
].into_iter().collect(),
|
||||
m: Box::new(
|
||||
MorphismInstance::Primitive{
|
||||
m: DummyMorphism(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse_desugared("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict),
|
||||
dst_type: dict.parse_desugared("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict)
|
||||
}),
|
||||
}
|
||||
)
|
||||
}
|
||||
]
|
||||
})
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_morphism_path3() {
|
||||
let (mut dict, base) = morphism_test_setup();
|
||||
|
||||
let morph_graph = MorphismGraph::new(base);
|
||||
|
||||
let result = morph_graph.search(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
|
||||
}, &mut dict);
|
||||
|
||||
eprintln!("{:#?}", result);
|
||||
|
||||
assert_eq!(
|
||||
result,
|
||||
|
||||
Ok(
|
||||
MorphismInstance::Specialize {
|
||||
σ: vec![
|
||||
(2, TypeTerm::Num(16)),
|
||||
].into_iter().collect(),
|
||||
|
||||
m: Box::new(
|
||||
MorphismInstance::Chain {
|
||||
path: vec![
|
||||
|
||||
MorphismInstance::Sub {
|
||||
ψ: dict.parse("ℕ ~ <PosInt 10 LittleEndian>").expect(""),
|
||||
m: Box::new(
|
||||
MorphismInstance::MapSeq {
|
||||
seq_repr: None,
|
||||
item_morph: Box::new(
|
||||
MorphismInstance::Specialize {
|
||||
σ: vec![
|
||||
(0, TypeTerm::Num(10)),
|
||||
].into_iter().collect(),
|
||||
m: Box::new(MorphismInstance::Primitive {
|
||||
m: DummyMorphism(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("<Digit Radix> ~ Char").unwrap(),
|
||||
dst_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap()
|
||||
}),
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
},
|
||||
MorphismInstance::Specialize {
|
||||
σ: vec![
|
||||
(1, TypeTerm::Num(10)),
|
||||
].into_iter().collect(),
|
||||
m: Box::new(
|
||||
MorphismInstance::Primitive{
|
||||
m: DummyMorphism(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse_desugared("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict),
|
||||
dst_type: dict.parse_desugared("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict)
|
||||
}),
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
MorphismInstance::Sub {
|
||||
ψ: dict.parse("ℕ ~ <PosInt DstRadix LittleEndian>").expect(""),
|
||||
m: Box::new(
|
||||
MorphismInstance::MapSeq {
|
||||
seq_repr: None,
|
||||
item_morph: Box::new(
|
||||
MorphismInstance::Specialize {
|
||||
σ: vec![
|
||||
(0, dict.parse("16").expect("")),
|
||||
].into_iter().collect(),
|
||||
m: Box::new(MorphismInstance::Primitive {
|
||||
m: DummyMorphism(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap(),
|
||||
dst_type: dict.parse("<Digit Radix> ~ Char").unwrap()
|
||||
}),
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
]
|
||||
})
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
*/
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/*
|
||||
|
||||
#[test]
|
||||
fn test_morphism_path_posint() {
|
||||
let (mut dict, base) = morphism_test_setup();
|
||||
|
||||
let path = ShortestPathProblem::new(&base, MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse_desugared("ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().sugar(&mut dict),
|
||||
dst_type: dict.parse_desugared("ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().sugar(&mut dict),
|
||||
}).solve();
|
||||
|
||||
if let Some(path) = path.as_ref() {
|
||||
print_path(&mut dict, path);
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
path,
|
||||
Some(
|
||||
vec![
|
||||
MorphismInstance::MapSeq {
|
||||
ψ: dict.parse_desugared("ℕ ~ <PosInt 10 BigEndian>").expect("").sugar(&mut dict),
|
||||
seq_repr: None,
|
||||
item_morph: Box::new(MorphismInstance::Primitive {
|
||||
σ: vec![
|
||||
(0, TypeTerm::Num(10)),
|
||||
].into_iter().collect(),
|
||||
ψ: TypeTerm::unit(),
|
||||
morph: DummyMorphism(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse_desugared("<Digit Radix> ~ Char").unwrap().sugar(&mut dict),
|
||||
dst_type: dict.parse_desugared("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap().sugar(&mut dict)
|
||||
}),
|
||||
})
|
||||
},
|
||||
|
||||
MorphismInstance::Primitive {
|
||||
σ: vec![
|
||||
(0, TypeTerm::Num(10)),
|
||||
].into_iter().collect(),
|
||||
ψ: TypeTerm::unit(),
|
||||
morph: DummyMorphism(MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse_desugared("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict),
|
||||
dst_type: dict.parse_desugared("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict)
|
||||
}),
|
||||
},
|
||||
MorphismInstance::Primitive {
|
||||
σ: vec![
|
||||
(1, TypeTerm::Num(10)),
|
||||
(2, TypeTerm::Num(16)),
|
||||
].into_iter().collect(),
|
||||
ψ: TypeTerm::unit(),
|
||||
morph: DummyMorphism(MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse_desugared("ℕ ~ <PosInt SrcRadix LittleEndian> ~ <Seq <Digit SrcRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict),
|
||||
dst_type: dict.parse_desugared("ℕ ~ <PosInt DstRadix LittleEndian> ~ <Seq <Digit DstRadix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict)
|
||||
}),
|
||||
},
|
||||
MorphismInstance::Primitive {
|
||||
σ: vec![
|
||||
(2, TypeTerm::Num(16)),
|
||||
(0, TypeTerm::Num(16)),
|
||||
].into_iter().collect(),
|
||||
ψ: TypeTerm::unit(),
|
||||
morph: DummyMorphism(MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse_desugared("ℕ ~ <PosInt Radix LittleEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict),
|
||||
dst_type: dict.parse_desugared("ℕ ~ <PosInt Radix BigEndian> ~ <Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().sugar(&mut dict),
|
||||
}),
|
||||
},
|
||||
|
||||
MorphismInstance::MapSeq {
|
||||
ψ: dict.parse_desugared("ℕ ~ <PosInt 16 BigEndian>").expect("").sugar(&mut dict),
|
||||
seq_repr: None,
|
||||
item_morph: Box::new(MorphismInstance::Primitive {
|
||||
σ: vec![
|
||||
(0, TypeTerm::Num(16)),
|
||||
].into_iter().collect(),
|
||||
ψ: TypeTerm::unit(),
|
||||
morph: DummyMorphism(MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse_desugared("<Digit Radix> ~ ℤ_2^64 ~ machine.UInt64").unwrap().sugar(&mut dict),
|
||||
dst_type: dict.parse_desugared("<Digit Radix> ~ Char").unwrap().sugar(&mut dict)
|
||||
}),
|
||||
})
|
||||
},
|
||||
]
|
||||
)
|
||||
);
|
||||
/*
|
||||
assert_eq!(
|
||||
base.find_morphism_path(MorphismType {
|
||||
src_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap()
|
||||
}),
|
||||
Some(
|
||||
vec![
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 10 LittleEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 16 LittleEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ ℤ_2^64 ~ machine.UInt64>").unwrap().normalize(),
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 16 BigEndian> ~ <Seq <Digit 16> ~ Char>").unwrap().normalize(),
|
||||
]
|
||||
)
|
||||
);
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
assert_eq!(
|
||||
base.find_morphism_with_subtyping(
|
||||
&MorphismType {
|
||||
src_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian> ~ <Seq <Digit 10> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
|
||||
}
|
||||
),
|
||||
|
||||
Some((
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<Seq <Digit Radix> ~ Char>").unwrap(),
|
||||
dst_type: dict.parse("<Seq <Digit Radix> ~ ℤ_2^64 ~ machine.UInt64>").unwrap()
|
||||
}),
|
||||
|
||||
dict.parse("Symbol ~ ℕ ~ <PosInt 10 BigEndian>").unwrap(),
|
||||
|
||||
vec![
|
||||
(dict.get_typeid(&"Radix".into()).unwrap(),
|
||||
dict.parse("10").unwrap())
|
||||
].into_iter().collect::<std::collections::HashMap<TypeID, TypeTerm>>()
|
||||
))
|
||||
);
|
||||
*/
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn morphism_test_seq_repr() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
let mut base = MorphismBase::<DummyMorphism>::new();
|
||||
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse_desugared("<Seq~<ValueTerminated 0> native.UInt8>").unwrap().sugar(&mut dict),
|
||||
dst_type: dict.parse_desugared("<Seq~<LengthPrefix native.UInt64> native.UInt8>").unwrap().sugar(&mut dict)
|
||||
})
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
base.get_morphism_instance(&MorphismType {
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse_desugared("<Seq~<ValueTerminated 0> Char~Ascii~native.UInt8>").expect("parse").sugar(&mut dict),
|
||||
dst_type: dict.parse_desugared("<Seq~<LengthPrefix native.UInt64> Char~Ascii~native.UInt8>").expect("parse").sugar(&mut dict)
|
||||
}),
|
||||
Some(
|
||||
MorphismInstance::Primitive {
|
||||
ψ: dict.parse_desugared("<Seq Char~Ascii>").expect("").sugar(&mut dict),
|
||||
σ: HashMap::new(),
|
||||
morph: DummyMorphism(MorphismType{
|
||||
bounds: Vec::new(),
|
||||
src_type: dict.parse_desugared("<Seq~<ValueTerminated 0> native.UInt8>").unwrap().sugar(&mut dict),
|
||||
dst_type: dict.parse_desugared("<Seq~<LengthPrefix native.UInt64> native.UInt8>").unwrap().sugar(&mut dict)
|
||||
})
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/*
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[test]
|
||||
fn test_morphism_path_listedit()
|
||||
{
|
||||
let mut dict = BimapTypeDict::new();
|
||||
let mut base = MorphismBase::<DummyMorphism>::new( vec![ dict.parse("List").expect("") ] );
|
||||
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("Char").unwrap(),
|
||||
dst_type: dict.parse("Char ~ EditTree").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List~Vec Char>").unwrap(),
|
||||
dst_type: dict.parse("<List Char>").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List Char>").unwrap(),
|
||||
dst_type: dict.parse("<List Char~ReprTree>").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List ReprTree>").unwrap(),
|
||||
dst_type: dict.parse("<List~Vec ReprTree>").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List~Vec Char~ReprTree>").unwrap(),
|
||||
dst_type: dict.parse("<List Char> ~ EditTree").unwrap()
|
||||
})
|
||||
);
|
||||
base.add_morphism(
|
||||
DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List~Vec Char~ReprTree>").unwrap(),
|
||||
dst_type: dict.parse("<List Char> ~ EditTree").unwrap()
|
||||
})
|
||||
);
|
||||
|
||||
|
||||
let path = ShortestPathProblem::new(&base, MorphismType {
|
||||
src_type: dict.parse("<Seq~List~Vec <Digit 10>~Char>").unwrap(),
|
||||
dst_type: dict.parse("<Seq~List <Digit 10>~Char> ~ EditTree").unwrap(),
|
||||
}).solve();
|
||||
|
||||
if let Some(path) = path.as_ref() {
|
||||
print_path(&mut dict, path);
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
path,
|
||||
Some(vec![
|
||||
MorphismInstance {
|
||||
m: DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List~Vec Char>").unwrap(),
|
||||
dst_type: dict.parse("<List Char>").unwrap()
|
||||
}),
|
||||
halo: dict.parse("<Seq~List <Digit 10>>").unwrap(),
|
||||
σ: HashMap::new()
|
||||
},
|
||||
MorphismInstance {
|
||||
m: DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List Char>").unwrap(),
|
||||
dst_type: dict.parse("<List Char~ReprTree>").unwrap()
|
||||
}),
|
||||
halo: dict.parse("<Seq~List <Digit 10>>").unwrap(),
|
||||
σ: HashMap::new()
|
||||
},
|
||||
MorphismInstance {
|
||||
m: DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List ReprTree>").unwrap(),
|
||||
dst_type: dict.parse("<List~Vec ReprTree>").unwrap()
|
||||
}),
|
||||
halo: dict.parse("<Seq~List <Digit 10>~Char>").unwrap(),
|
||||
σ: HashMap::new()
|
||||
},
|
||||
MorphismInstance {
|
||||
m: DummyMorphism(MorphismType{
|
||||
src_type: dict.parse("<List~Vec Char~ReprTree>").unwrap(),
|
||||
dst_type: dict.parse("<List Char> ~ EditTree").unwrap()
|
||||
}),
|
||||
halo: dict.parse("<Seq~List <Digit 10>>").unwrap(),
|
||||
σ: HashMap::new()
|
||||
},
|
||||
])
|
||||
);
|
||||
}
|
||||
*/
|
||||
*/
|
|
@ -1,6 +1,6 @@
|
|||
|
||||
use {
|
||||
crate::{term::*, dict::*, parser::*}
|
||||
crate::{desugared_term::*, dict::*, parser::*, Context, LayeredContext, TypeKind, TypeTerm}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
@ -12,16 +12,33 @@ fn test_parser_id() {
|
|||
dict.add_varname("T".into());
|
||||
|
||||
assert_eq!(
|
||||
Ok(TypeTerm::TypeID(TypeID::Var(0))),
|
||||
Ok(TypeTerm::Var(0)),
|
||||
dict.parse("T")
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Ok(TypeTerm::TypeID(TypeID::Fun(0))),
|
||||
Ok(TypeTerm::Id(0)),
|
||||
dict.parse("A")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parser_var_ctx() {
|
||||
let mut ctx = Context::new();
|
||||
|
||||
ctx.add_variable("T", TypeKind::Type);
|
||||
|
||||
assert_eq!(
|
||||
Ok(TypeTerm::Var(0)),
|
||||
ctx.parse("T")
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Ok(TypeTerm::Id(0)),
|
||||
ctx.parse("A")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parser_num() {
|
||||
assert_eq!(
|
||||
|
@ -42,17 +59,17 @@ fn test_parser_char() {
|
|||
fn test_parser_app() {
|
||||
assert_eq!(
|
||||
BimapTypeDict::new().parse("<A B>"),
|
||||
Ok(TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
Ok(TypeTerm::Spec(vec![
|
||||
TypeTerm::Id(0),
|
||||
TypeTerm::Id(1),
|
||||
]))
|
||||
);
|
||||
assert_eq!(
|
||||
BimapTypeDict::new().parse("<A B C>"),
|
||||
Ok(TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
TypeTerm::TypeID(TypeID::Fun(2)),
|
||||
Ok(TypeTerm::Spec(vec![
|
||||
TypeTerm::Id(0),
|
||||
TypeTerm::Id(1),
|
||||
TypeTerm::Id(2),
|
||||
]))
|
||||
);
|
||||
}
|
||||
|
@ -78,16 +95,16 @@ fn test_parser_ladder() {
|
|||
assert_eq!(
|
||||
BimapTypeDict::new().parse("A~B"),
|
||||
Ok(TypeTerm::Ladder(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
TypeTerm::Id(0),
|
||||
TypeTerm::Id(1),
|
||||
]))
|
||||
);
|
||||
assert_eq!(
|
||||
BimapTypeDict::new().parse("A~B~C"),
|
||||
Ok(TypeTerm::Ladder(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
TypeTerm::TypeID(TypeID::Fun(2)),
|
||||
TypeTerm::Id(0),
|
||||
TypeTerm::Id(1),
|
||||
TypeTerm::Id(2),
|
||||
]))
|
||||
);
|
||||
}
|
||||
|
@ -97,11 +114,11 @@ fn test_parser_ladder_outside() {
|
|||
assert_eq!(
|
||||
BimapTypeDict::new().parse("<A B>~C"),
|
||||
Ok(TypeTerm::Ladder(vec![
|
||||
TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
TypeTerm::Spec(vec![
|
||||
TypeTerm::Id(0),
|
||||
TypeTerm::Id(1),
|
||||
]),
|
||||
TypeTerm::TypeID(TypeID::Fun(2)),
|
||||
TypeTerm::Id(2),
|
||||
]))
|
||||
);
|
||||
}
|
||||
|
@ -110,11 +127,11 @@ fn test_parser_ladder_outside() {
|
|||
fn test_parser_ladder_inside() {
|
||||
assert_eq!(
|
||||
BimapTypeDict::new().parse("<A B~C>"),
|
||||
Ok(TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
Ok(TypeTerm::Spec(vec![
|
||||
TypeTerm::Id(0),
|
||||
TypeTerm::Ladder(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
TypeTerm::TypeID(TypeID::Fun(2)),
|
||||
TypeTerm::Id(1),
|
||||
TypeTerm::Id(2),
|
||||
])
|
||||
]))
|
||||
);
|
||||
|
@ -124,13 +141,13 @@ fn test_parser_ladder_inside() {
|
|||
fn test_parser_ladder_between() {
|
||||
assert_eq!(
|
||||
BimapTypeDict::new().parse("<A B~<C D>>"),
|
||||
Ok(TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
Ok(TypeTerm::Spec(vec![
|
||||
TypeTerm::Id(0),
|
||||
TypeTerm::Ladder(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(2)),
|
||||
TypeTerm::TypeID(TypeID::Fun(3)),
|
||||
TypeTerm::Id(1),
|
||||
TypeTerm::Spec(vec![
|
||||
TypeTerm::Id(2),
|
||||
TypeTerm::Id(3),
|
||||
])
|
||||
])
|
||||
]))
|
||||
|
@ -141,7 +158,7 @@ fn test_parser_ladder_between() {
|
|||
#[test]
|
||||
fn test_parser_ladder_large() {
|
||||
assert_eq!(
|
||||
BimapTypeDict::new().parse(
|
||||
BimapTypeDict::new().parse_desugared(
|
||||
"<Seq Date
|
||||
~<TimeSince UnixEpoch>
|
||||
~<Duration Seconds>
|
||||
|
@ -154,50 +171,50 @@ fn test_parser_ladder_large() {
|
|||
~<Seq Byte>"),
|
||||
|
||||
Ok(
|
||||
TypeTerm::Ladder(vec![
|
||||
TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::Ladder(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(1)),
|
||||
TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(2)),
|
||||
TypeTerm::TypeID(TypeID::Fun(3))
|
||||
DesugaredTypeTerm::Ladder(vec![
|
||||
DesugaredTypeTerm::App(vec![
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(0)),
|
||||
DesugaredTypeTerm::Ladder(vec![
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(1)),
|
||||
DesugaredTypeTerm::App(vec![
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(2)),
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(3))
|
||||
]),
|
||||
TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(4)),
|
||||
TypeTerm::TypeID(TypeID::Fun(5))
|
||||
DesugaredTypeTerm::App(vec![
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(4)),
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(5))
|
||||
]),
|
||||
TypeTerm::TypeID(TypeID::Fun(6)),
|
||||
TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(7)),
|
||||
TypeTerm::Num(10),
|
||||
TypeTerm::TypeID(TypeID::Fun(8))
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(6)),
|
||||
DesugaredTypeTerm::App(vec![
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(7)),
|
||||
DesugaredTypeTerm::Num(10),
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(8))
|
||||
]),
|
||||
TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::Ladder(vec![
|
||||
TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(9)),
|
||||
TypeTerm::Num(10)
|
||||
DesugaredTypeTerm::App(vec![
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(0)),
|
||||
DesugaredTypeTerm::Ladder(vec![
|
||||
DesugaredTypeTerm::App(vec![
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(9)),
|
||||
DesugaredTypeTerm::Num(10)
|
||||
]),
|
||||
TypeTerm::TypeID(TypeID::Fun(10))
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(10))
|
||||
])
|
||||
])
|
||||
])
|
||||
]),
|
||||
TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(11)),
|
||||
TypeTerm::TypeID(TypeID::Fun(10)),
|
||||
TypeTerm::Char(':')
|
||||
DesugaredTypeTerm::App(vec![
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(11)),
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(10)),
|
||||
DesugaredTypeTerm::Char(':')
|
||||
]),
|
||||
TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(10))
|
||||
DesugaredTypeTerm::App(vec![
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(0)),
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(10))
|
||||
]),
|
||||
TypeTerm::TypeID(TypeID::Fun(12)),
|
||||
TypeTerm::App(vec![
|
||||
TypeTerm::TypeID(TypeID::Fun(0)),
|
||||
TypeTerm::TypeID(TypeID::Fun(13))
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(12)),
|
||||
DesugaredTypeTerm::App(vec![
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(0)),
|
||||
DesugaredTypeTerm::TypeID(TypeID::Fun(13))
|
||||
])
|
||||
])
|
||||
)
|
||||
|
|
117
src/test/pnf.rs
117
src/test/pnf.rs
|
@ -1,58 +1,117 @@
|
|||
use crate::{dict::BimapTypeDict, parser::*};
|
||||
|
||||
#[test]
|
||||
fn test_param_normalize() {
|
||||
fn test_normalize_id() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C").expect("parse error"),
|
||||
dict.parse("A~B~C").expect("parse error").param_normalize(),
|
||||
dict.parse("A~B~C").expect("parse error").normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A B>~C").expect("parse error"),
|
||||
dict.parse("<A B>~C").expect("parse error").param_normalize(),
|
||||
dict.parse("<A B>~C").expect("parse error").normalize(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_spec() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A B~C>").expect("parse error"),
|
||||
dict.parse("<A B>~<A C>").expect("parse error").param_normalize(),
|
||||
dict.parse("<A B>~<A C>").expect("parse error").normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A~Y B>").expect("parse error"),
|
||||
dict.parse("<A B>~<Y B>").expect("parse error").param_normalize(),
|
||||
dict.parse("<A~Y B>~<Y B>").expect("parse error").normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A B~C D~E>").expect("parse error"),
|
||||
dict.parse("<A B D>~<A C D>~<A C E>").expect("parse errror").param_normalize(),
|
||||
dict.parse("<A B D>~<A C D>~<A C E>").expect("parse errror").normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A~X B~C D~E>").expect("parse error"),
|
||||
dict.parse("<A B D>~<A B~C E>~<X C E>").expect("parse errror").param_normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<Seq <Digit 10>~Char>").expect("parse error"),
|
||||
dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror").param_normalize(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<Seq Char> ~ <<ValueDelim '\\0'> Char> ~ <<ValueDelim '\\0'> Ascii~x86.UInt8>").expect("parse error").param_normalize(),
|
||||
dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~x86.UInt8>").expect("parse error")
|
||||
);
|
||||
assert_eq!(
|
||||
dict.parse("<Seq Char~Ascii> ~ <<ValueDelim '\\0'> Char~Ascii> ~ <<ValueDelim '\\0'> x86.UInt8>").expect("parse error").param_normalize(),
|
||||
dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~x86.UInt8>").expect("parse error")
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<A~Y <B C~D~E> F H H>").expect("parse error"),
|
||||
dict.parse("<A <B C> F H H>
|
||||
~<A <B D> F H H>
|
||||
~<A~Y <B E> F H H>").expect("parse errror")
|
||||
.param_normalize(),
|
||||
dict.parse("<A~X B D>~<A~X B~C E>~<X C E>").expect("parse errror").normalize(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_seq() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
assert_eq!(
|
||||
dict.parse("<Seq Char~Ascii>").expect("parse error"),
|
||||
dict.parse("<Seq Char>~<Seq Ascii>").expect("parse errror").normalize(),
|
||||
);
|
||||
|
||||
eprintln!("---------------");
|
||||
assert_eq!(
|
||||
dict.parse("<Seq <Digit 10>~Char>").expect("parse error"),
|
||||
dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror").normalize(),
|
||||
);
|
||||
eprintln!("---------------");
|
||||
assert_eq!(
|
||||
dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~native.UInt8>").expect("parse error"),
|
||||
dict.parse("<Seq Char> ~ <<ValueDelim '\\0'> Char> ~ <<ValueDelim '\\0'> Ascii~native.UInt8>").expect("parse error").normalize(),
|
||||
);
|
||||
|
||||
eprintln!("---------------");
|
||||
assert_eq!(
|
||||
dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~native.UInt8>").expect("parse error"),
|
||||
dict.parse("<Seq Char~Ascii> ~ <<ValueDelim '\\0'> Char~Ascii> ~ <<ValueDelim '\\0'> native.UInt8>").expect("parse error").normalize(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_complex_spec() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
assert_eq!(
|
||||
dict.parse("<A~Y <B C~D~E> F H H>").expect("parse error"),
|
||||
dict.parse("<A~Y <B C> F H H>
|
||||
~<A~Y <B D> F H H>
|
||||
~<Y <B E> F H H>").expect("parse errror")
|
||||
.normalize(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_struct() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
assert_eq!(
|
||||
dict.parse("< Struct~Aligned
|
||||
< a TimePoint~<TimeSince UnixEpoch>~Seconds~native.UInt64 >
|
||||
< b Angle ~ Degrees ~ ℝ ~ native.Float32 >
|
||||
>
|
||||
").expect("parse error"),
|
||||
dict.parse("
|
||||
< Struct <a TimePoint> <b Angle> >
|
||||
~ < Struct <a <TimeSince UnixEpoch>~Seconds> <b Angle~Degrees~ℝ> >
|
||||
~ < Struct~Aligned <a native.UInt64> <b native.Float32> >
|
||||
").expect("parse errror")
|
||||
|
||||
.normalize(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_enum() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
assert_eq!(
|
||||
dict.parse("< Enum
|
||||
< a TimePoint~<TimeSince UnixEpoch>~Seconds~native.UInt64 >
|
||||
< b Angle ~ Degrees ~ ℝ ~ native.Float32 >
|
||||
>
|
||||
").expect("parse error"),
|
||||
dict.parse("
|
||||
< Enum <a TimePoint> <b Angle> >
|
||||
~ < Enum <a <TimeSince UnixEpoch>~Seconds> <b Angle~Degrees~ℝ> >
|
||||
~ < Enum <a native.UInt64> <b native.Float32> >
|
||||
").expect("parse errror")
|
||||
|
||||
.normalize(),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
|
||||
use {
|
||||
crate::{dict::*, term::*, parser::*, unparser::*, substitution::*},
|
||||
std::iter::FromIterator,
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[test]
|
||||
fn test_subst() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
let mut σ = std::collections::HashMap::new();
|
||||
|
||||
// T --> ℕ
|
||||
σ.insert
|
||||
(dict.add_varname(String::from("T")),
|
||||
dict.parse("ℕ").unwrap());
|
||||
|
||||
// U --> <Seq Char>
|
||||
σ.insert
|
||||
(dict.add_varname(String::from("U")),
|
||||
dict.parse("<Seq Char>").unwrap());
|
||||
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<Seq T~U>").unwrap().apply_subst(&σ).clone(),
|
||||
dict.parse("<Seq ℕ~<Seq Char>>").unwrap()
|
||||
);
|
||||
}
|
|
@ -1,96 +0,0 @@
|
|||
use crate::{dict::BimapTypeDict, parser::*, unparser::*};
|
||||
|
||||
#[test]
|
||||
fn test_semantic_subtype() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C").expect("parse error")
|
||||
.is_semantic_subtype_of(
|
||||
&dict.parse("A~B~C").expect("parse errror")
|
||||
),
|
||||
Some((0, dict.parse("A~B~C").expect("parse errror")))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B1~C1").expect("parse error")
|
||||
.is_semantic_subtype_of(
|
||||
&dict.parse("A~B2~C2").expect("parse errror")
|
||||
),
|
||||
Some((0, dict.parse("A~B1~C1").expect("parse errror")))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C1").expect("parse error")
|
||||
.is_semantic_subtype_of(
|
||||
&dict.parse("B~C2").expect("parse errror")
|
||||
),
|
||||
Some((1, dict.parse("B~C1").expect("parse errror")))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_syntactic_subtype() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C").expect("parse error")
|
||||
.is_syntactic_subtype_of(
|
||||
&dict.parse("A~B~C").expect("parse errror")
|
||||
),
|
||||
Ok(0)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C").expect("parse error")
|
||||
.is_syntactic_subtype_of(
|
||||
&dict.parse("B~C").expect("parse errror")
|
||||
),
|
||||
Ok(1)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C~D~E").expect("parse error")
|
||||
.is_syntactic_subtype_of(
|
||||
&dict.parse("C~D").expect("parse errror")
|
||||
),
|
||||
Ok(2)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C~D~E").expect("parse error")
|
||||
.is_syntactic_subtype_of(
|
||||
&dict.parse("C~G").expect("parse errror")
|
||||
),
|
||||
Err((2,3))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("A~B~C~D~E").expect("parse error")
|
||||
.is_syntactic_subtype_of(
|
||||
&dict.parse("G~F~K").expect("parse errror")
|
||||
),
|
||||
Err((0,0))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("<Duration Seconds>~ℕ").expect("parse error")
|
||||
.is_syntactic_subtype_of(
|
||||
&dict.parse("ℕ").expect("parse errror")
|
||||
),
|
||||
Ok(1)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
dict.parse("
|
||||
<Duration Seconds>
|
||||
~ℕ
|
||||
~<PosInt 10 BigEndian>
|
||||
~< Seq <Digit 10> ~ Char >"
|
||||
).expect("parse error")
|
||||
.is_syntactic_subtype_of(
|
||||
&dict.parse("<Seq Char>").expect("parse errror")
|
||||
),
|
||||
Ok(4)
|
||||
);
|
||||
}
|
|
@ -1,380 +0,0 @@
|
|||
|
||||
use {
|
||||
crate::{dict::*, parser::*, unparser::*, term::*, unification::*},
|
||||
std::iter::FromIterator
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
fn test_unify(ts1: &str, ts2: &str, expect_unificator: bool) {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
dict.add_varname(String::from("T"));
|
||||
dict.add_varname(String::from("U"));
|
||||
dict.add_varname(String::from("V"));
|
||||
dict.add_varname(String::from("W"));
|
||||
|
||||
let mut t1 = dict.parse(ts1).unwrap();
|
||||
let mut t2 = dict.parse(ts2).unwrap();
|
||||
let σ = crate::unify( &t1, &t2 );
|
||||
|
||||
if expect_unificator {
|
||||
assert!(σ.is_ok());
|
||||
|
||||
let σ = σ.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
t1.apply_subst(&σ),
|
||||
t2.apply_subst(&σ)
|
||||
);
|
||||
} else {
|
||||
assert!(! σ.is_ok());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unification_error() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
dict.add_varname(String::from("T"));
|
||||
|
||||
assert_eq!(
|
||||
crate::unify(
|
||||
&dict.parse("<A T>").unwrap(),
|
||||
&dict.parse("<B T>").unwrap()
|
||||
),
|
||||
|
||||
Err(UnificationError {
|
||||
addr: vec![0],
|
||||
t1: dict.parse("A").unwrap(),
|
||||
t2: dict.parse("B").unwrap()
|
||||
})
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
crate::unify(
|
||||
&dict.parse("<V <U A> T>").unwrap(),
|
||||
&dict.parse("<V <U B> T>").unwrap()
|
||||
),
|
||||
|
||||
Err(UnificationError {
|
||||
addr: vec![1, 1],
|
||||
t1: dict.parse("A").unwrap(),
|
||||
t2: dict.parse("B").unwrap()
|
||||
})
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
crate::unify(
|
||||
&dict.parse("T").unwrap(),
|
||||
&dict.parse("<Seq T>").unwrap()
|
||||
),
|
||||
|
||||
Err(UnificationError {
|
||||
addr: vec![],
|
||||
t1: dict.parse("T").unwrap(),
|
||||
t2: dict.parse("<Seq T>").unwrap()
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unification() {
|
||||
test_unify("A", "A", true);
|
||||
test_unify("A", "B", false);
|
||||
test_unify("<Seq T>", "<Seq Ascii~Char>", true);
|
||||
test_unify("<Seq T>", "<U Char>", true);
|
||||
|
||||
test_unify(
|
||||
"<Seq Path~<Seq Char>>~<SepSeq Char '\n'>~<Seq Char>",
|
||||
"<Seq T~<Seq Char>>~<SepSeq Char '\n'>~<Seq Char>",
|
||||
true
|
||||
);
|
||||
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
dict.add_varname(String::from("T"));
|
||||
dict.add_varname(String::from("U"));
|
||||
dict.add_varname(String::from("V"));
|
||||
dict.add_varname(String::from("W"));
|
||||
|
||||
assert_eq!(
|
||||
UnificationProblem::new_eq(vec![
|
||||
(dict.parse("U").unwrap(), dict.parse("<Seq Char>").unwrap()),
|
||||
(dict.parse("T").unwrap(), dict.parse("<Seq U>").unwrap()),
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![],
|
||||
vec![
|
||||
// T
|
||||
(TypeID::Var(0), dict.parse("<Seq <Seq Char>>").unwrap()),
|
||||
|
||||
// U
|
||||
(TypeID::Var(1), dict.parse("<Seq Char>").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
UnificationProblem::new_eq(vec![
|
||||
(dict.parse("<Seq T>").unwrap(), dict.parse("<Seq W~<Seq Char>>").unwrap()),
|
||||
(dict.parse("<Seq ℕ>").unwrap(), dict.parse("<Seq W>").unwrap()),
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![],
|
||||
vec![
|
||||
// W
|
||||
(TypeID::Var(3), dict.parse("ℕ").unwrap()),
|
||||
|
||||
// T
|
||||
(TypeID::Var(0), dict.parse("ℕ~<Seq Char>").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_subtype_unification1() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
dict.add_varname(String::from("T"));
|
||||
|
||||
assert_eq!(
|
||||
UnificationProblem::new_sub(vec![
|
||||
(dict.parse("A ~ B").unwrap(),
|
||||
dict.parse("B").unwrap()),
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![ dict.parse("A").unwrap() ],
|
||||
vec![].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
UnificationProblem::new_sub(vec![
|
||||
(dict.parse("A ~ B ~ C ~ D").unwrap(),
|
||||
dict.parse("C ~ D").unwrap()),
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![ dict.parse("A ~ B").unwrap() ],
|
||||
vec![].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
UnificationProblem::new_sub(vec![
|
||||
(dict.parse("A ~ B ~ C ~ D").unwrap(),
|
||||
dict.parse("T ~ D").unwrap()),
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![ TypeTerm::unit() ],
|
||||
vec![
|
||||
(dict.get_typeid(&"T".into()).unwrap(), dict.parse("A ~ B ~ C").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
UnificationProblem::new_sub(vec![
|
||||
(dict.parse("A ~ B ~ C ~ D").unwrap(),
|
||||
dict.parse("B ~ T ~ D").unwrap()),
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![ dict.parse("A").unwrap() ],
|
||||
vec![
|
||||
(dict.get_typeid(&"T".into()).unwrap(), dict.parse("C").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_subtype_unification2() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
dict.add_varname(String::from("T"));
|
||||
dict.add_varname(String::from("U"));
|
||||
dict.add_varname(String::from("V"));
|
||||
dict.add_varname(String::from("W"));
|
||||
|
||||
assert_eq!(
|
||||
UnificationProblem::new_sub(vec![
|
||||
(dict.parse("<Seq~T <Digit 10> ~ Char ~ Ascii>").unwrap(),
|
||||
dict.parse("<Seq~<LengthPrefix x86.UInt64> Char ~ Ascii>").unwrap()),
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![
|
||||
dict.parse("<Seq <Digit 10>>").unwrap()
|
||||
],
|
||||
vec![
|
||||
// T
|
||||
(TypeID::Var(0), dict.parse("<LengthPrefix x86.UInt64>").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
UnificationProblem::new_sub(vec![
|
||||
(dict.parse("U").unwrap(), dict.parse("<Seq Char>").unwrap()),
|
||||
(dict.parse("T").unwrap(), dict.parse("<Seq U>").unwrap()),
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![
|
||||
TypeTerm::unit(),
|
||||
TypeTerm::unit(),
|
||||
],
|
||||
vec![
|
||||
// T
|
||||
(TypeID::Var(0), dict.parse("<Seq <Seq Char>>").unwrap()),
|
||||
|
||||
// U
|
||||
(TypeID::Var(1), dict.parse("<Seq Char>").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
UnificationProblem::new_sub(vec![
|
||||
(dict.parse("<Seq T>").unwrap(),
|
||||
dict.parse("<Seq W~<Seq Char>>").unwrap()),
|
||||
(dict.parse("<Seq~<LengthPrefix x86.UInt64> ℕ~<PosInt 10 BigEndian>>").unwrap(),
|
||||
dict.parse("<<LengthPrefix x86.UInt64> W>").unwrap()),
|
||||
]).solve(),
|
||||
Ok((
|
||||
vec![
|
||||
TypeTerm::unit(),
|
||||
dict.parse("<Seq ℕ>").unwrap(),
|
||||
],
|
||||
vec![
|
||||
// W
|
||||
(TypeID::Var(3), dict.parse("ℕ~<PosInt 10 BigEndian>").unwrap()),
|
||||
|
||||
// T
|
||||
(TypeID::Var(0), dict.parse("ℕ~<PosInt 10 BigEndian>~<Seq Char>").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
subtype_unify(
|
||||
&dict.parse("<Seq~List~Vec <Digit 16>~Char>").expect(""),
|
||||
&dict.parse("<List~Vec Char>").expect("")
|
||||
),
|
||||
Ok((
|
||||
dict.parse("<Seq~List <Digit 16>>").expect(""),
|
||||
vec![].into_iter().collect()
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
subtype_unify(
|
||||
&dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq~List~Vec <Digit 16>~Char>").expect(""),
|
||||
&dict.parse("<List~Vec Char>").expect("")
|
||||
),
|
||||
Ok((
|
||||
dict.parse("ℕ ~ <PosInt 16 BigEndian> ~ <Seq~List <Digit 16>>").expect(""),
|
||||
vec![].into_iter().collect()
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_trait_not_subtype() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
assert_eq!(
|
||||
subtype_unify(
|
||||
&dict.parse("A ~ B").expect(""),
|
||||
&dict.parse("A ~ B ~ C").expect("")
|
||||
),
|
||||
Err(UnificationError {
|
||||
addr: vec![1],
|
||||
t1: dict.parse("B").expect(""),
|
||||
t2: dict.parse("C").expect("")
|
||||
})
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
subtype_unify(
|
||||
&dict.parse("<Seq~List~Vec <Digit 10>~Char>").expect(""),
|
||||
&dict.parse("<Seq~List~Vec Char~ReprTree>").expect("")
|
||||
),
|
||||
Err(UnificationError {
|
||||
addr: vec![1,1],
|
||||
t1: dict.parse("Char").expect(""),
|
||||
t2: dict.parse("ReprTree").expect("")
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reprtree_list_subtype() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
dict.add_varname("Item".into());
|
||||
|
||||
assert_eq!(
|
||||
subtype_unify(
|
||||
&dict.parse("<List~Vec <Digit 10>~Char~ReprTree>").expect(""),
|
||||
&dict.parse("<List~Vec Item~ReprTree>").expect("")
|
||||
),
|
||||
Ok((
|
||||
TypeTerm::unit(),
|
||||
vec![
|
||||
(dict.get_typeid(&"Item".into()).unwrap(), dict.parse("<Digit 10>~Char").unwrap())
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_subtype_delim() {
|
||||
let mut dict = BimapTypeDict::new();
|
||||
|
||||
dict.add_varname(String::from("T"));
|
||||
dict.add_varname(String::from("Delim"));
|
||||
|
||||
assert_eq!(
|
||||
UnificationProblem::new_sub(vec![
|
||||
|
||||
(
|
||||
//given type
|
||||
dict.parse("
|
||||
< Seq <Seq <Digit 10>~Char~Ascii~UInt8> >
|
||||
~ < ValueSep ':' Char~Ascii~UInt8 >
|
||||
~ < Seq~<LengthPrefix UInt64> Char~Ascii~UInt8 >
|
||||
").expect(""),
|
||||
|
||||
//expected type
|
||||
dict.parse("
|
||||
< Seq <Seq T> >
|
||||
~ < ValueSep Delim T >
|
||||
~ < Seq~<LengthPrefix UInt64> T >
|
||||
").expect("")
|
||||
),
|
||||
|
||||
// subtype bounds
|
||||
(
|
||||
dict.parse("T").expect(""),
|
||||
dict.parse("UInt8").expect("")
|
||||
),
|
||||
/* todo
|
||||
(
|
||||
dict.parse("<TypeOf Delim>").expect(""),
|
||||
dict.parse("T").expect("")
|
||||
),
|
||||
*/
|
||||
]).solve(),
|
||||
Ok((
|
||||
// halo types for each rhs in the sub-equations
|
||||
vec![
|
||||
dict.parse("<Seq <Seq <Digit 10>>>").expect(""),
|
||||
dict.parse("Char~Ascii").expect(""),
|
||||
],
|
||||
|
||||
// variable substitution
|
||||
vec![
|
||||
(dict.get_typeid(&"T".into()).unwrap(), dict.parse("Char~Ascii~UInt8").expect("")),
|
||||
(dict.get_typeid(&"Delim".into()).unwrap(), TypeTerm::Char(':')),
|
||||
].into_iter().collect()
|
||||
))
|
||||
);
|
||||
}
|
|
@ -1,529 +0,0 @@
|
|||
use {
|
||||
crate::{dict::*, term::*}, std::{collections::HashMap}
|
||||
};
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
||||
|
||||
#[derive(Clone, Eq, PartialEq, Debug)]
|
||||
pub struct UnificationError {
|
||||
pub addr: Vec<usize>,
|
||||
pub t1: TypeTerm,
|
||||
pub t2: TypeTerm
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct UnificationPair {
|
||||
addr: Vec<usize>,
|
||||
halo: TypeTerm,
|
||||
|
||||
lhs: TypeTerm,
|
||||
rhs: TypeTerm,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UnificationProblem {
|
||||
σ: HashMap<TypeID, TypeTerm>,
|
||||
upper_bounds: HashMap< u64, TypeTerm >,
|
||||
lower_bounds: HashMap< u64, TypeTerm >,
|
||||
equal_pairs: Vec<UnificationPair>,
|
||||
subtype_pairs: Vec<UnificationPair>,
|
||||
trait_pairs: Vec<UnificationPair>
|
||||
}
|
||||
|
||||
impl UnificationProblem {
|
||||
pub fn new(
|
||||
equal_pairs: Vec<(TypeTerm, TypeTerm)>,
|
||||
subtype_pairs: Vec<(TypeTerm, TypeTerm)>,
|
||||
trait_pairs: Vec<(TypeTerm, TypeTerm)>
|
||||
) -> Self {
|
||||
UnificationProblem {
|
||||
σ: HashMap::new(),
|
||||
|
||||
equal_pairs: equal_pairs.into_iter().map(|(lhs,rhs)|
|
||||
UnificationPair{
|
||||
lhs,rhs,
|
||||
halo: TypeTerm::unit(),
|
||||
addr: Vec::new()
|
||||
}).collect(),
|
||||
|
||||
subtype_pairs: subtype_pairs.into_iter().map(|(lhs,rhs)|
|
||||
UnificationPair{
|
||||
lhs,rhs,
|
||||
halo: TypeTerm::unit(),
|
||||
addr: Vec::new()
|
||||
}).collect(),
|
||||
|
||||
trait_pairs: trait_pairs.into_iter().map(|(lhs,rhs)|
|
||||
UnificationPair{
|
||||
lhs,rhs,
|
||||
halo: TypeTerm::unit(),
|
||||
addr: Vec::new()
|
||||
}).collect(),
|
||||
|
||||
upper_bounds: HashMap::new(),
|
||||
lower_bounds: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_eq(eqs: Vec<(TypeTerm, TypeTerm)>) -> Self {
|
||||
UnificationProblem::new( eqs, Vec::new(), Vec::new() )
|
||||
}
|
||||
|
||||
pub fn new_sub(subs: Vec<(TypeTerm, TypeTerm)>) -> Self {
|
||||
UnificationProblem::new( Vec::new(), subs, Vec::new() )
|
||||
}
|
||||
|
||||
|
||||
/// update all values in substitution
|
||||
pub fn reapply_subst(&mut self) {
|
||||
let mut new_σ = HashMap::new();
|
||||
for (v, tt) in self.σ.iter() {
|
||||
let mut tt = tt.clone().normalize();
|
||||
tt.apply_subst(&self.σ);
|
||||
tt = tt.normalize();
|
||||
//eprintln!("update σ : {:?} --> {:?}", v, tt);
|
||||
new_σ.insert(v.clone(), tt);
|
||||
}
|
||||
self.σ = new_σ;
|
||||
}
|
||||
|
||||
|
||||
pub fn eval_equation(&mut self, unification_pair: UnificationPair) -> Result<(), UnificationError> {
|
||||
match (&unification_pair.lhs, &unification_pair.rhs) {
|
||||
(TypeTerm::TypeID(TypeID::Var(varid)), t) |
|
||||
(t, TypeTerm::TypeID(TypeID::Var(varid))) => {
|
||||
if ! t.contains_var( *varid ) {
|
||||
self.σ.insert(TypeID::Var(*varid), t.clone());
|
||||
self.reapply_subst();
|
||||
Ok(())
|
||||
} else if t == &TypeTerm::TypeID(TypeID::Var(*varid)) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(*varid)), t2: t.clone() })
|
||||
}
|
||||
}
|
||||
|
||||
(TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
|
||||
if a1 == a2 { Ok(()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
|
||||
}
|
||||
(TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
|
||||
if n1 == n2 { Ok(()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
|
||||
}
|
||||
(TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
|
||||
if c1 == c2 { Ok(()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
|
||||
}
|
||||
|
||||
(TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) |
|
||||
(TypeTerm::App(a1), TypeTerm::App(a2)) => {
|
||||
if a1.len() == a2.len() {
|
||||
for (i, (x, y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate().rev() {
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
new_addr.push(i);
|
||||
self.equal_pairs.push(
|
||||
UnificationPair {
|
||||
lhs: x,
|
||||
rhs: y,
|
||||
halo: TypeTerm::unit(),
|
||||
addr: new_addr
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
|
||||
}
|
||||
}
|
||||
|
||||
_ => Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
pub fn add_lower_subtype_bound(&mut self, v: u64, new_lower_bound: TypeTerm) -> Result<(),()> {
|
||||
|
||||
if new_lower_bound == TypeTerm::TypeID(TypeID::Var(v)) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if new_lower_bound.contains_var(v) {
|
||||
// loop
|
||||
return Err(());
|
||||
}
|
||||
|
||||
if let Some(lower_bound) = self.lower_bounds.get(&v).cloned() {
|
||||
// eprintln!("var already exists. check max. type");
|
||||
if let Ok(halo) = self.eval_subtype(
|
||||
UnificationPair {
|
||||
lhs: lower_bound.clone(),
|
||||
rhs: new_lower_bound.clone(),
|
||||
halo: TypeTerm::unit(),
|
||||
addr: vec![]
|
||||
}
|
||||
) {
|
||||
// eprintln!("found more general lower bound");
|
||||
// eprintln!("set var {}'s lowerbound to {:?}", varid, t.clone());
|
||||
// generalize variable type to supertype
|
||||
self.lower_bounds.insert(v, new_lower_bound);
|
||||
Ok(())
|
||||
} else if let Ok(halo) = self.eval_subtype(
|
||||
UnificationPair{
|
||||
lhs: new_lower_bound,
|
||||
rhs: lower_bound,
|
||||
halo: TypeTerm::unit(),
|
||||
addr: vec![]
|
||||
}
|
||||
) {
|
||||
// eprintln!("OK, is already larger type");
|
||||
Ok(())
|
||||
} else {
|
||||
// eprintln!("violated subtype restriction");
|
||||
Err(())
|
||||
}
|
||||
} else {
|
||||
// eprintln!("set var {}'s lowerbound to {:?}", varid, t.clone());
|
||||
self.lower_bounds.insert(v, new_lower_bound);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn add_upper_subtype_bound(&mut self, v: u64, new_upper_bound: TypeTerm) -> Result<(),()> {
|
||||
if new_upper_bound == TypeTerm::TypeID(TypeID::Var(v)) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if new_upper_bound.contains_var(v) {
|
||||
// loop
|
||||
return Err(());
|
||||
}
|
||||
|
||||
if let Some(upper_bound) = self.upper_bounds.get(&v).cloned() {
|
||||
if let Ok(_halo) = self.eval_subtype(
|
||||
UnificationPair {
|
||||
lhs: new_upper_bound.clone(),
|
||||
rhs: upper_bound,
|
||||
halo: TypeTerm::unit(),
|
||||
addr: vec![]
|
||||
}
|
||||
) {
|
||||
// found a lower upper bound
|
||||
self.upper_bounds.insert(v, new_upper_bound);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(())
|
||||
}
|
||||
} else {
|
||||
self.upper_bounds.insert(v, new_upper_bound);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eval_subtype(&mut self, unification_pair: UnificationPair) -> Result<
|
||||
// ok: halo type
|
||||
TypeTerm,
|
||||
// error
|
||||
UnificationError
|
||||
> {
|
||||
// eprintln!("eval_subtype {:?} <=? {:?}", unification_pair.lhs, unification_pair.rhs);
|
||||
match (unification_pair.lhs.clone(), unification_pair.rhs.clone()) {
|
||||
|
||||
/*
|
||||
Variables
|
||||
*/
|
||||
|
||||
(t, TypeTerm::TypeID(TypeID::Var(v))) => {
|
||||
//eprintln!("t <= variable");
|
||||
if self.add_lower_subtype_bound(v, t.clone()).is_ok() {
|
||||
Ok(TypeTerm::unit())
|
||||
} else {
|
||||
Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(v)), t2: t })
|
||||
}
|
||||
}
|
||||
|
||||
(TypeTerm::TypeID(TypeID::Var(v)), t) => {
|
||||
//eprintln!("variable <= t");
|
||||
if self.add_upper_subtype_bound(v, t.clone()).is_ok() {
|
||||
Ok(TypeTerm::unit())
|
||||
} else {
|
||||
Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::TypeID(TypeID::Var(v)), t2: t })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
Atoms
|
||||
*/
|
||||
|
||||
(TypeTerm::TypeID(a1), TypeTerm::TypeID(a2)) => {
|
||||
if a1 == a2 { Ok(TypeTerm::unit()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs}) }
|
||||
}
|
||||
(TypeTerm::Num(n1), TypeTerm::Num(n2)) => {
|
||||
if n1 == n2 { Ok(TypeTerm::unit()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
|
||||
}
|
||||
(TypeTerm::Char(c1), TypeTerm::Char(c2)) => {
|
||||
if c1 == c2 { Ok(TypeTerm::unit()) } else { Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs }) }
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
Ladders
|
||||
*/
|
||||
|
||||
(TypeTerm::Ladder(a1), TypeTerm::Ladder(a2)) => {
|
||||
let mut l1_iter = a1.into_iter().enumerate().rev();
|
||||
let mut l2_iter = a2.into_iter().rev();
|
||||
|
||||
let mut halo_ladder = Vec::new();
|
||||
|
||||
while let Some(rhs) = l2_iter.next() {
|
||||
//eprintln!("take rhs = {:?}", rhs);
|
||||
if let Some((i, lhs)) = l1_iter.next() {
|
||||
//eprintln!("take lhs ({}) = {:?}", i, lhs);
|
||||
let mut addr = unification_pair.addr.clone();
|
||||
addr.push(i);
|
||||
//eprintln!("addr = {:?}", addr);
|
||||
|
||||
match (lhs.clone(), rhs.clone()) {
|
||||
(t, TypeTerm::TypeID(TypeID::Var(v))) => {
|
||||
|
||||
if self.add_upper_subtype_bound(v,t.clone()).is_ok() {
|
||||
let mut new_upper_bound_ladder = vec![ t ];
|
||||
|
||||
if let Some(next_rhs) = l2_iter.next() {
|
||||
|
||||
// TODO
|
||||
|
||||
} else {
|
||||
// take everything
|
||||
|
||||
while let Some((i,t)) = l1_iter.next() {
|
||||
new_upper_bound_ladder.push(t);
|
||||
}
|
||||
}
|
||||
|
||||
new_upper_bound_ladder.reverse();
|
||||
if self.add_upper_subtype_bound(v, TypeTerm::Ladder(new_upper_bound_ladder)).is_ok() {
|
||||
// ok
|
||||
} else {
|
||||
return Err(UnificationError {
|
||||
addr,
|
||||
t1: lhs,
|
||||
t2: rhs
|
||||
});
|
||||
}
|
||||
} else {
|
||||
return Err(UnificationError {
|
||||
addr,
|
||||
t1: lhs,
|
||||
t2: rhs
|
||||
});
|
||||
}
|
||||
}
|
||||
(lhs, rhs) => {
|
||||
if let Ok(ψ) = self.eval_subtype(
|
||||
UnificationPair {
|
||||
lhs: lhs.clone(), rhs: rhs.clone(),
|
||||
addr:addr.clone(), halo: TypeTerm::unit()
|
||||
}
|
||||
) {
|
||||
// ok.
|
||||
//eprintln!("rungs are subtypes. continue");
|
||||
halo_ladder.push(ψ);
|
||||
} else {
|
||||
return Err(UnificationError {
|
||||
addr,
|
||||
t1: lhs,
|
||||
t2: rhs
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// not a subtype,
|
||||
return Err(UnificationError {
|
||||
addr: vec![],
|
||||
t1: unification_pair.lhs,
|
||||
t2: unification_pair.rhs
|
||||
});
|
||||
}
|
||||
}
|
||||
//eprintln!("left ladder fully consumed");
|
||||
|
||||
for (i,t) in l1_iter {
|
||||
halo_ladder.push(t);
|
||||
}
|
||||
halo_ladder.reverse();
|
||||
Ok(TypeTerm::Ladder(halo_ladder).strip().param_normalize())
|
||||
},
|
||||
|
||||
(t, TypeTerm::Ladder(a1)) => {
|
||||
Err(UnificationError{ addr: unification_pair.addr, t1: t, t2: TypeTerm::Ladder(a1) })
|
||||
}
|
||||
|
||||
(TypeTerm::Ladder(mut a1), t) => {
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
new_addr.push( a1.len() -1 );
|
||||
if let Ok(halo) = self.eval_subtype(
|
||||
UnificationPair {
|
||||
lhs: a1.pop().unwrap(),
|
||||
rhs: t.clone(),
|
||||
halo: TypeTerm::unit(),
|
||||
addr: new_addr
|
||||
}
|
||||
) {
|
||||
a1.push(halo);
|
||||
if a1.len() == 1 {
|
||||
Ok(a1.pop().unwrap())
|
||||
} else {
|
||||
Ok(TypeTerm::Ladder(a1))
|
||||
}
|
||||
} else {
|
||||
Err(UnificationError{ addr: unification_pair.addr, t1: TypeTerm::Ladder(a1), t2: t })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
Application
|
||||
*/
|
||||
|
||||
(TypeTerm::App(a1), TypeTerm::App(a2)) => {
|
||||
if a1.len() == a2.len() {
|
||||
let mut halo_args = Vec::new();
|
||||
let mut n_halos_required = 0;
|
||||
|
||||
for (i, (mut x, mut y)) in a1.iter().cloned().zip(a2.iter().cloned()).enumerate() {
|
||||
let mut new_addr = unification_pair.addr.clone();
|
||||
new_addr.push(i);
|
||||
|
||||
x = x.strip();
|
||||
|
||||
// eprintln!("before strip: {:?}", y);
|
||||
y = y.strip();
|
||||
// eprintln!("after strip: {:?}", y);
|
||||
// eprintln!("APP<> eval {:?} \n ?<=? {:?} ", x, y);
|
||||
|
||||
match self.eval_subtype(
|
||||
UnificationPair {
|
||||
lhs: x.clone(),
|
||||
rhs: y.clone(),
|
||||
halo: TypeTerm::unit(),
|
||||
addr: new_addr,
|
||||
}
|
||||
) {
|
||||
Ok(halo) => {
|
||||
if halo == TypeTerm::unit() {
|
||||
let mut y = y.clone();
|
||||
y.apply_subst(&self.σ);
|
||||
y = y.strip();
|
||||
let mut top = y.get_lnf_vec().first().unwrap().clone();
|
||||
halo_args.push(top.clone());
|
||||
//eprintln!("add top {:?}", top);
|
||||
} else {
|
||||
//eprintln!("add halo {:?}", halo);
|
||||
if n_halos_required > 0 {
|
||||
let x = &mut halo_args[n_halos_required-1];
|
||||
if let TypeTerm::Ladder(argrs) = x {
|
||||
let mut a = a2[n_halos_required-1].clone();
|
||||
a.apply_subst(&self.σ);
|
||||
a = a.get_lnf_vec().first().unwrap().clone();
|
||||
argrs.push(a);
|
||||
} else {
|
||||
*x = TypeTerm::Ladder(vec![
|
||||
x.clone(),
|
||||
a2[n_halos_required-1].clone().get_lnf_vec().first().unwrap().clone()
|
||||
]);
|
||||
|
||||
x.apply_subst(&self.σ);
|
||||
}
|
||||
}
|
||||
|
||||
halo_args.push(halo);
|
||||
n_halos_required += 1;
|
||||
}
|
||||
},
|
||||
Err(err) => { return Err(err); }
|
||||
}
|
||||
}
|
||||
|
||||
if n_halos_required > 0 {
|
||||
//eprintln!("halo args : {:?}", halo_args);
|
||||
Ok(TypeTerm::App(halo_args))
|
||||
} else {
|
||||
Ok(TypeTerm::unit())
|
||||
}
|
||||
} else {
|
||||
Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
|
||||
}
|
||||
}
|
||||
|
||||
_ => Err(UnificationError{ addr: unification_pair.addr, t1: unification_pair.lhs, t2: unification_pair.rhs })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn solve(mut self) -> Result<(Vec<TypeTerm>, HashMap<TypeID, TypeTerm>), UnificationError> {
|
||||
// solve equations
|
||||
while let Some( mut equal_pair ) = self.equal_pairs.pop() {
|
||||
equal_pair.lhs.apply_subst(&self.σ);
|
||||
equal_pair.rhs.apply_subst(&self.σ);
|
||||
|
||||
self.eval_equation(equal_pair)?;
|
||||
}
|
||||
|
||||
// solve subtypes
|
||||
// eprintln!("------ SOLVE SUBTYPES ---- ");
|
||||
for mut subtype_pair in self.subtype_pairs.clone().into_iter() {
|
||||
subtype_pair.lhs.apply_subst(&self.σ);
|
||||
subtype_pair.rhs.apply_subst(&self.σ);
|
||||
let _halo = self.eval_subtype( subtype_pair.clone() )?.strip();
|
||||
}
|
||||
|
||||
// add variables from subtype bounds
|
||||
for (var_id, t) in self.upper_bounds.iter() {
|
||||
// eprintln!("VAR {} upper bound {:?}", var_id, t);
|
||||
self.σ.insert(TypeID::Var(*var_id), t.clone().strip());
|
||||
}
|
||||
|
||||
for (var_id, t) in self.lower_bounds.iter() {
|
||||
// eprintln!("VAR {} lower bound {:?}", var_id, t);
|
||||
self.σ.insert(TypeID::Var(*var_id), t.clone().strip());
|
||||
}
|
||||
|
||||
self.reapply_subst();
|
||||
|
||||
// eprintln!("------ MAKE HALOS -----");
|
||||
let mut halo_types = Vec::new();
|
||||
for mut subtype_pair in self.subtype_pairs.clone().into_iter() {
|
||||
subtype_pair.lhs = subtype_pair.lhs.apply_subst(&self.σ).clone().strip();
|
||||
subtype_pair.rhs = subtype_pair.rhs.apply_subst(&self.σ).clone().strip();
|
||||
|
||||
let halo = self.eval_subtype( subtype_pair.clone() )?.strip();
|
||||
halo_types.push(halo);
|
||||
}
|
||||
|
||||
// solve traits
|
||||
while let Some( trait_pair ) = self.trait_pairs.pop() {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
Ok((halo_types, self.σ))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unify(
|
||||
t1: &TypeTerm,
|
||||
t2: &TypeTerm
|
||||
) -> Result<HashMap<TypeID, TypeTerm>, UnificationError> {
|
||||
let unification = UnificationProblem::new_eq(vec![ (t1.clone(), t2.clone()) ]);
|
||||
Ok(unification.solve()?.1)
|
||||
}
|
||||
|
||||
pub fn subtype_unify(
|
||||
t1: &TypeTerm,
|
||||
t2: &TypeTerm
|
||||
) -> Result<(TypeTerm, HashMap<TypeID, TypeTerm>), UnificationError> {
|
||||
let unification = UnificationProblem::new_sub(vec![ (t1.clone(), t2.clone()) ]);
|
||||
unification.solve().map( |(halos,σ)| ( halos.first().cloned().unwrap_or(TypeTerm::unit()), σ) )
|
||||
}
|
||||
|
||||
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
|
Loading…
Add table
Add a link
Reference in a new issue