Compare commits

..

22 commits

Author SHA1 Message Date
a52c01d9e9
morphism base: store vec of seq-types 2025-02-09 12:46:14 +01:00
4155310e1e
fix tests 2025-02-09 12:46:13 +01:00
a534d33b7b
pretty: output escape character for \0 and \n 2025-02-09 12:46:13 +01:00
f33ad0a7e2
steiner tree: eliminate identity loops 2025-02-09 12:46:13 +01:00
8fb20e4e18
add Send+Sync trait bound to TypeDict 2025-02-09 12:46:13 +01:00
f7b8a20299
fix find_morphism_path
* also apply substitution from src-type match
* get this substitution as result from `enum_morphisms_with_subtyping`
2025-02-09 12:46:13 +01:00
9408703cd1
check if term is empty 2025-02-09 12:46:13 +01:00
575caa7c44
add Debug for Bimap & BimapTypeDict 2025-02-09 12:46:13 +01:00
b0b14fa04c
make TypeDict a trait & BimapTypeDict an impl 2025-02-09 12:46:13 +01:00
c6edd44eac
add steiner tree solver based on shortest path 2025-02-09 12:46:13 +01:00
ffb9906209
initial implementation of solver for steiner trees 2025-02-09 12:46:12 +01:00
adc8a43b69
morphism base: find shortest path instead of just some path 2025-02-09 12:46:12 +01:00
c0c0184d97
fix returned halo type in find_morphism_with_subtyping() 2025-02-09 12:46:12 +01:00
95fc28f80e
turn Morphism into trait and add find_morphism() function 2025-02-09 12:46:10 +01:00
fd5936209c
add test for find_morphism_path() 2025-02-09 12:45:19 +01:00
967ae5f30e
initial MorphismBase with DFS to find morphism paths 2025-02-09 12:45:19 +01:00
3c5d7111bc
Merge branch 'fix-pnf' into dev 2025-02-09 12:42:40 +01:00
a9a35aed1b
rewrite param_normalize() 2025-02-09 12:42:24 +01:00
4a6a35a897
pnf: add test for collapsing first application argument 2025-02-09 12:42:23 +01:00
4aa62d4813
Merge branch 'topic-sugar' into dev 2025-02-09 12:40:11 +01:00
c6bad6046a
add sugared terms & pretty printing 2025-02-09 12:38:07 +01:00
c03db48fd2
TypeID: add Copy trait 2025-02-09 12:38:04 +01:00
3 changed files with 109 additions and 67 deletions

View file

@ -27,4 +27,3 @@ pub use {
unification::*,
morphism::*
};

View file

@ -2,6 +2,20 @@ use crate::term::TypeTerm;
//<<<<>>>><<>><><<>><<<*>>><<>><><<>><<<<>>>>\\
pub fn splice_ladders( mut upper: Vec< TypeTerm >, mut lower: Vec< TypeTerm > ) -> Vec< TypeTerm > {
for i in 0 .. upper.len() {
if upper[i] == lower[0] {
let mut result_ladder = Vec::<TypeTerm>::new();
result_ladder.append(&mut upper[0..i].iter().cloned().collect());
result_ladder.append(&mut lower);
return result_ladder;
}
}
upper.append(&mut lower);
upper
}
impl TypeTerm {
/// transmute type into Parameter-Normal-Form (PNF)
///
@ -10,88 +24,99 @@ impl TypeTerm {
/// <Seq <Digit 10>>~<Seq Char>
/// ⇒ <Seq <Digit 10>~Char>
/// ```
pub fn param_normalize(self) -> Self {
pub fn param_normalize(mut self) -> Self {
match self {
TypeTerm::Ladder(mut rungs) => {
if rungs.len() > 0 {
// normalize all rungs separately
for r in rungs.iter_mut() {
*r = r.clone().param_normalize();
}
let mut new_rungs = Vec::new();
while let Some(bottom) = rungs.pop() {
if let Some(last_but) = rungs.last_mut() {
match (bottom, last_but) {
(TypeTerm::App(bot_args), TypeTerm::App(last_args)) => {
if bot_args.len() == last_args.len() {
let mut new_rung_params = Vec::new();
let mut require_break = false;
// take top-rung
match rungs.remove(0) {
TypeTerm::App(params_top) => {
let mut params_ladders = Vec::new();
let mut tail : Vec<TypeTerm> = Vec::new();
if bot_args.len() > 0 {
if let Ok(_idx) = last_args[0].is_syntactic_subtype_of(&bot_args[0]) {
for i in 0 .. bot_args.len() {
// append all other rungs to ladders inside
// the application
for p in params_top {
params_ladders.push(vec![ p ]);
}
let spliced_type_ladder = splice_ladders(
last_args[i].clone().get_lnf_vec(),
bot_args[i].clone().get_lnf_vec()
);
let spliced_type =
if spliced_type_ladder.len() == 1 {
spliced_type_ladder[0].clone()
} else if spliced_type_ladder.len() > 1 {
TypeTerm::Ladder(spliced_type_ladder)
} else {
TypeTerm::unit()
};
for r in rungs {
match r {
TypeTerm::App(mut params_rung) => {
if params_rung.len() > 0 {
let mut first_param = params_rung.remove(0);
new_rung_params.push( spliced_type.param_normalize() );
}
if first_param == params_ladders[0][0] {
for (l, p) in params_ladders.iter_mut().skip(1).zip(params_rung) {
l.push(p.param_normalize());
}
} else {
params_rung.insert(0, first_param);
tail.push(TypeTerm::App(params_rung));
new_rung_params.push(
TypeTerm::Ladder(vec![
last_args[0].clone(),
bot_args[0].clone()
]).normalize()
);
for i in 1 .. bot_args.len() {
if let Ok(_idx) = last_args[i].is_syntactic_subtype_of(&bot_args[i]) {
let spliced_type_ladder = splice_ladders(
last_args[i].clone().get_lnf_vec(),
bot_args[i].clone().get_lnf_vec()
);
let spliced_type =
if spliced_type_ladder.len() == 1 {
spliced_type_ladder[0].clone()
} else if spliced_type_ladder.len() > 1 {
TypeTerm::Ladder(spliced_type_ladder)
} else {
TypeTerm::unit()
};
new_rung_params.push( spliced_type.param_normalize() );
} else {
new_rung_params.push( bot_args[i].clone() );
require_break = true;
}
}
}
}
}
TypeTerm::Ladder(mut rs) => {
for r in rs {
tail.push(r.param_normalize());
if require_break {
new_rungs.push( TypeTerm::App(new_rung_params) );
} else {
rungs.pop();
rungs.push(TypeTerm::App(new_rung_params));
}
}
atomic => {
tail.push(atomic);
} else {
new_rungs.push( TypeTerm::App(bot_args) );
}
}
(bottom, last_buf) => {
new_rungs.push( bottom );
}
}
let head = TypeTerm::App(
params_ladders.into_iter()
.map(
|mut l| {
l.dedup();
match l.len() {
0 => TypeTerm::unit(),
1 => l.remove(0),
_ => TypeTerm::Ladder(l).param_normalize()
}
}
)
.collect()
);
if tail.len() > 0 {
tail.insert(0, head);
TypeTerm::Ladder(tail)
} else {
head
}
} else {
new_rungs.push( bottom );
}
}
TypeTerm::Ladder(mut r) => {
r.append(&mut rungs);
TypeTerm::Ladder(r)
}
new_rungs.reverse();
atomic => {
rungs.insert(0, atomic);
TypeTerm::Ladder(rungs)
}
if new_rungs.len() > 1 {
TypeTerm::Ladder(new_rungs)
} else if new_rungs.len() == 1 {
new_rungs[0].clone()
} else {
TypeTerm::unit()
}
} else {
TypeTerm::unit()

View file

@ -19,22 +19,40 @@ fn test_param_normalize() {
dict.parse("<A B>~<A C>").expect("parse error").param_normalize(),
);
assert_eq!(
dict.parse("<A~Y B>").expect("parse error"),
dict.parse("<A B>~<Y B>").expect("parse error").param_normalize(),
);
assert_eq!(
dict.parse("<A B~C D~E>").expect("parse error"),
dict.parse("<A B D>~<A C D>~<A C E>").expect("parse errror").param_normalize(),
);
assert_eq!(
dict.parse("<A~X B~C D~E>").expect("parse error"),
dict.parse("<A B D>~<A B~C E>~<X C E>").expect("parse errror").param_normalize(),
);
assert_eq!(
dict.parse("<Seq <Digit 10>~Char>").expect("parse error"),
dict.parse("<Seq <Digit 10>>~<Seq Char>").expect("parse errror").param_normalize(),
);
assert_eq!(
dict.parse("<A <B C~D~E> F~G H H>").expect("parse error"),
dict.parse("<Seq Char> ~ <<ValueDelim '\\0'> Char> ~ <<ValueDelim '\\0'> Ascii~x86.UInt8>").expect("parse error").param_normalize(),
dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~x86.UInt8>").expect("parse error")
);
assert_eq!(
dict.parse("<Seq Char~Ascii> ~ <<ValueDelim '\\0'> Char~Ascii> ~ <<ValueDelim '\\0'> x86.UInt8>").expect("parse error").param_normalize(),
dict.parse("<Seq~<ValueDelim '\\0'> Char~Ascii~x86.UInt8>").expect("parse error")
);
assert_eq!(
dict.parse("<A~Y <B C~D~E> F H H>").expect("parse error"),
dict.parse("<A <B C> F H H>
~<A <B D> F H H>
~<A <B E> F H H>
~<A <B E> G H H>").expect("parse errror")
~<A~Y <B E> F H H>").expect("parse errror")
.param_normalize(),
);
}