partial_degeneralize: add a version that takes no "todegen" argument

* spot/twaalgos/degen.cc,
spot/twaalgos/degen.hh (is_partially_degeneralizable): New function.
(partial_degeneralize): New version without todegen argument.
* tests/python/pdegen.py: Add test cases.
This commit is contained in:
Alexandre Duret-Lutz 2020-02-04 17:31:12 +01:00
parent 8df616ee0d
commit bf4a0ccffd
3 changed files with 149 additions and 4 deletions

View file

@ -837,7 +837,6 @@ namespace spot
return updated;
}
[[noreturn]] static void
report_invalid_partial_degen_arg(acc_cond::mark_t todegen,
acc_cond::acc_code& cond)
@ -850,10 +849,67 @@ namespace spot
}
}
acc_cond::mark_t
is_partially_degeneralizable(const const_twa_graph_ptr& aut,
bool allow_inf, bool allow_fin)
{
auto& code = aut->get_acceptance();
if (code.empty())
return {};
if (allow_fin)
allow_fin = is_deterministic(aut);
acc_cond::mark_t res = {};
unsigned res_sz = -1U;
auto update = [&](const acc_cond::mark_t& m)
{
unsigned sz = m.count();
if (sz > 1 && sz < res_sz)
{
res_sz = sz;
res = m;
}
// If we have found a pair to degeneralize, we
// won't find
return res_sz == 2;
};
unsigned pos = code.size();
do
{
switch (code[pos - 1].sub.op)
{
case acc_cond::acc_op::And:
case acc_cond::acc_op::Or:
--pos;
break;
case acc_cond::acc_op::Fin:
case acc_cond::acc_op::FinNeg:
pos -= 2;
if (allow_fin)
if (update(code[pos].mark))
return res;
break;
case acc_cond::acc_op::Inf:
case acc_cond::acc_op::InfNeg:
pos -= 2;
if (allow_inf)
if (update(code[pos].mark))
return res;
break;
}
}
while (pos > 0);
return res;
}
twa_graph_ptr
partial_degeneralize(const const_twa_graph_ptr& a,
acc_cond::mark_t todegen)
{
auto res = make_twa_graph(a->get_dict());
res->copy_ap_of(a);
@ -1023,6 +1079,14 @@ namespace spot
return res;
}
twa_graph_ptr
partial_degeneralize(twa_graph_ptr a)
{
while (acc_cond::mark_t m = is_partially_degeneralizable(a))
a = partial_degeneralize(a, m);
return a;
}
std::vector<acc_cond::mark_t>
propagate_marks_vector(const const_twa_graph_ptr& aut,

View file

@ -104,12 +104,47 @@ namespace spot
/// also be degeneralized if the input automaton is deterministic.
///
/// If this functions is called with a value of \a todegen that does
/// not match a (complete) conjunction of Inf(.), or in a
/// deterministic automaton a (complete) disjunction of Fin(.), an
/// std::runtime_error exception is thrown.
/// not match a conjunction of Inf(.), or in a deterministic
/// automaton a disjunction of Fin(.), an std::runtime_error
/// exception is thrown.
///
/// The version of the function that has no \a todegen argument will
/// perform all possible partial degeneralizations, and may return
/// the input automaton unmodified if no partial degeneralization is
/// possible.
///
/// @{
SPOT_API twa_graph_ptr
partial_degeneralize(const const_twa_graph_ptr& a,
acc_cond::mark_t todegen);
SPOT_API twa_graph_ptr
partial_degeneralize(twa_graph_ptr a);
/// @}
/// \brief Is the automaton partially degeneralizable?
///
/// Return a mark `M={m₁, m₂, ..., mₙ}` such that either (1)
/// `Inf(m₁)&Inf(m₂)&...&Inf(mₙ)` appears in the acceptance
/// condition of \a aut, or (2) \a aut is deterministic and
/// `Inf(m₁)|Inf(m₂)|...|Fin(mₙ)` appear in its conditions.
///
/// If multiple such marks exist the smallest such mark is returned.
/// (This is important in case of overlapping options. E.g., in the
/// formula `Inf(0)&Inf(1)&Inf(3) | (Inf(0)&Inf(1))&Fin(2)` we have
/// two possible degeneralizations options `{0,1,3}`, and `{0,1}`.
/// Degeneralizing for `{0,1,3}` and then `{0,1}` could enlarge the
/// automaton by a factor 6, while degeneralizing by `{0,1}` and
/// then some `{x,y}` may enlarge the automaton only by a factor 4.)
///
/// Return an empty mark otherwise if the automaton is not partially
/// degeneralizable.
///
/// The optional arguments \a allow_inf and \a allow_fin, can be set
/// to false to disallow one type of match.
SPOT_API acc_cond::mark_t
is_partially_degeneralizable(const const_twa_graph_ptr& aut,
bool allow_inf = true,
bool allow_fin = true);
/// \ingroup twa_algorithms
/// \brief Propagate marks around the automaton

View file

@ -73,10 +73,12 @@ State: 1
--END--
""")
assert spot.is_partially_degeneralizable(a) == [0, 1]
da = spot.partial_degeneralize(a, [0, 1])
assert da.equivalent_to(a)
assert da.num_states() == 2
assert spot.is_partially_degeneralizable(b) == [0, 1]
db = spot.partial_degeneralize(b, [0, 1])
assert db.equivalent_to(b)
assert db.num_states() == 3
@ -100,10 +102,12 @@ State: 2 "1#0" {1}
--END--"""
c = spot.automaton("randaut -A'(Fin(0)&Inf(1)&Inf(2))|Fin(2)' 1 |")
assert spot.is_partially_degeneralizable(c) == [1, 2]
dc = spot.partial_degeneralize(c, [1, 2])
assert dc.equivalent_to(c)
assert str(dc.get_acceptance()) == '(Fin(0) & Inf(2)) | Fin(1)'
assert spot.is_partially_degeneralizable(d) == []
dd = spot.partial_degeneralize(d, [])
assert dd.equivalent_to(d)
assert dd.num_states() == 1
@ -138,6 +142,8 @@ State: 3 "2#0"
[!0] 2
--END--"""
assert spot.is_partially_degeneralizable(de) == []
try:
df = spot.partial_degeneralize(f, [0, 1])
except RuntimeError as e:
@ -157,6 +163,7 @@ State: 7 [0&!1&!2] 0 {4 7} --END--""")
daut5 = spot.degeneralize_tba(aut5)
assert daut5.equivalent_to(aut5)
sets = list(range(aut5.num_sets()))
assert spot.is_partially_degeneralizable(aut5) == sets
pdaut5 = spot.partial_degeneralize(aut5, sets)
assert pdaut5.equivalent_to(aut5)
assert daut5.num_states() == 9
@ -171,6 +178,7 @@ trans-labels explicit-labels trans-acc deterministic --BODY-- State: 0
daut6 = spot.degeneralize_tba(aut6)
assert daut6.equivalent_to(aut6)
sets = list(range(aut6.num_sets()))
assert spot.is_partially_degeneralizable(aut6) == sets
pdaut6 = spot.partial_degeneralize(aut6, sets)
assert pdaut6.equivalent_to(aut6)
assert daut6.num_states() == 8
@ -187,6 +195,7 @@ State: 0 [0&!1&2] 1 {2 3} State: 1 [0&!1&2] 0 {0 2} [0&!1&!2] 6 State: 2
daut7 = spot.degeneralize_tba(aut7)
assert daut7.equivalent_to(aut7)
sets = list(range(aut7.num_sets()))
assert spot.is_partially_degeneralizable(aut7) == sets
pdaut7 = spot.partial_degeneralize(aut7, sets)
assert pdaut7.equivalent_to(aut7)
assert daut7.num_states() == 10
@ -202,6 +211,7 @@ State: 0 [!0&1&!2] 7 {0} State: 1 [!0&1&2] 1 {4} [0&!1&2] 6 {1 2} State: 2
daut8 = spot.degeneralize_tba(aut8)
assert daut8.equivalent_to(aut8)
sets = list(range(aut8.num_sets()))
assert spot.is_partially_degeneralizable(aut8) == sets
pdaut8 = spot.partial_degeneralize(aut8, sets)
assert pdaut8.equivalent_to(aut8)
assert daut8.num_states() == 22
@ -228,6 +238,7 @@ State: 2
[0] 0 {1}
[!0] 1
--END--""")
assert spot.is_partially_degeneralizable(aut10) == [0, 1]
pdaut10 = spot.partial_degeneralize(aut10, [0, 1])
assert pdaut10.equivalent_to(aut10)
assert pdaut10.to_str() == """HOA: v1
@ -262,6 +273,7 @@ State: 2
[0] 0 {1}
[!0] 1
--END--""")
assert spot.is_partially_degeneralizable(aut11) == [0, 1]
pdaut11 = spot.partial_degeneralize(aut11, [0, 1])
assert pdaut11.equivalent_to(aut11)
assert pdaut11.to_str() == """HOA: v1
@ -280,3 +292,37 @@ State: 2
[0] 0 {2}
[!0] 1
--END--"""
aut12 = spot.automaton("""HOA: v1
States: 3
Start: 0
AP: 1 "p0"
Acceptance: 4 Inf(0)&Inf(1)&Inf(3) | (Inf(0)&Inf(1))&Fin(2)
--BODY--
State: 0
[0] 1 {0}
State: 1
[0] 2 {2}
[!0] 2
State: 2
[0] 2 {1}
[0] 0
[!0] 1 {3}
--END--""")
assert spot.is_partially_degeneralizable(aut12) == [0,1]
aut12b = spot.partial_degeneralize(aut12, [0,1])
aut12c = spot.partial_degeneralize(aut12b, [1,2])
assert aut12c.equivalent_to(aut12)
assert aut12c.num_states() == 9
aut12d = spot.partial_degeneralize(aut12, [0,1,3])
aut12e = spot.partial_degeneralize(aut12d, [0,1])
assert aut12e.equivalent_to(aut12)
assert aut12e.num_states() == 11
aut12f = spot.partial_degeneralize(aut12)
assert aut12f.equivalent_to(aut12)
assert aut12f.num_states() == 9
aut12g = spot.partial_degeneralize(aut12f)
assert aut12f == aut12g