Fix verbatim blocks of Doxygen comments.
* src/ltlast/formula.hh, src/ltlvisit/contain.hh, src/ltlvisit/randomltl.hh, src/ltlvisit/reduce.hh, src/ltlvisit/remove_x.hh, src/ltlvisit/simplify.hh, src/ltlvisit/snf.hh, src/misc/minato.hh, src/misc/optionmap.hh, src/saba/sabacomplementtgba.hh, src/taalgos/emptinessta.hh, src/taalgos/minimize.hh, src/taalgos/tgba2ta.hh, src/tgba/tgbakvcomplement.hh, src/tgbaalgos/cycles.hh, src/tgbaalgos/eltl2tgba_lacim.hh, src/tgbaalgos/gtec/gtec.hh, src/tgbaalgos/gv04.hh, src/tgbaalgos/ltl2taa.hh, src/tgbaalgos/ltl2tgba_fm.hh, src/tgbaalgos/ltl2tgba_lacim.hh, src/tgbaalgos/magic.hh, src/tgbaalgos/minimize.hh, src/tgbaalgos/randomgraph.hh, src/tgbaalgos/se05.hh, src/tgbaalgos/simulation.hh, src/tgbaalgos/tau03.hh, src/tgbaalgos/tau03opt.hh: Surround verbatim blocks with /** ... */ instead of using /// on each line. Otherwise Doxygen will output the leading "///" tokens -- apparently this is a feature. * src/sanity/style.test: Strip multi-line comments before checking code style.
This commit is contained in:
parent
1ec9cebe58
commit
1cd9b204ed
29 changed files with 758 additions and 745 deletions
|
|
@ -186,20 +186,20 @@ namespace spot
|
|||
/// \brief Whether the formula is purely eventual.
|
||||
///
|
||||
/// Pure eventuality formulae are defined in
|
||||
/// \verbatim
|
||||
/// @InProceedings{ etessami.00.concur,
|
||||
/// author = {Kousha Etessami and Gerard J. Holzmann},
|
||||
/// title = {Optimizing {B\"u}chi Automata},
|
||||
/// booktitle = {Proceedings of the 11th International Conference on
|
||||
/// Concurrency Theory (Concur'2000)},
|
||||
/// pages = {153--167},
|
||||
/// year = {2000},
|
||||
/// editor = {C. Palamidessi},
|
||||
/// volume = {1877},
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// publisher = {Springer-Verlag}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ etessami.00.concur,
|
||||
author = {Kousha Etessami and Gerard J. Holzmann},
|
||||
title = {Optimizing {B\"u}chi Automata},
|
||||
booktitle = {Proceedings of the 11th International Conference on
|
||||
Concurrency Theory (Concur'2000)},
|
||||
pages = {153--167},
|
||||
year = {2000},
|
||||
editor = {C. Palamidessi},
|
||||
volume = {1877},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
publisher = {Springer-Verlag}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// A word that satisfies a pure eventuality can be prefixed by
|
||||
/// anything and still satisfies the formula.
|
||||
|
|
@ -211,20 +211,20 @@ namespace spot
|
|||
/// \brief Whether a formula is purely universal.
|
||||
///
|
||||
/// Purely universal formulae are defined in
|
||||
/// \verbatim
|
||||
/// @InProceedings{ etessami.00.concur,
|
||||
/// author = {Kousha Etessami and Gerard J. Holzmann},
|
||||
/// title = {Optimizing {B\"u}chi Automata},
|
||||
/// booktitle = {Proceedings of the 11th International Conference on
|
||||
/// Concurrency Theory (Concur'2000)},
|
||||
/// pages = {153--167},
|
||||
/// year = {2000},
|
||||
/// editor = {C. Palamidessi},
|
||||
/// volume = {1877},
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// publisher = {Springer-Verlag}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ etessami.00.concur,
|
||||
author = {Kousha Etessami and Gerard J. Holzmann},
|
||||
title = {Optimizing {B\"u}chi Automata},
|
||||
booktitle = {Proceedings of the 11th International Conference on
|
||||
Concurrency Theory (Concur'2000)},
|
||||
pages = {153--167},
|
||||
year = {2000},
|
||||
editor = {C. Palamidessi},
|
||||
volume = {1877},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
publisher = {Springer-Verlag}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// Any (non-empty) suffix of a word that satisfies a purely
|
||||
/// universal formula also satisfies the formula.
|
||||
|
|
|
|||
|
|
@ -83,22 +83,22 @@ namespace spot
|
|||
/// \brief Reduce a formula using language containment relationships.
|
||||
///
|
||||
/// The method is taken from table 4.1 in
|
||||
/// \verbatim
|
||||
///@TechReport{ tauriainen.03.a83,
|
||||
/// author = {Heikki Tauriainen},
|
||||
/// title = {On Translating Linear Temporal Logic into Alternating and
|
||||
/// Nondeterministic Automata},
|
||||
/// institution = {Helsinki University of Technology, Laboratory for
|
||||
/// Theoretical Computer Science},
|
||||
/// address = {Espoo, Finland},
|
||||
/// month = dec,
|
||||
/// number = {A83},
|
||||
/// pages = {132},
|
||||
/// type = {Research Report},
|
||||
/// year = {2003},
|
||||
/// note = {Reprint of Licentiate's thesis}
|
||||
///}
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@TechReport{ tauriainen.03.a83,
|
||||
author = {Heikki Tauriainen},
|
||||
title = {On Translating Linear Temporal Logic into Alternating and
|
||||
Nondeterministic Automata},
|
||||
institution = {Helsinki University of Technology, Laboratory for
|
||||
Theoretical Computer Science},
|
||||
address = {Espoo, Finland},
|
||||
month = dec,
|
||||
number = {A83},
|
||||
pages = {132},
|
||||
type = {Research Report},
|
||||
year = {2003},
|
||||
note = {Reprint of Licentiate's thesis}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// (The "dagged" cells in the tables are not handled here.)
|
||||
///
|
||||
|
|
|
|||
|
|
@ -117,24 +117,24 @@ namespace spot
|
|||
///
|
||||
/// The default priorities are defined as follows:
|
||||
///
|
||||
/// \verbatim
|
||||
/// ap n
|
||||
/// false 1
|
||||
/// true 1
|
||||
/// not 1
|
||||
/// F 1
|
||||
/// G 1
|
||||
/// X 1
|
||||
/// equiv 1
|
||||
/// implies 1
|
||||
/// xor 1
|
||||
/// R 1
|
||||
/// U 1
|
||||
/// W 1
|
||||
/// M 1
|
||||
/// and 1
|
||||
/// or 1
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
ap n
|
||||
false 1
|
||||
true 1
|
||||
not 1
|
||||
F 1
|
||||
G 1
|
||||
X 1
|
||||
equiv 1
|
||||
implies 1
|
||||
xor 1
|
||||
R 1
|
||||
U 1
|
||||
W 1
|
||||
M 1
|
||||
and 1
|
||||
or 1
|
||||
\endverbatim */
|
||||
///
|
||||
/// Where \c n is the number of atomic propositions in the
|
||||
/// set passed to the constructor.
|
||||
|
|
@ -168,17 +168,17 @@ namespace spot
|
|||
///
|
||||
/// The default priorities are defined as follows:
|
||||
///
|
||||
/// \verbatim
|
||||
/// ap n
|
||||
/// false 1
|
||||
/// true 1
|
||||
/// not 1
|
||||
/// equiv 1
|
||||
/// implies 1
|
||||
/// xor 1
|
||||
/// and 1
|
||||
/// or 1
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
ap n
|
||||
false 1
|
||||
true 1
|
||||
not 1
|
||||
equiv 1
|
||||
implies 1
|
||||
xor 1
|
||||
and 1
|
||||
or 1
|
||||
\endverbatim */
|
||||
///
|
||||
/// Where \c n is the number of atomic propositions in the
|
||||
/// set passed to the constructor.
|
||||
|
|
@ -207,19 +207,19 @@ namespace spot
|
|||
///
|
||||
/// The default priorities are defined as follows:
|
||||
///
|
||||
/// \verbatim
|
||||
/// eword 1
|
||||
/// boolform 1
|
||||
/// star 1
|
||||
/// star_b 1
|
||||
/// equal_b 1
|
||||
/// goto_b 1
|
||||
/// and 1
|
||||
/// andNLM 1
|
||||
/// or 1
|
||||
/// concat 1
|
||||
/// fusion 1
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
eword 1
|
||||
boolform 1
|
||||
star 1
|
||||
star_b 1
|
||||
equal_b 1
|
||||
goto_b 1
|
||||
and 1
|
||||
andNLM 1
|
||||
or 1
|
||||
concat 1
|
||||
fusion 1
|
||||
\endverbatim */
|
||||
///
|
||||
/// Where "boolfrom" designates a Boolean formula generated
|
||||
/// by random_boolean.
|
||||
|
|
@ -252,27 +252,27 @@ namespace spot
|
|||
///
|
||||
/// The default priorities are defined as follows:
|
||||
///
|
||||
/// \verbatim
|
||||
/// ap n
|
||||
/// false 1
|
||||
/// true 1
|
||||
/// not 1
|
||||
/// F 1
|
||||
/// G 1
|
||||
/// X 1
|
||||
/// Closure 1
|
||||
/// equiv 1
|
||||
/// implies 1
|
||||
/// xor 1
|
||||
/// R 1
|
||||
/// U 1
|
||||
/// W 1
|
||||
/// M 1
|
||||
/// and 1
|
||||
/// or 1
|
||||
/// EConcat 1
|
||||
/// UConcat 1
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
ap n
|
||||
false 1
|
||||
true 1
|
||||
not 1
|
||||
F 1
|
||||
G 1
|
||||
X 1
|
||||
Closure 1
|
||||
equiv 1
|
||||
implies 1
|
||||
xor 1
|
||||
R 1
|
||||
U 1
|
||||
W 1
|
||||
M 1
|
||||
and 1
|
||||
or 1
|
||||
EConcat 1
|
||||
UConcat 1
|
||||
\endverbatim */
|
||||
///
|
||||
/// Where \c n is the number of atomic propositions in the
|
||||
/// set passed to the constructor.
|
||||
|
|
|
|||
|
|
@ -80,20 +80,20 @@ namespace spot
|
|||
/// \brief Check whether a formula is a pure eventuality.
|
||||
///
|
||||
/// Pure eventuality formulae are defined in
|
||||
/// \verbatim
|
||||
/// @InProceedings{ etessami.00.concur,
|
||||
/// author = {Kousha Etessami and Gerard J. Holzmann},
|
||||
/// title = {Optimizing {B\"u}chi Automata},
|
||||
/// booktitle = {Proceedings of the 11th International Conference on
|
||||
/// Concurrency Theory (Concur'2000)},
|
||||
/// pages = {153--167},
|
||||
/// year = {2000},
|
||||
/// editor = {C. Palamidessi},
|
||||
/// volume = {1877},
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// publisher = {Springer-Verlag}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ etessami.00.concur,
|
||||
author = {Kousha Etessami and Gerard J. Holzmann},
|
||||
title = {Optimizing {B\"u}chi Automata},
|
||||
booktitle = {Proceedings of the 11th International Conference on
|
||||
Concurrency Theory (Concur'2000)},
|
||||
pages = {153--167},
|
||||
year = {2000},
|
||||
editor = {C. Palamidessi},
|
||||
volume = {1877},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
publisher = {Springer-Verlag}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// A word that satisfies a pure eventuality can be prefixed by
|
||||
/// anything and still satisfies the formula.
|
||||
|
|
@ -109,20 +109,20 @@ namespace spot
|
|||
/// \brief Check whether a formula is purely universal.
|
||||
///
|
||||
/// Purely universal formulae are defined in
|
||||
/// \verbatim
|
||||
/// @InProceedings{ etessami.00.concur,
|
||||
/// author = {Kousha Etessami and Gerard J. Holzmann},
|
||||
/// title = {Optimizing {B\"u}chi Automata},
|
||||
/// booktitle = {Proceedings of the 11th International Conference on
|
||||
/// Concurrency Theory (Concur'2000)},
|
||||
/// pages = {153--167},
|
||||
/// year = {2000},
|
||||
/// editor = {C. Palamidessi},
|
||||
/// volume = {1877},
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// publisher = {Springer-Verlag}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ etessami.00.concur,
|
||||
author = {Kousha Etessami and Gerard J. Holzmann},
|
||||
title = {Optimizing {B\"u}chi Automata},
|
||||
booktitle = {Proceedings of the 11th International Conference on
|
||||
Concurrency Theory (Concur'2000)},
|
||||
pages = {153--167},
|
||||
year = {2000},
|
||||
editor = {C. Palamidessi},
|
||||
volume = {1877},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
publisher = {Springer-Verlag}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// Any (non-empty) suffix of a word that satisfies if purely
|
||||
/// universal formula also satisfies the formula.
|
||||
|
|
|
|||
|
|
@ -31,18 +31,18 @@ namespace spot
|
|||
/// This function may also be applied to stutter-sensitive formulas,
|
||||
/// but in that case the resulting formula is not equivalent.
|
||||
///
|
||||
/// \verbatim
|
||||
/// @Article{ etessami.00.ipl,
|
||||
/// author = {Kousha Etessami},
|
||||
/// title = {A note on a question of {P}eled and {W}ilke regarding
|
||||
/// stutter-invariant {LTL}},
|
||||
/// journal = {Information Processing Letters},
|
||||
/// volume = {75},
|
||||
/// number = {6},
|
||||
/// year = {2000},
|
||||
/// pages = {261--263}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@Article{ etessami.00.ipl,
|
||||
author = {Kousha Etessami},
|
||||
title = {A note on a question of {P}eled and {W}ilke regarding
|
||||
stutter-invariant {LTL}},
|
||||
journal = {Information Processing Letters},
|
||||
volume = {75},
|
||||
number = {6},
|
||||
year = {2000},
|
||||
pages = {261--263}
|
||||
}
|
||||
\endverbatim */
|
||||
const formula* remove_x(const formula* f);
|
||||
|
||||
/// \brief Whether an LTL formula \a f is stutter-insensitive.
|
||||
|
|
@ -51,18 +51,18 @@ namespace spot
|
|||
/// <code>remove_x(f)</code> is equivalent to \a f. This only
|
||||
/// works for LTL formulas, not PSL formulas.
|
||||
///
|
||||
/// \verbatim
|
||||
/// @Article{ etessami.00.ipl,
|
||||
/// author = {Kousha Etessami},
|
||||
/// title = {A note on a question of {P}eled and {W}ilke regarding
|
||||
/// stutter-invariant {LTL}},
|
||||
/// journal = {Information Processing Letters},
|
||||
/// volume = {75},
|
||||
/// number = {6},
|
||||
/// year = {2000},
|
||||
/// pages = {261--263}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@Article{ etessami.00.ipl,
|
||||
author = {Kousha Etessami},
|
||||
title = {A note on a question of {P}eled and {W}ilke regarding
|
||||
stutter-invariant {LTL}},
|
||||
journal = {Information Processing Letters},
|
||||
volume = {75},
|
||||
number = {6},
|
||||
year = {2000},
|
||||
pages = {261--263}
|
||||
}
|
||||
\endverbatim */
|
||||
bool is_stutter_insensitive(const formula* f);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -102,19 +102,19 @@ namespace spot
|
|||
/// Returns whether \a f syntactically implies \a g.
|
||||
///
|
||||
/// This is adapted from
|
||||
/// \verbatim
|
||||
/// @InProceedings{ somenzi.00.cav,
|
||||
/// author = {Fabio Somenzi and Roderick Bloem},
|
||||
/// title = {Efficient {B\"u}chi Automata for {LTL} Formulae},
|
||||
/// booktitle = {Proceedings of the 12th International Conference on
|
||||
/// Computer Aided Verification (CAV'00)},
|
||||
/// pages = {247--263},
|
||||
/// year = {2000},
|
||||
/// volume = {1855},
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// publisher = {Springer-Verlag}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ somenzi.00.cav,
|
||||
author = {Fabio Somenzi and Roderick Bloem},
|
||||
title = {Efficient {B\"u}chi Automata for {LTL} Formulae},
|
||||
booktitle = {Proceedings of the 12th International Conference on
|
||||
Computer Aided Verification (CAV'00)},
|
||||
pages = {247--263},
|
||||
year = {2000},
|
||||
volume = {1855},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
publisher = {Springer-Verlag}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
bool syntactic_implication(const formula* f, const formula* g);
|
||||
/// \brief Syntactic implication with one negated argument.
|
||||
|
|
|
|||
|
|
@ -37,16 +37,16 @@ namespace spot
|
|||
/// corresponds to the E° operation defined in the following
|
||||
/// paper.
|
||||
///
|
||||
/// \verbatim
|
||||
/// @Article{ bruggeman.96.tcs,
|
||||
/// author = {Anne Br{\"u}ggemann-Klein},
|
||||
/// title = {Regular Expressions into Finite Automata},
|
||||
/// journal = {Theoretical Computer Science},
|
||||
/// year = {1996},
|
||||
/// volume = {120},
|
||||
/// pages = {87--98}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@Article{ bruggeman.96.tcs,
|
||||
author = {Anne Br{\"u}ggemann-Klein},
|
||||
title = {Regular Expressions into Finite Automata},
|
||||
journal = {Theoretical Computer Science},
|
||||
year = {1996},
|
||||
volume = {120},
|
||||
pages = {87--98}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// \param sere the SERE to rewrite
|
||||
/// \param cache an optional cache
|
||||
|
|
|
|||
|
|
@ -33,20 +33,20 @@ namespace spot
|
|||
///
|
||||
/// This algorithm implements a derecursived version the Minato-Morreale
|
||||
/// algorithm presented in the following paper.
|
||||
/// \verbatim
|
||||
/// @InProceedings{ minato.92.sasimi,
|
||||
/// author = {Shin-ichi Minato},
|
||||
/// title = {Fast Generation of Irredundant Sum-of-Products Forms
|
||||
/// from Binary Decision Diagrams},
|
||||
/// booktitle = {Proceedings of the third Synthesis and Simulation
|
||||
/// and Meeting International Interchange workshop
|
||||
/// (SASIMI'92)},
|
||||
/// pages = {64--73},
|
||||
/// year = {1992},
|
||||
/// address = {Kobe, Japan},
|
||||
/// month = {April}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ minato.92.sasimi,
|
||||
author = {Shin-ichi Minato},
|
||||
title = {Fast Generation of Irredundant Sum-of-Products Forms
|
||||
from Binary Decision Diagrams},
|
||||
booktitle = {Proceedings of the third Synthesis and Simulation
|
||||
and Meeting International Interchange workshop
|
||||
(SASIMI'92)},
|
||||
pages = {64--73},
|
||||
year = {1992},
|
||||
address = {Kobe, Japan},
|
||||
month = {April}
|
||||
}
|
||||
\endverbatim */
|
||||
class minato_isop
|
||||
{
|
||||
public:
|
||||
|
|
|
|||
|
|
@ -40,11 +40,11 @@ namespace spot
|
|||
/// sign). If not specified, the default value is 1.
|
||||
///
|
||||
/// The following three lines are equivalent.
|
||||
/// \verbatim
|
||||
/// optA !optB optC=4194304
|
||||
/// optA=1, optB=0, optC=4096K
|
||||
/// optC = 4M; optA !optB
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
optA !optB optC=4194304
|
||||
optA=1, optB=0, optC=4096K
|
||||
optC = 4M; optA !optB
|
||||
\endverbatim */
|
||||
///
|
||||
/// \return A non-null pointer to the option for which an expected integer
|
||||
/// value cannot be parsed.
|
||||
|
|
|
|||
|
|
@ -36,17 +36,17 @@ namespace spot
|
|||
/// are expected to reduce the size of the automaton.
|
||||
///
|
||||
/// This algorithm comes from:
|
||||
/// \verbatim
|
||||
/// @Article{ gurumurthy.03.lncs,
|
||||
/// title = {On complementing nondeterministic {B\"uchi} automata},
|
||||
/// author = {Gurumurthy, S. and Kupferman, O. and Somenzi, F. and
|
||||
/// Vardi, M.Y.},
|
||||
/// journal = {Lecture Notes in Computer Science},
|
||||
/// pages = {96--110},
|
||||
/// year = {2003},
|
||||
/// publisher = {Springer-Verlag}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@Article{ gurumurthy.03.lncs,
|
||||
title = {On complementing nondeterministic {B\"uchi} automata},
|
||||
author = {Gurumurthy, S. and Kupferman, O. and Somenzi, F. and
|
||||
Vardi, M.Y.},
|
||||
journal = {Lecture Notes in Computer Science},
|
||||
pages = {96--110},
|
||||
year = {2003},
|
||||
publisher = {Springer-Verlag}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// The construction is done on-the-fly, by the
|
||||
/// \c saba_complement_succ_iterator class.
|
||||
|
|
|
|||
|
|
@ -76,7 +76,20 @@ for dir in "${INCDIR-..}" "${INCDIR-..}"/../iface; do
|
|||
-0777 $file || diag "always put 'ingroup' before 'brief'"
|
||||
|
||||
# Strip comments and strings.
|
||||
sed 's,[ ]*//.*,,;s,"[^"]*","",g' < $file > $tmp
|
||||
#
|
||||
# Multi-line comments of the form
|
||||
# /* Line 1
|
||||
# Line 2
|
||||
# Line 3 */
|
||||
# are replaced by
|
||||
# //
|
||||
# //
|
||||
# //
|
||||
# to keep the line numbers correct in the diagnostics.
|
||||
perl -pe 'sub f {my $a = shift; $a =~ s:[^\n]*://:g; return "$a"}
|
||||
s,/\*(.*?)\*/,f($1),sge;
|
||||
s,//.*?\n,//\n,g;
|
||||
s,"[^"\n]*","",g' -0777 <$file >$tmp
|
||||
|
||||
grep '[ ]$' $tmp &&
|
||||
diag 'Trailing whitespace.'
|
||||
|
|
|
|||
|
|
@ -50,19 +50,19 @@ namespace spot
|
|||
/// was found empty. Otherwise the automaton accepts some run.
|
||||
///
|
||||
/// This is based on the following paper.
|
||||
/// \verbatim
|
||||
/// @InProceedings{ geldenhuys.06.spin,
|
||||
/// author = {Jaco Geldenhuys and Henri Hansen},
|
||||
/// title = {Larger Automata and Less Work for {LTL} Model Checking},
|
||||
/// booktitle = {Proceedings of the 13th International SPIN Workshop
|
||||
/// (SPIN'06)},
|
||||
/// year = {2006},
|
||||
/// pages = {53--70},
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// volume = {3925},
|
||||
/// publisher = {Springer}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ geldenhuys.06.spin,
|
||||
author = {Jaco Geldenhuys and Henri Hansen},
|
||||
title = {Larger Automata and Less Work for {LTL} Model Checking},
|
||||
booktitle = {Proceedings of the 13th International SPIN Workshop
|
||||
(SPIN'06)},
|
||||
year = {2006},
|
||||
pages = {53--70},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
volume = {3925},
|
||||
publisher = {Springer}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// the implementation of spot::ta_check::check() is inspired from the
|
||||
/// two-pass algorithm of the paper above:
|
||||
|
|
|
|||
|
|
@ -38,22 +38,22 @@ namespace spot
|
|||
/// achieved using any algorithm based on partition refinement
|
||||
///
|
||||
/// For more detail about this type of algorithm, see the following paper:
|
||||
/// \verbatim
|
||||
/// @InProceedings{valmari.09.icatpn,
|
||||
/// author = {Antti Valmari},
|
||||
/// title = {Bisimilarity Minimization in in O(m logn) Time},
|
||||
/// booktitle = {Proceedings of the 30th International Conference on
|
||||
/// the Applications and Theory of Petri Nets
|
||||
/// (ICATPN'09)},
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// publisher = {Springer},
|
||||
/// isbn = {978-3-642-02423-8},
|
||||
/// pages = {123--142},
|
||||
/// volume = 5606,
|
||||
/// url = {http://dx.doi.org/10.1007/978-3-642-02424-5_9},
|
||||
/// year = {2009}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{valmari.09.icatpn,
|
||||
author = {Antti Valmari},
|
||||
title = {Bisimilarity Minimization in in O(m logn) Time},
|
||||
booktitle = {Proceedings of the 30th International Conference on
|
||||
the Applications and Theory of Petri Nets
|
||||
(ICATPN'09)},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
publisher = {Springer},
|
||||
isbn = {978-3-642-02423-8},
|
||||
pages = {123--142},
|
||||
volume = 5606,
|
||||
url = {http://dx.doi.org/10.1007/978-3-642-02424-5_9},
|
||||
year = {2009}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// \param ta_ the TA automaton to convert into a simplified TA
|
||||
ta*
|
||||
|
|
|
|||
|
|
@ -30,19 +30,19 @@ namespace spot
|
|||
/// \brief Build a spot::ta_explicit* (TA) from an LTL formula.
|
||||
///
|
||||
/// This is based on the following paper.
|
||||
/// \verbatim
|
||||
/// @InProceedings{ geldenhuys.06.spin,
|
||||
/// author = {Jaco Geldenhuys and Henri Hansen},
|
||||
/// title = {Larger Automata and Less Work for {LTL} Model Checking},
|
||||
/// booktitle = {Proceedings of the 13th International SPIN Workshop
|
||||
/// (SPIN'06)},
|
||||
/// year = {2006},
|
||||
/// pages = {53--70},
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// volume = {3925},
|
||||
/// publisher = {Springer}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ geldenhuys.06.spin,
|
||||
author = {Jaco Geldenhuys and Henri Hansen},
|
||||
title = {Larger Automata and Less Work for {LTL} Model Checking},
|
||||
booktitle = {Proceedings of the 13th International SPIN Workshop
|
||||
(SPIN'06)},
|
||||
year = {2006},
|
||||
pages = {53--70},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
volume = {3925},
|
||||
publisher = {Springer}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// \param tgba_to_convert The TGBA automaton to convert into a TA automaton
|
||||
///
|
||||
|
|
|
|||
|
|
@ -64,18 +64,18 @@ namespace spot
|
|||
/// \brief Build a complemented automaton.
|
||||
///
|
||||
/// The construction comes from:
|
||||
/// \verbatim
|
||||
/// @Article{ kupferman.05.tcs,
|
||||
/// title = {From complementation to certification},
|
||||
/// author = {Kupferman, O. and Vardi, M.Y.},
|
||||
/// journal = {Theoretical Computer Science},
|
||||
/// volume = {345},
|
||||
/// number = {1},
|
||||
/// pages = {83--100},
|
||||
/// year = {2005},
|
||||
/// publisher = {Elsevier}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@Article{ kupferman.05.tcs,
|
||||
title = {From complementation to certification},
|
||||
author = {Kupferman, O. and Vardi, M.Y.},
|
||||
journal = {Theoretical Computer Science},
|
||||
volume = {345},
|
||||
number = {1},
|
||||
pages = {83--100},
|
||||
year = {2005},
|
||||
publisher = {Elsevier}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// The original automaton is used as a States-based Generalized
|
||||
/// Büchi Automaton.
|
||||
|
|
|
|||
|
|
@ -30,19 +30,19 @@ namespace spot
|
|||
///
|
||||
/// This class implements a non-recursive version of the algorithm
|
||||
/// on page 170 of:
|
||||
/// \verbatim
|
||||
/// @Article{loizou.82.is,
|
||||
/// author = {George Loizou and Peter Thanisch},
|
||||
/// title = {Enumerating the Cycles of a Digraph: A New
|
||||
/// Preprocessing Strategy},
|
||||
/// journal = {Information Sciences},
|
||||
/// year = {1982},
|
||||
/// volume = {27},
|
||||
/// number = {3},
|
||||
/// pages = {163--182},
|
||||
/// month = aug
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@Article{loizou.82.is,
|
||||
author = {George Loizou and Peter Thanisch},
|
||||
title = {Enumerating the Cycles of a Digraph: A New
|
||||
Preprocessing Strategy},
|
||||
journal = {Information Sciences},
|
||||
year = {1982},
|
||||
volume = {27},
|
||||
number = {3},
|
||||
pages = {163--182},
|
||||
month = aug
|
||||
}
|
||||
\endverbatim */
|
||||
/// (the additional preprocessings described later in that paper are
|
||||
/// not implemented).
|
||||
///
|
||||
|
|
|
|||
|
|
@ -28,21 +28,21 @@ namespace spot
|
|||
/// \brief Build a spot::tgba_bdd_concrete from an ELTL formula.
|
||||
///
|
||||
/// This is based on the following paper.
|
||||
/// \verbatim
|
||||
/// @InProceedings{ couvreur.00.lacim,
|
||||
/// author = {Jean-Michel Couvreur},
|
||||
/// title = {Un point de vue symbolique sur la logique temporelle
|
||||
/// lin{\'e}aire},
|
||||
/// booktitle = {Actes du Colloque LaCIM 2000},
|
||||
/// month = {August},
|
||||
/// year = {2000},
|
||||
/// pages = {131--140},
|
||||
/// volume = {27},
|
||||
/// series = {Publications du LaCIM},
|
||||
/// publisher = {Universit{\'e} du Qu{\'e}bec {\`a} Montr{\'e}al},
|
||||
/// editor = {Pierre Leroux}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ couvreur.00.lacim,
|
||||
author = {Jean-Michel Couvreur},
|
||||
title = {Un point de vue symbolique sur la logique temporelle
|
||||
lin{\'e}aire},
|
||||
booktitle = {Actes du Colloque LaCIM 2000},
|
||||
month = {August},
|
||||
year = {2000},
|
||||
pages = {131--140},
|
||||
volume = {27},
|
||||
series = {Publications du LaCIM},
|
||||
publisher = {Universit{\'e} du Qu{\'e}bec {\`a} Montr{\'e}al},
|
||||
editor = {Pierre Leroux}
|
||||
}
|
||||
\endverbatim */
|
||||
/// \param f The formula to translate into an automaton.
|
||||
/// \param dict The spot::bdd_dict the constructed automata should use.
|
||||
/// \return A spot::tgba_bdd_concrete that recognizes the language of \a f.
|
||||
|
|
|
|||
|
|
@ -35,23 +35,23 @@ namespace spot
|
|||
/// \brief Check whether the language of an automate is empty.
|
||||
///
|
||||
/// This is based on the following paper.
|
||||
/// \verbatim
|
||||
/// @InProceedings{couvreur.99.fm,
|
||||
/// author = {Jean-Michel Couvreur},
|
||||
/// title = {On-the-fly Verification of Temporal Logic},
|
||||
/// pages = {253--271},
|
||||
/// editor = {Jeannette M. Wing and Jim Woodcock and Jim Davies},
|
||||
/// booktitle = {Proceedings of the World Congress on Formal Methods in
|
||||
/// the Development of Computing Systems (FM'99)},
|
||||
/// publisher = {Springer-Verlag},
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// volume = {1708},
|
||||
/// year = {1999},
|
||||
/// address = {Toulouse, France},
|
||||
/// month = {September},
|
||||
/// isbn = {3-540-66587-0}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{couvreur.99.fm,
|
||||
author = {Jean-Michel Couvreur},
|
||||
title = {On-the-fly Verification of Temporal Logic},
|
||||
pages = {253--271},
|
||||
editor = {Jeannette M. Wing and Jim Woodcock and Jim Davies},
|
||||
booktitle = {Proceedings of the World Congress on Formal Methods in
|
||||
the Development of Computing Systems (FM'99)},
|
||||
publisher = {Springer-Verlag},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
volume = {1708},
|
||||
year = {1999},
|
||||
address = {Toulouse, France},
|
||||
month = {September},
|
||||
isbn = {3-540-66587-0}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// A recursive definition of the algorithm would look as follows,
|
||||
/// but the implementation is of course not recursive.
|
||||
|
|
@ -60,36 +60,36 @@ namespace spot
|
|||
/// positive DFS order or 0 if it is dead, SCC is and ACC are two
|
||||
/// stacks.)
|
||||
///
|
||||
/// \verbatim
|
||||
/// check(<Sigma, Q, delta, q, F>, H, SCC, ACC)
|
||||
/// if q is not in H // new state
|
||||
/// H[q] = H.size + 1
|
||||
/// SCC.push(<H[q], {}>)
|
||||
/// forall <a, s> : <q, _, a, s> in delta
|
||||
/// ACC.push(a)
|
||||
/// res = check(<Sigma, Q, delta, s, F>, H, SCC, ACC)
|
||||
/// if res
|
||||
/// return res
|
||||
/// <n, _> = SCC.top()
|
||||
/// if n = H[q]
|
||||
/// SCC.pop()
|
||||
/// mark_reachable_states_as_dead(<Sigma, Q, delta, q, F>, H$)
|
||||
/// return 0
|
||||
/// else
|
||||
/// if H[q] = 0 // dead state
|
||||
/// ACC.pop()
|
||||
/// return true
|
||||
/// else // state in stack: merge SCC
|
||||
/// all = {}
|
||||
/// do
|
||||
/// <n, a> = SCC.pop()
|
||||
/// all = all union a union { ACC.pop() }
|
||||
/// until n <= H[q]
|
||||
/// SCC.push(<n, all>)
|
||||
/// if all != F
|
||||
/// return 0
|
||||
/// return new emptiness_check_result(necessary data)
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
check(<Sigma, Q, delta, q, F>, H, SCC, ACC)
|
||||
if q is not in H // new state
|
||||
H[q] = H.size + 1
|
||||
SCC.push(<H[q], {}>)
|
||||
forall <a, s> : <q, _, a, s> in delta
|
||||
ACC.push(a)
|
||||
res = check(<Sigma, Q, delta, s, F>, H, SCC, ACC)
|
||||
if res
|
||||
return res
|
||||
<n, _> = SCC.top()
|
||||
if n = H[q]
|
||||
SCC.pop()
|
||||
mark_reachable_states_as_dead(<Sigma, Q, delta, q, F>, H$)
|
||||
return 0
|
||||
else
|
||||
if H[q] = 0 // dead state
|
||||
ACC.pop()
|
||||
return true
|
||||
else // state in stack: merge SCC
|
||||
all = {}
|
||||
do
|
||||
<n, a> = SCC.pop()
|
||||
all = all union a union { ACC.pop() }
|
||||
until n <= H[q]
|
||||
SCC.push(<n, all>)
|
||||
if all != F
|
||||
return 0
|
||||
return new emptiness_check_result(necessary data)
|
||||
\endverbatim */
|
||||
///
|
||||
/// check() returns 0 iff the automaton's language is empty. It
|
||||
/// returns an instance of emptiness_check_result. If the automaton
|
||||
|
|
|
|||
|
|
@ -34,23 +34,23 @@ namespace spot
|
|||
///
|
||||
/// The original algorithm, coming from the following paper, has only
|
||||
/// been slightly modified to work on transition-based automata.
|
||||
/// \verbatim
|
||||
/// @InProceedings{geldenhuys.04.tacas,
|
||||
/// author = {Jaco Geldenhuys and Antti Valmari},
|
||||
/// title = {Tarjan's Algorithm Makes On-the-Fly {LTL} Verification
|
||||
/// More Efficient},
|
||||
/// booktitle = {Proceedings of the 10th International Conference on Tools
|
||||
/// and Algorithms for the Construction and Analysis of Systems
|
||||
/// (TACAS'04)},
|
||||
/// editor = {Kurt Jensen and Andreas Podelski},
|
||||
/// pages = {205--219},
|
||||
/// year = {2004},
|
||||
/// publisher = {Springer-Verlag},
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// volume = {2988},
|
||||
/// isbn = {3-540-21299-X}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{geldenhuys.04.tacas,
|
||||
author = {Jaco Geldenhuys and Antti Valmari},
|
||||
title = {Tarjan's Algorithm Makes On-the-Fly {LTL} Verification
|
||||
More Efficient},
|
||||
booktitle = {Proceedings of the 10th International Conference on Tools
|
||||
and Algorithms for the Construction and Analysis of Systems
|
||||
(TACAS'04)},
|
||||
editor = {Kurt Jensen and Andreas Podelski},
|
||||
pages = {205--219},
|
||||
year = {2004},
|
||||
publisher = {Springer-Verlag},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
volume = {2988},
|
||||
isbn = {3-540-21299-X}
|
||||
}
|
||||
\endverbatim */
|
||||
emptiness_check* explicit_gv04_check(const tgba* a,
|
||||
option_map o = option_map());
|
||||
}
|
||||
|
|
|
|||
|
|
@ -28,20 +28,20 @@ namespace spot
|
|||
/// \brief Build a spot::taa* from an LTL formula.
|
||||
///
|
||||
/// This is based on the following.
|
||||
/// \verbatim
|
||||
/// @techreport{HUT-TCS-A104,
|
||||
/// address = {Espoo, Finland},
|
||||
/// author = {Heikki Tauriainen},
|
||||
/// month = {September},
|
||||
/// note = {Doctoral dissertation},
|
||||
/// number = {A104},
|
||||
/// pages = {xii+229},
|
||||
/// title = {Automata and Linear Temporal Logic: Translations
|
||||
/// with Transition-Based Acceptance},
|
||||
/// type = {Research Report},
|
||||
/// year = {2006}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@techreport{HUT-TCS-A104,
|
||||
address = {Espoo, Finland},
|
||||
author = {Heikki Tauriainen},
|
||||
month = {September},
|
||||
note = {Doctoral dissertation},
|
||||
number = {A104},
|
||||
pages = {xii+229},
|
||||
title = {Automata and Linear Temporal Logic: Translations
|
||||
with Transition-Based Acceptance},
|
||||
type = {Research Report},
|
||||
year = {2006}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// \param f The formula to translate into an automaton.
|
||||
/// \param dict The spot::bdd_dict the constructed automata should use.
|
||||
|
|
|
|||
|
|
@ -33,23 +33,23 @@ namespace spot
|
|||
/// \brief Build a spot::tgba_explicit* from an LTL formula.
|
||||
///
|
||||
/// This is based on the following paper.
|
||||
/// \verbatim
|
||||
/// @InProceedings{couvreur.99.fm,
|
||||
/// author = {Jean-Michel Couvreur},
|
||||
/// title = {On-the-fly Verification of Temporal Logic},
|
||||
/// pages = {253--271},
|
||||
/// editor = {Jeannette M. Wing and Jim Woodcock and Jim Davies},
|
||||
/// booktitle = {Proceedings of the World Congress on Formal Methods in the
|
||||
/// Development of Computing Systems (FM'99)},
|
||||
/// publisher = {Springer-Verlag},
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// volume = {1708},
|
||||
/// year = {1999},
|
||||
/// address = {Toulouse, France},
|
||||
/// month = {September},
|
||||
/// isbn = {3-540-66587-0}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{couvreur.99.fm,
|
||||
author = {Jean-Michel Couvreur},
|
||||
title = {On-the-fly Verification of Temporal Logic},
|
||||
pages = {253--271},
|
||||
editor = {Jeannette M. Wing and Jim Woodcock and Jim Davies},
|
||||
booktitle = {Proceedings of the World Congress on Formal Methods in the
|
||||
Development of Computing Systems (FM'99)},
|
||||
publisher = {Springer-Verlag},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
volume = {1708},
|
||||
year = {1999},
|
||||
address = {Toulouse, France},
|
||||
month = {September},
|
||||
isbn = {3-540-66587-0}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// \param f The formula to translate into an automaton.
|
||||
///
|
||||
|
|
@ -70,23 +70,23 @@ namespace spot
|
|||
/// from the same state with the same label (i.e., condition + acceptance
|
||||
/// conditions) will be merged. This correspond to an optimization
|
||||
/// described in the following paper.
|
||||
/// \verbatim
|
||||
/// @InProceedings{ sebastiani.03.charme,
|
||||
/// author = {Roberto Sebastiani and Stefano Tonetta},
|
||||
/// title = {"More Deterministic" vs. "Smaller" B{\"u}chi Automata for
|
||||
/// Efficient LTL Model Checking},
|
||||
/// booktitle = {Proceedings for the 12th Advanced Research Working
|
||||
/// Conference on Correct Hardware Design and Verification
|
||||
/// Methods (CHARME'03)},
|
||||
/// pages = {126--140},
|
||||
/// year = {2003},
|
||||
/// editor = {G. Goos and J. Hartmanis and J. van Leeuwen},
|
||||
/// volume = {2860},
|
||||
/// series = {Lectures Notes in Computer Science},
|
||||
/// month = {October},
|
||||
/// publisher = {Springer-Verlag}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ sebastiani.03.charme,
|
||||
author = {Roberto Sebastiani and Stefano Tonetta},
|
||||
title = {"More Deterministic" vs. "Smaller" B{\"u}chi Automata for
|
||||
Efficient LTL Model Checking},
|
||||
booktitle = {Proceedings for the 12th Advanced Research Working
|
||||
Conference on Correct Hardware Design and Verification
|
||||
Methods (CHARME'03)},
|
||||
pages = {126--140},
|
||||
year = {2003},
|
||||
editor = {G. Goos and J. Hartmanis and J. van Leeuwen},
|
||||
volume = {2860},
|
||||
series = {Lectures Notes in Computer Science},
|
||||
month = {October},
|
||||
publisher = {Springer-Verlag}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// \param fair_loop_approx When set, a really simple characterization of
|
||||
/// unstable state is used to suppress all acceptance conditions from
|
||||
|
|
@ -103,22 +103,22 @@ namespace spot
|
|||
/// for the type of reduction you want, see
|
||||
/// spot::ltl::ltl_simplifier. This idea is taken from the
|
||||
/// following paper.
|
||||
/// \verbatim
|
||||
/// @InProceedings{ thirioux.02.fmics,
|
||||
/// author = {Xavier Thirioux},
|
||||
/// title = {Simple and Efficient Translation from {LTL} Formulas to
|
||||
/// {B\"u}chi Automata},
|
||||
/// booktitle = {Proceedings of the 7th International ERCIM Workshop in
|
||||
/// Formal Methods for Industrial Critical Systems (FMICS'02)},
|
||||
/// series = {Electronic Notes in Theoretical Computer Science},
|
||||
/// volume = {66(2)},
|
||||
/// publisher = {Elsevier},
|
||||
/// editor = {Rance Cleaveland and Hubert Garavel},
|
||||
/// year = {2002},
|
||||
/// month = jul,
|
||||
/// address = {M{\'a}laga, Spain}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ thirioux.02.fmics,
|
||||
author = {Xavier Thirioux},
|
||||
title = {Simple and Efficient Translation from {LTL} Formulas to
|
||||
{B\"u}chi Automata},
|
||||
booktitle = {Proceedings of the 7th International ERCIM Workshop in
|
||||
Formal Methods for Industrial Critical Systems (FMICS'02)},
|
||||
series = {Electronic Notes in Theoretical Computer Science},
|
||||
volume = {66(2)},
|
||||
publisher = {Elsevier},
|
||||
editor = {Rance Cleaveland and Hubert Garavel},
|
||||
year = {2002},
|
||||
month = jul,
|
||||
address = {M{\'a}laga, Spain}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// \return A spot::tgba_explicit that recognizes the language of \a f.
|
||||
tgba_explicit_formula*
|
||||
|
|
|
|||
|
|
@ -29,21 +29,21 @@ namespace spot
|
|||
/// \brief Build a spot::tgba_bdd_concrete from an LTL formula.
|
||||
///
|
||||
/// This is based on the following paper.
|
||||
/// \verbatim
|
||||
/// @InProceedings{ couvreur.00.lacim,
|
||||
/// author = {Jean-Michel Couvreur},
|
||||
/// title = {Un point de vue symbolique sur la logique temporelle
|
||||
/// lin{\'e}aire},
|
||||
/// booktitle = {Actes du Colloque LaCIM 2000},
|
||||
/// month = {August},
|
||||
/// year = {2000},
|
||||
/// pages = {131--140},
|
||||
/// volume = {27},
|
||||
/// series = {Publications du LaCIM},
|
||||
/// publisher = {Universit{\'e} du Qu{\'e}bec {\`a} Montr{\'e}al},
|
||||
/// editor = {Pierre Leroux}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ couvreur.00.lacim,
|
||||
author = {Jean-Michel Couvreur},
|
||||
title = {Un point de vue symbolique sur la logique temporelle
|
||||
lin{\'e}aire},
|
||||
booktitle = {Actes du Colloque LaCIM 2000},
|
||||
month = {August},
|
||||
year = {2000},
|
||||
pages = {131--140},
|
||||
volume = {27},
|
||||
series = {Publications du LaCIM},
|
||||
publisher = {Universit{\'e} du Qu{\'e}bec {\`a} Montr{\'e}al},
|
||||
editor = {Pierre Leroux}
|
||||
}
|
||||
\endverbatim */
|
||||
/// \param f The formula to translate into an automaton.
|
||||
/// \param dict The spot::bdd_dict the constructed automata should use.
|
||||
/// \return A spot::tgba_bdd_concrete that recognizes the language of \a f.
|
||||
|
|
|
|||
|
|
@ -42,55 +42,55 @@ namespace spot
|
|||
/// (until it returns a null pointer) to enumerate all the visited acceptance
|
||||
/// paths. The implemented algorithm is the following:
|
||||
///
|
||||
/// \verbatim
|
||||
/// procedure check ()
|
||||
/// begin
|
||||
/// call dfs_blue(s0);
|
||||
/// end;
|
||||
///
|
||||
/// procedure dfs_blue (s)
|
||||
/// begin
|
||||
/// s.color = blue;
|
||||
/// for all t in post(s) do
|
||||
/// if t.color == white then
|
||||
/// call dfs_blue(t);
|
||||
/// end if;
|
||||
/// if (the edge (s,t) is accepting) then
|
||||
/// target = s;
|
||||
/// call dfs_red(t);
|
||||
/// end if;
|
||||
/// end for;
|
||||
/// end;
|
||||
///
|
||||
/// procedure dfs_red(s)
|
||||
/// begin
|
||||
/// s.color = red;
|
||||
/// if s == target then
|
||||
/// report cycle
|
||||
/// end if;
|
||||
/// for all t in post(s) do
|
||||
/// if t.color == blue then
|
||||
/// call dfs_red(t);
|
||||
/// end if;
|
||||
/// end for;
|
||||
/// end;
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
procedure check ()
|
||||
begin
|
||||
call dfs_blue(s0);
|
||||
end;
|
||||
|
||||
procedure dfs_blue (s)
|
||||
begin
|
||||
s.color = blue;
|
||||
for all t in post(s) do
|
||||
if t.color == white then
|
||||
call dfs_blue(t);
|
||||
end if;
|
||||
if (the edge (s,t) is accepting) then
|
||||
target = s;
|
||||
call dfs_red(t);
|
||||
end if;
|
||||
end for;
|
||||
end;
|
||||
|
||||
procedure dfs_red(s)
|
||||
begin
|
||||
s.color = red;
|
||||
if s == target then
|
||||
report cycle
|
||||
end if;
|
||||
for all t in post(s) do
|
||||
if t.color == blue then
|
||||
call dfs_red(t);
|
||||
end if;
|
||||
end for;
|
||||
end;
|
||||
\endverbatim */
|
||||
///
|
||||
/// This algorithm is an adaptation to TBA of the one
|
||||
/// (which deals with accepting states) presented in
|
||||
///
|
||||
/// \verbatim
|
||||
/// Article{ courcoubetis.92.fmsd,
|
||||
/// author = {Costas Courcoubetis and Moshe Y. Vardi and Pierre
|
||||
/// Wolper and Mihalis Yannakakis},
|
||||
/// title = {Memory-Efficient Algorithm for the Verification of
|
||||
/// Temporal Properties},
|
||||
/// journal = {Formal Methods in System Design},
|
||||
/// pages = {275--288},
|
||||
/// year = {1992},
|
||||
/// volume = {1}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
Article{ courcoubetis.92.fmsd,
|
||||
author = {Costas Courcoubetis and Moshe Y. Vardi and Pierre
|
||||
Wolper and Mihalis Yannakakis},
|
||||
title = {Memory-Efficient Algorithm for the Verification of
|
||||
Temporal Properties},
|
||||
journal = {Formal Methods in System Design},
|
||||
pages = {275--288},
|
||||
year = {1992},
|
||||
volume = {1}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// \bug The name is misleading. Magic-search is the algorithm
|
||||
/// from \c godefroid.93.pstv, not \c courcoubetis.92.fmsd.
|
||||
|
|
@ -105,15 +105,15 @@ namespace spot
|
|||
/// During the visit of \a a, the returned checker does not store explicitely
|
||||
/// the traversed states but uses the bit-state hashing technic presented in:
|
||||
///
|
||||
/// \verbatim
|
||||
/// @book{Holzmann91,
|
||||
/// author = {G.J. Holzmann},
|
||||
/// title = {Design and Validation of Computer Protocols},
|
||||
/// publisher = {Prentice-Hall},
|
||||
/// address = {Englewood Cliffs, New Jersey},
|
||||
/// year = {1991}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@book{Holzmann91,
|
||||
author = {G.J. Holzmann},
|
||||
title = {Design and Validation of Computer Protocols},
|
||||
publisher = {Prentice-Hall},
|
||||
address = {Englewood Cliffs, New Jersey},
|
||||
year = {1991}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// Consequently, the detection of an acceptence cycle is not ensured.
|
||||
///
|
||||
|
|
|
|||
|
|
@ -37,20 +37,20 @@ namespace spot
|
|||
/// as if all states were accepting states.
|
||||
///
|
||||
/// For more detail about monitors, see the following paper:
|
||||
/// \verbatim
|
||||
/// @InProceedings{ tabakov.10.rv,
|
||||
/// author = {Deian Tabakov and Moshe Y. Vardi},
|
||||
/// title = {Optimized Temporal Monitors for SystemC{$^*$}},
|
||||
/// booktitle = {Proceedings of the 10th International Conferance
|
||||
/// on Runtime Verification},
|
||||
/// pages = {436--451},
|
||||
/// year = 2010,
|
||||
/// volume = {6418},
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// month = nov,
|
||||
/// publisher = {Spring-Verlag}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ tabakov.10.rv,
|
||||
author = {Deian Tabakov and Moshe Y. Vardi},
|
||||
title = {Optimized Temporal Monitors for SystemC{$^*$}},
|
||||
booktitle = {Proceedings of the 10th International Conferance
|
||||
on Runtime Verification},
|
||||
pages = {436--451},
|
||||
year = 2010,
|
||||
volume = {6418},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
month = nov,
|
||||
publisher = {Spring-Verlag}
|
||||
}
|
||||
\endverbatim */
|
||||
/// (Note: although the above paper uses Spot, this function did not
|
||||
/// exist in Spot at that time.)
|
||||
///
|
||||
|
|
@ -76,23 +76,23 @@ namespace spot
|
|||
/// Please see the following paper for a discussion of this
|
||||
/// technique.
|
||||
///
|
||||
/// \verbatim
|
||||
/// @InProceedings{ dax.07.atva,
|
||||
/// author = {Christian Dax and Jochen Eisinger and Felix Klaedtke},
|
||||
/// title = {Mechanizing the Powerset Construction for Restricted
|
||||
/// Classes of {$\omega$}-Automata},
|
||||
/// year = 2007,
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// publisher = {Springer-Verlag},
|
||||
/// volume = 4762,
|
||||
/// booktitle = {Proceedings of the 5th International Symposium on
|
||||
/// Automated Technology for Verification and Analysis
|
||||
/// (ATVA'07)},
|
||||
/// editor = {Kedar S. Namjoshi and Tomohiro Yoneda and Teruo Higashino
|
||||
/// and Yoshio Okamura},
|
||||
/// month = oct
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ dax.07.atva,
|
||||
author = {Christian Dax and Jochen Eisinger and Felix Klaedtke},
|
||||
title = {Mechanizing the Powerset Construction for Restricted
|
||||
Classes of {$\omega$}-Automata},
|
||||
year = 2007,
|
||||
series = {Lecture Notes in Computer Science},
|
||||
publisher = {Springer-Verlag},
|
||||
volume = 4762,
|
||||
booktitle = {Proceedings of the 5th International Symposium on
|
||||
Automated Technology for Verification and Analysis
|
||||
(ATVA'07)},
|
||||
editor = {Kedar S. Namjoshi and Tomohiro Yoneda and Teruo Higashino
|
||||
and Yoshio Okamura},
|
||||
month = oct
|
||||
}
|
||||
\endverbatim */
|
||||
sba_explicit_number* minimize_wdba(const tgba* a);
|
||||
|
||||
/// \brief Minimize an automaton if it represents an obligation property.
|
||||
|
|
@ -101,23 +101,23 @@ namespace spot
|
|||
/// algorithm implemented in the minimize_wdba() function, and presented
|
||||
/// by the following paper:
|
||||
///
|
||||
/// \verbatim
|
||||
/// @InProceedings{ dax.07.atva,
|
||||
/// author = {Christian Dax and Jochen Eisinger and Felix Klaedtke},
|
||||
/// title = {Mechanizing the Powerset Construction for Restricted
|
||||
/// Classes of {$\omega$}-Automata},
|
||||
/// year = 2007,
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// publisher = {Springer-Verlag},
|
||||
/// volume = 4762,
|
||||
/// booktitle = {Proceedings of the 5th International Symposium on
|
||||
/// Automated Technology for Verification and Analysis
|
||||
/// (ATVA'07)},
|
||||
/// editor = {Kedar S. Namjoshi and Tomohiro Yoneda and Teruo Higashino
|
||||
/// and Yoshio Okamura},
|
||||
/// month = oct
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ dax.07.atva,
|
||||
author = {Christian Dax and Jochen Eisinger and Felix Klaedtke},
|
||||
title = {Mechanizing the Powerset Construction for Restricted
|
||||
Classes of {$\omega$}-Automata},
|
||||
year = 2007,
|
||||
series = {Lecture Notes in Computer Science},
|
||||
publisher = {Springer-Verlag},
|
||||
volume = 4762,
|
||||
booktitle = {Proceedings of the 5th International Symposium on
|
||||
Automated Technology for Verification and Analysis
|
||||
(ATVA'07)},
|
||||
editor = {Kedar S. Namjoshi and Tomohiro Yoneda and Teruo Higashino
|
||||
and Yoshio Okamura},
|
||||
month = oct
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// Because it is hard to determine if an automaton corresponds
|
||||
/// to an obligation property, you should supply either the formula
|
||||
|
|
|
|||
|
|
@ -53,21 +53,21 @@ namespace spot
|
|||
/// \param env The environment in which to declare the acceptance conditions.
|
||||
///
|
||||
/// This algorithms is adapted from the one in Fig 6.2 page 48 of
|
||||
/// \verbatim
|
||||
/// @TechReport{ tauriainen.00.a66,
|
||||
/// author = {Heikki Tauriainen},
|
||||
/// title = {Automated Testing of {B\"u}chi Automata Translators for
|
||||
/// {L}inear {T}emporal {L}ogic},
|
||||
/// address = {Espoo, Finland},
|
||||
/// institution = {Helsinki University of Technology, Laboratory for
|
||||
/// Theoretical Computer Science},
|
||||
/// number = {A66},
|
||||
/// year = {2000},
|
||||
/// url = {http://citeseer.nj.nec.com/tauriainen00automated.html},
|
||||
/// type = {Research Report},
|
||||
/// note = {Reprint of Master's thesis}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@TechReport{ tauriainen.00.a66,
|
||||
author = {Heikki Tauriainen},
|
||||
title = {Automated Testing of {B\"u}chi Automata Translators for
|
||||
{L}inear {T}emporal {L}ogic},
|
||||
address = {Espoo, Finland},
|
||||
institution = {Helsinki University of Technology, Laboratory for
|
||||
Theoretical Computer Science},
|
||||
number = {A66},
|
||||
year = {2000},
|
||||
url = {http://citeseer.nj.nec.com/tauriainen00automated.html},
|
||||
type = {Research Report},
|
||||
note = {Reprint of Master's thesis}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// Although the intent is similar, there are some differences with
|
||||
/// between the above published algorithm and this implementation .
|
||||
|
|
|
|||
|
|
@ -43,59 +43,59 @@ namespace spot
|
|||
/// paths. The implemented algorithm is an optimization of
|
||||
/// spot::explicit_magic_search and is the following:
|
||||
///
|
||||
/// \verbatim
|
||||
/// procedure check ()
|
||||
/// begin
|
||||
/// call dfs_blue(s0);
|
||||
/// end;
|
||||
///
|
||||
/// procedure dfs_blue (s)
|
||||
/// begin
|
||||
/// s.color = cyan;
|
||||
/// for all t in post(s) do
|
||||
/// if t.color == white then
|
||||
/// call dfs_blue(t);
|
||||
/// else if t.color == cyan and
|
||||
/// (the edge (s,t) is accepting or
|
||||
/// (it exists a predecessor p of s in st_blue and s != t and
|
||||
/// the arc between p and s is accepting)) then
|
||||
/// report cycle;
|
||||
/// end if;
|
||||
/// if the edge (s,t) is accepting then
|
||||
/// call dfs_red(t);
|
||||
/// end if;
|
||||
/// end for;
|
||||
/// s.color = blue;
|
||||
/// end;
|
||||
///
|
||||
/// procedure dfs_red(s)
|
||||
/// begin
|
||||
/// if s.color == cyan then
|
||||
/// report cycle;
|
||||
/// end if;
|
||||
/// s.color = red;
|
||||
/// for all t in post(s) do
|
||||
/// if t.color == blue then
|
||||
/// call dfs_red(t);
|
||||
/// end if;
|
||||
/// end for;
|
||||
/// end;
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
procedure check ()
|
||||
begin
|
||||
call dfs_blue(s0);
|
||||
end;
|
||||
|
||||
procedure dfs_blue (s)
|
||||
begin
|
||||
s.color = cyan;
|
||||
for all t in post(s) do
|
||||
if t.color == white then
|
||||
call dfs_blue(t);
|
||||
else if t.color == cyan and
|
||||
(the edge (s,t) is accepting or
|
||||
(it exists a predecessor p of s in st_blue and s != t and
|
||||
the arc between p and s is accepting)) then
|
||||
report cycle;
|
||||
end if;
|
||||
if the edge (s,t) is accepting then
|
||||
call dfs_red(t);
|
||||
end if;
|
||||
end for;
|
||||
s.color = blue;
|
||||
end;
|
||||
|
||||
procedure dfs_red(s)
|
||||
begin
|
||||
if s.color == cyan then
|
||||
report cycle;
|
||||
end if;
|
||||
s.color = red;
|
||||
for all t in post(s) do
|
||||
if t.color == blue then
|
||||
call dfs_red(t);
|
||||
end if;
|
||||
end for;
|
||||
end;
|
||||
\endverbatim */
|
||||
///
|
||||
/// It is an adaptation to TBA of the one presented in
|
||||
/// \verbatim
|
||||
/// @techreport{SE04,
|
||||
/// author = {Stefan Schwoon and Javier Esparza},
|
||||
/// institution = {Universit{\"a}t Stuttgart, Fakult\"at Informatik,
|
||||
/// Elektrotechnik und Informationstechnik},
|
||||
/// month = {November},
|
||||
/// number = {2004/06},
|
||||
/// title = {A Note on On-The-Fly Verification Algorithms},
|
||||
/// year = {2004},
|
||||
/// url =
|
||||
///{http://www.fmi.uni-stuttgart.de/szs/publications/info/schwoosn.SE04.shtml}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@techreport{SE04,
|
||||
author = {Stefan Schwoon and Javier Esparza},
|
||||
institution = {Universit{\"a}t Stuttgart, Fakult\"at Informatik,
|
||||
Elektrotechnik und Informationstechnik},
|
||||
month = {November},
|
||||
number = {2004/06},
|
||||
title = {A Note on On-The-Fly Verification Algorithms},
|
||||
year = {2004},
|
||||
url =
|
||||
{http://www.fmi.uni-stuttgart.de/szs/publications/info/schwoosn.SE04.shtml}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// \sa spot::explicit_magic_search
|
||||
///
|
||||
|
|
@ -109,15 +109,15 @@ namespace spot
|
|||
/// During the visit of \a a, the returned checker does not store explicitely
|
||||
/// the traversed states but uses the bit-state hashing technic presented in:
|
||||
///
|
||||
/// \verbatim
|
||||
/// @book{Holzmann91,
|
||||
/// author = {G.J. Holzmann},
|
||||
/// title = {Design and Validation of Computer Protocols},
|
||||
/// publisher = {Prentice-Hall},
|
||||
/// address = {Englewood Cliffs, New Jersey},
|
||||
/// year = {1991}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@book{Holzmann91,
|
||||
author = {G.J. Holzmann},
|
||||
title = {Design and Validation of Computer Protocols},
|
||||
publisher = {Prentice-Hall},
|
||||
address = {Englewood Cliffs, New Jersey},
|
||||
year = {1991}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// Consequently, the detection of an acceptence cycle is not ensured.
|
||||
///
|
||||
|
|
|
|||
|
|
@ -36,21 +36,21 @@ namespace spot
|
|||
/// based on the following paper, but generalized to handle TGBA
|
||||
/// directly.
|
||||
///
|
||||
/// \verbatim
|
||||
/// @InProceedings{ etessami.00.concur,
|
||||
/// author = {Kousha Etessami and Gerard J. Holzmann},
|
||||
/// title = {Optimizing {B\"u}chi Automata},
|
||||
/// booktitle = {Proceedings of the 11th International Conference on
|
||||
/// Concurrency Theory (Concur'00)},
|
||||
/// pages = {153--167},
|
||||
/// year = {2000},
|
||||
/// editor = {C. Palamidessi},
|
||||
/// volume = {1877},
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// address = {Pennsylvania, USA},
|
||||
/// publisher = {Springer-Verlag}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ etessami.00.concur,
|
||||
author = {Kousha Etessami and Gerard J. Holzmann},
|
||||
title = {Optimizing {B\"u}chi Automata},
|
||||
booktitle = {Proceedings of the 11th International Conference on
|
||||
Concurrency Theory (Concur'00)},
|
||||
pages = {153--167},
|
||||
year = {2000},
|
||||
editor = {C. Palamidessi},
|
||||
volume = {1877},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
address = {Pennsylvania, USA},
|
||||
publisher = {Springer-Verlag}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// Our reconstruction of the quotient automaton based on this
|
||||
/// suffix-inclusion relation will also improve determinism.
|
||||
|
|
@ -80,20 +80,20 @@ namespace spot
|
|||
///
|
||||
/// Reverse simulation is discussed in the following paper, bu
|
||||
/// following paper, but generalized to handle TGBA directly.
|
||||
/// \verbatim
|
||||
/// @InProceedings{ somenzi.00.cav,
|
||||
/// author = {Fabio Somenzi and Roderick Bloem},
|
||||
/// title = {Efficient {B\"u}chi Automata for {LTL} Formul{\ae}},
|
||||
/// booktitle = {Proceedings of the 12th International Conference on
|
||||
/// Computer Aided Verification (CAV'00)},
|
||||
/// pages = {247--263},
|
||||
/// year = {2000},
|
||||
/// volume = {1855},
|
||||
/// series = {Lecture Notes in Computer Science},
|
||||
/// address = {Chicago, Illinois, USA},
|
||||
/// publisher = {Springer-Verlag}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@InProceedings{ somenzi.00.cav,
|
||||
author = {Fabio Somenzi and Roderick Bloem},
|
||||
title = {Efficient {B\"u}chi Automata for {LTL} Formul{\ae}},
|
||||
booktitle = {Proceedings of the 12th International Conference on
|
||||
Computer Aided Verification (CAV'00)},
|
||||
pages = {247--263},
|
||||
year = {2000},
|
||||
volume = {1855},
|
||||
series = {Lecture Notes in Computer Science},
|
||||
address = {Chicago, Illinois, USA},
|
||||
publisher = {Springer-Verlag}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
/// Our reconstruction of the quotient automaton based on this
|
||||
/// prefix-inclusion relation will also improve codeterminism.
|
||||
|
|
|
|||
|
|
@ -37,61 +37,61 @@ namespace spot
|
|||
/// During the visit of \a a, the returned checker stores explicitely all
|
||||
/// the traversed states. The implemented algorithm is the following:
|
||||
///
|
||||
/// \verbatim
|
||||
/// procedure check ()
|
||||
/// begin
|
||||
/// call dfs_blue(s0);
|
||||
/// end;
|
||||
///
|
||||
/// procedure dfs_blue (s)
|
||||
/// begin
|
||||
/// s.color = blue;
|
||||
/// s.acc = emptyset;
|
||||
/// for all t in post(s) do
|
||||
/// if t.color == white then
|
||||
/// call dfs_blue(t);
|
||||
/// end if;
|
||||
/// end for;
|
||||
/// for all t in post(s) do
|
||||
/// let (s, l, a, t) be the edge from s to t;
|
||||
/// if s.acc U a not included in t.acc then
|
||||
/// call dfs_red(t, a U s.acc);
|
||||
/// end if;
|
||||
/// end for;
|
||||
/// if s.acc == all_acc then
|
||||
/// report a cycle;
|
||||
/// end if;
|
||||
/// end;
|
||||
///
|
||||
/// procedure dfs_red(s, A)
|
||||
/// begin
|
||||
/// s.acc = s.acc U A;
|
||||
/// for all t in post(s) do
|
||||
/// if t.color != white and A not included in t.acc then
|
||||
/// call dfs_red(t, A);
|
||||
/// end if;
|
||||
/// end for;
|
||||
/// end;
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
procedure check ()
|
||||
begin
|
||||
call dfs_blue(s0);
|
||||
end;
|
||||
|
||||
procedure dfs_blue (s)
|
||||
begin
|
||||
s.color = blue;
|
||||
s.acc = emptyset;
|
||||
for all t in post(s) do
|
||||
if t.color == white then
|
||||
call dfs_blue(t);
|
||||
end if;
|
||||
end for;
|
||||
for all t in post(s) do
|
||||
let (s, l, a, t) be the edge from s to t;
|
||||
if s.acc U a not included in t.acc then
|
||||
call dfs_red(t, a U s.acc);
|
||||
end if;
|
||||
end for;
|
||||
if s.acc == all_acc then
|
||||
report a cycle;
|
||||
end if;
|
||||
end;
|
||||
|
||||
procedure dfs_red(s, A)
|
||||
begin
|
||||
s.acc = s.acc U A;
|
||||
for all t in post(s) do
|
||||
if t.color != white and A not included in t.acc then
|
||||
call dfs_red(t, A);
|
||||
end if;
|
||||
end for;
|
||||
end;
|
||||
\endverbatim */
|
||||
///
|
||||
/// This algorithm is the one presented in
|
||||
///
|
||||
/// \verbatim
|
||||
/// @techreport{HUT-TCS-A83,
|
||||
/// address = {Espoo, Finland},
|
||||
/// author = {Heikki Tauriainen},
|
||||
/// institution = {Helsinki University of Technology, Laboratory for
|
||||
/// Theoretical Computer Science},
|
||||
/// month = {December},
|
||||
/// number = {A83},
|
||||
/// pages = {132},
|
||||
/// title = {On Translating Linear Temporal Logic into Alternating and
|
||||
/// Nondeterministic Automata},
|
||||
/// type = {Research Report},
|
||||
/// year = {2003},
|
||||
/// url = {http://www.tcs.hut.fi/Publications/info/bibdb.HUT-TCS-A83.shtml}
|
||||
/// }
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
@techreport{HUT-TCS-A83,
|
||||
address = {Espoo, Finland},
|
||||
author = {Heikki Tauriainen},
|
||||
institution = {Helsinki University of Technology, Laboratory for
|
||||
Theoretical Computer Science},
|
||||
month = {December},
|
||||
number = {A83},
|
||||
pages = {132},
|
||||
title = {On Translating Linear Temporal Logic into Alternating and
|
||||
Nondeterministic Automata},
|
||||
type = {Research Report},
|
||||
year = {2003},
|
||||
url = {http://www.tcs.hut.fi/Publications/info/bibdb.HUT-TCS-A83.shtml}
|
||||
}
|
||||
\endverbatim */
|
||||
///
|
||||
emptiness_check* explicit_tau03_search(const tgba *a,
|
||||
option_map o = option_map());
|
||||
|
|
|
|||
|
|
@ -37,55 +37,55 @@ namespace spot
|
|||
/// During the visit of \a a, the returned checker stores explicitely all
|
||||
/// the traversed states. The implemented algorithm is the following:
|
||||
///
|
||||
/// \verbatim
|
||||
/// procedure check ()
|
||||
/// begin
|
||||
/// weight = 0; // the null vector
|
||||
/// call dfs_blue(s0);
|
||||
/// end;
|
||||
///
|
||||
/// procedure dfs_blue (s)
|
||||
/// begin
|
||||
/// s.color = cyan;
|
||||
/// s.acc = emptyset;
|
||||
/// s.weight = weight;
|
||||
/// for all t in post(s) do
|
||||
/// let (s, l, a, t) be the edge from s to t;
|
||||
/// if t.color == white then
|
||||
/// for all b in a do
|
||||
/// weight[b] = weight[b] + 1;
|
||||
/// end for;
|
||||
/// call dfs_blue(t);
|
||||
/// for all b in a do
|
||||
/// weight[b] = weight[b] - 1;
|
||||
/// end for;
|
||||
/// end if;
|
||||
/// Acc = s.acc U a;
|
||||
/// if t.color == cyan &&
|
||||
/// (Acc U support(weight - t.weight) U t.acc) == all_acc then
|
||||
/// report a cycle;
|
||||
/// else if Acc not included in t.acc then
|
||||
/// t.acc := t.acc U Acc;
|
||||
/// call dfs_red(t, Acc);
|
||||
/// end if;
|
||||
/// end for;
|
||||
/// s.color = blue;
|
||||
/// end;
|
||||
///
|
||||
/// procedure dfs_red(s, Acc)
|
||||
/// begin
|
||||
/// for all t in post(s) do
|
||||
/// let (s, l, a, t) be the edge from s to t;
|
||||
/// if t.color == cyan &&
|
||||
/// (Acc U support(weight - t.weight) U t.acc) == all_acc then
|
||||
/// report a cycle;
|
||||
/// else if t.color != white and Acc not included in t.acc then
|
||||
/// t.acc := t.acc U Acc;
|
||||
/// call dfs_red(t, Acc);
|
||||
/// end if;
|
||||
/// end for;
|
||||
/// end;
|
||||
/// \endverbatim
|
||||
/** \verbatim
|
||||
procedure check ()
|
||||
begin
|
||||
weight = 0; // the null vector
|
||||
call dfs_blue(s0);
|
||||
end;
|
||||
|
||||
procedure dfs_blue (s)
|
||||
begin
|
||||
s.color = cyan;
|
||||
s.acc = emptyset;
|
||||
s.weight = weight;
|
||||
for all t in post(s) do
|
||||
let (s, l, a, t) be the edge from s to t;
|
||||
if t.color == white then
|
||||
for all b in a do
|
||||
weight[b] = weight[b] + 1;
|
||||
end for;
|
||||
call dfs_blue(t);
|
||||
for all b in a do
|
||||
weight[b] = weight[b] - 1;
|
||||
end for;
|
||||
end if;
|
||||
Acc = s.acc U a;
|
||||
if t.color == cyan &&
|
||||
(Acc U support(weight - t.weight) U t.acc) == all_acc then
|
||||
report a cycle;
|
||||
else if Acc not included in t.acc then
|
||||
t.acc := t.acc U Acc;
|
||||
call dfs_red(t, Acc);
|
||||
end if;
|
||||
end for;
|
||||
s.color = blue;
|
||||
end;
|
||||
|
||||
procedure dfs_red(s, Acc)
|
||||
begin
|
||||
for all t in post(s) do
|
||||
let (s, l, a, t) be the edge from s to t;
|
||||
if t.color == cyan &&
|
||||
(Acc U support(weight - t.weight) U t.acc) == all_acc then
|
||||
report a cycle;
|
||||
else if t.color != white and Acc not included in t.acc then
|
||||
t.acc := t.acc U Acc;
|
||||
call dfs_red(t, Acc);
|
||||
end if;
|
||||
end for;
|
||||
end;
|
||||
\endverbatim */
|
||||
///
|
||||
/// This algorithm is a generalisation to TGBA of the one implemented in
|
||||
/// spot::explicit_se05_search. It is based on the acceptance set labelling
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue