Fix verbatim blocks of Doxygen comments.
* src/ltlast/formula.hh, src/ltlvisit/contain.hh, src/ltlvisit/randomltl.hh, src/ltlvisit/reduce.hh, src/ltlvisit/remove_x.hh, src/ltlvisit/simplify.hh, src/ltlvisit/snf.hh, src/misc/minato.hh, src/misc/optionmap.hh, src/saba/sabacomplementtgba.hh, src/taalgos/emptinessta.hh, src/taalgos/minimize.hh, src/taalgos/tgba2ta.hh, src/tgba/tgbakvcomplement.hh, src/tgbaalgos/cycles.hh, src/tgbaalgos/eltl2tgba_lacim.hh, src/tgbaalgos/gtec/gtec.hh, src/tgbaalgos/gv04.hh, src/tgbaalgos/ltl2taa.hh, src/tgbaalgos/ltl2tgba_fm.hh, src/tgbaalgos/ltl2tgba_lacim.hh, src/tgbaalgos/magic.hh, src/tgbaalgos/minimize.hh, src/tgbaalgos/randomgraph.hh, src/tgbaalgos/se05.hh, src/tgbaalgos/simulation.hh, src/tgbaalgos/tau03.hh, src/tgbaalgos/tau03opt.hh: Surround verbatim blocks with /** ... */ instead of using /// on each line. Otherwise Doxygen will output the leading "///" tokens -- apparently this is a feature. * src/sanity/style.test: Strip multi-line comments before checking code style.
This commit is contained in:
parent
1ec9cebe58
commit
1cd9b204ed
29 changed files with 758 additions and 745 deletions
|
|
@ -186,20 +186,20 @@ namespace spot
|
||||||
/// \brief Whether the formula is purely eventual.
|
/// \brief Whether the formula is purely eventual.
|
||||||
///
|
///
|
||||||
/// Pure eventuality formulae are defined in
|
/// Pure eventuality formulae are defined in
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ etessami.00.concur,
|
@InProceedings{ etessami.00.concur,
|
||||||
/// author = {Kousha Etessami and Gerard J. Holzmann},
|
author = {Kousha Etessami and Gerard J. Holzmann},
|
||||||
/// title = {Optimizing {B\"u}chi Automata},
|
title = {Optimizing {B\"u}chi Automata},
|
||||||
/// booktitle = {Proceedings of the 11th International Conference on
|
booktitle = {Proceedings of the 11th International Conference on
|
||||||
/// Concurrency Theory (Concur'2000)},
|
Concurrency Theory (Concur'2000)},
|
||||||
/// pages = {153--167},
|
pages = {153--167},
|
||||||
/// year = {2000},
|
year = {2000},
|
||||||
/// editor = {C. Palamidessi},
|
editor = {C. Palamidessi},
|
||||||
/// volume = {1877},
|
volume = {1877},
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// publisher = {Springer-Verlag}
|
publisher = {Springer-Verlag}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// A word that satisfies a pure eventuality can be prefixed by
|
/// A word that satisfies a pure eventuality can be prefixed by
|
||||||
/// anything and still satisfies the formula.
|
/// anything and still satisfies the formula.
|
||||||
|
|
@ -211,20 +211,20 @@ namespace spot
|
||||||
/// \brief Whether a formula is purely universal.
|
/// \brief Whether a formula is purely universal.
|
||||||
///
|
///
|
||||||
/// Purely universal formulae are defined in
|
/// Purely universal formulae are defined in
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ etessami.00.concur,
|
@InProceedings{ etessami.00.concur,
|
||||||
/// author = {Kousha Etessami and Gerard J. Holzmann},
|
author = {Kousha Etessami and Gerard J. Holzmann},
|
||||||
/// title = {Optimizing {B\"u}chi Automata},
|
title = {Optimizing {B\"u}chi Automata},
|
||||||
/// booktitle = {Proceedings of the 11th International Conference on
|
booktitle = {Proceedings of the 11th International Conference on
|
||||||
/// Concurrency Theory (Concur'2000)},
|
Concurrency Theory (Concur'2000)},
|
||||||
/// pages = {153--167},
|
pages = {153--167},
|
||||||
/// year = {2000},
|
year = {2000},
|
||||||
/// editor = {C. Palamidessi},
|
editor = {C. Palamidessi},
|
||||||
/// volume = {1877},
|
volume = {1877},
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// publisher = {Springer-Verlag}
|
publisher = {Springer-Verlag}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// Any (non-empty) suffix of a word that satisfies a purely
|
/// Any (non-empty) suffix of a word that satisfies a purely
|
||||||
/// universal formula also satisfies the formula.
|
/// universal formula also satisfies the formula.
|
||||||
|
|
|
||||||
|
|
@ -83,22 +83,22 @@ namespace spot
|
||||||
/// \brief Reduce a formula using language containment relationships.
|
/// \brief Reduce a formula using language containment relationships.
|
||||||
///
|
///
|
||||||
/// The method is taken from table 4.1 in
|
/// The method is taken from table 4.1 in
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
///@TechReport{ tauriainen.03.a83,
|
@TechReport{ tauriainen.03.a83,
|
||||||
/// author = {Heikki Tauriainen},
|
author = {Heikki Tauriainen},
|
||||||
/// title = {On Translating Linear Temporal Logic into Alternating and
|
title = {On Translating Linear Temporal Logic into Alternating and
|
||||||
/// Nondeterministic Automata},
|
Nondeterministic Automata},
|
||||||
/// institution = {Helsinki University of Technology, Laboratory for
|
institution = {Helsinki University of Technology, Laboratory for
|
||||||
/// Theoretical Computer Science},
|
Theoretical Computer Science},
|
||||||
/// address = {Espoo, Finland},
|
address = {Espoo, Finland},
|
||||||
/// month = dec,
|
month = dec,
|
||||||
/// number = {A83},
|
number = {A83},
|
||||||
/// pages = {132},
|
pages = {132},
|
||||||
/// type = {Research Report},
|
type = {Research Report},
|
||||||
/// year = {2003},
|
year = {2003},
|
||||||
/// note = {Reprint of Licentiate's thesis}
|
note = {Reprint of Licentiate's thesis}
|
||||||
///}
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// (The "dagged" cells in the tables are not handled here.)
|
/// (The "dagged" cells in the tables are not handled here.)
|
||||||
///
|
///
|
||||||
|
|
|
||||||
|
|
@ -117,24 +117,24 @@ namespace spot
|
||||||
///
|
///
|
||||||
/// The default priorities are defined as follows:
|
/// The default priorities are defined as follows:
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// ap n
|
ap n
|
||||||
/// false 1
|
false 1
|
||||||
/// true 1
|
true 1
|
||||||
/// not 1
|
not 1
|
||||||
/// F 1
|
F 1
|
||||||
/// G 1
|
G 1
|
||||||
/// X 1
|
X 1
|
||||||
/// equiv 1
|
equiv 1
|
||||||
/// implies 1
|
implies 1
|
||||||
/// xor 1
|
xor 1
|
||||||
/// R 1
|
R 1
|
||||||
/// U 1
|
U 1
|
||||||
/// W 1
|
W 1
|
||||||
/// M 1
|
M 1
|
||||||
/// and 1
|
and 1
|
||||||
/// or 1
|
or 1
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// Where \c n is the number of atomic propositions in the
|
/// Where \c n is the number of atomic propositions in the
|
||||||
/// set passed to the constructor.
|
/// set passed to the constructor.
|
||||||
|
|
@ -168,17 +168,17 @@ namespace spot
|
||||||
///
|
///
|
||||||
/// The default priorities are defined as follows:
|
/// The default priorities are defined as follows:
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// ap n
|
ap n
|
||||||
/// false 1
|
false 1
|
||||||
/// true 1
|
true 1
|
||||||
/// not 1
|
not 1
|
||||||
/// equiv 1
|
equiv 1
|
||||||
/// implies 1
|
implies 1
|
||||||
/// xor 1
|
xor 1
|
||||||
/// and 1
|
and 1
|
||||||
/// or 1
|
or 1
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// Where \c n is the number of atomic propositions in the
|
/// Where \c n is the number of atomic propositions in the
|
||||||
/// set passed to the constructor.
|
/// set passed to the constructor.
|
||||||
|
|
@ -207,19 +207,19 @@ namespace spot
|
||||||
///
|
///
|
||||||
/// The default priorities are defined as follows:
|
/// The default priorities are defined as follows:
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// eword 1
|
eword 1
|
||||||
/// boolform 1
|
boolform 1
|
||||||
/// star 1
|
star 1
|
||||||
/// star_b 1
|
star_b 1
|
||||||
/// equal_b 1
|
equal_b 1
|
||||||
/// goto_b 1
|
goto_b 1
|
||||||
/// and 1
|
and 1
|
||||||
/// andNLM 1
|
andNLM 1
|
||||||
/// or 1
|
or 1
|
||||||
/// concat 1
|
concat 1
|
||||||
/// fusion 1
|
fusion 1
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// Where "boolfrom" designates a Boolean formula generated
|
/// Where "boolfrom" designates a Boolean formula generated
|
||||||
/// by random_boolean.
|
/// by random_boolean.
|
||||||
|
|
@ -252,27 +252,27 @@ namespace spot
|
||||||
///
|
///
|
||||||
/// The default priorities are defined as follows:
|
/// The default priorities are defined as follows:
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// ap n
|
ap n
|
||||||
/// false 1
|
false 1
|
||||||
/// true 1
|
true 1
|
||||||
/// not 1
|
not 1
|
||||||
/// F 1
|
F 1
|
||||||
/// G 1
|
G 1
|
||||||
/// X 1
|
X 1
|
||||||
/// Closure 1
|
Closure 1
|
||||||
/// equiv 1
|
equiv 1
|
||||||
/// implies 1
|
implies 1
|
||||||
/// xor 1
|
xor 1
|
||||||
/// R 1
|
R 1
|
||||||
/// U 1
|
U 1
|
||||||
/// W 1
|
W 1
|
||||||
/// M 1
|
M 1
|
||||||
/// and 1
|
and 1
|
||||||
/// or 1
|
or 1
|
||||||
/// EConcat 1
|
EConcat 1
|
||||||
/// UConcat 1
|
UConcat 1
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// Where \c n is the number of atomic propositions in the
|
/// Where \c n is the number of atomic propositions in the
|
||||||
/// set passed to the constructor.
|
/// set passed to the constructor.
|
||||||
|
|
|
||||||
|
|
@ -80,20 +80,20 @@ namespace spot
|
||||||
/// \brief Check whether a formula is a pure eventuality.
|
/// \brief Check whether a formula is a pure eventuality.
|
||||||
///
|
///
|
||||||
/// Pure eventuality formulae are defined in
|
/// Pure eventuality formulae are defined in
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ etessami.00.concur,
|
@InProceedings{ etessami.00.concur,
|
||||||
/// author = {Kousha Etessami and Gerard J. Holzmann},
|
author = {Kousha Etessami and Gerard J. Holzmann},
|
||||||
/// title = {Optimizing {B\"u}chi Automata},
|
title = {Optimizing {B\"u}chi Automata},
|
||||||
/// booktitle = {Proceedings of the 11th International Conference on
|
booktitle = {Proceedings of the 11th International Conference on
|
||||||
/// Concurrency Theory (Concur'2000)},
|
Concurrency Theory (Concur'2000)},
|
||||||
/// pages = {153--167},
|
pages = {153--167},
|
||||||
/// year = {2000},
|
year = {2000},
|
||||||
/// editor = {C. Palamidessi},
|
editor = {C. Palamidessi},
|
||||||
/// volume = {1877},
|
volume = {1877},
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// publisher = {Springer-Verlag}
|
publisher = {Springer-Verlag}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// A word that satisfies a pure eventuality can be prefixed by
|
/// A word that satisfies a pure eventuality can be prefixed by
|
||||||
/// anything and still satisfies the formula.
|
/// anything and still satisfies the formula.
|
||||||
|
|
@ -109,20 +109,20 @@ namespace spot
|
||||||
/// \brief Check whether a formula is purely universal.
|
/// \brief Check whether a formula is purely universal.
|
||||||
///
|
///
|
||||||
/// Purely universal formulae are defined in
|
/// Purely universal formulae are defined in
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ etessami.00.concur,
|
@InProceedings{ etessami.00.concur,
|
||||||
/// author = {Kousha Etessami and Gerard J. Holzmann},
|
author = {Kousha Etessami and Gerard J. Holzmann},
|
||||||
/// title = {Optimizing {B\"u}chi Automata},
|
title = {Optimizing {B\"u}chi Automata},
|
||||||
/// booktitle = {Proceedings of the 11th International Conference on
|
booktitle = {Proceedings of the 11th International Conference on
|
||||||
/// Concurrency Theory (Concur'2000)},
|
Concurrency Theory (Concur'2000)},
|
||||||
/// pages = {153--167},
|
pages = {153--167},
|
||||||
/// year = {2000},
|
year = {2000},
|
||||||
/// editor = {C. Palamidessi},
|
editor = {C. Palamidessi},
|
||||||
/// volume = {1877},
|
volume = {1877},
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// publisher = {Springer-Verlag}
|
publisher = {Springer-Verlag}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// Any (non-empty) suffix of a word that satisfies if purely
|
/// Any (non-empty) suffix of a word that satisfies if purely
|
||||||
/// universal formula also satisfies the formula.
|
/// universal formula also satisfies the formula.
|
||||||
|
|
|
||||||
|
|
@ -31,18 +31,18 @@ namespace spot
|
||||||
/// This function may also be applied to stutter-sensitive formulas,
|
/// This function may also be applied to stutter-sensitive formulas,
|
||||||
/// but in that case the resulting formula is not equivalent.
|
/// but in that case the resulting formula is not equivalent.
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @Article{ etessami.00.ipl,
|
@Article{ etessami.00.ipl,
|
||||||
/// author = {Kousha Etessami},
|
author = {Kousha Etessami},
|
||||||
/// title = {A note on a question of {P}eled and {W}ilke regarding
|
title = {A note on a question of {P}eled and {W}ilke regarding
|
||||||
/// stutter-invariant {LTL}},
|
stutter-invariant {LTL}},
|
||||||
/// journal = {Information Processing Letters},
|
journal = {Information Processing Letters},
|
||||||
/// volume = {75},
|
volume = {75},
|
||||||
/// number = {6},
|
number = {6},
|
||||||
/// year = {2000},
|
year = {2000},
|
||||||
/// pages = {261--263}
|
pages = {261--263}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
const formula* remove_x(const formula* f);
|
const formula* remove_x(const formula* f);
|
||||||
|
|
||||||
/// \brief Whether an LTL formula \a f is stutter-insensitive.
|
/// \brief Whether an LTL formula \a f is stutter-insensitive.
|
||||||
|
|
@ -51,18 +51,18 @@ namespace spot
|
||||||
/// <code>remove_x(f)</code> is equivalent to \a f. This only
|
/// <code>remove_x(f)</code> is equivalent to \a f. This only
|
||||||
/// works for LTL formulas, not PSL formulas.
|
/// works for LTL formulas, not PSL formulas.
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @Article{ etessami.00.ipl,
|
@Article{ etessami.00.ipl,
|
||||||
/// author = {Kousha Etessami},
|
author = {Kousha Etessami},
|
||||||
/// title = {A note on a question of {P}eled and {W}ilke regarding
|
title = {A note on a question of {P}eled and {W}ilke regarding
|
||||||
/// stutter-invariant {LTL}},
|
stutter-invariant {LTL}},
|
||||||
/// journal = {Information Processing Letters},
|
journal = {Information Processing Letters},
|
||||||
/// volume = {75},
|
volume = {75},
|
||||||
/// number = {6},
|
number = {6},
|
||||||
/// year = {2000},
|
year = {2000},
|
||||||
/// pages = {261--263}
|
pages = {261--263}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
bool is_stutter_insensitive(const formula* f);
|
bool is_stutter_insensitive(const formula* f);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -102,19 +102,19 @@ namespace spot
|
||||||
/// Returns whether \a f syntactically implies \a g.
|
/// Returns whether \a f syntactically implies \a g.
|
||||||
///
|
///
|
||||||
/// This is adapted from
|
/// This is adapted from
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ somenzi.00.cav,
|
@InProceedings{ somenzi.00.cav,
|
||||||
/// author = {Fabio Somenzi and Roderick Bloem},
|
author = {Fabio Somenzi and Roderick Bloem},
|
||||||
/// title = {Efficient {B\"u}chi Automata for {LTL} Formulae},
|
title = {Efficient {B\"u}chi Automata for {LTL} Formulae},
|
||||||
/// booktitle = {Proceedings of the 12th International Conference on
|
booktitle = {Proceedings of the 12th International Conference on
|
||||||
/// Computer Aided Verification (CAV'00)},
|
Computer Aided Verification (CAV'00)},
|
||||||
/// pages = {247--263},
|
pages = {247--263},
|
||||||
/// year = {2000},
|
year = {2000},
|
||||||
/// volume = {1855},
|
volume = {1855},
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// publisher = {Springer-Verlag}
|
publisher = {Springer-Verlag}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
bool syntactic_implication(const formula* f, const formula* g);
|
bool syntactic_implication(const formula* f, const formula* g);
|
||||||
/// \brief Syntactic implication with one negated argument.
|
/// \brief Syntactic implication with one negated argument.
|
||||||
|
|
|
||||||
|
|
@ -37,16 +37,16 @@ namespace spot
|
||||||
/// corresponds to the E° operation defined in the following
|
/// corresponds to the E° operation defined in the following
|
||||||
/// paper.
|
/// paper.
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @Article{ bruggeman.96.tcs,
|
@Article{ bruggeman.96.tcs,
|
||||||
/// author = {Anne Br{\"u}ggemann-Klein},
|
author = {Anne Br{\"u}ggemann-Klein},
|
||||||
/// title = {Regular Expressions into Finite Automata},
|
title = {Regular Expressions into Finite Automata},
|
||||||
/// journal = {Theoretical Computer Science},
|
journal = {Theoretical Computer Science},
|
||||||
/// year = {1996},
|
year = {1996},
|
||||||
/// volume = {120},
|
volume = {120},
|
||||||
/// pages = {87--98}
|
pages = {87--98}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// \param sere the SERE to rewrite
|
/// \param sere the SERE to rewrite
|
||||||
/// \param cache an optional cache
|
/// \param cache an optional cache
|
||||||
|
|
|
||||||
|
|
@ -33,20 +33,20 @@ namespace spot
|
||||||
///
|
///
|
||||||
/// This algorithm implements a derecursived version the Minato-Morreale
|
/// This algorithm implements a derecursived version the Minato-Morreale
|
||||||
/// algorithm presented in the following paper.
|
/// algorithm presented in the following paper.
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ minato.92.sasimi,
|
@InProceedings{ minato.92.sasimi,
|
||||||
/// author = {Shin-ichi Minato},
|
author = {Shin-ichi Minato},
|
||||||
/// title = {Fast Generation of Irredundant Sum-of-Products Forms
|
title = {Fast Generation of Irredundant Sum-of-Products Forms
|
||||||
/// from Binary Decision Diagrams},
|
from Binary Decision Diagrams},
|
||||||
/// booktitle = {Proceedings of the third Synthesis and Simulation
|
booktitle = {Proceedings of the third Synthesis and Simulation
|
||||||
/// and Meeting International Interchange workshop
|
and Meeting International Interchange workshop
|
||||||
/// (SASIMI'92)},
|
(SASIMI'92)},
|
||||||
/// pages = {64--73},
|
pages = {64--73},
|
||||||
/// year = {1992},
|
year = {1992},
|
||||||
/// address = {Kobe, Japan},
|
address = {Kobe, Japan},
|
||||||
/// month = {April}
|
month = {April}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
class minato_isop
|
class minato_isop
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
|
|
|
||||||
|
|
@ -40,11 +40,11 @@ namespace spot
|
||||||
/// sign). If not specified, the default value is 1.
|
/// sign). If not specified, the default value is 1.
|
||||||
///
|
///
|
||||||
/// The following three lines are equivalent.
|
/// The following three lines are equivalent.
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// optA !optB optC=4194304
|
optA !optB optC=4194304
|
||||||
/// optA=1, optB=0, optC=4096K
|
optA=1, optB=0, optC=4096K
|
||||||
/// optC = 4M; optA !optB
|
optC = 4M; optA !optB
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// \return A non-null pointer to the option for which an expected integer
|
/// \return A non-null pointer to the option for which an expected integer
|
||||||
/// value cannot be parsed.
|
/// value cannot be parsed.
|
||||||
|
|
|
||||||
|
|
@ -36,17 +36,17 @@ namespace spot
|
||||||
/// are expected to reduce the size of the automaton.
|
/// are expected to reduce the size of the automaton.
|
||||||
///
|
///
|
||||||
/// This algorithm comes from:
|
/// This algorithm comes from:
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @Article{ gurumurthy.03.lncs,
|
@Article{ gurumurthy.03.lncs,
|
||||||
/// title = {On complementing nondeterministic {B\"uchi} automata},
|
title = {On complementing nondeterministic {B\"uchi} automata},
|
||||||
/// author = {Gurumurthy, S. and Kupferman, O. and Somenzi, F. and
|
author = {Gurumurthy, S. and Kupferman, O. and Somenzi, F. and
|
||||||
/// Vardi, M.Y.},
|
Vardi, M.Y.},
|
||||||
/// journal = {Lecture Notes in Computer Science},
|
journal = {Lecture Notes in Computer Science},
|
||||||
/// pages = {96--110},
|
pages = {96--110},
|
||||||
/// year = {2003},
|
year = {2003},
|
||||||
/// publisher = {Springer-Verlag}
|
publisher = {Springer-Verlag}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// The construction is done on-the-fly, by the
|
/// The construction is done on-the-fly, by the
|
||||||
/// \c saba_complement_succ_iterator class.
|
/// \c saba_complement_succ_iterator class.
|
||||||
|
|
|
||||||
|
|
@ -76,7 +76,20 @@ for dir in "${INCDIR-..}" "${INCDIR-..}"/../iface; do
|
||||||
-0777 $file || diag "always put 'ingroup' before 'brief'"
|
-0777 $file || diag "always put 'ingroup' before 'brief'"
|
||||||
|
|
||||||
# Strip comments and strings.
|
# Strip comments and strings.
|
||||||
sed 's,[ ]*//.*,,;s,"[^"]*","",g' < $file > $tmp
|
#
|
||||||
|
# Multi-line comments of the form
|
||||||
|
# /* Line 1
|
||||||
|
# Line 2
|
||||||
|
# Line 3 */
|
||||||
|
# are replaced by
|
||||||
|
# //
|
||||||
|
# //
|
||||||
|
# //
|
||||||
|
# to keep the line numbers correct in the diagnostics.
|
||||||
|
perl -pe 'sub f {my $a = shift; $a =~ s:[^\n]*://:g; return "$a"}
|
||||||
|
s,/\*(.*?)\*/,f($1),sge;
|
||||||
|
s,//.*?\n,//\n,g;
|
||||||
|
s,"[^"\n]*","",g' -0777 <$file >$tmp
|
||||||
|
|
||||||
grep '[ ]$' $tmp &&
|
grep '[ ]$' $tmp &&
|
||||||
diag 'Trailing whitespace.'
|
diag 'Trailing whitespace.'
|
||||||
|
|
|
||||||
|
|
@ -50,19 +50,19 @@ namespace spot
|
||||||
/// was found empty. Otherwise the automaton accepts some run.
|
/// was found empty. Otherwise the automaton accepts some run.
|
||||||
///
|
///
|
||||||
/// This is based on the following paper.
|
/// This is based on the following paper.
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ geldenhuys.06.spin,
|
@InProceedings{ geldenhuys.06.spin,
|
||||||
/// author = {Jaco Geldenhuys and Henri Hansen},
|
author = {Jaco Geldenhuys and Henri Hansen},
|
||||||
/// title = {Larger Automata and Less Work for {LTL} Model Checking},
|
title = {Larger Automata and Less Work for {LTL} Model Checking},
|
||||||
/// booktitle = {Proceedings of the 13th International SPIN Workshop
|
booktitle = {Proceedings of the 13th International SPIN Workshop
|
||||||
/// (SPIN'06)},
|
(SPIN'06)},
|
||||||
/// year = {2006},
|
year = {2006},
|
||||||
/// pages = {53--70},
|
pages = {53--70},
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// volume = {3925},
|
volume = {3925},
|
||||||
/// publisher = {Springer}
|
publisher = {Springer}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// the implementation of spot::ta_check::check() is inspired from the
|
/// the implementation of spot::ta_check::check() is inspired from the
|
||||||
/// two-pass algorithm of the paper above:
|
/// two-pass algorithm of the paper above:
|
||||||
|
|
|
||||||
|
|
@ -38,22 +38,22 @@ namespace spot
|
||||||
/// achieved using any algorithm based on partition refinement
|
/// achieved using any algorithm based on partition refinement
|
||||||
///
|
///
|
||||||
/// For more detail about this type of algorithm, see the following paper:
|
/// For more detail about this type of algorithm, see the following paper:
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{valmari.09.icatpn,
|
@InProceedings{valmari.09.icatpn,
|
||||||
/// author = {Antti Valmari},
|
author = {Antti Valmari},
|
||||||
/// title = {Bisimilarity Minimization in in O(m logn) Time},
|
title = {Bisimilarity Minimization in in O(m logn) Time},
|
||||||
/// booktitle = {Proceedings of the 30th International Conference on
|
booktitle = {Proceedings of the 30th International Conference on
|
||||||
/// the Applications and Theory of Petri Nets
|
the Applications and Theory of Petri Nets
|
||||||
/// (ICATPN'09)},
|
(ICATPN'09)},
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// publisher = {Springer},
|
publisher = {Springer},
|
||||||
/// isbn = {978-3-642-02423-8},
|
isbn = {978-3-642-02423-8},
|
||||||
/// pages = {123--142},
|
pages = {123--142},
|
||||||
/// volume = 5606,
|
volume = 5606,
|
||||||
/// url = {http://dx.doi.org/10.1007/978-3-642-02424-5_9},
|
url = {http://dx.doi.org/10.1007/978-3-642-02424-5_9},
|
||||||
/// year = {2009}
|
year = {2009}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// \param ta_ the TA automaton to convert into a simplified TA
|
/// \param ta_ the TA automaton to convert into a simplified TA
|
||||||
ta*
|
ta*
|
||||||
|
|
|
||||||
|
|
@ -30,19 +30,19 @@ namespace spot
|
||||||
/// \brief Build a spot::ta_explicit* (TA) from an LTL formula.
|
/// \brief Build a spot::ta_explicit* (TA) from an LTL formula.
|
||||||
///
|
///
|
||||||
/// This is based on the following paper.
|
/// This is based on the following paper.
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ geldenhuys.06.spin,
|
@InProceedings{ geldenhuys.06.spin,
|
||||||
/// author = {Jaco Geldenhuys and Henri Hansen},
|
author = {Jaco Geldenhuys and Henri Hansen},
|
||||||
/// title = {Larger Automata and Less Work for {LTL} Model Checking},
|
title = {Larger Automata and Less Work for {LTL} Model Checking},
|
||||||
/// booktitle = {Proceedings of the 13th International SPIN Workshop
|
booktitle = {Proceedings of the 13th International SPIN Workshop
|
||||||
/// (SPIN'06)},
|
(SPIN'06)},
|
||||||
/// year = {2006},
|
year = {2006},
|
||||||
/// pages = {53--70},
|
pages = {53--70},
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// volume = {3925},
|
volume = {3925},
|
||||||
/// publisher = {Springer}
|
publisher = {Springer}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// \param tgba_to_convert The TGBA automaton to convert into a TA automaton
|
/// \param tgba_to_convert The TGBA automaton to convert into a TA automaton
|
||||||
///
|
///
|
||||||
|
|
|
||||||
|
|
@ -64,18 +64,18 @@ namespace spot
|
||||||
/// \brief Build a complemented automaton.
|
/// \brief Build a complemented automaton.
|
||||||
///
|
///
|
||||||
/// The construction comes from:
|
/// The construction comes from:
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @Article{ kupferman.05.tcs,
|
@Article{ kupferman.05.tcs,
|
||||||
/// title = {From complementation to certification},
|
title = {From complementation to certification},
|
||||||
/// author = {Kupferman, O. and Vardi, M.Y.},
|
author = {Kupferman, O. and Vardi, M.Y.},
|
||||||
/// journal = {Theoretical Computer Science},
|
journal = {Theoretical Computer Science},
|
||||||
/// volume = {345},
|
volume = {345},
|
||||||
/// number = {1},
|
number = {1},
|
||||||
/// pages = {83--100},
|
pages = {83--100},
|
||||||
/// year = {2005},
|
year = {2005},
|
||||||
/// publisher = {Elsevier}
|
publisher = {Elsevier}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// The original automaton is used as a States-based Generalized
|
/// The original automaton is used as a States-based Generalized
|
||||||
/// Büchi Automaton.
|
/// Büchi Automaton.
|
||||||
|
|
|
||||||
|
|
@ -30,19 +30,19 @@ namespace spot
|
||||||
///
|
///
|
||||||
/// This class implements a non-recursive version of the algorithm
|
/// This class implements a non-recursive version of the algorithm
|
||||||
/// on page 170 of:
|
/// on page 170 of:
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @Article{loizou.82.is,
|
@Article{loizou.82.is,
|
||||||
/// author = {George Loizou and Peter Thanisch},
|
author = {George Loizou and Peter Thanisch},
|
||||||
/// title = {Enumerating the Cycles of a Digraph: A New
|
title = {Enumerating the Cycles of a Digraph: A New
|
||||||
/// Preprocessing Strategy},
|
Preprocessing Strategy},
|
||||||
/// journal = {Information Sciences},
|
journal = {Information Sciences},
|
||||||
/// year = {1982},
|
year = {1982},
|
||||||
/// volume = {27},
|
volume = {27},
|
||||||
/// number = {3},
|
number = {3},
|
||||||
/// pages = {163--182},
|
pages = {163--182},
|
||||||
/// month = aug
|
month = aug
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
/// (the additional preprocessings described later in that paper are
|
/// (the additional preprocessings described later in that paper are
|
||||||
/// not implemented).
|
/// not implemented).
|
||||||
///
|
///
|
||||||
|
|
|
||||||
|
|
@ -28,21 +28,21 @@ namespace spot
|
||||||
/// \brief Build a spot::tgba_bdd_concrete from an ELTL formula.
|
/// \brief Build a spot::tgba_bdd_concrete from an ELTL formula.
|
||||||
///
|
///
|
||||||
/// This is based on the following paper.
|
/// This is based on the following paper.
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ couvreur.00.lacim,
|
@InProceedings{ couvreur.00.lacim,
|
||||||
/// author = {Jean-Michel Couvreur},
|
author = {Jean-Michel Couvreur},
|
||||||
/// title = {Un point de vue symbolique sur la logique temporelle
|
title = {Un point de vue symbolique sur la logique temporelle
|
||||||
/// lin{\'e}aire},
|
lin{\'e}aire},
|
||||||
/// booktitle = {Actes du Colloque LaCIM 2000},
|
booktitle = {Actes du Colloque LaCIM 2000},
|
||||||
/// month = {August},
|
month = {August},
|
||||||
/// year = {2000},
|
year = {2000},
|
||||||
/// pages = {131--140},
|
pages = {131--140},
|
||||||
/// volume = {27},
|
volume = {27},
|
||||||
/// series = {Publications du LaCIM},
|
series = {Publications du LaCIM},
|
||||||
/// publisher = {Universit{\'e} du Qu{\'e}bec {\`a} Montr{\'e}al},
|
publisher = {Universit{\'e} du Qu{\'e}bec {\`a} Montr{\'e}al},
|
||||||
/// editor = {Pierre Leroux}
|
editor = {Pierre Leroux}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
/// \param f The formula to translate into an automaton.
|
/// \param f The formula to translate into an automaton.
|
||||||
/// \param dict The spot::bdd_dict the constructed automata should use.
|
/// \param dict The spot::bdd_dict the constructed automata should use.
|
||||||
/// \return A spot::tgba_bdd_concrete that recognizes the language of \a f.
|
/// \return A spot::tgba_bdd_concrete that recognizes the language of \a f.
|
||||||
|
|
|
||||||
|
|
@ -35,23 +35,23 @@ namespace spot
|
||||||
/// \brief Check whether the language of an automate is empty.
|
/// \brief Check whether the language of an automate is empty.
|
||||||
///
|
///
|
||||||
/// This is based on the following paper.
|
/// This is based on the following paper.
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{couvreur.99.fm,
|
@InProceedings{couvreur.99.fm,
|
||||||
/// author = {Jean-Michel Couvreur},
|
author = {Jean-Michel Couvreur},
|
||||||
/// title = {On-the-fly Verification of Temporal Logic},
|
title = {On-the-fly Verification of Temporal Logic},
|
||||||
/// pages = {253--271},
|
pages = {253--271},
|
||||||
/// editor = {Jeannette M. Wing and Jim Woodcock and Jim Davies},
|
editor = {Jeannette M. Wing and Jim Woodcock and Jim Davies},
|
||||||
/// booktitle = {Proceedings of the World Congress on Formal Methods in
|
booktitle = {Proceedings of the World Congress on Formal Methods in
|
||||||
/// the Development of Computing Systems (FM'99)},
|
the Development of Computing Systems (FM'99)},
|
||||||
/// publisher = {Springer-Verlag},
|
publisher = {Springer-Verlag},
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// volume = {1708},
|
volume = {1708},
|
||||||
/// year = {1999},
|
year = {1999},
|
||||||
/// address = {Toulouse, France},
|
address = {Toulouse, France},
|
||||||
/// month = {September},
|
month = {September},
|
||||||
/// isbn = {3-540-66587-0}
|
isbn = {3-540-66587-0}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// A recursive definition of the algorithm would look as follows,
|
/// A recursive definition of the algorithm would look as follows,
|
||||||
/// but the implementation is of course not recursive.
|
/// but the implementation is of course not recursive.
|
||||||
|
|
@ -60,36 +60,36 @@ namespace spot
|
||||||
/// positive DFS order or 0 if it is dead, SCC is and ACC are two
|
/// positive DFS order or 0 if it is dead, SCC is and ACC are two
|
||||||
/// stacks.)
|
/// stacks.)
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// check(<Sigma, Q, delta, q, F>, H, SCC, ACC)
|
check(<Sigma, Q, delta, q, F>, H, SCC, ACC)
|
||||||
/// if q is not in H // new state
|
if q is not in H // new state
|
||||||
/// H[q] = H.size + 1
|
H[q] = H.size + 1
|
||||||
/// SCC.push(<H[q], {}>)
|
SCC.push(<H[q], {}>)
|
||||||
/// forall <a, s> : <q, _, a, s> in delta
|
forall <a, s> : <q, _, a, s> in delta
|
||||||
/// ACC.push(a)
|
ACC.push(a)
|
||||||
/// res = check(<Sigma, Q, delta, s, F>, H, SCC, ACC)
|
res = check(<Sigma, Q, delta, s, F>, H, SCC, ACC)
|
||||||
/// if res
|
if res
|
||||||
/// return res
|
return res
|
||||||
/// <n, _> = SCC.top()
|
<n, _> = SCC.top()
|
||||||
/// if n = H[q]
|
if n = H[q]
|
||||||
/// SCC.pop()
|
SCC.pop()
|
||||||
/// mark_reachable_states_as_dead(<Sigma, Q, delta, q, F>, H$)
|
mark_reachable_states_as_dead(<Sigma, Q, delta, q, F>, H$)
|
||||||
/// return 0
|
return 0
|
||||||
/// else
|
else
|
||||||
/// if H[q] = 0 // dead state
|
if H[q] = 0 // dead state
|
||||||
/// ACC.pop()
|
ACC.pop()
|
||||||
/// return true
|
return true
|
||||||
/// else // state in stack: merge SCC
|
else // state in stack: merge SCC
|
||||||
/// all = {}
|
all = {}
|
||||||
/// do
|
do
|
||||||
/// <n, a> = SCC.pop()
|
<n, a> = SCC.pop()
|
||||||
/// all = all union a union { ACC.pop() }
|
all = all union a union { ACC.pop() }
|
||||||
/// until n <= H[q]
|
until n <= H[q]
|
||||||
/// SCC.push(<n, all>)
|
SCC.push(<n, all>)
|
||||||
/// if all != F
|
if all != F
|
||||||
/// return 0
|
return 0
|
||||||
/// return new emptiness_check_result(necessary data)
|
return new emptiness_check_result(necessary data)
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// check() returns 0 iff the automaton's language is empty. It
|
/// check() returns 0 iff the automaton's language is empty. It
|
||||||
/// returns an instance of emptiness_check_result. If the automaton
|
/// returns an instance of emptiness_check_result. If the automaton
|
||||||
|
|
|
||||||
|
|
@ -34,23 +34,23 @@ namespace spot
|
||||||
///
|
///
|
||||||
/// The original algorithm, coming from the following paper, has only
|
/// The original algorithm, coming from the following paper, has only
|
||||||
/// been slightly modified to work on transition-based automata.
|
/// been slightly modified to work on transition-based automata.
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{geldenhuys.04.tacas,
|
@InProceedings{geldenhuys.04.tacas,
|
||||||
/// author = {Jaco Geldenhuys and Antti Valmari},
|
author = {Jaco Geldenhuys and Antti Valmari},
|
||||||
/// title = {Tarjan's Algorithm Makes On-the-Fly {LTL} Verification
|
title = {Tarjan's Algorithm Makes On-the-Fly {LTL} Verification
|
||||||
/// More Efficient},
|
More Efficient},
|
||||||
/// booktitle = {Proceedings of the 10th International Conference on Tools
|
booktitle = {Proceedings of the 10th International Conference on Tools
|
||||||
/// and Algorithms for the Construction and Analysis of Systems
|
and Algorithms for the Construction and Analysis of Systems
|
||||||
/// (TACAS'04)},
|
(TACAS'04)},
|
||||||
/// editor = {Kurt Jensen and Andreas Podelski},
|
editor = {Kurt Jensen and Andreas Podelski},
|
||||||
/// pages = {205--219},
|
pages = {205--219},
|
||||||
/// year = {2004},
|
year = {2004},
|
||||||
/// publisher = {Springer-Verlag},
|
publisher = {Springer-Verlag},
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// volume = {2988},
|
volume = {2988},
|
||||||
/// isbn = {3-540-21299-X}
|
isbn = {3-540-21299-X}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
emptiness_check* explicit_gv04_check(const tgba* a,
|
emptiness_check* explicit_gv04_check(const tgba* a,
|
||||||
option_map o = option_map());
|
option_map o = option_map());
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -28,20 +28,20 @@ namespace spot
|
||||||
/// \brief Build a spot::taa* from an LTL formula.
|
/// \brief Build a spot::taa* from an LTL formula.
|
||||||
///
|
///
|
||||||
/// This is based on the following.
|
/// This is based on the following.
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @techreport{HUT-TCS-A104,
|
@techreport{HUT-TCS-A104,
|
||||||
/// address = {Espoo, Finland},
|
address = {Espoo, Finland},
|
||||||
/// author = {Heikki Tauriainen},
|
author = {Heikki Tauriainen},
|
||||||
/// month = {September},
|
month = {September},
|
||||||
/// note = {Doctoral dissertation},
|
note = {Doctoral dissertation},
|
||||||
/// number = {A104},
|
number = {A104},
|
||||||
/// pages = {xii+229},
|
pages = {xii+229},
|
||||||
/// title = {Automata and Linear Temporal Logic: Translations
|
title = {Automata and Linear Temporal Logic: Translations
|
||||||
/// with Transition-Based Acceptance},
|
with Transition-Based Acceptance},
|
||||||
/// type = {Research Report},
|
type = {Research Report},
|
||||||
/// year = {2006}
|
year = {2006}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// \param f The formula to translate into an automaton.
|
/// \param f The formula to translate into an automaton.
|
||||||
/// \param dict The spot::bdd_dict the constructed automata should use.
|
/// \param dict The spot::bdd_dict the constructed automata should use.
|
||||||
|
|
|
||||||
|
|
@ -33,23 +33,23 @@ namespace spot
|
||||||
/// \brief Build a spot::tgba_explicit* from an LTL formula.
|
/// \brief Build a spot::tgba_explicit* from an LTL formula.
|
||||||
///
|
///
|
||||||
/// This is based on the following paper.
|
/// This is based on the following paper.
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{couvreur.99.fm,
|
@InProceedings{couvreur.99.fm,
|
||||||
/// author = {Jean-Michel Couvreur},
|
author = {Jean-Michel Couvreur},
|
||||||
/// title = {On-the-fly Verification of Temporal Logic},
|
title = {On-the-fly Verification of Temporal Logic},
|
||||||
/// pages = {253--271},
|
pages = {253--271},
|
||||||
/// editor = {Jeannette M. Wing and Jim Woodcock and Jim Davies},
|
editor = {Jeannette M. Wing and Jim Woodcock and Jim Davies},
|
||||||
/// booktitle = {Proceedings of the World Congress on Formal Methods in the
|
booktitle = {Proceedings of the World Congress on Formal Methods in the
|
||||||
/// Development of Computing Systems (FM'99)},
|
Development of Computing Systems (FM'99)},
|
||||||
/// publisher = {Springer-Verlag},
|
publisher = {Springer-Verlag},
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// volume = {1708},
|
volume = {1708},
|
||||||
/// year = {1999},
|
year = {1999},
|
||||||
/// address = {Toulouse, France},
|
address = {Toulouse, France},
|
||||||
/// month = {September},
|
month = {September},
|
||||||
/// isbn = {3-540-66587-0}
|
isbn = {3-540-66587-0}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// \param f The formula to translate into an automaton.
|
/// \param f The formula to translate into an automaton.
|
||||||
///
|
///
|
||||||
|
|
@ -70,23 +70,23 @@ namespace spot
|
||||||
/// from the same state with the same label (i.e., condition + acceptance
|
/// from the same state with the same label (i.e., condition + acceptance
|
||||||
/// conditions) will be merged. This correspond to an optimization
|
/// conditions) will be merged. This correspond to an optimization
|
||||||
/// described in the following paper.
|
/// described in the following paper.
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ sebastiani.03.charme,
|
@InProceedings{ sebastiani.03.charme,
|
||||||
/// author = {Roberto Sebastiani and Stefano Tonetta},
|
author = {Roberto Sebastiani and Stefano Tonetta},
|
||||||
/// title = {"More Deterministic" vs. "Smaller" B{\"u}chi Automata for
|
title = {"More Deterministic" vs. "Smaller" B{\"u}chi Automata for
|
||||||
/// Efficient LTL Model Checking},
|
Efficient LTL Model Checking},
|
||||||
/// booktitle = {Proceedings for the 12th Advanced Research Working
|
booktitle = {Proceedings for the 12th Advanced Research Working
|
||||||
/// Conference on Correct Hardware Design and Verification
|
Conference on Correct Hardware Design and Verification
|
||||||
/// Methods (CHARME'03)},
|
Methods (CHARME'03)},
|
||||||
/// pages = {126--140},
|
pages = {126--140},
|
||||||
/// year = {2003},
|
year = {2003},
|
||||||
/// editor = {G. Goos and J. Hartmanis and J. van Leeuwen},
|
editor = {G. Goos and J. Hartmanis and J. van Leeuwen},
|
||||||
/// volume = {2860},
|
volume = {2860},
|
||||||
/// series = {Lectures Notes in Computer Science},
|
series = {Lectures Notes in Computer Science},
|
||||||
/// month = {October},
|
month = {October},
|
||||||
/// publisher = {Springer-Verlag}
|
publisher = {Springer-Verlag}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// \param fair_loop_approx When set, a really simple characterization of
|
/// \param fair_loop_approx When set, a really simple characterization of
|
||||||
/// unstable state is used to suppress all acceptance conditions from
|
/// unstable state is used to suppress all acceptance conditions from
|
||||||
|
|
@ -103,22 +103,22 @@ namespace spot
|
||||||
/// for the type of reduction you want, see
|
/// for the type of reduction you want, see
|
||||||
/// spot::ltl::ltl_simplifier. This idea is taken from the
|
/// spot::ltl::ltl_simplifier. This idea is taken from the
|
||||||
/// following paper.
|
/// following paper.
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ thirioux.02.fmics,
|
@InProceedings{ thirioux.02.fmics,
|
||||||
/// author = {Xavier Thirioux},
|
author = {Xavier Thirioux},
|
||||||
/// title = {Simple and Efficient Translation from {LTL} Formulas to
|
title = {Simple and Efficient Translation from {LTL} Formulas to
|
||||||
/// {B\"u}chi Automata},
|
{B\"u}chi Automata},
|
||||||
/// booktitle = {Proceedings of the 7th International ERCIM Workshop in
|
booktitle = {Proceedings of the 7th International ERCIM Workshop in
|
||||||
/// Formal Methods for Industrial Critical Systems (FMICS'02)},
|
Formal Methods for Industrial Critical Systems (FMICS'02)},
|
||||||
/// series = {Electronic Notes in Theoretical Computer Science},
|
series = {Electronic Notes in Theoretical Computer Science},
|
||||||
/// volume = {66(2)},
|
volume = {66(2)},
|
||||||
/// publisher = {Elsevier},
|
publisher = {Elsevier},
|
||||||
/// editor = {Rance Cleaveland and Hubert Garavel},
|
editor = {Rance Cleaveland and Hubert Garavel},
|
||||||
/// year = {2002},
|
year = {2002},
|
||||||
/// month = jul,
|
month = jul,
|
||||||
/// address = {M{\'a}laga, Spain}
|
address = {M{\'a}laga, Spain}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// \return A spot::tgba_explicit that recognizes the language of \a f.
|
/// \return A spot::tgba_explicit that recognizes the language of \a f.
|
||||||
tgba_explicit_formula*
|
tgba_explicit_formula*
|
||||||
|
|
|
||||||
|
|
@ -29,21 +29,21 @@ namespace spot
|
||||||
/// \brief Build a spot::tgba_bdd_concrete from an LTL formula.
|
/// \brief Build a spot::tgba_bdd_concrete from an LTL formula.
|
||||||
///
|
///
|
||||||
/// This is based on the following paper.
|
/// This is based on the following paper.
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ couvreur.00.lacim,
|
@InProceedings{ couvreur.00.lacim,
|
||||||
/// author = {Jean-Michel Couvreur},
|
author = {Jean-Michel Couvreur},
|
||||||
/// title = {Un point de vue symbolique sur la logique temporelle
|
title = {Un point de vue symbolique sur la logique temporelle
|
||||||
/// lin{\'e}aire},
|
lin{\'e}aire},
|
||||||
/// booktitle = {Actes du Colloque LaCIM 2000},
|
booktitle = {Actes du Colloque LaCIM 2000},
|
||||||
/// month = {August},
|
month = {August},
|
||||||
/// year = {2000},
|
year = {2000},
|
||||||
/// pages = {131--140},
|
pages = {131--140},
|
||||||
/// volume = {27},
|
volume = {27},
|
||||||
/// series = {Publications du LaCIM},
|
series = {Publications du LaCIM},
|
||||||
/// publisher = {Universit{\'e} du Qu{\'e}bec {\`a} Montr{\'e}al},
|
publisher = {Universit{\'e} du Qu{\'e}bec {\`a} Montr{\'e}al},
|
||||||
/// editor = {Pierre Leroux}
|
editor = {Pierre Leroux}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
/// \param f The formula to translate into an automaton.
|
/// \param f The formula to translate into an automaton.
|
||||||
/// \param dict The spot::bdd_dict the constructed automata should use.
|
/// \param dict The spot::bdd_dict the constructed automata should use.
|
||||||
/// \return A spot::tgba_bdd_concrete that recognizes the language of \a f.
|
/// \return A spot::tgba_bdd_concrete that recognizes the language of \a f.
|
||||||
|
|
|
||||||
|
|
@ -42,55 +42,55 @@ namespace spot
|
||||||
/// (until it returns a null pointer) to enumerate all the visited acceptance
|
/// (until it returns a null pointer) to enumerate all the visited acceptance
|
||||||
/// paths. The implemented algorithm is the following:
|
/// paths. The implemented algorithm is the following:
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// procedure check ()
|
procedure check ()
|
||||||
/// begin
|
begin
|
||||||
/// call dfs_blue(s0);
|
call dfs_blue(s0);
|
||||||
/// end;
|
end;
|
||||||
///
|
|
||||||
/// procedure dfs_blue (s)
|
procedure dfs_blue (s)
|
||||||
/// begin
|
begin
|
||||||
/// s.color = blue;
|
s.color = blue;
|
||||||
/// for all t in post(s) do
|
for all t in post(s) do
|
||||||
/// if t.color == white then
|
if t.color == white then
|
||||||
/// call dfs_blue(t);
|
call dfs_blue(t);
|
||||||
/// end if;
|
end if;
|
||||||
/// if (the edge (s,t) is accepting) then
|
if (the edge (s,t) is accepting) then
|
||||||
/// target = s;
|
target = s;
|
||||||
/// call dfs_red(t);
|
call dfs_red(t);
|
||||||
/// end if;
|
end if;
|
||||||
/// end for;
|
end for;
|
||||||
/// end;
|
end;
|
||||||
///
|
|
||||||
/// procedure dfs_red(s)
|
procedure dfs_red(s)
|
||||||
/// begin
|
begin
|
||||||
/// s.color = red;
|
s.color = red;
|
||||||
/// if s == target then
|
if s == target then
|
||||||
/// report cycle
|
report cycle
|
||||||
/// end if;
|
end if;
|
||||||
/// for all t in post(s) do
|
for all t in post(s) do
|
||||||
/// if t.color == blue then
|
if t.color == blue then
|
||||||
/// call dfs_red(t);
|
call dfs_red(t);
|
||||||
/// end if;
|
end if;
|
||||||
/// end for;
|
end for;
|
||||||
/// end;
|
end;
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// This algorithm is an adaptation to TBA of the one
|
/// This algorithm is an adaptation to TBA of the one
|
||||||
/// (which deals with accepting states) presented in
|
/// (which deals with accepting states) presented in
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// Article{ courcoubetis.92.fmsd,
|
Article{ courcoubetis.92.fmsd,
|
||||||
/// author = {Costas Courcoubetis and Moshe Y. Vardi and Pierre
|
author = {Costas Courcoubetis and Moshe Y. Vardi and Pierre
|
||||||
/// Wolper and Mihalis Yannakakis},
|
Wolper and Mihalis Yannakakis},
|
||||||
/// title = {Memory-Efficient Algorithm for the Verification of
|
title = {Memory-Efficient Algorithm for the Verification of
|
||||||
/// Temporal Properties},
|
Temporal Properties},
|
||||||
/// journal = {Formal Methods in System Design},
|
journal = {Formal Methods in System Design},
|
||||||
/// pages = {275--288},
|
pages = {275--288},
|
||||||
/// year = {1992},
|
year = {1992},
|
||||||
/// volume = {1}
|
volume = {1}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// \bug The name is misleading. Magic-search is the algorithm
|
/// \bug The name is misleading. Magic-search is the algorithm
|
||||||
/// from \c godefroid.93.pstv, not \c courcoubetis.92.fmsd.
|
/// from \c godefroid.93.pstv, not \c courcoubetis.92.fmsd.
|
||||||
|
|
@ -105,15 +105,15 @@ namespace spot
|
||||||
/// During the visit of \a a, the returned checker does not store explicitely
|
/// During the visit of \a a, the returned checker does not store explicitely
|
||||||
/// the traversed states but uses the bit-state hashing technic presented in:
|
/// the traversed states but uses the bit-state hashing technic presented in:
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @book{Holzmann91,
|
@book{Holzmann91,
|
||||||
/// author = {G.J. Holzmann},
|
author = {G.J. Holzmann},
|
||||||
/// title = {Design and Validation of Computer Protocols},
|
title = {Design and Validation of Computer Protocols},
|
||||||
/// publisher = {Prentice-Hall},
|
publisher = {Prentice-Hall},
|
||||||
/// address = {Englewood Cliffs, New Jersey},
|
address = {Englewood Cliffs, New Jersey},
|
||||||
/// year = {1991}
|
year = {1991}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// Consequently, the detection of an acceptence cycle is not ensured.
|
/// Consequently, the detection of an acceptence cycle is not ensured.
|
||||||
///
|
///
|
||||||
|
|
|
||||||
|
|
@ -37,20 +37,20 @@ namespace spot
|
||||||
/// as if all states were accepting states.
|
/// as if all states were accepting states.
|
||||||
///
|
///
|
||||||
/// For more detail about monitors, see the following paper:
|
/// For more detail about monitors, see the following paper:
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ tabakov.10.rv,
|
@InProceedings{ tabakov.10.rv,
|
||||||
/// author = {Deian Tabakov and Moshe Y. Vardi},
|
author = {Deian Tabakov and Moshe Y. Vardi},
|
||||||
/// title = {Optimized Temporal Monitors for SystemC{$^*$}},
|
title = {Optimized Temporal Monitors for SystemC{$^*$}},
|
||||||
/// booktitle = {Proceedings of the 10th International Conferance
|
booktitle = {Proceedings of the 10th International Conferance
|
||||||
/// on Runtime Verification},
|
on Runtime Verification},
|
||||||
/// pages = {436--451},
|
pages = {436--451},
|
||||||
/// year = 2010,
|
year = 2010,
|
||||||
/// volume = {6418},
|
volume = {6418},
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// month = nov,
|
month = nov,
|
||||||
/// publisher = {Spring-Verlag}
|
publisher = {Spring-Verlag}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
/// (Note: although the above paper uses Spot, this function did not
|
/// (Note: although the above paper uses Spot, this function did not
|
||||||
/// exist in Spot at that time.)
|
/// exist in Spot at that time.)
|
||||||
///
|
///
|
||||||
|
|
@ -76,23 +76,23 @@ namespace spot
|
||||||
/// Please see the following paper for a discussion of this
|
/// Please see the following paper for a discussion of this
|
||||||
/// technique.
|
/// technique.
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ dax.07.atva,
|
@InProceedings{ dax.07.atva,
|
||||||
/// author = {Christian Dax and Jochen Eisinger and Felix Klaedtke},
|
author = {Christian Dax and Jochen Eisinger and Felix Klaedtke},
|
||||||
/// title = {Mechanizing the Powerset Construction for Restricted
|
title = {Mechanizing the Powerset Construction for Restricted
|
||||||
/// Classes of {$\omega$}-Automata},
|
Classes of {$\omega$}-Automata},
|
||||||
/// year = 2007,
|
year = 2007,
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// publisher = {Springer-Verlag},
|
publisher = {Springer-Verlag},
|
||||||
/// volume = 4762,
|
volume = 4762,
|
||||||
/// booktitle = {Proceedings of the 5th International Symposium on
|
booktitle = {Proceedings of the 5th International Symposium on
|
||||||
/// Automated Technology for Verification and Analysis
|
Automated Technology for Verification and Analysis
|
||||||
/// (ATVA'07)},
|
(ATVA'07)},
|
||||||
/// editor = {Kedar S. Namjoshi and Tomohiro Yoneda and Teruo Higashino
|
editor = {Kedar S. Namjoshi and Tomohiro Yoneda and Teruo Higashino
|
||||||
/// and Yoshio Okamura},
|
and Yoshio Okamura},
|
||||||
/// month = oct
|
month = oct
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
sba_explicit_number* minimize_wdba(const tgba* a);
|
sba_explicit_number* minimize_wdba(const tgba* a);
|
||||||
|
|
||||||
/// \brief Minimize an automaton if it represents an obligation property.
|
/// \brief Minimize an automaton if it represents an obligation property.
|
||||||
|
|
@ -101,23 +101,23 @@ namespace spot
|
||||||
/// algorithm implemented in the minimize_wdba() function, and presented
|
/// algorithm implemented in the minimize_wdba() function, and presented
|
||||||
/// by the following paper:
|
/// by the following paper:
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ dax.07.atva,
|
@InProceedings{ dax.07.atva,
|
||||||
/// author = {Christian Dax and Jochen Eisinger and Felix Klaedtke},
|
author = {Christian Dax and Jochen Eisinger and Felix Klaedtke},
|
||||||
/// title = {Mechanizing the Powerset Construction for Restricted
|
title = {Mechanizing the Powerset Construction for Restricted
|
||||||
/// Classes of {$\omega$}-Automata},
|
Classes of {$\omega$}-Automata},
|
||||||
/// year = 2007,
|
year = 2007,
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// publisher = {Springer-Verlag},
|
publisher = {Springer-Verlag},
|
||||||
/// volume = 4762,
|
volume = 4762,
|
||||||
/// booktitle = {Proceedings of the 5th International Symposium on
|
booktitle = {Proceedings of the 5th International Symposium on
|
||||||
/// Automated Technology for Verification and Analysis
|
Automated Technology for Verification and Analysis
|
||||||
/// (ATVA'07)},
|
(ATVA'07)},
|
||||||
/// editor = {Kedar S. Namjoshi and Tomohiro Yoneda and Teruo Higashino
|
editor = {Kedar S. Namjoshi and Tomohiro Yoneda and Teruo Higashino
|
||||||
/// and Yoshio Okamura},
|
and Yoshio Okamura},
|
||||||
/// month = oct
|
month = oct
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// Because it is hard to determine if an automaton corresponds
|
/// Because it is hard to determine if an automaton corresponds
|
||||||
/// to an obligation property, you should supply either the formula
|
/// to an obligation property, you should supply either the formula
|
||||||
|
|
|
||||||
|
|
@ -53,21 +53,21 @@ namespace spot
|
||||||
/// \param env The environment in which to declare the acceptance conditions.
|
/// \param env The environment in which to declare the acceptance conditions.
|
||||||
///
|
///
|
||||||
/// This algorithms is adapted from the one in Fig 6.2 page 48 of
|
/// This algorithms is adapted from the one in Fig 6.2 page 48 of
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @TechReport{ tauriainen.00.a66,
|
@TechReport{ tauriainen.00.a66,
|
||||||
/// author = {Heikki Tauriainen},
|
author = {Heikki Tauriainen},
|
||||||
/// title = {Automated Testing of {B\"u}chi Automata Translators for
|
title = {Automated Testing of {B\"u}chi Automata Translators for
|
||||||
/// {L}inear {T}emporal {L}ogic},
|
{L}inear {T}emporal {L}ogic},
|
||||||
/// address = {Espoo, Finland},
|
address = {Espoo, Finland},
|
||||||
/// institution = {Helsinki University of Technology, Laboratory for
|
institution = {Helsinki University of Technology, Laboratory for
|
||||||
/// Theoretical Computer Science},
|
Theoretical Computer Science},
|
||||||
/// number = {A66},
|
number = {A66},
|
||||||
/// year = {2000},
|
year = {2000},
|
||||||
/// url = {http://citeseer.nj.nec.com/tauriainen00automated.html},
|
url = {http://citeseer.nj.nec.com/tauriainen00automated.html},
|
||||||
/// type = {Research Report},
|
type = {Research Report},
|
||||||
/// note = {Reprint of Master's thesis}
|
note = {Reprint of Master's thesis}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// Although the intent is similar, there are some differences with
|
/// Although the intent is similar, there are some differences with
|
||||||
/// between the above published algorithm and this implementation .
|
/// between the above published algorithm and this implementation .
|
||||||
|
|
|
||||||
|
|
@ -43,59 +43,59 @@ namespace spot
|
||||||
/// paths. The implemented algorithm is an optimization of
|
/// paths. The implemented algorithm is an optimization of
|
||||||
/// spot::explicit_magic_search and is the following:
|
/// spot::explicit_magic_search and is the following:
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// procedure check ()
|
procedure check ()
|
||||||
/// begin
|
begin
|
||||||
/// call dfs_blue(s0);
|
call dfs_blue(s0);
|
||||||
/// end;
|
end;
|
||||||
///
|
|
||||||
/// procedure dfs_blue (s)
|
procedure dfs_blue (s)
|
||||||
/// begin
|
begin
|
||||||
/// s.color = cyan;
|
s.color = cyan;
|
||||||
/// for all t in post(s) do
|
for all t in post(s) do
|
||||||
/// if t.color == white then
|
if t.color == white then
|
||||||
/// call dfs_blue(t);
|
call dfs_blue(t);
|
||||||
/// else if t.color == cyan and
|
else if t.color == cyan and
|
||||||
/// (the edge (s,t) is accepting or
|
(the edge (s,t) is accepting or
|
||||||
/// (it exists a predecessor p of s in st_blue and s != t and
|
(it exists a predecessor p of s in st_blue and s != t and
|
||||||
/// the arc between p and s is accepting)) then
|
the arc between p and s is accepting)) then
|
||||||
/// report cycle;
|
report cycle;
|
||||||
/// end if;
|
end if;
|
||||||
/// if the edge (s,t) is accepting then
|
if the edge (s,t) is accepting then
|
||||||
/// call dfs_red(t);
|
call dfs_red(t);
|
||||||
/// end if;
|
end if;
|
||||||
/// end for;
|
end for;
|
||||||
/// s.color = blue;
|
s.color = blue;
|
||||||
/// end;
|
end;
|
||||||
///
|
|
||||||
/// procedure dfs_red(s)
|
procedure dfs_red(s)
|
||||||
/// begin
|
begin
|
||||||
/// if s.color == cyan then
|
if s.color == cyan then
|
||||||
/// report cycle;
|
report cycle;
|
||||||
/// end if;
|
end if;
|
||||||
/// s.color = red;
|
s.color = red;
|
||||||
/// for all t in post(s) do
|
for all t in post(s) do
|
||||||
/// if t.color == blue then
|
if t.color == blue then
|
||||||
/// call dfs_red(t);
|
call dfs_red(t);
|
||||||
/// end if;
|
end if;
|
||||||
/// end for;
|
end for;
|
||||||
/// end;
|
end;
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// It is an adaptation to TBA of the one presented in
|
/// It is an adaptation to TBA of the one presented in
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @techreport{SE04,
|
@techreport{SE04,
|
||||||
/// author = {Stefan Schwoon and Javier Esparza},
|
author = {Stefan Schwoon and Javier Esparza},
|
||||||
/// institution = {Universit{\"a}t Stuttgart, Fakult\"at Informatik,
|
institution = {Universit{\"a}t Stuttgart, Fakult\"at Informatik,
|
||||||
/// Elektrotechnik und Informationstechnik},
|
Elektrotechnik und Informationstechnik},
|
||||||
/// month = {November},
|
month = {November},
|
||||||
/// number = {2004/06},
|
number = {2004/06},
|
||||||
/// title = {A Note on On-The-Fly Verification Algorithms},
|
title = {A Note on On-The-Fly Verification Algorithms},
|
||||||
/// year = {2004},
|
year = {2004},
|
||||||
/// url =
|
url =
|
||||||
///{http://www.fmi.uni-stuttgart.de/szs/publications/info/schwoosn.SE04.shtml}
|
{http://www.fmi.uni-stuttgart.de/szs/publications/info/schwoosn.SE04.shtml}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// \sa spot::explicit_magic_search
|
/// \sa spot::explicit_magic_search
|
||||||
///
|
///
|
||||||
|
|
@ -109,15 +109,15 @@ namespace spot
|
||||||
/// During the visit of \a a, the returned checker does not store explicitely
|
/// During the visit of \a a, the returned checker does not store explicitely
|
||||||
/// the traversed states but uses the bit-state hashing technic presented in:
|
/// the traversed states but uses the bit-state hashing technic presented in:
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @book{Holzmann91,
|
@book{Holzmann91,
|
||||||
/// author = {G.J. Holzmann},
|
author = {G.J. Holzmann},
|
||||||
/// title = {Design and Validation of Computer Protocols},
|
title = {Design and Validation of Computer Protocols},
|
||||||
/// publisher = {Prentice-Hall},
|
publisher = {Prentice-Hall},
|
||||||
/// address = {Englewood Cliffs, New Jersey},
|
address = {Englewood Cliffs, New Jersey},
|
||||||
/// year = {1991}
|
year = {1991}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// Consequently, the detection of an acceptence cycle is not ensured.
|
/// Consequently, the detection of an acceptence cycle is not ensured.
|
||||||
///
|
///
|
||||||
|
|
|
||||||
|
|
@ -36,21 +36,21 @@ namespace spot
|
||||||
/// based on the following paper, but generalized to handle TGBA
|
/// based on the following paper, but generalized to handle TGBA
|
||||||
/// directly.
|
/// directly.
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ etessami.00.concur,
|
@InProceedings{ etessami.00.concur,
|
||||||
/// author = {Kousha Etessami and Gerard J. Holzmann},
|
author = {Kousha Etessami and Gerard J. Holzmann},
|
||||||
/// title = {Optimizing {B\"u}chi Automata},
|
title = {Optimizing {B\"u}chi Automata},
|
||||||
/// booktitle = {Proceedings of the 11th International Conference on
|
booktitle = {Proceedings of the 11th International Conference on
|
||||||
/// Concurrency Theory (Concur'00)},
|
Concurrency Theory (Concur'00)},
|
||||||
/// pages = {153--167},
|
pages = {153--167},
|
||||||
/// year = {2000},
|
year = {2000},
|
||||||
/// editor = {C. Palamidessi},
|
editor = {C. Palamidessi},
|
||||||
/// volume = {1877},
|
volume = {1877},
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// address = {Pennsylvania, USA},
|
address = {Pennsylvania, USA},
|
||||||
/// publisher = {Springer-Verlag}
|
publisher = {Springer-Verlag}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// Our reconstruction of the quotient automaton based on this
|
/// Our reconstruction of the quotient automaton based on this
|
||||||
/// suffix-inclusion relation will also improve determinism.
|
/// suffix-inclusion relation will also improve determinism.
|
||||||
|
|
@ -80,20 +80,20 @@ namespace spot
|
||||||
///
|
///
|
||||||
/// Reverse simulation is discussed in the following paper, bu
|
/// Reverse simulation is discussed in the following paper, bu
|
||||||
/// following paper, but generalized to handle TGBA directly.
|
/// following paper, but generalized to handle TGBA directly.
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @InProceedings{ somenzi.00.cav,
|
@InProceedings{ somenzi.00.cav,
|
||||||
/// author = {Fabio Somenzi and Roderick Bloem},
|
author = {Fabio Somenzi and Roderick Bloem},
|
||||||
/// title = {Efficient {B\"u}chi Automata for {LTL} Formul{\ae}},
|
title = {Efficient {B\"u}chi Automata for {LTL} Formul{\ae}},
|
||||||
/// booktitle = {Proceedings of the 12th International Conference on
|
booktitle = {Proceedings of the 12th International Conference on
|
||||||
/// Computer Aided Verification (CAV'00)},
|
Computer Aided Verification (CAV'00)},
|
||||||
/// pages = {247--263},
|
pages = {247--263},
|
||||||
/// year = {2000},
|
year = {2000},
|
||||||
/// volume = {1855},
|
volume = {1855},
|
||||||
/// series = {Lecture Notes in Computer Science},
|
series = {Lecture Notes in Computer Science},
|
||||||
/// address = {Chicago, Illinois, USA},
|
address = {Chicago, Illinois, USA},
|
||||||
/// publisher = {Springer-Verlag}
|
publisher = {Springer-Verlag}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// Our reconstruction of the quotient automaton based on this
|
/// Our reconstruction of the quotient automaton based on this
|
||||||
/// prefix-inclusion relation will also improve codeterminism.
|
/// prefix-inclusion relation will also improve codeterminism.
|
||||||
|
|
|
||||||
|
|
@ -37,61 +37,61 @@ namespace spot
|
||||||
/// During the visit of \a a, the returned checker stores explicitely all
|
/// During the visit of \a a, the returned checker stores explicitely all
|
||||||
/// the traversed states. The implemented algorithm is the following:
|
/// the traversed states. The implemented algorithm is the following:
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// procedure check ()
|
procedure check ()
|
||||||
/// begin
|
begin
|
||||||
/// call dfs_blue(s0);
|
call dfs_blue(s0);
|
||||||
/// end;
|
end;
|
||||||
///
|
|
||||||
/// procedure dfs_blue (s)
|
procedure dfs_blue (s)
|
||||||
/// begin
|
begin
|
||||||
/// s.color = blue;
|
s.color = blue;
|
||||||
/// s.acc = emptyset;
|
s.acc = emptyset;
|
||||||
/// for all t in post(s) do
|
for all t in post(s) do
|
||||||
/// if t.color == white then
|
if t.color == white then
|
||||||
/// call dfs_blue(t);
|
call dfs_blue(t);
|
||||||
/// end if;
|
end if;
|
||||||
/// end for;
|
end for;
|
||||||
/// for all t in post(s) do
|
for all t in post(s) do
|
||||||
/// let (s, l, a, t) be the edge from s to t;
|
let (s, l, a, t) be the edge from s to t;
|
||||||
/// if s.acc U a not included in t.acc then
|
if s.acc U a not included in t.acc then
|
||||||
/// call dfs_red(t, a U s.acc);
|
call dfs_red(t, a U s.acc);
|
||||||
/// end if;
|
end if;
|
||||||
/// end for;
|
end for;
|
||||||
/// if s.acc == all_acc then
|
if s.acc == all_acc then
|
||||||
/// report a cycle;
|
report a cycle;
|
||||||
/// end if;
|
end if;
|
||||||
/// end;
|
end;
|
||||||
///
|
|
||||||
/// procedure dfs_red(s, A)
|
procedure dfs_red(s, A)
|
||||||
/// begin
|
begin
|
||||||
/// s.acc = s.acc U A;
|
s.acc = s.acc U A;
|
||||||
/// for all t in post(s) do
|
for all t in post(s) do
|
||||||
/// if t.color != white and A not included in t.acc then
|
if t.color != white and A not included in t.acc then
|
||||||
/// call dfs_red(t, A);
|
call dfs_red(t, A);
|
||||||
/// end if;
|
end if;
|
||||||
/// end for;
|
end for;
|
||||||
/// end;
|
end;
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// This algorithm is the one presented in
|
/// This algorithm is the one presented in
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// @techreport{HUT-TCS-A83,
|
@techreport{HUT-TCS-A83,
|
||||||
/// address = {Espoo, Finland},
|
address = {Espoo, Finland},
|
||||||
/// author = {Heikki Tauriainen},
|
author = {Heikki Tauriainen},
|
||||||
/// institution = {Helsinki University of Technology, Laboratory for
|
institution = {Helsinki University of Technology, Laboratory for
|
||||||
/// Theoretical Computer Science},
|
Theoretical Computer Science},
|
||||||
/// month = {December},
|
month = {December},
|
||||||
/// number = {A83},
|
number = {A83},
|
||||||
/// pages = {132},
|
pages = {132},
|
||||||
/// title = {On Translating Linear Temporal Logic into Alternating and
|
title = {On Translating Linear Temporal Logic into Alternating and
|
||||||
/// Nondeterministic Automata},
|
Nondeterministic Automata},
|
||||||
/// type = {Research Report},
|
type = {Research Report},
|
||||||
/// year = {2003},
|
year = {2003},
|
||||||
/// url = {http://www.tcs.hut.fi/Publications/info/bibdb.HUT-TCS-A83.shtml}
|
url = {http://www.tcs.hut.fi/Publications/info/bibdb.HUT-TCS-A83.shtml}
|
||||||
/// }
|
}
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
emptiness_check* explicit_tau03_search(const tgba *a,
|
emptiness_check* explicit_tau03_search(const tgba *a,
|
||||||
option_map o = option_map());
|
option_map o = option_map());
|
||||||
|
|
|
||||||
|
|
@ -37,55 +37,55 @@ namespace spot
|
||||||
/// During the visit of \a a, the returned checker stores explicitely all
|
/// During the visit of \a a, the returned checker stores explicitely all
|
||||||
/// the traversed states. The implemented algorithm is the following:
|
/// the traversed states. The implemented algorithm is the following:
|
||||||
///
|
///
|
||||||
/// \verbatim
|
/** \verbatim
|
||||||
/// procedure check ()
|
procedure check ()
|
||||||
/// begin
|
begin
|
||||||
/// weight = 0; // the null vector
|
weight = 0; // the null vector
|
||||||
/// call dfs_blue(s0);
|
call dfs_blue(s0);
|
||||||
/// end;
|
end;
|
||||||
///
|
|
||||||
/// procedure dfs_blue (s)
|
procedure dfs_blue (s)
|
||||||
/// begin
|
begin
|
||||||
/// s.color = cyan;
|
s.color = cyan;
|
||||||
/// s.acc = emptyset;
|
s.acc = emptyset;
|
||||||
/// s.weight = weight;
|
s.weight = weight;
|
||||||
/// for all t in post(s) do
|
for all t in post(s) do
|
||||||
/// let (s, l, a, t) be the edge from s to t;
|
let (s, l, a, t) be the edge from s to t;
|
||||||
/// if t.color == white then
|
if t.color == white then
|
||||||
/// for all b in a do
|
for all b in a do
|
||||||
/// weight[b] = weight[b] + 1;
|
weight[b] = weight[b] + 1;
|
||||||
/// end for;
|
end for;
|
||||||
/// call dfs_blue(t);
|
call dfs_blue(t);
|
||||||
/// for all b in a do
|
for all b in a do
|
||||||
/// weight[b] = weight[b] - 1;
|
weight[b] = weight[b] - 1;
|
||||||
/// end for;
|
end for;
|
||||||
/// end if;
|
end if;
|
||||||
/// Acc = s.acc U a;
|
Acc = s.acc U a;
|
||||||
/// if t.color == cyan &&
|
if t.color == cyan &&
|
||||||
/// (Acc U support(weight - t.weight) U t.acc) == all_acc then
|
(Acc U support(weight - t.weight) U t.acc) == all_acc then
|
||||||
/// report a cycle;
|
report a cycle;
|
||||||
/// else if Acc not included in t.acc then
|
else if Acc not included in t.acc then
|
||||||
/// t.acc := t.acc U Acc;
|
t.acc := t.acc U Acc;
|
||||||
/// call dfs_red(t, Acc);
|
call dfs_red(t, Acc);
|
||||||
/// end if;
|
end if;
|
||||||
/// end for;
|
end for;
|
||||||
/// s.color = blue;
|
s.color = blue;
|
||||||
/// end;
|
end;
|
||||||
///
|
|
||||||
/// procedure dfs_red(s, Acc)
|
procedure dfs_red(s, Acc)
|
||||||
/// begin
|
begin
|
||||||
/// for all t in post(s) do
|
for all t in post(s) do
|
||||||
/// let (s, l, a, t) be the edge from s to t;
|
let (s, l, a, t) be the edge from s to t;
|
||||||
/// if t.color == cyan &&
|
if t.color == cyan &&
|
||||||
/// (Acc U support(weight - t.weight) U t.acc) == all_acc then
|
(Acc U support(weight - t.weight) U t.acc) == all_acc then
|
||||||
/// report a cycle;
|
report a cycle;
|
||||||
/// else if t.color != white and Acc not included in t.acc then
|
else if t.color != white and Acc not included in t.acc then
|
||||||
/// t.acc := t.acc U Acc;
|
t.acc := t.acc U Acc;
|
||||||
/// call dfs_red(t, Acc);
|
call dfs_red(t, Acc);
|
||||||
/// end if;
|
end if;
|
||||||
/// end for;
|
end for;
|
||||||
/// end;
|
end;
|
||||||
/// \endverbatim
|
\endverbatim */
|
||||||
///
|
///
|
||||||
/// This algorithm is a generalisation to TGBA of the one implemented in
|
/// This algorithm is a generalisation to TGBA of the one implemented in
|
||||||
/// spot::explicit_se05_search. It is based on the acceptance set labelling
|
/// spot::explicit_se05_search. It is based on the acceptance set labelling
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue