Compare commits

...

8 commits

Author SHA1 Message Date
e4022da76f translate_aa: fix construction for transition based acc
* spot/twaalgos/translate_aa.cc: Here.
2025-10-15 16:32:03 +02:00
fbb5675986 translate_aa: expose lf-trans option
* spot/twaalgos/translate_aa.cc:
2025-10-15 14:25:51 +02:00
514ef110f2 translate_aa: Factorize sere translation choice
* spot/twaalgos/translate_aa.cc: Here.
2025-10-15 13:20:58 +02:00
e53bd32631 translate_aa: rename expansion option to lf
* spot/twaalgos/translate_aa.cc: Here.
2025-10-15 13:02:26 +02:00
fface984ca sonf: fix recursion of rewriting, was only called on operand
* spot/tl/sonf.cc: Here.
2025-10-15 12:52:55 +02:00
8efea81c75 merge 2025-03-17 16:11:36 +01:00
059c5072df nix: provide package in release tarballs 2025-03-17 16:11:36 +01:00
42221100dd nix: setup Nix Flake file
* flake.nix, flake.lock: here
2025-03-17 16:11:36 +01:00
9 changed files with 424 additions and 62 deletions

1
.gitignore vendored
View file

@ -82,3 +82,4 @@ GTAGS
*.dsc
*.gcov
spot.spec
default.nix

View file

@ -65,7 +65,8 @@ EXTRA_DIST = HACKING ChangeLog.1 tools/gitlog-to-changelog \
tools/help2man tools/man2html.pl \
tools/test-driver-teamcity $(UTF8) $(DEBIAN) \
m4/gnulib-cache.m4 .dir-locals.el \
spot.spec spot.spec.in README.ltsmin
spot.spec spot.spec.in README.ltsmin \
default.nix default.nix.in
dist-hook: gen-ChangeLog
@ -111,3 +112,6 @@ deb: dist
spot.spec: configure.ac spot.spec.in
sed 's/[@]VERSION[@]/$(VERSION)/;s/[@]GITPATCH[@]/@@@$(GITPATCH)/;s/@@@\.//' spot.spec.in > $@.tmp && mv $@.tmp $@
default.nix: configure.ac default.nix.in
sed 's/[@]VERSION[@]/$(VERSION)/' default.nix.in > $@.tmp && mv $@.tmp $@

35
default.nix.in Normal file
View file

@ -0,0 +1,35 @@
# -*- mode: nix; coding: utf-8 -*-
# Copyright (C) 2022 Laboratoire de Recherche et Développement de l'Epita
# (LRDE).
#
# This file is part of Spot, a model checking library.
#
# Spot is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Spot is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
{ pkgs ? import <nixpkgs> {} }:
let
version = "@VERSION@";
in
pkgs.stdenv.mkDerivation {
inherit version;
pname = "spot";
buildInputs = [
pkgs.python3
];
src = ./.;
enableParallelBuilding = true;
}

61
flake.lock generated Normal file
View file

@ -0,0 +1,61 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1741196730,
"narHash": "sha256-0Sj6ZKjCpQMfWnN0NURqRCQn2ob7YtXTAOTwCuz7fkA=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "48913d8f9127ea6530a2a2f1bd4daa1b8685d8a3",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-24.11",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

214
flake.nix Normal file
View file

@ -0,0 +1,214 @@
{
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11";
flake-utils.url = "github:numtide/flake-utils";
};
outputs = { self, nixpkgs, flake-utils, ... }:
flake-utils.lib.eachSystem
[
"x86_64-linux"
]
(system:
let
pkgs = import nixpkgs { inherit system; };
lib = pkgs.lib;
mkSpotApps = appNames:
pkgs.lib.genAttrs appNames
(name: flake-utils.lib.mkApp {
drv = self.packages.${system}.spot;
name = name;
});
spotPackage =
let
inherit (builtins)
filter
head
isString
match
readFile
split
;
# NOTE: Maintaining the version separately would be a pain, and we
# can't have a flake.nix.in with a @VERSION@ because it would make
# the flake unusable without running autoconf first, defeating some
# of its purpose.
#
# So let's get it the hard way instead :)
extractVersionRegex = ''^AC_INIT\(\[spot], \[([^]]+)], \[spot@lrde\.epita\.fr]\)$'';
getLines = (fileContent:
filter isString (split "\n" fileContent)
);
findVersionLine = (lines:
lib.lists.findFirst
(l: lib.strings.hasPrefix "AC_INIT(" l)
null
lines
);
getVersion = (file:
let
lines = getLines (readFile file);
versionLine = findVersionLine lines;
version = head (match extractVersionRegex versionLine);
in
version
);
in
{
lib,
pkgs,
stdenv,
# FIXME: do we want this flag?
buildOrgDoc ? false,
# Whether to enable Spot's Python 3 bindings
enablePython ? false
}:
stdenv.mkDerivation {
pname = "spot";
version = getVersion ./configure.ac;
src = self;
enableParallelBuilding = true;
# NOTE: Nix enables a lot of hardening flags by default, some of
# these probably harm performance so I've disabled everything
# (haven't benchmarked with vs without these, though).
hardeningDisable = [ "all" ];
# NOTE: mktexpk fails without a HOME set
preBuild = ''
export HOME=$TMPDIR
patchShebangs tools
'' + (if buildOrgDoc then ''
ln -s ${pkgs.plantuml}/lib/plantuml.jar doc/org/plantuml.jar
'' else ''
touch doc/org-stamp
'');
configureFlags = [
"--disable-devel"
"--enable-optimizations"
] ++ lib.optional (!enablePython) [
"--disable-python"
];
nativeBuildInputs = with pkgs; [
autoreconfHook
autoconf
automake
bison
flex
libtool
perl
] ++ lib.optional buildOrgDoc [
graphviz
groff
plantuml
pdf2svg
R
] ++ lib.optional enablePython [
python3
swig4
];
buildInputs = with pkgs; [
# should provide the minimum amount of packages necessary for
# building tl.pdf
(texlive.combine {
inherit (texlive)
scheme-basic
latexmk
booktabs
cm-super
doi
doublestroke
etoolbox
koma-script
mathabx-type1
mathpazo
metafont
microtype
nag
pgf
standalone
stmaryrd
tabulary
todonotes
wasy-type1
wasysym
;
})
];
};
in
{
defaultPackage = self.packages.${system}.spot;
packages = {
# binaries + library only
spot = pkgs.callPackage spotPackage {};
# NOTE: clang build is broken on Nix when linking to stdlib++, using
# libcxx instead. See:
# https://github.com/NixOS/nixpkgs/issues/91285
spotClang = pkgs.callPackage spotPackage {
stdenv = pkgs.llvmPackages.libcxxStdenv;
};
spotWithOrgDoc = pkgs.callPackage spotPackage {
buildOrgDoc = true;
};
spotWithPython = pkgs.python3Packages.toPythonModule (
pkgs.callPackage spotPackage {
enablePython = true;
}
);
spotFull = pkgs.python3Packages.toPythonModule (
pkgs.callPackage spotPackage {
buildOrgDoc = true; enablePython = true;
}
);
};
apps = mkSpotApps [
"autcross"
"autfilt"
"dstar2tgba"
"genaut"
"genltl"
"ltl2tgba"
"ltl2tgta"
"ltlcross"
"ltldo"
"ltlfilt"
"ltlgrind"
"ltlmix"
"ltlsynt"
"randaut"
"randltl"
];
devShell = pkgs.mkShell {
name = "spot-dev";
inputsFrom = [ self.packages.${system}.spotFull ];
buildInputs = [
pkgs.gdb
pkgs.clang-tools # for clangd
pkgs.bear
(pkgs.python3.withPackages (p: [
p.jupyter
p.ipython # otherwise ipython module isn't found when running ipynb tests
]))
];
};
});
}

View file

@ -131,7 +131,7 @@ namespace spot
{
// recurse into rhs first (_ []-> rhs)
formula rhs =
f[1].map(extractor, extracted, extractor, false, false);
extractor(f[1], extracted, extractor, false, false);
f = formula::binop(kind, f[0], rhs);
formula ap = formula::ap(new_ap_name());

View file

@ -39,6 +39,7 @@ namespace spot
}
bdd outedge_combiner::operator()(unsigned st, const std::vector<unsigned>& dst_filter,
const std::vector<std::tuple<unsigned, bdd, unsigned>>& edge_filter,
bool remove_original_edges)
{
const auto& dict = aut_->get_dict();
@ -49,7 +50,23 @@ namespace spot
for (auto& e: aut_->out(d1))
{
// handle edge filtering
if (!dst_filter.empty())
if (!edge_filter.empty())
{
// Trying all univ dests for e, find if there was at least one
// compatible edge that was accepting in the original TFA
auto univ_dests = aut_->univ_dests(e.dst);
if (std::all_of(univ_dests.begin(), univ_dests.end(),
[&](unsigned dst) {
for (const auto& acc_e : edge_filter)
if(std::get<0>(acc_e) == e.src
&& std::get<2>(acc_e) == dst
&& bdd_implies(e.cond, std::get<1>(acc_e)))
return false; // false because we don't want to skip it
return true;
}))
continue;
}
else if (!dst_filter.empty()) // same for state-based acc
{
// if any edge destination is an accepting state in the SERE
// automaton, handle the edge, otherwise skip it

View file

@ -54,6 +54,7 @@ namespace spot
outedge_combiner(const twa_graph_ptr& aut, unsigned sink = -1u);
~outedge_combiner();
bdd operator()(unsigned st, const std::vector<unsigned>& dst_filter = std::vector<unsigned>(),
const std::vector<std::tuple<unsigned, bdd, unsigned>>& edge_filter = std::vector<std::tuple<unsigned, bdd, unsigned>>(),
bool remove_original_edges = false);
void new_dests(unsigned st, bdd out) const;
};

View file

@ -26,6 +26,7 @@
#include <spot/twaalgos/translate_aa.hh>
#include <spot/tl/derive.hh>
#include <spot/tl/expansions.hh>
#include <spot/tl/expansions2.hh>
#include <spot/tl/nenoform.hh>
#include <iostream>
@ -34,6 +35,24 @@ namespace spot
{
namespace
{
twa_graph_ptr sere_aa_translate(formula f, const bdd_dict_ptr& dict)
{
// old bdd method
if (sere_aa_translation_options() == 0)
return sere_to_tgba(f, dict, true);
// derivation
if (sere_aa_translation_options() == 1)
return derive_finite_automaton_with_first(f, dict);
// linear form
if (sere_aa_translation_options() == 2)
return expand_finite_automaton(f, dict, exp_opts::expand_opt::None);
// linear form - trans-based
return expand_finite_automaton2(f, dict, exp_opts::expand_opt::None);
}
struct ltl_to_aa_builder
{
ltl_to_aa_builder(twa_graph_ptr aut, unsigned accepting_sink)
@ -78,10 +97,11 @@ namespace spot
unsigned copy_sere_aut_to_res(twa_graph_ptr sere_aut,
std::map<unsigned, unsigned>& old_to_new,
std::vector<unsigned>* acc_states = nullptr,
std::vector<unsigned>* acc_edges = nullptr,
bool use_accepting_sink = true)
{
unsigned ns = sere_aut->num_states();
bool trans_based = sere_aut->prop_state_acc().is_false();
// TODO: create all new states at once, keeping an initial offset (the
// number of states already present in aut_)
@ -92,8 +112,6 @@ namespace spot
{
unsigned new_st = aut_->new_state();
p.first->second = new_st;
if (acc_states != nullptr && sere_aut->state_is_accepting(st))
acc_states->push_back(new_st);
}
return p.first->second;
};
@ -103,10 +121,22 @@ namespace spot
unsigned new_st = register_state(st);
for (const auto& e : sere_aut->out(st))
{
if (use_accepting_sink && sere_aut->state_is_accepting(e.dst))
aut_->new_edge(new_st, accepting_sink_, e.cond);
else
aut_->new_edge(new_st, register_state(e.dst), e.cond);
bool edge_is_acc = ((trans_based && e.acc)
|| (!trans_based && sere_aut->state_is_accepting(e.dst)));
if (edge_is_acc)
{
// point to accepting sink instead of original dst if asked
if (use_accepting_sink)
aut_->new_edge(new_st, accepting_sink_, e.cond);
else
{
unsigned new_e = aut_->new_edge(new_st, register_state(e.dst), e.cond);
// remember if old edges were accepting
if (acc_edges != nullptr)
acc_edges->push_back(new_e);
}
}
}
}
@ -289,43 +319,22 @@ namespace spot
{
unsigned rhs_init = recurse(f[1]);
const auto& dict = aut_->get_dict();
twa_graph_ptr sere_aut;
if (sere_aa_translation_options() == 0)
{
// old bdd method
sere_aut = sere_to_tgba(f[0], dict, true);
}
else if (sere_aa_translation_options() == 1)
{
// derivation
sere_aut = derive_finite_automaton_with_first(f[0], dict);
}
else
{
// linear form
sere_aut = expand_finite_automaton(f[0], dict, exp_opts::expand_opt::None);
}
twa_graph_ptr sere_aut = sere_aa_translate(f[0], dict);
// TODO: this should be a std::vector<bool> !
std::vector<unsigned> acc_states;
std::map<unsigned, unsigned> old_to_new;
copy_sere_aut_to_res(sere_aut, old_to_new, &acc_states, false);
std::vector<unsigned> acc_edges;
std::map<unsigned, unsigned> old_to_new;
copy_sere_aut_to_res(sere_aut, old_to_new, &acc_edges, false);
// mark all edges from NFA in new automaton
unsigned ns = sere_aut->num_states();
for (unsigned st = 0; st < ns; ++st)
{
auto it = old_to_new.find(st);
assert(it != old_to_new.end());
unsigned new_st = it->second;
for (auto& e : aut_->out(new_st))
{
e.acc = acc_cond::mark_t{0};
if (std::find(acc_states.begin(), acc_states.end(), e.dst)
!= acc_states.end())
acc_edges.push_back(aut_->edge_number(e));
}
e.acc = acc_cond::mark_t{0};
}
for (unsigned i : acc_edges)
@ -345,29 +354,27 @@ namespace spot
{
unsigned rhs_init = recurse(f[1]);
const auto& dict = aut_->get_dict();
twa_graph_ptr sere_aut;
if (sere_aa_translation_options() == 0)
{
// old bdd method
sere_aut = sere_to_tgba(f[0], dict, true);
}
else if (sere_aa_translation_options() == 1)
{
// derivation
sere_aut = derive_finite_automaton_with_first(f[0], dict);
}
else
{
// linear form
sere_aut = expand_finite_automaton(f[0], dict, exp_opts::expand_opt::None);
}
twa_graph_ptr sere_aut = sere_aa_translate(f[0], dict);
bool trans_based = sere_aut->prop_state_acc().is_false();
// DFA recognizes the empty language, so {0} []-> rhs is always true
unsigned ns = sere_aut->num_states();
bool has_accepting_state = false;
for (unsigned st = 0; st < ns && !has_accepting_state; ++st)
has_accepting_state = sere_aut->state_is_accepting(st);
if (!has_accepting_state)
bool accepts = false;
for (unsigned st = 0; st < ns && !accepts; ++st)
{
if (trans_based)
{
for (const auto& e : sere_aut->out(st))
if (e.acc)
{
accepts = true;
break;
}
}
else
accepts = sere_aut->state_is_accepting(st);
}
if (!accepts)
return accepting_sink_;
std::map<unsigned, unsigned> old_to_new;
@ -378,6 +385,8 @@ namespace spot
std::vector<unsigned> univ_dest;
// TODO: this should be a std::vector<bool> !
std::vector<unsigned> acc_states;
// any edge compatible with that should be considered accepting
std::vector<std::tuple<unsigned, bdd, unsigned>> acc_edges;
// registers a state in various maps and returns the index of the
// anonymous bdd var representing that state
@ -392,7 +401,7 @@ namespace spot
old_to_new.emplace(st, new_st);
var_to_state.emplace(v, new_st);
if (sere_aut->state_is_accepting(st))
if (!trans_based && sere_aut->state_is_accepting(st))
acc_states.push_back(new_st);
vars &= bdd_ithvar(v);
@ -401,6 +410,15 @@ namespace spot
return p.first->second;
};
// FIXME: this code handles dualization, but we cannot dualize if
// this situation arises:
//
// State: 0
// [a] 1
// [a] 2 {0}
//
// The quick fix is to simply determinize the NFA before dualizing,
// which removes any existentialism.
aut_->copy_ap_of(sere_aut);
for (unsigned st = 0; st < ns; ++st)
{
@ -411,6 +429,15 @@ namespace spot
{
int st_bddi = register_state(e.dst);
sig |= e.cond & bdd_ithvar(st_bddi);
// register edge that was accepting in TFA
if (trans_based && e.acc)
{
unsigned new_src = old_to_new[e.src];
unsigned new_dst = old_to_new[e.dst];
acc_edges.push_back({new_src, e.cond, new_dst});
}
}
for (bdd cond : minterms_of(bddtrue, aps))
@ -446,7 +473,7 @@ namespace spot
unsigned new_st = it->second;
bdd comb = bddtrue;
comb &= oe_(new_st, acc_states, true);
comb &= oe_(new_st, acc_states, acc_edges, true);
if (comb != bddtrue)
{
comb &= oe_(rhs_init);
@ -529,14 +556,16 @@ namespace spot
pref = 0;
else if (!strcasecmp(version, "derive"))
pref = 1;
else if (!strcasecmp(version, "expansion"))
else if (!strcasecmp(version, "lf"))
pref = 2;
else if (!strcasecmp(version, "lft"))
pref = 3;
else
{
const char* err = ("sere_aa_translation_options(): argument"
" should be one of {bdd,derive,expansion}");
" should be one of {bdd,derive,lf,lft}");
if (env)
err = "SPOT_SERE_AA_TRANSLATE_OPT should be one of {bdd,derive,expansion}";
err = "SPOT_SERE_AA_TRANSLATE_OPT should be one of {bdd,derive,lf,lft}";
throw std::runtime_error(err);
}
}