Speedup reduc.test by not spawning one process per formula.

* src/ltltest/reduc.cc: Add an option -f to read a lost of
formulae from a file.  Running a process for each formula was
too slow.  Also add an option -h to hide reduced formulae.
* src/ltltest/reduc.test: Simplify accordingly.
This commit is contained in:
Alexandre Duret-Lutz 2010-01-21 14:54:36 +01:00
parent 7262dff0d9
commit 062045eb45
3 changed files with 182 additions and 106 deletions

View file

@ -1,7 +1,9 @@
#! /bin/sh
# Copyright (C) 2004, 2005, 2006, 2009 Laboratoire d'Informatique de
# Paris 6 (LIP6), département Systèmes Répartis Coopératifs (SRC),
# Université Pierre et Marie Curie.
# Copyright (C) 2009, 2010 Laboratoire de Recherche et Développement de
# l'Epita (LRDE).
# Copyright (C) 2004, 2005, 2006 Laboratoire d'Informatique de Paris 6
# (LIP6), département Systèmes Répartis Coopératifs (SRC), Université
# Pierre et Marie Curie.
#
# This file is part of Spot, a model checking library.
#
@ -34,24 +36,11 @@ for i in 10 12 14 16 18 20; do
run 0 ../randltl -u -s 100 -f $i a b c d e f -F 100 >> $FILE
done
for opt in 0 1 2 3 7 8 9; do
rm -f result.data
cat $FILE |
while read f; do
../reduc $opt "$f" >> result.data
done
test $? = 0 || exit 1
perl -ne 'BEGIN { $sum1 = 0; $sum2 = 0; }
/^(\d+)\s+(\d+)/;
$sum1 += $1;
$sum2 += $2;
END { print 100 - ($sum2 * 100 / $sum1); print "\n"; }
' < result.data
for opt in 0 1 2 3 7; do
run 0 ../reduc -f -h $opt "$FILE"
done
# Running the above through valgrind is quite slow already.
# Don't use valgrind for the next reductions (even slower).
for opt in 8 9; do
../reduc -f -h $opt "$FILE"
done
rm -f result.data
rm -f $FILE