From 9938c15da04f2bac9d50954740463307e12c9c93 Mon Sep 17 00:00:00 2001 From: koniksedy Date: Wed, 7 Feb 2024 17:20:53 +0100 Subject: [PATCH 01/24] lvlfa as a duplicate of nfa --- include/mata/lvlfa/algorithms.hh | 125 ++ include/mata/lvlfa/builder.hh | 97 + include/mata/lvlfa/delta.hh | 65 + include/mata/lvlfa/lvlfa.hh | 434 ++++ include/mata/lvlfa/plumbing.hh | 99 + include/mata/lvlfa/strings.hh | 9 + include/mata/lvlfa/types.hh | 49 + include/mata/nfa/nfa.hh | 14 +- include/mata/parser/inter-aut.hh | 4 +- src/CMakeLists.txt | 9 + src/inter-aut.cc | 19 + src/lvlfa/builder.cc | 310 +++ src/lvlfa/complement.cc | 75 + src/lvlfa/concatenation.cc | 134 ++ src/lvlfa/inclusion.cc | 289 +++ src/lvlfa/intersection.cc | 240 +++ src/lvlfa/lvlfa.cc | 210 ++ src/lvlfa/operations.cc | 652 ++++++ src/lvlfa/universal.cc | 156 ++ tests/CMakeLists.txt | 21 +- tests/lvlfa/builder.cc | 107 + tests/lvlfa/delta.cc | 469 +++++ tests/lvlfa/lvlfa-concatenation.cc | 1082 ++++++++++ tests/lvlfa/lvlfa-intersection.cc | 327 +++ tests/lvlfa/lvlfa-plumbing.cc | 120 ++ tests/lvlfa/lvlfa-profiling.cc | 125 ++ tests/lvlfa/lvlfa.cc | 2986 ++++++++++++++++++++++++++++ tests/lvlfa/utils.hh | 98 + 28 files changed, 8310 insertions(+), 15 deletions(-) create mode 100644 include/mata/lvlfa/algorithms.hh create mode 100644 include/mata/lvlfa/builder.hh create mode 100644 include/mata/lvlfa/delta.hh create mode 100644 include/mata/lvlfa/lvlfa.hh create mode 100644 include/mata/lvlfa/plumbing.hh create mode 100644 include/mata/lvlfa/strings.hh create mode 100644 include/mata/lvlfa/types.hh create mode 100644 src/lvlfa/builder.cc create mode 100644 src/lvlfa/complement.cc create mode 100644 src/lvlfa/concatenation.cc create mode 100644 src/lvlfa/inclusion.cc create mode 100644 src/lvlfa/intersection.cc create mode 100644 src/lvlfa/lvlfa.cc create mode 100644 src/lvlfa/operations.cc create mode 100644 src/lvlfa/universal.cc create mode 100644 tests/lvlfa/builder.cc create mode 100644 tests/lvlfa/delta.cc create mode 100644 tests/lvlfa/lvlfa-concatenation.cc create mode 100644 tests/lvlfa/lvlfa-intersection.cc create mode 100644 tests/lvlfa/lvlfa-plumbing.cc create mode 100644 tests/lvlfa/lvlfa-profiling.cc create mode 100644 tests/lvlfa/lvlfa.cc create mode 100644 tests/lvlfa/utils.hh diff --git a/include/mata/lvlfa/algorithms.hh b/include/mata/lvlfa/algorithms.hh new file mode 100644 index 00000000..56f8067a --- /dev/null +++ b/include/mata/lvlfa/algorithms.hh @@ -0,0 +1,125 @@ +/* algorithms.hh -- Wrapping up algorithms for Lvlfa manipulation which would be otherwise in anonymous namespaces. + */ + +#ifndef MATA_LVLFA_INTERNALS_HH_ +#define MATA_LVLFA_INTERNALS_HH_ + +#include "lvlfa.hh" +#include "mata/simlib/util/binary_relation.hh" + +/** + * Concrete LVLFA implementations of algorithms, such as complement, inclusion, or universality checking. + * + * This is a separation of the implementation from the interface defined in mata::lvlfa. + * Note, that in mata::lvlfa interface, there are particular dispatch functions calling + * these function according to parameters provided by a user. + * E.g. we can call the following function: `is_universal(aut, alph, {{'algorithm', 'antichains'}})` + * to check for universality based on antichain-based algorithm. + * + * In particular, this includes algorithms for: + * 1. Complementation, + * 2. Inclusion, + * 3. Universality checking, + * 4. Intersection/concatenation with epsilon transitions, or, + * 5. Computing relation. + */ +namespace mata::lvlfa::algorithms { + +/** + * Brzozowski minimization of automata (revert -> determinize -> revert -> determinize). + * @param[in] aut Automaton to be minimized. + * @return Minimized automaton. + */ +Lvlfa minimize_brzozowski(const Lvlfa& aut); + +/** + * Complement implemented by determization, adding sink state and making automaton complete. Then it adds final states + * which were non final in the original automaton. + * @param[in] aut Automaton to be complemented. + * @param[in] symbols Symbols needed to make the automaton complete. + * @param[in] minimize_during_determinization Whether the determinized automaton is computed by (brzozowski) + * minimization. + * @return Complemented automaton. + */ +Lvlfa complement_classical(const Lvlfa& aut, const mata::utils::OrdVector& symbols, + bool minimize_during_determinization = false); + +/** + * Inclusion implemented by complementation of bigger automaton, intersecting it with smaller and then it checks + * emptiness of intersection. + * @param[in] smaller Automaton which language should be included in the bigger one. + * @param[in] bigger Automaton which language should include the smaller one. + * @param[in] alphabet Alphabet of both automata (it is computed automatically, but it is more efficient to set it if + * you have it). + * @param[out] cex A potential counterexample word which breaks inclusion + * @return True if smaller language is included, + * i.e., if the final intersection of smaller complement of bigger is empty. + */ +bool is_included_naive(const Lvlfa& smaller, const Lvlfa& bigger, const Alphabet* alphabet = nullptr, Run* cex = nullptr); + +/** + * Inclusion implemented by antichain algorithms. + * @param[in] smaller Automaton which language should be included in the bigger one + * @param[in] bigger Automaton which language should include the smaller one + * @param[in] alphabet Alphabet of both automata (not needed for antichain algorithm) + * @param[out] cex A potential counterexample word which breaks inclusion + * @return True if smaller language is included, + * i.e., if the final intersection of smaller complement of bigger is empty. + */ +bool is_included_antichains(const Lvlfa& smaller, const Lvlfa& bigger, const Alphabet* alphabet = nullptr, Run* cex = nullptr); + +/** + * Universality check implemented by checking emptiness of complemented automaton + * @param[in] aut Automaton which universality is checked + * @param[in] alphabet Alphabet of the automaton + * @param[out] cex Counterexample word which eventually breaks the universality + * @return True if the complemented automaton has non empty language, i.e., the original one is not universal + */ +bool is_universal_naive(const Lvlfa& aut, const Alphabet& alphabet, Run* cex); + +/** + * Universality checking based on subset construction with antichain. + * @param[in] aut Automaton which universality is checked + * @param[in] alphabet Alphabet of the automaton + * @param[out] cex Counterexample word which eventually breaks the universality + * @return True if the automaton is universal, otherwise false. + */ +bool is_universal_antichains(const Lvlfa& aut, const Alphabet& alphabet, Run* cex); + +Simlib::Util::BinaryRelation compute_relation( + const Lvlfa& aut, + const ParameterMap& params = {{ "relation", "simulation"}, { "direction", "forward"}}); + +/** + * @brief Compute product of two LVLFAs, final condition is to be specified, with a possibility of using multiple epsilons. + * + * @param[in] lhs First LVLFA to compute intersection for. + * @param[in] rhs Second LVLFA to compute intersection for. + * @param[in] first_epsilons The smallest epsilon. + * @param[in] final_condition The predicate that tells whether a pair of states is final (conjunction for intersection). + * @param[out] prod_map Can be used to get the mapping of the pairs of the original states to product states. + * Mostly useless, it is only filled in and returned if !=nullptr, but the algorithm internally uses another data structures, + * because this one is too slow. + * @return LVLFA as a product of LVLFAs @p lhs and @p rhs with ε-transitions preserved. + */ +Lvlfa product(const Lvlfa& lhs, const Lvlfa& rhs, const std::function && final_condition, + const Symbol first_epsilon = EPSILON, std::unordered_map, State> *prod_map = nullptr); + +/** + * @brief Concatenate two LVLFAs. + * + * Supports epsilon symbols when @p use_epsilon is set to true. + * @param[in] lhs First automaton to concatenate. + * @param[in] rhs Second automaton to concatenate. + * @param[in] epsilon Epsilon to be used co concatenation (provided @p use_epsilon is true) + * @param[in] use_epsilon Whether to concatenate over epsilon symbol. + * @param[out] lhs_state_renaming Map mapping lhs states to result states. + * @param[out] rhs_state_renaming Map mapping rhs states to result states. + * @return Concatenated automaton. + */ +Lvlfa concatenate_eps(const Lvlfa& lhs, const Lvlfa& rhs, const Symbol& epsilon, bool use_epsilon = false, + StateRenaming* lhs_state_renaming = nullptr, StateRenaming* rhs_state_renaming = nullptr); + +} // Namespace mata::lvlfa::algorithms. + +#endif // MATA_LVLFA_INTERNALS_HH_ diff --git a/include/mata/lvlfa/builder.hh b/include/mata/lvlfa/builder.hh new file mode 100644 index 00000000..08d23bbf --- /dev/null +++ b/include/mata/lvlfa/builder.hh @@ -0,0 +1,97 @@ +// TODO: Insert file header. + +#ifndef LIBMATA_LVLFA_BUILDER_HH +#define LIBMATA_LVLFA_BUILDER_HH + +#include "lvlfa.hh" +#include + +#include + + +/** + * Namespace providing options to build NFAs. + */ +namespace mata::lvlfa::builder { + +using namespace mata::lvlfa; + +using NameStateMap = std::unordered_map; + +/** + * Create an automaton accepting only a single @p word. + */ +Lvlfa create_single_word_lvlfa(const std::vector& word); + +/** + * Create an automaton accepting only a single @p word. + * + * @param word Word to accept. + * @param alphabet Alphabet to use in NFA for translating word into symbols. If specified, the alphabet has to contain + * translations for all of the word symbols. If left empty, a new alphabet with only the symbols of the word will be + * created. + */ +Lvlfa create_single_word_lvlfa(const std::vector& word, Alphabet* alphabet = nullptr); + +/** + * Create automaton accepting only epsilon string. + */ +Lvlfa create_empty_string_lvlfa(); + +/** + * Create automaton accepting sigma star over the passed alphabet. + * + * @param[in] alphabet Alphabet to construct sigma star automaton with. When alphabet is left empty, the default empty + * alphabet is used, creating an automaton accepting only the empty string. + */ +Lvlfa create_sigma_star_lvlfa(Alphabet* alphabet = new OnTheFlyAlphabet{}); + +/** Loads an automaton from Parsed object */ +// TODO this function should the same thing as the one taking IntermediateAut or be deleted +Lvlfa construct(const mata::parser::ParsedSection& parsec, Alphabet* alphabet, NameStateMap* state_map = nullptr); + +/** Loads an automaton from Parsed object */ +Lvlfa construct(const mata::IntermediateAut& inter_aut, Alphabet* alphabet, NameStateMap* state_map = nullptr); +/** Loads an automaton from Parsed object; version for python binding */ +void construct( + Lvlfa* result, const mata::IntermediateAut& inter_aut, Alphabet* alphabet, NameStateMap* state_map = nullptr +); + +template +Lvlfa construct(const ParsedObject& parsed, Alphabet* alphabet = nullptr, + NameStateMap* state_map = nullptr) { + OnTheFlyAlphabet tmp_alphabet{}; + if (!alphabet) { + alphabet = &tmp_alphabet; + } + return construct(parsed, alphabet, state_map); +} // construct(). + +/** + * Parse NFA from the mata format in an input stream. + * + * @param lvlfa_stream Input stream containing NFA in mata format. + * @throws std::runtime_error Parsing of NFA fails. + */ +Lvlfa parse_from_mata(std::istream& lvlfa_stream); + +/** + * Parse NFA from the mata format in a string. + * + * @param lvlfa_stream String containing NFA in mata format. + * @throws std::runtime_error Parsing of NFA fails. + */ +Lvlfa parse_from_mata(const std::string& lvlfa_in_mata); + +/** + * Parse NFA from the mata format in a file. + * + * @param lvlfa_stream Path to the file containing NFA in mata format. + * @throws std::runtime_error @p lvlfa_file does not exist. + * @throws std::runtime_error Parsing of NFA fails. + */ +Lvlfa parse_from_mata(const std::filesystem::path& lvlfa_file); + +} // namespace mata::lvlfa::builder. + +#endif //LIBMATA_LVLFA_BUILDER_HH diff --git a/include/mata/lvlfa/delta.hh b/include/mata/lvlfa/delta.hh new file mode 100644 index 00000000..1a5d15a3 --- /dev/null +++ b/include/mata/lvlfa/delta.hh @@ -0,0 +1,65 @@ +// TODO: Insert file header. + +#ifndef MATA_LVLFA_DELTA_HH +#define MATA_LVLFA_DELTA_HH + +#include "mata/utils/sparse-set.hh" +#include "mata/utils/synchronized-iterator.hh" +#include "mata/alphabet.hh" +#include "mata/lvlfa/types.hh" + +#include "mata/nfa/delta.hh" + +#include + +namespace mata::lvlfa { + +/// A single transition in Delta represented as a triple(source, symbol, target). +using Transition = mata::nfa::Transition; + +/** + * Move from a @c StatePost for a single source state, represented as a pair of @c symbol and target state @c target. + */ +using Move = mata::nfa::Move; + +/** + * Structure represents a post of a single @c symbol: a set of target states in transitions. + * + * A set of @c SymbolPost, called @c StatePost, is describing the automata transitions from a single source state. + */ +using SymbolPost = mata::nfa::SymbolPost; + +/** + * @brief A data structure representing possible transitions over different symbols from a source state. + * + * It is an ordered vector containing possible @c SymbolPost (i.e., pair of symbol and target states). + * @c SymbolPosts in the vector are ordered by symbols in @c SymbolPosts. + */ +using StatePost = mata::nfa::StatePost; + + +/** + * @brief Specialization of utils::SynchronizedExistentialIterator for iterating over SymbolPosts. + */ +using SynchronizedExistentialSymbolPostIterator = mata::nfa::SynchronizedExistentialSymbolPostIterator; + +/** + * @brief Delta is a data structure for representing transition relation. + * + * Transition is represented as a triple Trans(source state, symbol, target state). Move is the part (symbol, target + * state), specified for a single source state. + * Its underlying data structure is vector of StatePost classes. Each index to the vector corresponds to one source + * state, that is, a number for a certain state is an index to the vector of state posts. + * Transition relation (delta) in Mata stores a set of transitions in a four-level hierarchical structure: + * Delta, StatePost, SymbolPost, and a set of target states. + * A vector of 'StatePost's indexed by a source states on top, where the StatePost for a state 'q' (whose number is + * 'q' and it is the index to the vector of 'StatePost's) stores a set of 'Move's from the source state 'q'. + * Namely, 'StatePost' has a vector of 'SymbolPost's, where each 'SymbolPost' stores a symbol 'a' and a vector of + * target states of 'a'-moves from state 'q'. 'SymbolPost's are ordered by the symbol, target states are ordered by + * the state number. + */ +using Delta = mata::nfa::Delta; + +} // namespace mata::lvlfa. + +#endif //MATA_DELTA_HH diff --git a/include/mata/lvlfa/lvlfa.hh b/include/mata/lvlfa/lvlfa.hh new file mode 100644 index 00000000..28b632a8 --- /dev/null +++ b/include/mata/lvlfa/lvlfa.hh @@ -0,0 +1,434 @@ +/* lvlfa.hh -- Nondeterministic finite automaton (over finite words). + */ + +#ifndef MATA_LVLFA_HH_ +#define MATA_LVLFA_HH_ + +// Static data structures, such as search stack, in algorithms. Might have some effect on some algorithms (like +// fragile_revert). +//#define _STATIC_STRUCTURES_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "mata/alphabet.hh" +#include "mata/parser/parser.hh" +#include "mata/utils/utils.hh" +#include "mata/utils/ord-vector.hh" +#include "mata/parser/inter-aut.hh" +#include "mata/utils/synchronized-iterator.hh" +#include "mata/utils/sparse-set.hh" +#include "types.hh" +#include "delta.hh" + +#include "mata/nfa/nfa.hh" + +/** + * Nondeterministic Finite Automata including structures, transitions and algorithms. + * + * In particular, this includes: + * 1. Structures (Automaton, Transitions, Results, Delta), + * 2. Algorithms (operations, checks, tests), + * 3. Constructions. + * + * Other algorithms are included in mata::lvlfa::Plumbing (simplified API for, e.g., binding) + * and mata::lvlfa::algorithms (concrete implementations of algorithms, such as for complement). + */ +namespace mata::lvlfa { + +/** + * A struct representing an LVLFA. + */ +struct Lvlfa : public mata::nfa::Nfa { +public: + /** + * @brief For state q, delta[q] keeps the list of transitions ordered by symbols. + * + * The set of states of this automaton are the numbers from 0 to the number of states minus one. + */ + std::vector levels{}; + Level max_level = 0; + /// Key value store for additional attributes for the LVLFA. Keys are attribute names as strings and the value types + /// are up to the user. + /// For example, we can set up attributes such as "state_dict" for state dictionary attribute mapping states to their + /// respective names, or "transition_dict" for transition dictionary adding a human-readable meaning to each + /// transition. + // TODO: When there is a need for state dictionary, consider creating default library implementation of state + // dictionary in the attributes. + +public: + explicit Lvlfa(Delta delta = {}, utils::SparseSet initial_states = {}, + utils::SparseSet final_states = {}, std::vector levels = {}, Level max_level = 0, Alphabet* alphabet = nullptr) + : mata::nfa::Nfa(delta, initial_states, final_states, alphabet), levels(std::move(levels)), max_level(max_level) {} + + /** + * @brief Construct a new explicit LVLFA with num_of_states states and optionally set initial and final states. + * + * @param[in] num_of_states Number of states for which to preallocate Delta. + */ + explicit Lvlfa(const unsigned long num_of_states, StateSet initial_states = {}, + StateSet final_states = {}, std::vector levels = {}, Level max_level = 0, Alphabet* alphabet = nullptr) + : mata::nfa::Nfa(num_of_states, initial_states, final_states, alphabet), levels(levels), max_level(max_level) {} + + // Lvlfa(const mata::nfa::Nfa& other) + // : delta(std::move(other.delta)), initial(std::move(other.initial)), final(std::move(other.final)), + // levels(std::move(std::vector(other.num_of_states(), 0))), max_level(0), alphabet(other.alphabet) {} + + /** + * @brief Construct a new explicit LVLFA from other LVLFA. + */ + Lvlfa(const Lvlfa& other) = default; + + Lvlfa(Lvlfa&& other) noexcept + : levels { std::move(other.levels) }, max_level{ other.max_level } { + delta = std::move(other.delta); + initial = std::move(other.initial); + final = std::move(other.final); + attributes = std::move(other.attributes); + alphabet = other.alphabet; + other.alphabet = nullptr; + } + + Lvlfa& operator=(const Lvlfa& other) = default; + Lvlfa& operator=(Lvlfa&& other) noexcept; + + /** + * Add a new (fresh) state to the automaton. + * @return The newly created state. + */ + State add_state(); + + /** + * Add state @p state to @c delta if @p state is not in @c delta yet. + * @return The requested @p state. + */ + State add_state(State state); + + /** + * @brief Clear the underlying LVLFA to a blank LVLFA. + * + * The whole LVLFA is cleared, each member is set to its zero value. + */ + void clear(); + + /** + * @brief Check if @c this is exactly identical to @p aut. + * + * This is exact equality of automata, including state numbering (so even stronger than isomorphism), + * essentially only useful for testing purposes. + * @return True if automata are exactly identical, false otherwise. + */ + bool is_identical(const Lvlfa& aut) const; + + /** + * @brief Remove inaccessible (unreachable) and not co-accessible (non-terminating) states in-place. + * + * Remove states which are not accessible (unreachable; state is accessible when the state is the endpoint of a path + * starting from an initial state) or not co-accessible (non-terminating; state is co-accessible when the state is + * the starting point of a path ending in a final state). + * + * @param[out] state_renaming Mapping of trimmed states to new states. + * @return @c this after trimming. + */ + Lvlfa& trim(StateRenaming* state_renaming = nullptr); + + /** + * @brief In-place concatenation. + */ + Lvlfa& concatenate(const Lvlfa& aut); + + /** + * @brief In-place union + */ + Lvlfa& uni(const Lvlfa &aut); + + /** + * Unify transitions to create a directed graph with at most a single transition between two states. + * @param[in] abstract_symbol Abstract symbol to use for transitions in digraph. + * @return An automaton representing a directed graph. + */ + Lvlfa get_one_letter_aut(Symbol abstract_symbol = 'x') const; + + /** + * Unify transitions to create a directed graph with at most a single transition between two states. + * + * @param[out] result An automaton representing a directed graph. + */ + void get_one_letter_aut(Lvlfa& result) const; + + /** + * @brief Prints the automaton in DOT format + * + * @return automaton in DOT format + */ + std::string print_to_DOT() const; + /** + * @brief Prints the automaton to the output stream in DOT format + */ + void print_to_DOT(std::ostream &output) const; + /** + * @brief Prints the automaton in mata format + * + * If you need to parse the automaton again, use IntAlphabet in construct() + * + * @return automaton in mata format + * TODO handle alphabet of the automaton, currently we print the exact value of the symbols + */ + std::string print_to_mata() const; + /** + * @brief Prints the automaton to the output stream in mata format + * + * If you need to parse the automaton again, use IntAlphabet in construct() + * + * TODO handle alphabet of the automaton, currently we print the exact value of the symbols + */ + void print_to_mata(std::ostream &output) const; + + /** + * Fill @p alphabet with symbols from @p lvlfa. + * @param[in] lvlfa LVLFA with symbols to fill @p alphabet with. + * @param[out] alphabet Alphabet to be filled with symbols from @p lvlfa. + */ + void fill_alphabet(mata::OnTheFlyAlphabet& alphabet) const; + + /// Is the language of the automaton universal? + bool is_universal(const Alphabet& alphabet, Run* cex = nullptr, + const ParameterMap& params = {{ "algorithm", "antichains" }}) const; + /// Is the language of the automaton universal? + bool is_universal(const Alphabet& alphabet, const ParameterMap& params) const; + + /// Checks whether a word is in the language of an automaton. + bool is_in_lang(const Run& word) const; + /// Checks whether a word is in the language of an automaton. + bool is_in_lang(const Word& word) { return is_in_lang(Run{ word, {} }); } + + /// Checks whether the prefix of a string is in the language of an automaton + bool is_prfx_in_lang(const Run& word) const; + + std::pair get_word_for_path(const Run& run) const; + + /** + * @brief Get the set of all words in the language of the automaton whose length is <= @p max_length + * + * If you have an automaton with finite language (can be checked using @ref is_acyclic), + * you can get all words by calling + * get_words(aut.num_of_states()) + */ + std::set get_words(unsigned max_length); + +}; // struct Lvlfa. + +// Allow variadic number of arguments of the same type. +// +// Using parameter pack and variadic arguments. +// Adapted from: https://www.fluentcpp.com/2019/01/25/variadic-number-function-parameters-type/. +/// Pack of bools for reasoning about a sequence of parameters. +template struct bool_pack{}; +/// Check that for all values in a pack @p Ts are 'true'. +template using conjunction = std::is_same, bool_pack>; +/// Check that all types in a sequence of parameters @p Ts are of type @p T. +template using AreAllOfType = typename conjunction...>::type; + +Lvlfa uni(const Lvlfa &lhs, const Lvlfa &rhs); + +/** + * @brief Compute intersection of two LVLFAs. + * + * Both automata can contain ε-transitions. The product preserves the ε-transitions, i.e., + * for each each product state `(s, t)` with`s -ε-> p`, `(s, t) -ε-> (p, t)` is created, and vice versa. + * + * Automata must share alphabets. //TODO: this is not implemented yet. + * + * @param[in] lhs First LVLFA to compute intersection for. + * @param[in] rhs Second LVLFA to compute intersection for. + * @param[in] first_epsilon smallest epsilon. //TODO: this should eventually be taken from the alphabet as anything larger than the largest symbol? + * @param[out] prod_map Mapping of pairs of the original states (lhs_state, rhs_state) to new product states (not used internally, allocated only when !=nullptr, expensive). + * @return LVLFA as a product of LVLFAs @p lhs and @p rhs with ε-transitions preserved. + */ +Lvlfa intersection(const Lvlfa& lhs, const Lvlfa& rhs, + const Symbol first_epsilon = EPSILON, std::unordered_map, State> *prod_map = nullptr); + +/** + * @brief Concatenate two LVLFAs. + * + * Supports epsilon symbols when @p use_epsilon is set to true. + * @param[in] lhs First automaton to concatenate. + * @param[in] rhs Second automaton to concatenate. + * @param[in] use_epsilon Whether to concatenate over epsilon symbol. + * @param[out] lhs_state_renaming Map mapping lhs states to result states. + * @param[out] rhs_state_renaming Map mapping rhs states to result states. + * @return Concatenated automaton. + */ +// TODO: check how fast is using just concatenate over epsilon and then call remove_epsilon(). +Lvlfa concatenate(const Lvlfa& lhs, const Lvlfa& rhs, bool use_epsilon = false, + StateRenaming* lhs_state_renaming = nullptr, StateRenaming* rhs_state_renaming = nullptr); + +/** + * @brief Compute automaton accepting complement of @p aut. + * + * @param[in] aut Automaton whose complement to compute. + * @param[in] alphabet Alphabet used for complementation. + * @param[in] params Optional parameters to control the complementation algorithm: + * - "algorithm": "classical" (classical algorithm determinizes the automaton, makes it complete and swaps final and non-final states); + * - "minimize": "true"/"false" (whether to compute minimal deterministic automaton for classical algorithm); + * @return Complemented automaton. + */ +Lvlfa complement(const Lvlfa& aut, const Alphabet& alphabet, + const ParameterMap& params = {{ "algorithm", "classical" }, { "minimize", "false" }}); + +/** + * @brief Compute automaton accepting complement of @p aut. + * + * This overloaded version complements over an already created ordered set of @p symbols instead of an alphabet. + * This is a more efficient solution in case you already have @p symbols precomputed or want to complement multiple + * automata over the same set of @c symbols: the function does not need to compute the ordered set of symbols from + * the alphabet again (and for each automaton). + * + * @param[in] aut Automaton whose complement to compute. + * @param[in] symbols Symbols to complement over. + * @param[in] params Optional parameters to control the complementation algorithm: + * - "algorithm": "classical" (classical algorithm determinizes the automaton, makes it complete and swaps final and non-final states); + * - "minimize": "true"/"false" (whether to compute minimal deterministic automaton for classical algorithm); + * @return Complemented automaton. + */ +Lvlfa complement(const Lvlfa& aut, const utils::OrdVector& symbols, + const ParameterMap& params = {{ "algorithm", "classical" }, { "minimize", "false" }}); + +/** + * @brief Compute minimal deterministic automaton. + * + * @param[in] aut Automaton whose minimal version to compute. + * @param[in] params Optional parameters to control the minimization algorithm: + * - "algorithm": "brzozowski" + * @return Minimal deterministic automaton. + */ +Lvlfa minimize(const Lvlfa &aut, const ParameterMap& params = {{ "algorithm", "brzozowski" }}); + +/** + * @brief Determinize automaton. + * + * @param[in] aut Automaton to determinize. + * @param[out] subset_map Map that maps sets of states of input automaton to states of determinized automaton. + * @return Determinized automaton. + */ +Lvlfa determinize(const Lvlfa& aut, std::unordered_map *subset_map = nullptr); + +/** + * @brief Reduce the size of the automaton. + * + * @param[in] aut Automaton to reduce. + * @param[out] state_renaming Mapping of original states to reduced states. + * @param[in] params Optional parameters to control the reduction algorithm: + * - "algorithm": "simulation". + * @return Reduced automaton. + */ +Lvlfa reduce(const Lvlfa &aut, StateRenaming *state_renaming = nullptr, + const ParameterMap& params = {{ "algorithm", "simulation" } }); + +/** + * @brief Checks inclusion of languages of two LVLFAs: @p smaller and @p bigger (smaller <= bigger). + * + * @param[in] smaller First automaton to concatenate. + * @param[in] bigger Second automaton to concatenate. + * @param[out] cex Counterexample for the inclusion. + * @param[in] alphabet Alphabet of both LVLFAs to compute with. + * @param[in] params Optional parameters to control the equivalence check algorithm: + * - "algorithm": "naive", "antichains" (Default: "antichains") + * @return True if @p smaller is included in @p bigger, false otherwise. + */ +bool is_included(const Lvlfa& smaller, const Lvlfa& bigger, Run* cex, const Alphabet* alphabet = nullptr, + const ParameterMap& params = {{ "algorithm", "antichains" }}); + +/** + * @brief Checks inclusion of languages of two LVLFAs: @p smaller and @p bigger (smaller <= bigger). + * + * @param[in] smaller First automaton to concatenate. + * @param[in] bigger Second automaton to concatenate. + * @param[in] alphabet Alphabet of both LVLFAs to compute with. + * @param[in] params Optional parameters to control the equivalence check algorithm: + * - "algorithm": "naive", "antichains" (Default: "antichains") + * @return True if @p smaller is included in @p bigger, false otherwise. + */ +inline bool is_included(const Lvlfa& smaller, const Lvlfa& bigger, const Alphabet* const alphabet = nullptr, + const ParameterMap& params = {{ "algorithm", "antichains" }}) { + return is_included(smaller, bigger, nullptr, alphabet, params); +} + +/** + * @brief Perform equivalence check of two LVLFAs: @p lhs and @p rhs. + * + * @param[in] lhs First automaton to concatenate. + * @param[in] rhs Second automaton to concatenate. + * @param[in] alphabet Alphabet of both LVLFAs to compute with. + * @param[in] params[ Optional parameters to control the equivalence check algorithm: + * - "algorithm": "naive", "antichains" (Default: "antichains") + * @return True if @p lhs and @p rhs are equivalent, false otherwise. + */ +bool are_equivalent(const Lvlfa& lhs, const Lvlfa& rhs, const Alphabet* alphabet, + const ParameterMap& params = {{ "algorithm", "antichains"}}); + +/** + * @brief Perform equivalence check of two LVLFAs: @p lhs and @p rhs. + * + * The current implementation of @c Lvlfa does not accept input alphabet. For this reason, an alphabet + * has to be created from all transitions each time an operation on alphabet is called. When calling this function, + * the alphabet has to be computed first. + * + * Hence, this function is less efficient than its alternative taking already defined alphabet as its parameter. + * That way, alphabet has to be computed only once, as opposed to the current ad-hoc construction of the alphabet. + * The use of the alternative with defined alphabet should be preferred. + * + * @param[in] lhs First automaton to concatenate. + * @param[in] rhs Second automaton to concatenate. + * @param[in] params Optional parameters to control the equivalence check algorithm: + * - "algorithm": "naive", "antichains" (Default: "antichains") + * @return True if @p lhs and @p rhs are equivalent, false otherwise. + */ +bool are_equivalent(const Lvlfa& lhs, const Lvlfa& rhs, const ParameterMap& params = {{ "algorithm", "antichains"}}); + +// Reverting the automaton by one of the three functions below, +// currently simple_revert seems best (however, not tested enough). +Lvlfa revert(const Lvlfa& aut); + +// This revert algorithm is fragile, uses low level accesses to Lvlfa and static data structures, +// and it is potentially dangerous when there are used symbols with large numbers (allocates an array indexed by symbols) +// It is faster asymptotically and for somewhat dense automata, +// the same or a little bit slower than simple_revert otherwise. +// Not affected by pre-reserving vectors. +Lvlfa fragile_revert(const Lvlfa& aut); + +// Reverting the automaton by a simple algorithm, which does a lot of random access addition to Post and Move. +// Much affected by pre-reserving vectors. +Lvlfa simple_revert(const Lvlfa& aut); + +// Reverting the automaton by a modification of the simple algorithm. +// It replaces random access addition to SymbolPost by push_back and sorting later, so far seems the slowest of all, except on +// dense automata, where it is almost as slow as simple_revert. Candidate for removal. +Lvlfa somewhat_simple_revert(const Lvlfa& aut); + +// Removing epsilon transitions +Lvlfa remove_epsilon(const Lvlfa& aut, Symbol epsilon = EPSILON); + +/** Encodes a vector of strings (each corresponding to one symbol) into a + * @c Word instance + */ + // TODO: rename to something, but no idea to what. + // Maybe we need some terminology - Symbols and Words are made of numbers. + // What are the symbol names and their sequences? +Run encode_word(const Alphabet* alphabet, const std::vector& input); + +} // namespace mata::lvlfa. + +namespace std { +std::ostream& operator<<(std::ostream& os, const mata::lvlfa::Lvlfa& lvlfa); +} // namespace std. + +#endif /* MATA_LVLFA_HH_ */ diff --git a/include/mata/lvlfa/plumbing.hh b/include/mata/lvlfa/plumbing.hh new file mode 100644 index 00000000..2514e68c --- /dev/null +++ b/include/mata/lvlfa/plumbing.hh @@ -0,0 +1,99 @@ +/* nfa-plumbings.hh -- Wrapping up different supporting functions. + */ + +#ifndef MATA_LVLFA_PLUMBING_HH_ +#define MATA_LVLFA_PLUMBING_HH_ + + +#include "lvlfa.hh" +#include "builder.hh" + + +using namespace mata::lvlfa::builder; + +/** + * Simplified NFA API, used in binding to call NFA algorithms. + * + * In particular, this mostly includes operations and checks, that do not return Automaton, + * but instead take resulting automaton as pointer (e.g. `void f(Lvlfa* result, const Lvlfa& lhs, const Lvlfa& rhs)`). + */ +namespace mata::lvlfa::plumbing { + + +inline void get_elements(StateSet* element_set, const BoolVector& bool_vec) { + element_set->clear(); + element_set->reserve(bool_vec.count()); + for (size_t i{ 0 }; i < bool_vec.size(); ++i) { + if (bool_vec[i] == 1) { + element_set->push_back(i); + } + } +} + +inline void complement( + Lvlfa* result, + const Lvlfa& aut, + const Alphabet& alphabet, + const ParameterMap& params = {{ "algorithm", "classical"}, + { "minimize", "false"}}) { *result = complement(aut, alphabet, params); +} + +inline void minimize(Lvlfa* res, const Lvlfa &aut) { *res = minimize(aut); } + +inline void determinize(Lvlfa* result, const Lvlfa& aut, std::unordered_map *subset_map = nullptr) { + *result = determinize(aut, subset_map); +} + +inline void reduce(Lvlfa* result, const Lvlfa &aut, StateRenaming *state_renaming = nullptr, + const ParameterMap& params = {{ "algorithm", "simulation"}}) { + *result = reduce(aut, state_renaming, params); +} + +inline void revert(Lvlfa* result, const Lvlfa& aut) { *result = revert(aut); } + +inline void remove_epsilon(Lvlfa* result, const Lvlfa& aut, Symbol epsilon = EPSILON) { *result = remove_epsilon(aut, epsilon); } + +/** Loads an automaton from Parsed object */ +template +void construct(Lvlfa* result, const ParsedObject& parsed, Alphabet* alphabet = nullptr, + NameStateMap* state_map = nullptr) { + OnTheFlyAlphabet tmp_alphabet{}; + if (!alphabet) { alphabet = &tmp_alphabet; } + *result = builder::construct(parsed, alphabet, state_map); +} + +inline void uni(Lvlfa *unionAutomaton, const Lvlfa &lhs, const Lvlfa &rhs) { *unionAutomaton = uni(lhs, rhs); } + +/** + * @brief Compute intersection of two NFAs. + * + * Both automata can contain ε-transitions. The product preserves the ε-transitions, i.e., + * for each each product state `(s, t)` with`s -ε-> p`, `(s, t) -ε-> (p, t)` is created, and vice versa. + * + * Automata must share alphabets. + * + * @param[out] res The resulting intersection NFA. + * @param[in] lhs Input NFA. + * @param[in] rhs Input NFA. + * @param[in] first_epsilon smallest epsilon. + * @param[out] prod_map Mapping of pairs of the original states (lhs_state, rhs_state) to new product states (not used internally, allocated only when !=nullptr, expensive). + * @return NFA as a product of NFAs @p lhs and @p rhs with ε-transitions preserved. + */ +inline void intersection(Lvlfa* res, const Lvlfa& lhs, const Lvlfa& rhs, Symbol first_epsilon = EPSILON, + std::unordered_map, State> *prod_map = nullptr) { + *res = intersection(lhs, rhs, first_epsilon, prod_map); +} + +/** + * @brief Concatenate two NFAs. + * @param[out] lhs_result_state_renaming Map mapping lhs states to result states. + * @param[out] rhs_result_state_renaming Map mapping rhs states to result states. + */ +inline void concatenate(Lvlfa* res, const Lvlfa& lhs, const Lvlfa& rhs, bool use_epsilon = false, + StateRenaming* lhs_result_state_renaming = nullptr, StateRenaming* rhs_result_state_renaming = nullptr) { + *res = concatenate(lhs, rhs, use_epsilon, lhs_result_state_renaming, rhs_result_state_renaming); +} + +} // namespace mata::nfa::Plumbing. + +#endif // MATA_NFA_PLUMBING_HH_ diff --git a/include/mata/lvlfa/strings.hh b/include/mata/lvlfa/strings.hh new file mode 100644 index 00000000..e06a5c8c --- /dev/null +++ b/include/mata/lvlfa/strings.hh @@ -0,0 +1,9 @@ +/* nfa-strings.hh -- Operations on NFAs for string solving. + */ + +#ifndef MATA_LVLFA_STRING_SOLVING_HH_ +#define MATA_LVLFA_STRING_SOLVING_HH_ + +#include "mata/nfa/strings.hh" + +#endif // MATA_NFA_STRING_SOLVING_HH_. diff --git a/include/mata/lvlfa/types.hh b/include/mata/lvlfa/types.hh new file mode 100644 index 00000000..eb2eb889 --- /dev/null +++ b/include/mata/lvlfa/types.hh @@ -0,0 +1,49 @@ +// TODO: Insert file header. + +#ifndef MATA_LVLFA_TYPES_HH +#define MATA_LVLFA_TYPES_HH + +#include "mata/alphabet.hh" +#include "mata/parser/parser.hh" + +#include "mata/nfa/types.hh" + +#include + + +namespace mata::lvlfa { + +extern const std::string TYPE_NFA; + +using Level = unsigned; +using State = mata::nfa::State; +using StateSet = mata::nfa::StateSet; + +using Run = mata::nfa::Run; + +using StateRenaming = mata::nfa::StateRenaming; + +/** + * @brief Map of additional parameter name and value pairs. + * + * Used by certain functions for specifying some additional parameters in the following format: + * ```cpp + * ParameterMap { + * { "algorithm", "classical" }, + * { "minimize", "true" } + * } + * ``` + */ +using ParameterMap = mata::nfa::ParameterMap; + +using Limits = mata::nfa::Limits; + +struct Lvlfa; ///< A non-deterministic finite automaton. + +/// An epsilon symbol which is now defined as the maximal value of data type used for symbols. +constexpr Symbol EPSILON = mata::nfa::EPSILON; +constexpr Symbol DONT_CARE = EPSILON - 1; + +} // namespace mata::nfa. + +#endif //MATA_TYPES_HH diff --git a/include/mata/nfa/nfa.hh b/include/mata/nfa/nfa.hh index 26ade1d9..9cae57b0 100644 --- a/include/mata/nfa/nfa.hh +++ b/include/mata/nfa/nfa.hh @@ -166,7 +166,7 @@ public: BoolVector get_useful_states() const; /** - * @brief Structure for storing callback functions (event handlers) utilizing + * @brief Structure for storing callback functions (event handlers) utilizing * Tarjan's SCC discover algorithm. */ struct TarjanDiscoverCallback { @@ -182,7 +182,7 @@ public: /** * @brief Tarjan's SCC discover algorihm. - * + * * @param callback Callbacks class to instantiate callbacks for the Tarjan's algorithm. */ void tarjan_scc_discover(const TarjanDiscoverCallback& callback) const; @@ -275,8 +275,8 @@ public: StateSet post(const StateSet& states, const Symbol& symbol) const; /** - * Check whether the language of NFA is empty. - * Currently calls is_lang_empty_scc if cex is null + * Check whether the language of NFA is empty. + * Currently calls is_lang_empty_scc if cex is null * @param[out] cex Counter-example path for a case the language is not empty. * @return True if the language is empty, false otherwise. */ @@ -284,7 +284,7 @@ public: /** * @brief Check if the language is empty using Tarjan's SCC discover algorithm. - * + * * @return Language empty <-> True */ bool is_lang_empty_scc() const; @@ -307,7 +307,7 @@ public: /** * @brief Is the automaton graph acyclic? Used for checking language finiteness. - * + * * @return true <-> Automaton graph is acyclic. */ bool is_acyclic() const; @@ -337,7 +337,7 @@ public: /** * @brief Get the set of all words in the language of the automaton whose length is <= @p max_length - * + * * If you have an automaton with finite language (can be checked using @ref is_acyclic), * you can get all words by calling * get_words(aut.num_of_states()) diff --git a/include/mata/parser/inter-aut.hh b/include/mata/parser/inter-aut.hh index f5b49dd7..634b55f8 100644 --- a/include/mata/parser/inter-aut.hh +++ b/include/mata/parser/inter-aut.hh @@ -147,7 +147,8 @@ public: */ enum class AutomatonType { NFA, - AFA + AFA, + LVLFA }; /** @@ -229,6 +230,7 @@ public: bool are_nodes_enum_type() const {return node_naming == Naming::ENUM;} bool is_bitvector() const {return alphabet_type == AlphabetType::BITVECTOR;} + bool is_lvlfa() const {return automaton_type == AutomatonType::LVLFA;} bool is_nfa() const {return automaton_type == AutomatonType::NFA;} bool is_afa() const {return automaton_type == AutomatonType::AFA;} diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index c3b5bbea..afc04cc2 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -21,6 +21,15 @@ add_library(libmata STATIC nfa/delta.cc nfa/operations.cc nfa/builder.cc + + lvlfa/lvlfa.cc + lvlfa/inclusion.cc + lvlfa/universal.cc + lvlfa/complement.cc + lvlfa/intersection.cc + lvlfa/concatenation.cc + lvlfa/operations.cc + lvlfa/builder.cc ) # libmata needs at least c++20 diff --git a/src/inter-aut.cc b/src/inter-aut.cc index 4511dd4e..4e5f0bf2 100644 --- a/src/inter-aut.cc +++ b/src/inter-aut.cc @@ -344,6 +344,8 @@ bool has_atmost_one_auto_naming(const mata::IntermediateAut& aut) { aut.automaton_type = mata::IntermediateAut::AutomatonType::NFA; } else if (section.type.find("AFA") != std::string::npos) { aut.automaton_type = mata::IntermediateAut::AutomatonType::AFA; + } else if (section.type.find("LVLFA") != std::string::npos) { + aut.automaton_type = mata::IntermediateAut::AutomatonType::LVLFA; } aut.alphabet_type = get_alphabet_type(section.type); @@ -454,6 +456,23 @@ void mata::IntermediateAut::parse_transition(mata::IntermediateAut &aut, const s } else assert(false && "Unknown NFA type"); + postfix.emplace_back(mata::FormulaNode::Type::OPERATOR, "&", "&", mata::FormulaNode::OperatorType::AND); + } else if (aut.automaton_type == mata::IntermediateAut::AutomatonType::LVLFA && tokens[tokens.size() - 2] != "&") { + // we need to take care about this case manually since user does not need to determine + // symbol and state naming and put conjunction to transition + if (aut.alphabet_type != mata::IntermediateAut::AlphabetType::BITVECTOR) { + assert(rhs.size() == 2); + postfix.emplace_back(mata::FormulaNode::Type::OPERAND, rhs[0], rhs[0], mata::FormulaNode::OperandType::SYMBOL); + postfix.emplace_back(create_node(aut, rhs[1])); + } else if (aut.alphabet_type == mata::IntermediateAut::AlphabetType::BITVECTOR) { + // This is a case where rhs state is not separated by a conjunction from the rest of the transitions. + std::string last_token{ rhs.back() }; + rhs.pop_back(); + postfix = infix_to_postfix(aut, rhs); + postfix.emplace_back(create_node(aut, last_token)); + } else + assert(false && "Unknown LVLFA type"); + postfix.emplace_back(mata::FormulaNode::Type::OPERATOR, "&", "&", mata::FormulaNode::OperatorType::AND); } else postfix = infix_to_postfix(aut, rhs); diff --git a/src/lvlfa/builder.cc b/src/lvlfa/builder.cc new file mode 100644 index 00000000..7513d0dd --- /dev/null +++ b/src/lvlfa/builder.cc @@ -0,0 +1,310 @@ +// TODO: Insert header file. + +#include "mata/lvlfa/builder.hh" +#include "mata/parser/mintermization.hh" + +#include + +using namespace mata::lvlfa; +using mata::lvlfa::Lvlfa; +using mata::Symbol; + +Lvlfa builder::construct(const mata::parser::ParsedSection& parsec, mata::Alphabet* alphabet, NameStateMap* state_map) { + Lvlfa aut; + assert(nullptr != alphabet); + + if (parsec.type != TYPE_NFA) { + throw std::runtime_error(std::string(__FUNCTION__) + ": expecting type \"" + + TYPE_NFA + "\""); + } + + bool remove_state_map = false; + if (nullptr == state_map) { + state_map = new NameStateMap(); + remove_state_map = true; + } + + // a lambda for translating state names to identifiers + auto get_state_name = [&state_map, &aut](const std::string& str) { + if (!state_map->count(str)) { + State state = aut.add_state(); + state_map->insert({str, state}); + return state; + } else { + return (*state_map)[str]; + } + }; + + // a lambda for cleanup + auto clean_up = [&]() { + if (remove_state_map) { delete state_map; } + }; + + + auto it = parsec.dict.find("Initial"); + if (parsec.dict.end() != it) + { + for (const auto& str : it->second) + { + State state = get_state_name(str); + aut.initial.insert(state); + } + } + + + it = parsec.dict.find("Final"); + if (parsec.dict.end() != it) + { + for (const auto& str : it->second) + { + State state = get_state_name(str); + aut.final.insert(state); + } + } + + aut.levels.clear(); + it = parsec.dict.find("Levels"); + if (parsec.dict.end() != it) + { + aut.levels.resize(it->second.size(), 0); + for (const auto &str : it->second) + { + std::stringstream ss(str); + std::string state_name, level_str; + try { + std::getline(ss, state_name, ':'); + std::getline(ss, level_str, ':'); + if (!ss.eof()) { + throw std::runtime_error("Bad format of levels: too many colons in " + str); + } + + State state = get_state_name(state_name); + long level = std::stol(level_str); + if (level < 0) { + throw std::runtime_error("Bad format of levels: level " + level_str + " is out of range."); + } + aut.levels[state] = static_cast(level); + + } catch (const std::invalid_argument &ex) { + throw std::runtime_error("Bad format of levels: unsupported level " + level_str); + } catch (const std::out_of_range &ex) { + throw std::runtime_error("Bad format of levels: level " + level_str + " is out of range."); + } catch (...) { + throw std::runtime_error("Bad format of levels."); + } + } + } + + it = parsec.dict.find("MaxLevel"); + if (parsec.dict.end() != it) { + if (it->second.size() == 0) { + throw std::runtime_error("MaxLevel has to be specified."); + } + if (it->second.size() > 1) { + throw std::runtime_error("Only one MexLevel can be specified."); + } + try { + long level = std::stol(it->second[0]); + if (level < 0) { + throw std::runtime_error("Bad format of levels: level " + it->second[0] + " is out of range."); + } + aut.max_level = static_cast(level); + } catch (const std::invalid_argument &ex) { + throw std::runtime_error("Bad format of levels: unsupported level " + it->second[0]); + } catch (const std::out_of_range &ex) { + throw std::runtime_error("Bad format of levels: level " + it->second[0] + " is out of range."); + } + } + + for (const auto& body_line : parsec.body) + { + if (body_line.size() != 3) + { + // clean up + clean_up(); + + if (body_line.size() == 2) + { + throw std::runtime_error("Epsilon transitions not supported: " + + std::to_string(body_line)); + } + else + { + throw std::runtime_error("Invalid transition: " + + std::to_string(body_line)); + } + } + + State src_state = get_state_name(body_line[0]); + Symbol symbol = alphabet->translate_symb(body_line[1]); + State tgt_state = get_state_name(body_line[2]); + + aut.delta.add(src_state, symbol, tgt_state); + } + + // do the dishes and take out garbage + clean_up(); + + return aut; +} // construct(). + +Lvlfa builder::construct(const mata::IntermediateAut& inter_aut, mata::Alphabet* alphabet, NameStateMap* state_map) { + // throw std::runtime_error("Constructor via IntermediateAut is not implemented for LVLFA."); + Lvlfa aut; + assert(nullptr != alphabet); + + if (!inter_aut.is_lvlfa()) { + throw std::runtime_error(std::string(__FUNCTION__) + ": expecting type \"" + + TYPE_NFA + "\""); + } + + NameStateMap tmp_state_map; + if (nullptr == state_map) { + state_map = &tmp_state_map; + } + + // a lambda for translating state names to identifiers + auto get_state_name = [&state_map, &aut](const std::string& str) { + if (!state_map->count(str)) { + State state = aut.add_state(); + state_map->insert({str, state}); + return state; + } else { + return (*state_map)[str]; + } + }; + + for (const auto& str : inter_aut.initial_formula.collect_node_names()) + { + State state = get_state_name(str); + aut.initial.insert(state); + } + + for (const auto& trans : inter_aut.transitions) + { + if (trans.second.children.size() != 2) + { + if (trans.second.children.size() == 1) + { + throw std::runtime_error("Epsilon transitions not supported"); + } + else + { + throw std::runtime_error("Invalid transition"); + } + } + + State src_state = get_state_name(trans.first.name); + Symbol symbol = alphabet->translate_symb(trans.second.children[0].node.name); + State tgt_state = get_state_name(trans.second.children[1].node.name); + + aut.delta.add(src_state, symbol, tgt_state); + } + + std::unordered_set final_formula_nodes; + if (!(inter_aut.final_formula.node.is_constant())) { + // we do not want to parse true/false (constant) as a state so we do not collect it + final_formula_nodes = inter_aut.final_formula.collect_node_names(); + } + // for constant true, we will pretend that final nodes are negated with empty final_formula_nodes + bool final_nodes_are_negated = (inter_aut.final_formula.node.is_true() || inter_aut.are_final_states_conjunction_of_negation()); + + if (final_nodes_are_negated) { + // we add all states NOT in final_formula_nodes to final states + for (const auto &state_name_and_id : *state_map) { + if (!final_formula_nodes.count(state_name_and_id.first)) { + aut.final.insert(state_name_and_id.second); + } + } + } else { + // we add all states in final_formula_nodes to final states + for (const auto& str : final_formula_nodes) + { + State state = get_state_name(str); + aut.final.insert(state); + } + } + + return aut; +} // construct(). + +void builder::construct( + mata::lvlfa::Lvlfa *result, + const mata::IntermediateAut &inter_aut, + mata::Alphabet *alphabet, + mata::lvlfa::builder::NameStateMap *state_map +) { + *result = construct(inter_aut, alphabet, state_map); +} + +Lvlfa builder::create_single_word_lvlfa(const std::vector& word) { + const size_t word_size{ word.size() }; + Lvlfa lvlfa{ word_size + 1, { 0 }, { word_size } }; + + for (State state{ 0 }; state < word_size; ++state) { + lvlfa.delta.add(state, word[state], state + 1); + } + return lvlfa; +} + +Lvlfa builder::create_single_word_lvlfa(const std::vector& word, mata::Alphabet *alphabet) { + if (!alphabet) { + alphabet = new OnTheFlyAlphabet{ word }; + } + const size_t word_size{ word.size() }; + Lvlfa lvlfa{ word_size + 1, { 0 }, { word_size }, std::vector(word_size + 1, 0), 0, alphabet }; + + for (State state{ 0 }; state < word_size; ++state) { + lvlfa.delta.add(state, alphabet->translate_symb(word[state]), state + 1); + } + return lvlfa; +} + +Lvlfa builder::create_empty_string_lvlfa() { + return Lvlfa{ 1, StateSet{ 0 }, StateSet{ 0 } }; +} + +Lvlfa builder::create_sigma_star_lvlfa(mata::Alphabet* alphabet) { + Lvlfa lvlfa{ 1, StateSet{ 0 }, StateSet{ 0 }, { 0 }, 0, alphabet }; + for (const mata::Symbol& symbol : alphabet->get_alphabet_symbols()) { + lvlfa.delta.add(0, symbol, 0); + } + return lvlfa; +} + +Lvlfa builder::parse_from_mata(std::istream& lvlfa_stream) { + const std::string lvlfa_str = "LVLFA"; + parser::Parsed parsed{ parser::parse_mf(lvlfa_stream) }; + if (parsed.size() != 1) { + throw std::runtime_error("The number of sections in the input file is '" + std::to_string(parsed.size()) + + "'. Required is '1'.\n"); + } + const std::string automaton_type{ parsed[0].type }; + if (automaton_type.compare(0, lvlfa_str.length(), lvlfa_str) != 0) { + throw std::runtime_error("The type of input automaton is '" + automaton_type + "'. Required is 'LVLFA'\n"); + } + IntAlphabet alphabet; + return construct(IntermediateAut::parse_from_mf(parsed)[0], &alphabet); + // return construct(parsed, &alphabet); +} + +Lvlfa builder::parse_from_mata(const std::filesystem::path& lvlfa_file) { + std::ifstream file_stream{ lvlfa_file }; + if (!file_stream) { + throw std::runtime_error("Could not open file \'" + lvlfa_file.string() + "'\n"); + } + + Lvlfa lvlfa; + try { + lvlfa = parse_from_mata(file_stream); + } catch (const std::exception& ex) { + file_stream.close(); + throw; + } + return lvlfa; +} + +Lvlfa builder::parse_from_mata(const std::string& lvlfa_in_mata) { + std::istringstream lvlfa_stream(lvlfa_in_mata); + return parse_from_mata(lvlfa_stream); +} diff --git a/src/lvlfa/complement.cc b/src/lvlfa/complement.cc new file mode 100644 index 00000000..814ae405 --- /dev/null +++ b/src/lvlfa/complement.cc @@ -0,0 +1,75 @@ +/* lvlfa-complement.cc -- LVLFA complement + */ + +// MATA headers +#include "mata/lvlfa/lvlfa.hh" +#include "mata/lvlfa/algorithms.hh" + +using namespace mata::lvlfa; +using namespace mata::utils; + +Lvlfa mata::lvlfa::algorithms::complement_classical(const Lvlfa& aut, const OrdVector& symbols, + bool minimize_during_determinization) { + Lvlfa result; + State sink_state; + if (minimize_during_determinization) { + result = minimize_brzozowski(aut); // brzozowski minimization makes it deterministic + if (result.final.empty() && !result.initial.empty()) { + assert(result.initial.size() == 1); + // if automaton does not accept anything, then there is only one (initial) state + // which can be the sink state (so we do not create unnecessary one) + sink_state = *result.initial.begin(); + } else { + sink_state = result.num_of_states(); + } + } else { + std::unordered_map subset_map; + result = determinize(aut, &subset_map); + // check if a sink state was not created during determinization + auto sink_state_iter = subset_map.find({}); + if (sink_state_iter != subset_map.end()) { + sink_state = sink_state_iter->second; + } else { + sink_state = result.num_of_states(); + } + } + + result.make_complete(symbols, sink_state); + result.final.complement(result.num_of_states()); + return result; +} + +Lvlfa mata::lvlfa::complement(const Lvlfa& aut, const Alphabet& alphabet, const ParameterMap& params) { + return mata::lvlfa::complement(aut, alphabet.get_alphabet_symbols(), params); +} + +Lvlfa mata::lvlfa::complement(const Lvlfa& aut, const mata::utils::OrdVector& symbols, const ParameterMap& params) { + Lvlfa result; + // Setting the requested algorithm. + decltype(algorithms::complement_classical)* algo = algorithms::complement_classical; + if (!haskey(params, "algorithm")) { + throw std::runtime_error(std::to_string(__func__) + + " requires setting the \"algo\" key in the \"params\" argument; " + "received: " + std::to_string(params)); + } + + const std::string& str_algo = params.at("algorithm"); + if ("classical" == str_algo) { /* default */ } + else { + throw std::runtime_error(std::to_string(__func__) + + " received an unknown value of the \"algo\" key: " + str_algo); + } + + bool minimize_during_determinization = false; + if (params.find("minimize") != params.end()) { + const std::string& minimize_arg = params.at("minimize"); + if ("true" == minimize_arg) { minimize_during_determinization = true; } + else if ("false" == minimize_arg) { minimize_during_determinization = false; } + else { + throw std::runtime_error(std::to_string(__func__) + + " received an unknown value of the \"minimize\" key: " + str_algo); + } + } + + return algo(aut, symbols, minimize_during_determinization); +} diff --git a/src/lvlfa/concatenation.cc b/src/lvlfa/concatenation.cc new file mode 100644 index 00000000..4c1ef80e --- /dev/null +++ b/src/lvlfa/concatenation.cc @@ -0,0 +1,134 @@ +/* lvlfa-concatenation.cc -- Concatenation of LVLFAs + */ + +// MATA headers +#include "mata/lvlfa/lvlfa.hh" +#include "mata/lvlfa/algorithms.hh" + +using namespace mata::lvlfa; + +namespace mata::lvlfa { + +Lvlfa concatenate(const Lvlfa& lhs, const Lvlfa& rhs, bool use_epsilon, + StateRenaming* lhs_state_renaming, StateRenaming* rhs_state_renaming) { + return algorithms::concatenate_eps(lhs, rhs, EPSILON, use_epsilon, lhs_state_renaming, rhs_state_renaming); +} + +Lvlfa& Lvlfa::concatenate(const Lvlfa& aut) { + size_t n = this->num_of_states(); + auto upd_fnc = [&](State st) { + return st + n; + }; + + // copy the information about aut to save the case when this is the same object as aut. + utils::SparseSet aut_initial = aut.initial; + utils::SparseSet aut_final = aut.final; + size_t aut_n = aut.num_of_states(); + + this->delta.allocate(n); + this->delta.append(aut.delta.renumber_targets(upd_fnc)); + + // set accepting states + utils::SparseSet new_fin{}; + new_fin.reserve(n+aut_n); + for(const State& aut_fin : aut_final) { + new_fin.insert(upd_fnc(aut_fin)); + } + + // connect both parts + for(const State& ini : aut_initial) { + const StatePost& ini_post = this->delta[upd_fnc(ini)]; + // is ini state also final? + bool is_final = aut_final[ini]; + for(const State& fin : this->final) { + if(is_final) { + new_fin.insert(fin); + } + for(const SymbolPost& ini_mv : ini_post) { + // TODO: this should be done efficiently in a delta method + // TODO: in fact it is not efficient for now + for(const State& dest : ini_mv.targets) { + this->delta.add(fin, ini_mv.symbol, dest); + } + } + } + } + this->final = new_fin; + return *this; +} + +Lvlfa algorithms::concatenate_eps(const Lvlfa& lhs, const Lvlfa& rhs, const Symbol& epsilon, bool use_epsilon, + StateRenaming* lhs_state_renaming, StateRenaming* rhs_state_renaming) { + // Compute concatenation of given automata. + // Concatenation will proceed in the order of the passed automata: Result is 'lhs . rhs'. + + if (lhs.num_of_states() == 0 || rhs.num_of_states() == 0 || lhs.initial.empty() || lhs.final.empty() || + rhs.initial.empty() || rhs.final.empty()) { + return Lvlfa{}; + } + + const unsigned long lhs_states_num{lhs.num_of_states() }; + const unsigned long rhs_states_num{rhs.num_of_states() }; + Lvlfa result{}; // Concatenated automaton. + StateRenaming _lhs_states_renaming{}; // Map mapping rhs states to result states. + StateRenaming _rhs_states_renaming{}; // Map mapping rhs states to result states. + + const size_t result_num_of_states{lhs_states_num + rhs_states_num}; + if (result_num_of_states == 0) { return Lvlfa{}; } + + // Map lhs states to result states. + _lhs_states_renaming.reserve(lhs_states_num); + Symbol result_state_index{ 0 }; + for (State lhs_state{ 0 }; lhs_state < lhs_states_num; ++lhs_state) { + _lhs_states_renaming.insert(std::make_pair(lhs_state, result_state_index)); + ++result_state_index; + } + // Map rhs states to result states. + _rhs_states_renaming.reserve(rhs_states_num); + for (State rhs_state{ 0 }; rhs_state < rhs_states_num; ++rhs_state) { + _rhs_states_renaming.insert(std::make_pair(rhs_state, result_state_index)); + ++result_state_index; + } + + result = Lvlfa(); + result.delta = lhs.delta; + result.initial = lhs.initial; + result.add_state(result_num_of_states-1); + + // Add epsilon transitions connecting lhs and rhs automata. + // The epsilon transitions lead from lhs original final states to rhs original initial states. + for (const auto& lhs_final_state: lhs.final) { + for (const auto& rhs_initial_state: rhs.initial) { + result.delta.add(lhs_final_state, epsilon, + _rhs_states_renaming[rhs_initial_state]); + } + } + + // Make result final states. + for (const auto& rhs_final_state: rhs.final) + { + result.final.insert(_rhs_states_renaming[rhs_final_state]); + } + + // Add rhs transitions to the result. + for (State rhs_state{ 0 }; rhs_state < rhs_states_num; ++rhs_state) + { + for (const SymbolPost& rhs_move: rhs.delta.state_post(rhs_state)) + { + for (const State& rhs_state_to: rhs_move.targets) + { + result.delta.add(_rhs_states_renaming[rhs_state], + rhs_move.symbol, + _rhs_states_renaming[rhs_state_to]); + } + } + } + + if (!use_epsilon) { + result.remove_epsilon(); + } + if (lhs_state_renaming != nullptr) { *lhs_state_renaming = _lhs_states_renaming; } + if (rhs_state_renaming != nullptr) { *rhs_state_renaming = _rhs_states_renaming; } + return result; +} // concatenate_eps(). +} // Namespace mata::lvlfa. diff --git a/src/lvlfa/inclusion.cc b/src/lvlfa/inclusion.cc new file mode 100644 index 00000000..45857a8d --- /dev/null +++ b/src/lvlfa/inclusion.cc @@ -0,0 +1,289 @@ +/* lvlfa-incl.cc -- LVLFA language inclusion + */ + +// MATA headers +#include "mata/lvlfa/lvlfa.hh" +#include "mata/lvlfa/algorithms.hh" +#include "mata/utils/sparse-set.hh" + +using namespace mata::lvlfa; +using namespace mata::utils; + +/// naive language inclusion check (complementation + intersection + emptiness) +bool mata::lvlfa::algorithms::is_included_naive( + const Lvlfa &smaller, + const Lvlfa &bigger, + const Alphabet *const alphabet,//TODO: this should not be needed, likewise for equivalence + Run *cex) { // {{{ + Lvlfa bigger_cmpl; + if (alphabet == nullptr) { + bigger_cmpl = complement(bigger, create_alphabet(smaller, bigger)); + } else { + bigger_cmpl = complement(bigger, *alphabet); + } + Lvlfa lvlfa_isect = intersection(smaller, bigger_cmpl); + + return lvlfa_isect.is_lang_empty(cex); +} // is_included_naive }}} + + +/// language inclusion check using Antichains +// TODO, what about to construct the separator from this? +bool mata::lvlfa::algorithms::is_included_antichains( + const Lvlfa& smaller, + const Lvlfa& bigger, + const Alphabet* const alphabet, //TODO: this parameter is not used + Run* cex) +{ // {{{ + (void)alphabet; + + // TODO: Decide what is the best optimization for inclusion. + + using ProdStateType = std::tuple; + using ProdStatesType = std::vector; + // ProcessedType is indexed by states of the smaller lvlfa + // tailored for pure antichain approach ... the simulation-based antichain will not work (without changes). + using ProcessedType = std::vector; + + auto subsumes = [](const ProdStateType& lhs, const ProdStateType& rhs) { + if (std::get<0>(lhs) != std::get<0>(rhs)) { + return false; + } + + const StateSet& lhs_bigger = std::get<1>(lhs); + const StateSet& rhs_bigger = std::get<1>(rhs); + + //TODO: Can this be done faster using more heuristics? E.g., compare the last elements first ... + //TODO: Try BDDs! What about some abstractions? + return lhs_bigger.IsSubsetOf(rhs_bigger); + }; + + + // initialize + ProdStatesType worklist{};//Pairs (q,S) to be processed. It sometimes gives a huge speed-up when they are kept sorted by the size of S, + // worklist.reserve(32); + // so those with smaller popped for processing first. + ProcessedType processed(smaller.num_of_states()); // Allocate to the number of states of the smaller lvlfa. + // The pairs of each state are also kept sorted. It allows slightly faster antichain pruning - no need to test inclusion in sets that have less elements. + + //Is |S| < |S'| for the inut pairs (q,S) and (q',S')? + // auto smaller_set = [](const ProdStateType & a, const ProdStateType & b) { return std::get<1>(a).size() < std::get<1>(b).size(); }; + + std::vector distances_smaller = revert(smaller).distances_from_initial(); + std::vector distances_bigger = revert(bigger).distances_from_initial(); + + // auto closer_dist = [&](const ProdStateType & a, const ProdStateType & b) { + // return distances_smaller[a.first] < distances_smaller[b.first]; + // }; + + // auto closer_smaller = [&](const ProdStateType & a, const ProdStateType & b) { + // if (distances_smaller[a.first] != distances_smaller[b.first]) + // return distances_smaller[a.first] < distances_smaller[b.first]; + // else + // return a.second.size() < b.second.size(); + // }; + + // auto smaller_closer = [&](const ProdStateType & a, const ProdStateType & b) { + // if (a.second.size() != b.second.size()) + // return a.second.size() < b.second.size(); + // else + // return distances_smaller[a.first] < distances_smaller[b.first]; + // }; + + auto min_dst = [&](const StateSet& set) { + if (set.empty()) return Limits::max_state; + return distances_bigger[*std::min_element(set.begin(), set.end(), [&](const State a,const State b){return distances_bigger[a] < distances_bigger[b];})]; + }; + + auto lengths_incompatible = [&](const ProdStateType& pair) { + return distances_smaller[std::get<0>(pair)] < std::get<2>(pair); + }; + + auto insert_to_pairs = [&](ProdStatesType & pairs,const ProdStateType & pair) { + // auto it = std::lower_bound(pairs.begin(), pairs.end(), pair, smaller_set); + // auto it = std::lower_bound(pairs.begin(), pairs.end(), pair, closer_dist); + // auto it = std::lower_bound(pairs.begin(), pairs.end(), pair, smaller_closer); + // auto it = std::lower_bound(pairs.begin(), pairs.end(), pair, closer_smaller); + // pairs.insert(it,pair); + pairs.push_back(pair); + // std::sort(pairs.begin(), pairs.end(), smaller_closer); + }; + + // 'paths[s] == t' denotes that state 's' was accessed from state 't', + // 'paths[s] == s' means that 's' is an initial state + std::map> paths; + + // check initial states first // TODO: this would be done in the main loop as the first thing anyway? + for (const auto& state : smaller.initial) { + if (smaller.final[state] && + are_disjoint(bigger.initial, bigger.final)) + { + if (cex != nullptr) { cex->word.clear(); } + return false; + } + + StateSet bigger_state_set{ bigger.initial }; + const ProdStateType st = std::tuple(state, bigger_state_set, min_dst(bigger_state_set)); + insert_to_pairs(worklist, st); + insert_to_pairs(processed[state],st); + + if (cex != nullptr) + paths.insert({ st, {st, 0}}); + } + + //For synchronised iteration over the set of states + SynchronizedExistentialSymbolPostIterator sync_iterator; + + // We use DFS strategy for the worklist processing + while (!worklist.empty()) { + // get a next product state + ProdStateType prod_state = *worklist.rbegin(); + worklist.pop_back(); + + const State& smaller_state = std::get<0>(prod_state); + const StateSet& bigger_set = std::get<1>(prod_state); + + sync_iterator.reset(); + for (State q: bigger_set) { + mata::utils::push_back(sync_iterator, bigger.delta[q]); + } + + // process transitions leaving smaller_state + for (const auto& smaller_move : smaller.delta[smaller_state]) { + const Symbol& smaller_symbol = smaller_move.symbol; + + StateSet bigger_succ = {}; + if(sync_iterator.synchronize_with(smaller_move)) { + bigger_succ = sync_iterator.unify_targets(); + } + + for (const State& smaller_succ : smaller_move.targets) { + const ProdStateType succ = {smaller_succ, bigger_succ, min_dst(bigger_succ)}; + + if (lengths_incompatible(succ) || (smaller.final[smaller_succ] && + !bigger.final.intersects_with(bigger_succ))) + { + if (cex != nullptr) { + cex->word.clear(); + cex->word.push_back(smaller_symbol); + ProdStateType trav = prod_state; + while (paths[trav].first != trav) + { // go back until initial state + cex->word.push_back(paths[trav].second); + trav = paths[trav].first; + } + + std::reverse(cex->word.begin(), cex->word.end()); + } + + return false; + } + + bool is_subsumed = false; + for (const auto& anti_state : processed[smaller_succ]) + { // trying to find in processed a smaller state than the newly created succ + // if (smaller_set(succ,anti_state)) { + // break; + // } + if (subsumes(anti_state, succ)) { + is_subsumed = true; + break; + } + } + + if (is_subsumed) { + continue; + } + + for (ProdStatesType* ds: {&processed[smaller_succ], &worklist}) { + //Pruning of processed and the worklist. + //Since they are ordered by the size of the sets, we can iterate from back, + //and as soon as we get to sets larger than succ, we can stop (larger sets cannot be subsets). + std::erase_if(*ds, [&](const auto& d){ return subsumes(succ, d); }); + // for (long it = static_cast(ds->size()-1);it>=0;--it) { + // // if (smaller_set((*ds)[static_cast(it)],succ)) + // // break; + // if (subsumes(succ, (*ds)[static_cast(it)])) { + // //Using index it instead of an iterator since erase could invalidate it (?) + // ds->erase(ds->begin() + it); + // } + // } + insert_to_pairs(*ds, succ); + } + + if(cex != nullptr) { + // also set that succ was accessed from state + paths[succ] = {prod_state, smaller_symbol}; + } + } + } + } + return true; +} // }}} + +namespace { + using AlgoType = decltype(algorithms::is_included_naive)*; + + bool compute_equivalence(const Lvlfa &lhs, const Lvlfa &rhs, const mata::Alphabet *const alphabet, const AlgoType &algo) { + //alphabet should not be needed as input parameter + if (algo(lhs, rhs, alphabet, nullptr)) { + if (algo(rhs, lhs, alphabet, nullptr)) { + return true; + } + } + + return false; + } + + AlgoType set_algorithm(const std::string &function_name, const ParameterMap ¶ms) { + if (!haskey(params, "algorithm")) { + throw std::runtime_error(function_name + + " requires setting the \"algo\" key in the \"params\" argument; " + "received: " + std::to_string(params)); + } + + decltype(algorithms::is_included_naive) *algo; + const std::string &str_algo = params.at("algorithm"); + if ("naive" == str_algo) { + algo = algorithms::is_included_naive; + } else if ("antichains" == str_algo) { + algo = algorithms::is_included_antichains; + } else { + throw std::runtime_error(std::to_string(__func__) + + " received an unknown value of the \"algo\" key: " + str_algo); + } + + return algo; + } + +} + +// The dispatching method that calls the correct one based on parameters +bool mata::lvlfa::is_included( + const Lvlfa &smaller, + const Lvlfa &bigger, + Run *cex, + const Alphabet *const alphabet, + const ParameterMap ¶ms) { // {{{ + AlgoType algo{set_algorithm(std::to_string(__func__), params)}; + return algo(smaller, bigger, alphabet, cex); +} // is_included }}} + +bool mata::lvlfa::are_equivalent(const Lvlfa& lhs, const Lvlfa& rhs, const Alphabet *alphabet, const ParameterMap& params) +{ + //TODO: add comment on what this is doing, what is __func__ ... + AlgoType algo{ set_algorithm(std::to_string(__func__), params) }; + + if (params.at("algorithm") == "naive") { + if (alphabet == nullptr) { + const auto computed_alphabet{create_alphabet(lhs, rhs) }; + return compute_equivalence(lhs, rhs, &computed_alphabet, algo); + } + } + + return compute_equivalence(lhs, rhs, alphabet, algo); +} + +bool mata::lvlfa::are_equivalent(const Lvlfa& lhs, const Lvlfa& rhs, const ParameterMap& params) { + return are_equivalent(lhs, rhs, nullptr, params); +} diff --git a/src/lvlfa/intersection.cc b/src/lvlfa/intersection.cc new file mode 100644 index 00000000..51d610e6 --- /dev/null +++ b/src/lvlfa/intersection.cc @@ -0,0 +1,240 @@ +/* lvlfa-intersection.cc -- Intersection of LVLFAs + */ + +// MATA headers +#include "mata/lvlfa/lvlfa.hh" +#include "mata/lvlfa/algorithms.hh" +#include +#include + + +using namespace mata::lvlfa; + +namespace { + +using ProductMap = std::unordered_map,State>; +using MatrixProductStorage = std::vector>; +using VecMapProductStorage = std::vector>; +using InvertedProductStorage = std::vector; +//Unordered map seems to be faster than ordered map here, but still very much slower than matrix. + +} // Anonymous namespace. + +namespace mata::lvlfa { + +Lvlfa intersection(const Lvlfa& lhs, const Lvlfa& rhs, const Symbol first_epsilon, ProductMap *prod_map) { + + auto both_final = [&](const State lhs_state,const State rhs_state) { + return lhs.final.contains(lhs_state) && rhs.final.contains(rhs_state); + }; + + if (lhs.final.empty() || lhs.initial.empty() || rhs.initial.empty() || rhs.final.empty()) + return Lvlfa{}; + + return algorithms::product(lhs, rhs, both_final, first_epsilon, prod_map); +} + +//TODO: move this method to lvlfa.hh? It is something one might want to use (e.g. for union, inclusion, equivalence of DFAs). +Lvlfa mata::lvlfa::algorithms::product( + const Lvlfa& lhs, const Lvlfa& rhs, const std::function&& final_condition, + const Symbol first_epsilon, ProductMap *product_map) { + + Lvlfa product{}; // The product automaton. + + // Set of product states to process. + std::deque worklist{}; + + //The largest matrix (product_matrix) of pairs of states we are brave enough to allocate. + // Let's say we are fine with allocating large_product * (about 8 Bytes) space. + // So ten million cells is close to 100 MB. + // If the number is larger, then we do not allocate a matrix, but use a vector of unordered maps (product_vec_map). + // The unordered_map seems to be about twice slower. + constexpr size_t MAX_PRODUCT_MATRIX_SIZE = 50'000'000; + //constexpr size_t MAX_PRODUCT_MATRIX_SIZE = 0; + const bool large_product = lhs.num_of_states() * rhs.num_of_states() > MAX_PRODUCT_MATRIX_SIZE; + assert(lhs.num_of_states() < Limits::max_state); + assert(rhs.num_of_states() < Limits::max_state); + + //Two variants of storage for the mapping from pairs of lhs and rhs states to product state, for large and non-large products. + MatrixProductStorage matrix_product_storage; + VecMapProductStorage vec_map_product_storage; + InvertedProductStorage product_to_lhs(lhs.num_of_states()+rhs.num_of_states()); + InvertedProductStorage product_to_rhs(lhs.num_of_states()+rhs.num_of_states()); + + + //Initialize the storage, according to the number of possible state pairs. + if (!large_product) + matrix_product_storage = MatrixProductStorage(lhs.num_of_states(), std::vector(rhs.num_of_states(), Limits::max_state)); + else + vec_map_product_storage = VecMapProductStorage(lhs.num_of_states()); + + /// Give me the product state for the pair of lhs and rhs states. + /// Returns Limits::max_state if not found. + auto get_state_from_product_storage = [&](State lhs_state, State rhs_state) { + if (!large_product) + return matrix_product_storage[lhs_state][rhs_state]; + else { + auto it = vec_map_product_storage[lhs_state].find(rhs_state); + if (it == vec_map_product_storage[lhs_state].end()) + return Limits::max_state; + else + return it->second; + } + }; + + /// Insert new mapping lhs rhs state pair to product state. + auto insert_to_product_storage = [&](State lhs_state, State rhs_state, State product_state) { + if (!large_product) + matrix_product_storage[lhs_state][rhs_state] = product_state; + else + vec_map_product_storage[lhs_state][rhs_state] = product_state; + + product_to_lhs.resize(product_state+1); + product_to_rhs.resize(product_state+1); + product_to_lhs[product_state] = lhs_state; + product_to_rhs[product_state] = rhs_state; + + //this thing is not used internally. It is only used if we want to return the mapping. But it is expensive. + if (product_map != nullptr) + (*product_map)[std::pair(lhs_state,rhs_state)] = product_state; + }; + +/** + * Add symbol_post for the product state (lhs,rhs) to the product, used for epsilons only (it is simpler for normal symbols). + * @param[in] pair_to_process Currently processed pair of original states. + * @param[in] new_product_symbol_post State transitions to add to the product. + */ + auto add_product_e_post = [&](const State lhs_source, const State rhs_source, SymbolPost& new_product_symbol_post) + { + if (new_product_symbol_post.empty()) { return; } + + State product_source = get_state_from_product_storage(lhs_source, rhs_source); + + StatePost &product_state_post{product.delta.mutable_state_post(product_source)}; + + if (product_state_post.empty() || new_product_symbol_post.symbol > product_state_post.back().symbol) { + product_state_post.push_back(std::move(new_product_symbol_post)); + } + else { + auto symbol_post_it = product_state_post.find(new_product_symbol_post.symbol); + if (symbol_post_it == product_state_post.end()) { + product_state_post.insert(std::move(new_product_symbol_post)); + } + //Epsilons are not inserted in order, we insert all lhs epsilons and then all rhs epsilons. + // It can happen that we insert an e-transition from lhs and then another with the same e from rhs. + else { + symbol_post_it->insert(new_product_symbol_post.targets); + } + } + }; + +/** + * Create product state if it does not exist in storage yet and fill in its symbol_post from lhs and rhs targets. + * @param[in] lhs_target Target state in LVLFA @c lhs. + * @param[in] rhs_target Target state in LVLFA @c rhs. + * @param[out] product_symbol_post New SymbolPost of the product state. + */ + auto create_product_state_and_symbol_post = [&](const State lhs_target, const State rhs_target, SymbolPost& product_symbol_post) + { + State product_target = get_state_from_product_storage(lhs_target, rhs_target ); + + if ( product_target == Limits::max_state ) + { + product_target = product.add_state(); + assert(product_target < Limits::max_state); + + insert_to_product_storage(lhs_target,rhs_target, product_target); + + worklist.push_back(product_target); + + if (final_condition(lhs_target,rhs_target)) { + product.final.insert(product_target); + } + } + //TODO: Push_back all of them and sort at the could be faster. + product_symbol_post.insert(product_target); + }; + + // Initialize pairs to process with initial state pairs. + for (const State lhs_initial_state : lhs.initial) { + for (const State rhs_initial_state : rhs.initial) { + // Update product with initial state pairs. + const State product_initial_state = product.add_state(); + insert_to_product_storage(lhs_initial_state,rhs_initial_state,product_initial_state); + worklist.push_back(product_initial_state); + product.initial.insert(product_initial_state); + if (final_condition(lhs_initial_state,rhs_initial_state)) { + product.final.insert(product_initial_state); + } + } + } + + while (!worklist.empty()) { + State product_source = worklist.back();; + worklist.pop_back(); + State lhs_source = product_to_lhs[product_source]; + State rhs_source = product_to_rhs[product_source]; + // Compute classic product for current state pair. + + mata::utils::SynchronizedUniversalIterator::const_iterator> sync_iterator(2); + mata::utils::push_back(sync_iterator, lhs.delta[lhs_source]); + mata::utils::push_back(sync_iterator, rhs.delta[rhs_source]); + + while (sync_iterator.advance()) { + const std::vector& same_symbol_posts{ sync_iterator.get_current() }; + assert(same_symbol_posts.size() == 2); // One move per state in the pair. + + // Compute product for state transitions with same symbols. + // Find all transitions that have the same symbol for first and the second state in the pair_to_process. + // Create transition from the pair_to_process to all pairs between states to which first transition goes + // and states to which second one goes. + Symbol symbol = same_symbol_posts[0]->symbol; + if (symbol < first_epsilon) { + SymbolPost product_symbol_post{ symbol }; + for (const State lhs_target: same_symbol_posts[0]->targets) { + for (const State rhs_target: same_symbol_posts[1]->targets) { + create_product_state_and_symbol_post(lhs_target, rhs_target, product_symbol_post); + } + } + StatePost &product_state_post{product.delta.mutable_state_post(product_source)}; + //Here we are sure that we are working with the largest symbol so far, since we iterate through + //the symbol posts of the lhs and rhs in order. So we can just push_back (not insert). + product_state_post.push_back(std::move(product_symbol_post)); + } + else + break; + } + + // Add epsilon transitions, from lhs e-transitions. + const StatePost& lhs_state_post{lhs.delta[lhs_source] }; + + //TODO: handling of epsilons might not be ideal, don't know, it would need some brain cycles to improve. + // (handling of normal symbols is ok though) + auto lhs_first_epsilon_it = lhs_state_post.first_epsilon_it(first_epsilon); + if (lhs_first_epsilon_it != lhs_state_post.end()) { + for (auto lhs_symbol_post = lhs_first_epsilon_it; lhs_symbol_post < lhs_state_post.end(); ++lhs_symbol_post) { + SymbolPost prod_symbol_post{lhs_symbol_post->symbol }; + for (const State lhs_target: lhs_symbol_post->targets) { + create_product_state_and_symbol_post(lhs_target, rhs_source, prod_symbol_post); + } + add_product_e_post(lhs_source, rhs_source, prod_symbol_post); + } + } + + // Add epsilon transitions, from rhs e-transitions. + const StatePost& rhs_state_post{rhs.delta[rhs_source] }; + auto rhs_first_epsilon_it = rhs_state_post.first_epsilon_it(first_epsilon); + if (rhs_first_epsilon_it != rhs_state_post.end()) { + for (auto rhs_symbol_post = rhs_first_epsilon_it; rhs_symbol_post < rhs_state_post.end(); ++rhs_symbol_post) { + SymbolPost prod_symbol_post{rhs_symbol_post->symbol }; + for (const State rhs_target: rhs_symbol_post->targets) { + create_product_state_and_symbol_post(lhs_source, rhs_target, prod_symbol_post); + } + add_product_e_post(lhs_source, rhs_source, prod_symbol_post); + } + } + } + return product; +} // intersection(). + +} // namespace mata::lvlfa. diff --git a/src/lvlfa/lvlfa.cc b/src/lvlfa/lvlfa.cc new file mode 100644 index 00000000..e4734cb5 --- /dev/null +++ b/src/lvlfa/lvlfa.cc @@ -0,0 +1,210 @@ +/* lvlfa.cc -- operations for NFA + */ + +#include +#include +#include +#include + +// MATA headers +#include "mata/utils/sparse-set.hh" +#include "mata/lvlfa/lvlfa.hh" +#include "mata/lvlfa/algorithms.hh" +#include + +using namespace mata::utils; +using namespace mata::lvlfa; +using mata::Symbol; +using mata::Word; +using mata::BoolVector; + +using StateBoolArray = std::vector; ///< Bool array for states in the automaton. + +const std::string mata::lvlfa::TYPE_NFA = "LVLFA"; + + +// const State Limits::min_state; +// const State Limits::max_state; +// const Symbol Limits::min_symbol; +// const Symbol Limits::max_symbol; + +Lvlfa& Lvlfa::trim(StateRenaming* state_renaming) { + + +#ifdef _STATIC_STRUCTURES_ + BoolVector useful_states{ useful_states() }; + useful_states.clear(); + useful_states = useful_states(); +#else + BoolVector useful_states{ get_useful_states() }; +#endif + const size_t useful_states_size{ useful_states.size() }; + std::vector renaming(useful_states_size); + for(State new_state{ 0 }, orig_state{ 0 }; orig_state < useful_states_size; ++orig_state) { + if (useful_states[orig_state]) { + renaming[orig_state] = new_state; + ++new_state; + } + } + + delta.defragment(useful_states, renaming); + + auto is_state_useful = [&](State q){return q < useful_states.size() && useful_states[q];}; + initial.filter(is_state_useful); + final.filter(is_state_useful); + + // Specific for levels + ////////////////////// + State move_index{ 0 }; + std::erase_if(levels, + [&](Level&) -> bool { + State prev{ move_index }; + ++move_index; + return !useful_states[prev]; + } + ); + ////////////////////// + + auto rename_state = [&](State q){return renaming[q];}; + initial.rename(rename_state); + final.rename(rename_state); + initial.truncate(); + final.truncate(); + if (state_renaming != nullptr) { + state_renaming->clear(); + state_renaming->reserve(useful_states_size); + for (State q{ 0 }; q < useful_states_size; ++q) { + if (useful_states[q]) { + (*state_renaming)[q] = renaming[q]; + } + } + } + return *this; +} + +std::string Lvlfa::print_to_DOT() const { + std::stringstream output; + print_to_DOT(output); + return output.str(); +} + +void Lvlfa::print_to_DOT(std::ostream &output) const { + output << "digraph finiteAutomaton {" << std::endl + << "node [shape=circle];" << std::endl; + + for (State final_state: final) { + output << final_state << " [shape=doublecircle];" << std::endl; + } + + const size_t delta_size = delta.num_of_states(); + for (State source = 0; source != delta_size; ++source) { + for (const SymbolPost &move: delta[source]) { + output << source << " -> {"; + for (State target: move.targets) { + output << target << " "; + } + output << "} [label=" << move.symbol << "];" << std::endl; + } + } + + output << "node [shape=none, label=\"\"];" << std::endl; + output << "forcelabels=true;" << std::endl; + for (State s{ 0 }; s < levels.size(); s++) { + output << s << " [label=\"" << s << ":" << levels[s] << "\"];" << std::endl; + } + for (State init_state: initial) { + output << "i" << init_state << " -> " << init_state << ";" << std::endl; + } + + output << "}" << std::endl; +} + +std::string Lvlfa::print_to_mata() const { + std::stringstream output; + print_to_mata(output); + return output.str(); +} + +void Lvlfa::print_to_mata(std::ostream &output) const { + output << "@LVLFA-explicit" << std::endl + << "%Alphabet-auto" << std::endl; + // TODO should be this, but we cannot parse %Alphabet-numbers yet + //<< "%Alphabet-numbers" << std::endl; + + if (!initial.empty()) { + output << "%Initial"; + for (State init_state : initial) { + output << " q" << init_state; + } + output << std::endl; + } + + if (!final.empty()) { + output << "%Final"; + for (State final_state : final) { + output << " q" << final_state; + } + output << std::endl; + } + + // if (!levels.empty()) { + // output << "%Levels"; + // for (State s{ 0 }; s < num_of_states(); s++) { + // output << " " << "q" << s << ":" << levels[s]; + // } + // output << std::endl; + // output << "MaxLevel " << max_level << std::endl; + // } + + for (const Transition& trans: delta.transitions()) { + output << "q" << trans.source << " " << trans.symbol << " q" << trans.target << std::endl; + } +} + +Lvlfa Lvlfa::get_one_letter_aut(Symbol abstract_symbol) const { + Lvlfa digraph{num_of_states(), StateSet(initial), StateSet(final), std::vector(num_of_states(), 0), 0 }; + // Add directed transitions for digraph. + for (const Transition& transition: delta.transitions()) { + // Directly try to add the transition. Finding out whether the transition is already in the digraph + // only iterates through transition relation again. + digraph.delta.add(transition.source, abstract_symbol, transition.target); + } + return digraph; +} + +void Lvlfa::get_one_letter_aut(Lvlfa& result) const { + result = get_one_letter_aut(); +} + +Lvlfa& Lvlfa::operator=(Lvlfa&& other) noexcept { + if (this != &other) { + delta = std::move(other.delta); + initial = std::move(other.initial); + final = std::move(other.final); + levels = std::move(other.levels); + max_level = other.max_level; + alphabet = other.alphabet; + attributes = std::move(other.attributes); + other.alphabet = nullptr; + } + return *this; +} + +State Lvlfa::add_state() { + levels.push_back(0); + return mata::nfa::Nfa::add_state(); +} + +State Lvlfa::add_state(State state) { + levels.push_back(0); + return mata::nfa::Nfa::add_state(state); +} + +void Lvlfa::clear() { + mata::nfa::Nfa::clear(); + levels.clear(); +} + +bool Lvlfa::is_identical(const Lvlfa& aut) const { + return max_level == aut.max_level && levels == aut.levels && mata::nfa::Nfa::is_identical(aut); +} diff --git a/src/lvlfa/operations.cc b/src/lvlfa/operations.cc new file mode 100644 index 00000000..1c79aa0d --- /dev/null +++ b/src/lvlfa/operations.cc @@ -0,0 +1,652 @@ +/* lvlfa.cc -- operations for LVLFA + */ + +#include +#include +#include +#include + +// MATA headers +#include "mata/lvlfa/delta.hh" +#include "mata/utils/sparse-set.hh" +#include "mata/lvlfa/lvlfa.hh" +#include "mata/lvlfa/algorithms.hh" +#include "mata/lvlfa/builder.hh" +#include + +using std::tie; + +using namespace mata::utils; +using namespace mata::lvlfa; +using mata::Symbol; + +using StateBoolArray = std::vector; ///< Bool array for states in the automaton. + +namespace { + Simlib::Util::BinaryRelation compute_fw_direct_simulation(const Lvlfa& aut) { + Symbol maxSymbol{ aut.delta.get_max_symbol() }; + const size_t state_num{ aut.num_of_states() }; + Simlib::ExplicitLTS LTSforSimulation(state_num); + + for (const Transition& transition : aut.delta.transitions()) { + LTSforSimulation.add_transition(transition.source, transition.symbol, transition.target); + } + + // final states cannot be simulated by nonfinal -> we add new selfloops over final states with new symbol in LTS + for (State finalState : aut.final) { + LTSforSimulation.add_transition(finalState, maxSymbol + 1, finalState); + } + + LTSforSimulation.init(); + return LTSforSimulation.compute_simulation(); + } + + Lvlfa reduce_size_by_simulation(const Lvlfa& aut, StateRenaming &state_renaming) { + Lvlfa result; + const auto sim_relation = algorithms::compute_relation( + aut, ParameterMap{{ "relation", "simulation"}, { "direction", "forward"}}); + + auto sim_relation_symmetric = sim_relation; + sim_relation_symmetric.restrict_to_symmetric(); + + // for State q, quot_proj[q] should be the representative state representing the symmetric class of states in simulation + std::vector quot_proj; + sim_relation_symmetric.get_quotient_projection(quot_proj); + + const size_t num_of_states = aut.num_of_states(); + + // map each state q of aut to the state of the reduced automaton representing the simulation class of q + for (State q = 0; q < num_of_states; ++q) { + const State qReprState = quot_proj[q]; + if (state_renaming.count(qReprState) == 0) { // we need to map q's class to a new state in reducedAut + const State qClass = result.add_state(); + state_renaming[qReprState] = qClass; + state_renaming[q] = qClass; + } else { + state_renaming[q] = state_renaming[qReprState]; + } + } + + for (State q = 0; q < num_of_states; ++q) { + const State q_class_state = state_renaming.at(q); + + if (aut.initial[q]) { // if a symmetric class contains initial state, then the whole class should be initial + result.initial.insert(q_class_state); + } + + if (quot_proj[q] == q) { // we process only transitions starting from the representative state, this is enough for simulation + for (const auto &q_trans : aut.delta.state_post(q)) { + const StateSet representatives_of_states_to = [&]{ + StateSet state_set; + for (auto s : q_trans.targets) { + state_set.insert(quot_proj[s]); + } + return state_set; + }(); + + // get the class states of those representatives that are not simulated by another representative in representatives_of_states_to + StateSet representatives_class_states; + for (const State s : representatives_of_states_to) { + bool is_state_important = true; // if true, we need to keep the transition from q to s + for (const State p : representatives_of_states_to) { + if (s != p && sim_relation.get(s, p)) { // if p (different from s) simulates s + is_state_important = false; // as p simulates s, the transition from q to s is not important to keep, as it is subsumed in transition from q to p + break; + } + } + if (is_state_important) { + representatives_class_states.insert(state_renaming.at(s)); + } + } + + // add the transition 'q_class_state-q_trans.symbol->representatives_class_states' at the end of transition list of transitions starting from q_class_state + // as the q_trans.symbol should be the largest symbol we saw (as we iterate trough getTransitionsFromState(q) which is ordered) + result.delta.mutable_state_post(q_class_state).insert(SymbolPost(q_trans.symbol, representatives_class_states)); + } + + if (aut.final[q]) { // if q is final, then all states in its class are final => we make q_class_state final + result.final.insert(q_class_state); + } + } + } + + return result; + } +} + +//TODO: based on the comments inside, this function needs to be rewritten in a more optimal way. +Lvlfa mata::lvlfa::remove_epsilon(const Lvlfa& aut, Symbol epsilon) { + // cannot use multimap, because it can contain multiple occurrences of (a -> a), (a -> a) + std::unordered_map eps_closure; + + // TODO: grossly inefficient + // first we compute the epsilon closure + const size_t num_of_states{aut.num_of_states() }; + for (size_t i{ 0 }; i < num_of_states; ++i) + { + for (const auto& trans: aut.delta[i]) + { // initialize + const auto it_ins_pair = eps_closure.insert({i, {i}}); + if (trans.symbol == epsilon) + { + StateSet& closure = it_ins_pair.first->second; + // TODO: Fix possibly insert to OrdVector. Create list already ordered, then merge (do not need to resize each time); + closure.insert(trans.targets); + } + } + } + + bool changed = true; + while (changed) { // Compute the fixpoint. + changed = false; + for (size_t i = 0; i < num_of_states; ++i) { + const StatePost& post{ aut.delta[i] }; + const auto eps_move_it { post.find(epsilon) };//TODO: make faster if default epsilon + if (eps_move_it != post.end()) { + StateSet& src_eps_cl = eps_closure[i]; + for (const State tgt: eps_move_it->targets) { + const StateSet& tgt_eps_cl = eps_closure[tgt]; + for (const State st: tgt_eps_cl) { + if (src_eps_cl.count(st) == 0) { + changed = true; + break; + } + } + src_eps_cl.insert(tgt_eps_cl); + } + } + } + } + + // Construct the automaton without epsilon transitions. + Lvlfa result{ Delta{}, aut.initial, aut.final, aut.levels, aut.max_level, aut.alphabet }; + for (const auto& state_closure_pair : eps_closure) { // For every state. + State src_state = state_closure_pair.first; + for (State eps_cl_state : state_closure_pair.second) { // For every state in its epsilon closure. + if (aut.final[eps_cl_state]) result.final.insert(src_state); + for (const SymbolPost& move : aut.delta[eps_cl_state]) { + if (move.symbol == epsilon) continue; + // TODO: this could be done more efficiently if we had a better add method + for (State tgt_state : move.targets) { + result.delta.add(src_state, move.symbol, tgt_state); + } + } + } + } + return result; +} + +Lvlfa mata::lvlfa::fragile_revert(const Lvlfa& aut) { + const size_t num_of_states{ aut.num_of_states() }; + + Lvlfa result(num_of_states); + + result.initial = aut.final; + result.final = aut.initial; + + // Compute non-epsilon symbols. + OrdVector symbols = aut.delta.get_used_symbols(); + if (symbols.empty()) { return result; } + if (symbols.back() == EPSILON) { symbols.pop_back(); } + // size of the "used alphabet", i.e. max symbol+1 or 0 + Symbol alphasize = (symbols.empty()) ? 0 : (symbols.back()+1); + +#ifdef _STATIC_STRUCTURES_ + //STATIC DATA STRUCTURES: + // Not sure that it works ideally, whether the space for the inner vectors stays there. + static std::vector> sources; + static std::vector> targets; + static std::vector e_sources; + static std::vector e_targets; + if (alphasize>sources.size()) { + sources.resize(alphasize); + targets.resize(alphasize); + } + + e_sources.clear(); + e_targets.clear(); + + //WHEN ONLY MAX SYMBOL IS COMPUTED + // for (int i = 0;i> sources (alphasize); + std::vector> targets (alphasize); + std::vector e_sources; + std::vector e_targets; +#endif + + //Copy all transition with non-e symbols to the arrays of sources and targets indexed by symbols. + //Targets and sources of e-transitions go to the special place. + //Important: since we are going through delta in order of sources, the sources arrays are all ordered. + for (State sourceState{ 0 }; sourceState < num_of_states; ++sourceState) { + for (const SymbolPost &move: aut.delta[sourceState]) { + if (move.symbol == EPSILON) { + for (const State targetState: move.targets) { + //reserve_on_insert(e_sources); + e_sources.push_back(sourceState); + //reserve_on_insert(e_targets); + e_targets.push_back(targetState); + } + } + else { + for (const State targetState: move.targets) { + //reserve_on_insert(sources[move.symbol]); + sources[move.symbol].push_back(sourceState); + //reserve_on_insert(targets[move.symbol]); + targets[move.symbol].push_back(targetState); + } + } + } + } + + //Now make the delta of the reversed automaton. + //Important: since sources are ordered, when adding them as targets, we can just push them back. + result.delta.reserve(num_of_states); + + // adding non-e transitions + for (const Symbol symbol: symbols) { + for (size_t i{ 0 }; i < sources[symbol].size(); ++i) { + State tgt_state =sources[symbol][i]; + State src_state =targets[symbol][i]; + StatePost & src_post = result.delta.mutable_state_post(src_state); + if (src_post.empty() || src_post.back().symbol != symbol) { + src_post.push_back(SymbolPost(symbol)); + } + src_post.back().push_back(tgt_state); + } + } + + // adding e-transitions + for (size_t i{ 0 }; i < e_sources.size(); ++i) { + State tgt_state =e_sources[i]; + State src_state =e_targets[i]; + StatePost & src_post = result.delta.mutable_state_post(src_state); + if (src_post.empty() || src_post.back().symbol != EPSILON) { + src_post.push_back(SymbolPost(EPSILON)); + } + src_post.back().push_back(tgt_state); + } + + //sorting the targets + //Hm I don't know why I put this here, but it should not be needed ... + //for (State q = 0, states_num = result.delta.post_size(); qtargets); + // } + //} + + return result; +} + +Lvlfa mata::lvlfa::simple_revert(const Lvlfa& aut) { + Lvlfa result; + result.clear(); + + const size_t num_of_states{ aut.num_of_states() }; + result.delta.allocate(num_of_states); + + for (State sourceState{ 0 }; sourceState < num_of_states; ++sourceState) { + for (const SymbolPost &transition: aut.delta[sourceState]) { + for (const State targetState: transition.targets) { + result.delta.add(targetState, transition.symbol, sourceState); + } + } + } + + result.initial = aut.final; + result.final = aut.initial; + + return result; +} + +//not so great, can be removed +Lvlfa mata::lvlfa::somewhat_simple_revert(const Lvlfa& aut) { + const size_t num_of_states{ aut.num_of_states() }; + + Lvlfa result(num_of_states); + + result.initial = aut.final; + result.final = aut.initial; + + for (State sourceState{ 0 }; sourceState < num_of_states; ++sourceState) { + for (const SymbolPost &transition: aut.delta[sourceState]) { + for (const State targetState: transition.targets) { + StatePost & post = result.delta.mutable_state_post(targetState); + //auto move = std::find(post.begin(),post.end(),Move(transition.symbol)); + auto move = post.find(SymbolPost(transition.symbol)); + if (move == post.end()) { + //post.push_back(Move(transition.symbol,sourceState)); + post.insert(SymbolPost(transition.symbol, sourceState)); + } + else + move->push_back(sourceState); + //move->insert(sourceState); + } + } + } + + //sorting the targets + for (State q = 0, states_num = result.delta.num_of_states(); q < states_num; ++q) { + //Post & post = result.delta.get_mutable_post(q); + //utils::sort_and_rmdupl(post); + for (SymbolPost& m: result.delta.mutable_state_post(q)) { sort_and_rmdupl(m.targets); } + } + + return result; +} + +Lvlfa mata::lvlfa::revert(const Lvlfa& aut) { + return simple_revert(aut); + //return fragile_revert(aut); + //return somewhat_simple_revert(aut); +} + +std::pair mata::lvlfa::Lvlfa::get_word_for_path(const Run& run) const { + if (run.path.empty()) { return {{}, true}; } + + Run word; + State cur = run.path[0]; + for (size_t i = 1; i < run.path.size(); ++i) { + State newSt = run.path[i]; + bool found = false; + if (!this->delta.empty()) { + for (const auto &symbolMap: this->delta[cur]) { + for (State st: symbolMap.targets) { + if (st == newSt) { + word.word.push_back(symbolMap.symbol); + found = true; + break; + } + } + if (found) { break; } + } + } + if (!found) { return {{}, false}; } + cur = newSt; // update current state + } + return {word, true}; +} + +//TODO: this is not efficient +bool mata::lvlfa::Lvlfa::is_in_lang(const Run& run) const { + StateSet current_post(this->initial); + for (const Symbol sym : run.word) { + current_post = this->post(current_post, sym); + if (current_post.empty()) { return false; } + } + return this->final.intersects_with(current_post); +} + +/// Checks whether the prefix of a string is in the language of an automaton +// TODO: slow and it should share code with is_in_lang +bool mata::lvlfa::Lvlfa::is_prfx_in_lang(const Run& run) const { + StateSet current_post{ this->initial }; + for (const Symbol sym : run.word) { + if (this->final.intersects_with(current_post)) { return true; } + current_post = this->post(current_post, sym); + if (current_post.empty()) { return false; } + } + return this->final.intersects_with(current_post); +} + +Lvlfa mata::lvlfa::algorithms::minimize_brzozowski(const Lvlfa& aut) { + //compute the minimal deterministic automaton, Brzozovski algorithm + return determinize(revert(determinize(revert(aut)))); +} + +Lvlfa mata::lvlfa::minimize( + const Lvlfa& aut, + const ParameterMap& params) +{ + Lvlfa result; + // setting the default algorithm + decltype(algorithms::minimize_brzozowski)* algo = algorithms::minimize_brzozowski; + if (!haskey(params, "algorithm")) { + throw std::runtime_error(std::to_string(__func__) + + " requires setting the \"algo\" key in the \"params\" argument; " + "received: " + std::to_string(params)); + } + + const std::string& str_algo = params.at("algorithm"); + if ("brzozowski" == str_algo) { /* default */ } + else { + throw std::runtime_error(std::to_string(__func__) + + " received an unknown value of the \"algo\" key: " + str_algo); + } + + return algo(aut); +} + +Lvlfa mata::lvlfa::uni(const Lvlfa &lhs, const Lvlfa &rhs) { + Lvlfa union_lvlfa{ lhs }; + return union_lvlfa.uni(rhs); +} + +Lvlfa& Lvlfa::uni(const Lvlfa& aut) { + size_t n = this->num_of_states(); + auto upd_fnc = [&](State st) { + return st + n; + }; + + // copy the information about aut to save the case when this is the same object as aut. + size_t aut_states = aut.num_of_states(); + SparseSet aut_final_copy = aut.final; + SparseSet aut_initial_copy = aut.initial; + + this->delta.allocate(n); + this->delta.append(aut.delta.renumber_targets(upd_fnc)); + + // set accepting states + this->final.reserve(n+aut_states); + for(const State& aut_fin : aut_final_copy) { + this->final.insert(upd_fnc(aut_fin)); + } + // set unitial states + this->initial.reserve(n+aut_states); + for(const State& aut_ini : aut_initial_copy) { + this->initial.insert(upd_fnc(aut_ini)); + } + + return *this; +} + +Simlib::Util::BinaryRelation mata::lvlfa::algorithms::compute_relation(const Lvlfa& aut, const ParameterMap& params) { + if (!haskey(params, "relation")) { + throw std::runtime_error(std::to_string(__func__) + + " requires setting the \"relation\" key in the \"params\" argument; " + "received: " + std::to_string(params)); + } + if (!haskey(params, "direction")) { + throw std::runtime_error(std::to_string(__func__) + + " requires setting the \"direction\" key in the \"params\" argument; " + "received: " + std::to_string(params)); + } + + const std::string& relation = params.at("relation"); + const std::string& direction = params.at("direction"); + if ("simulation" == relation && direction == "forward") { + return compute_fw_direct_simulation(aut); + } + else { + throw std::runtime_error(std::to_string(__func__) + + " received an unknown value of the \"relation\" key: " + relation); + } +} + +Lvlfa mata::lvlfa::reduce(const Lvlfa &aut, StateRenaming *state_renaming, const ParameterMap& params) { + if (!haskey(params, "algorithm")) { + throw std::runtime_error(std::to_string(__func__) + + " requires setting the \"algorithm\" key in the \"params\" argument; " + "received: " + std::to_string(params)); + } + + Lvlfa result; + std::unordered_map reduced_state_map; + const std::string& algorithm = params.at("algorithm"); + if ("simulation" == algorithm) { + result = reduce_size_by_simulation(aut, reduced_state_map); + } else { + throw std::runtime_error(std::to_string(__func__) + + " received an unknown value of the \"algorithm\" key: " + algorithm); + } + + if (state_renaming) { + state_renaming->clear(); + *state_renaming = reduced_state_map; + } + return result; +} + +Lvlfa mata::lvlfa::determinize( + const Lvlfa& aut, + std::unordered_map *subset_map) { + + Lvlfa result; + //assuming all sets targets are non-empty + std::vector> worklist; + bool deallocate_subset_map = false; + if (subset_map == nullptr) { + subset_map = new std::unordered_map(); + deallocate_subset_map = true; + } + + result.clear(); + + const StateSet S0 = StateSet(aut.initial); + const State S0id = result.add_state(); + result.initial.insert(S0id); + + if (aut.final.intersects_with(S0)) { + result.final.insert(S0id); + } + worklist.emplace_back(S0id, S0); + + (*subset_map)[mata::utils::OrdVector(S0)] = S0id; + + if (aut.delta.empty()) + return result; + + using Iterator = mata::utils::OrdVector::const_iterator; + SynchronizedExistentialSymbolPostIterator synchronized_iterator; + + while (!worklist.empty()) { + const auto Spair = worklist.back(); + worklist.pop_back(); + const StateSet S = Spair.second; + const State Sid = Spair.first; + if (S.empty()) { + // This should not happen assuming all sets targets are non-empty. + break; + } + + // add moves of S to the sync ex iterator + // TODO: shouldn't we also reset first? + for (State q: S) { + mata::utils::push_back(synchronized_iterator, aut.delta[q]); + } + + while (synchronized_iterator.advance()) { + + // extract post from the sychronized_iterator iterator + const std::vector& moves = synchronized_iterator.get_current(); + Symbol currentSymbol = (*moves.begin())->symbol; + StateSet T = synchronized_iterator.unify_targets(); + + const auto existingTitr = subset_map->find(T); + State Tid; + if (existingTitr != subset_map->end()) { + Tid = existingTitr->second; + } else { + Tid = result.add_state(); + (*subset_map)[mata::utils::OrdVector(T)] = Tid; + if (aut.final.intersects_with(T)) { + result.final.insert(Tid); + } + worklist.emplace_back(Tid, T); + } + result.delta.mutable_state_post(Sid).insert(SymbolPost(currentSymbol, Tid)); + } + } + + if (deallocate_subset_map) { delete subset_map; } + + return result; +} + +std::ostream& std::operator<<(std::ostream& os, const Lvlfa& lvlfa) { + lvlfa.print_to_mata(os); + return os; +} + +void mata::lvlfa::Lvlfa::fill_alphabet(OnTheFlyAlphabet& alphabet) const { + for (const StatePost& state_post: this->delta) { + for (const SymbolPost& symbol_post: state_post) { + alphabet.update_next_symbol_value(symbol_post.symbol); + alphabet.try_add_new_symbol(std::to_string(symbol_post.symbol), symbol_post.symbol); + } + } +} + +Run mata::lvlfa::encode_word(const Alphabet* alphabet, const std::vector& input) { + return { .word = alphabet->translate_word(input) }; +} + +std::set mata::lvlfa::Lvlfa::get_words(unsigned max_length) { + std::set result; + + // contains a pair: a state s and the word with which we got to the state s + std::vector> worklist; + // initializing worklist + for (State init_state : initial) { + worklist.push_back({init_state, {}}); + if (final.contains(init_state)) { + result.insert(mata::Word()); + } + } + + // will be used during the loop + std::vector> new_worklist; + + unsigned cur_length = 0; + while (!worklist.empty() && cur_length < max_length) { + new_worklist.clear(); + for (const auto& state_and_word : worklist) { + State s_from = state_and_word.first; + const mata::Word& word = state_and_word.second; + for (const SymbolPost& sp : delta[s_from]) { + mata::Word new_word = word; + new_word.push_back(sp.symbol); + for (State s_to : sp.targets) { + new_worklist.push_back({s_to, new_word}); + if (final.contains(s_to)) { + result.insert(new_word); + } + } + } + } + worklist.swap(new_worklist); + ++cur_length; + } + + return result; +} diff --git a/src/lvlfa/universal.cc b/src/lvlfa/universal.cc new file mode 100644 index 00000000..eee68a00 --- /dev/null +++ b/src/lvlfa/universal.cc @@ -0,0 +1,156 @@ +/* lvlfa-universal.cc -- LVLFA universality + */ + +// MATA headers +#include "mata/lvlfa/lvlfa.hh" +#include "mata/lvlfa/algorithms.hh" +#include "mata/utils/sparse-set.hh" + +using namespace mata::lvlfa; +using namespace mata::utils; + +//TODO: this could be merged with inclusion, or even removed, universality could be implemented using inclusion, +// it is not something needed in practice, so some little overhead is ok + + +/// naive universality check (complementation + emptiness) +bool mata::lvlfa::algorithms::is_universal_naive( + const Lvlfa& aut, + const Alphabet& alphabet, + Run* cex) +{ // {{{ + Lvlfa cmpl = complement(aut, alphabet); + + return cmpl.is_lang_empty(cex); +} // is_universal_naive }}} + + +/// universality check using Antichains +bool mata::lvlfa::algorithms::is_universal_antichains( + const Lvlfa& aut, + const Alphabet& alphabet, + Run* cex) +{ // {{{ + + using WorklistType = std::list; + using ProcessedType = std::list; + + auto subsumes = [](const StateSet& lhs, const StateSet& rhs) { + if (lhs.size() > rhs.size()) { // bigger set cannot be subset + return false; + } + + return std::includes(rhs.begin(), rhs.end(), lhs.begin(), lhs.end()); + }; + + // process parameters + // TODO: set correctly!!!! + bool is_dfs = true; + + // check the initial state + if (are_disjoint(aut.initial, aut.final)) { + if (nullptr != cex) { cex->word.clear(); } + return false; + } + + // initialize + WorklistType worklist = { StateSet(aut.initial) }; + ProcessedType processed = { StateSet(aut.initial) }; + mata::utils::OrdVector alph_symbols = alphabet.get_alphabet_symbols(); + + // 'paths[s] == t' denotes that state 's' was accessed from state 't', + // 'paths[s] == s' means that 's' is an initial state + std::map> paths = + { {StateSet(aut.initial), {StateSet(aut.initial), 0}} }; + + while (!worklist.empty()) { + // get a next state + StateSet state; + if (is_dfs) { + state = *worklist.rbegin(); + worklist.pop_back(); + } else { // BFS + state = *worklist.begin(); + worklist.pop_front(); + } + + // process it + for (Symbol symb : alph_symbols) { + StateSet succ = aut.post(state, symb); + if (!aut.final.intersects_with(succ)) { + if (nullptr != cex) { + cex->word.clear(); + cex->word.push_back(symb); + StateSet trav = state; + while (paths[trav].first != trav) + { // go back until initial state + cex->word.push_back(paths[trav].second); + trav = paths[trav].first; + } + + std::reverse(cex->word.begin(), cex->word.end()); + } + + return false; + } + + bool is_subsumed = false; + for (const auto& anti_state : processed) { + // trying to find a smaller state in processed + if (subsumes(anti_state, succ)) { + is_subsumed = true; + break; + } + } + + if (is_subsumed) { continue; } + + // prune data structures and insert succ inside + for (std::list* ds : {&processed, &worklist}) { + auto it = ds->begin(); + while (it != ds->end()) { + if (subsumes(succ, *it)) { + auto to_remove = it; + ++it; + ds->erase(to_remove); + } else { + ++it; + } + } + + // TODO: set pushing strategy + ds->push_back(succ); + } + + // also set that succ was accessed from state + paths[succ] = {state, symb}; + } + } + + return true; +} // }}} + +// The dispatching method that calls the correct one based on parameters. +bool mata::lvlfa::Lvlfa::is_universal(const Alphabet& alphabet, Run* cex, const ParameterMap& params) const { + // setting the default algorithm + decltype(algorithms::is_universal_naive)* algo = algorithms::is_universal_naive; + if (!haskey(params, "algorithm")) { + throw std::runtime_error(std::to_string(__func__) + + " requires setting the \"algo\" key in the \"params\" argument; " + "received: " + std::to_string(params)); + } + + const std::string& str_algo = params.at("algorithm"); + if ("naive" == str_algo) { /* default */ } + else if ("antichains" == str_algo) { + algo = algorithms::is_universal_antichains; + } else { + throw std::runtime_error(std::to_string(__func__) + + " received an unknown value of the \"algo\" key: " + str_algo); + } + return algo(*this, alphabet, cex); +} // is_universal() + +bool mata::lvlfa::Lvlfa::is_universal(const Alphabet& alphabet, const ParameterMap& params) const { + return this->is_universal(alphabet, nullptr, params); +} diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index c7a0a17e..d09407fa 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -7,13 +7,20 @@ add_executable(tests parser.cc re2parser.cc mintermization.cc - nfa/delta.cc - nfa/nfa.cc - nfa/builder.cc - nfa/nfa-concatenation.cc - nfa/nfa-intersection.cc - nfa/nfa-profiling.cc - nfa/nfa-plumbing.cc + # nfa/delta.cc + # nfa/nfa.cc + # nfa/builder.cc + # nfa/nfa-concatenation.cc + # nfa/nfa-intersection.cc + # nfa/nfa-profiling.cc + # nfa/nfa-plumbing.cc + lvlfa/delta.cc + lvlfa/lvlfa.cc + lvlfa/builder.cc + lvlfa/lvlfa-concatenation.cc + lvlfa/lvlfa-intersection.cc + lvlfa/lvlfa-profiling.cc + lvlfa/lvlfa-plumbing.cc strings/nfa-noodlification.cc strings/nfa-segmentation.cc strings/nfa-string-solving.cc diff --git a/tests/lvlfa/builder.cc b/tests/lvlfa/builder.cc new file mode 100644 index 00000000..f81a8e05 --- /dev/null +++ b/tests/lvlfa/builder.cc @@ -0,0 +1,107 @@ +// TODO: some header + +#include +#include +#include + +#include + +#include "mata/lvlfa/lvlfa.hh" +#include "mata/lvlfa/builder.hh" + +using namespace mata::lvlfa; +using Symbol = mata::Symbol; +using IntAlphabet = mata::IntAlphabet; +using OnTheFlyAlphabet = mata::OnTheFlyAlphabet; + +using Word = std::vector; + +TEST_CASE("parse_from_mata()") { + Delta delta; + + SECTION("Simple automaton") { + delta.add(0, 0, 0); + delta.add(0, 1, 1); + delta.add(1, 2, 0); + Lvlfa lvlfa{ delta, { 0 }, { 1 }, {}, 0}; + + SECTION("from string") { + Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa.print_to_mata()) }; + CHECK(are_equivalent(parsed, lvlfa)); + } + + SECTION("from stream") { + std::stringstream lvlfa_stream; + lvlfa.print_to_mata(lvlfa_stream); + Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_stream) }; + CHECK(are_equivalent(parsed, lvlfa)); + } + + SECTION("from file") { + std::filesystem::path lvlfa_file{ "./temp-test-parse_from_mata-simple_lvlfa.mata" }; + std::fstream file{ lvlfa_file, std::fstream::in | std::fstream::out | std::fstream::trunc}; + lvlfa.print_to_mata(file); + Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_file) }; + file.close(); + std::filesystem::remove(lvlfa_file); + + CHECK(are_equivalent(parsed, lvlfa)); + } + + } + + SECTION("larger automaton") { + Lvlfa lvlfa; + lvlfa.initial = { 1, 2, 50 }; + lvlfa.delta.add(1, 'a', 2); + lvlfa.delta.add(1, 'a', 3); + lvlfa.delta.add(1, 'b', 4); + lvlfa.delta.add(2, 'a', 2); + lvlfa.delta.add(2, 'b', 2); + lvlfa.delta.add(2, 'a', 3); + lvlfa.delta.add(2, 'b', 4); + lvlfa.delta.add(3, 'b', 4); + lvlfa.delta.add(3, 'c', 7); + lvlfa.delta.add(3, 'b', 2); + lvlfa.delta.add(5, 'c', 3); + lvlfa.delta.add(7, 'a', 8); + lvlfa.delta.add(12, 'b', 15); + lvlfa.delta.add(1, 'b', 40); + lvlfa.delta.add(51, 'z', 42); + lvlfa.final = { 3, 103 }; + // lvlfa.levels = std::vector(lvlfa.num_of_states(), 0); + // lvlfa.max_level = 0; + + SECTION("from string") { + Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa.print_to_mata()) }; + parsed.final.contains(103); + parsed.initial.contains(50); + parsed.delta.contains(51, 'z', 42); + CHECK(are_equivalent(parsed, lvlfa)); + } + + SECTION("from stream") { + std::stringstream lvlfa_stream; + lvlfa.print_to_mata(lvlfa_stream); + Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_stream) }; + parsed.final.contains(103); + parsed.initial.contains(50); + parsed.delta.contains(51, 'z', 42); + CHECK(are_equivalent(parsed, lvlfa)); + } + + SECTION("from file") { + std::filesystem::path lvlfa_file{ "./temp-test-parse_from_mata-larger_lvlfa.mata" }; + std::fstream file{ lvlfa_file, std::fstream::in | std::fstream::out | std::fstream::trunc }; + lvlfa.print_to_mata(file); + Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_file) }; + file.close(); + std::filesystem::remove(lvlfa_file); + + parsed.final.contains(103); + parsed.initial.contains(50); + parsed.delta.contains(51, 'z', 42); + CHECK(are_equivalent(parsed, lvlfa)); + } + } +} diff --git a/tests/lvlfa/delta.cc b/tests/lvlfa/delta.cc new file mode 100644 index 00000000..de8b52a9 --- /dev/null +++ b/tests/lvlfa/delta.cc @@ -0,0 +1,469 @@ +// TODO: some header + +#include "utils.hh" + +#include "mata/alphabet.hh" +#include "mata/lvlfa/types.hh" +#include "mata/lvlfa/delta.hh" +#include "mata/lvlfa/lvlfa.hh" + +#include + +using namespace mata::lvlfa; + +using Symbol = mata::Symbol; + +TEST_CASE("mata::lvlfa::SymbolPost") { + CHECK(SymbolPost{ 0, StateSet{} } == SymbolPost{ 0, StateSet{ 0, 1 } }); + CHECK(SymbolPost{ 1, StateSet{} } != SymbolPost{ 0, StateSet{} }); + CHECK(SymbolPost{ 0, StateSet{ 1 } } < SymbolPost{ 1, StateSet{} }); + CHECK(SymbolPost{ 0, StateSet{ 1 } } <= SymbolPost{ 1, StateSet{} }); + CHECK(SymbolPost{ 0, StateSet{ 1 } } <= SymbolPost{ 0, StateSet{} }); + CHECK(SymbolPost{ 1, StateSet{ 0 } } > SymbolPost{ 0, StateSet{ 1 } }); + CHECK(SymbolPost{ 1, StateSet{ 0 } } >= SymbolPost{ 0, StateSet{ 1 } }); + CHECK(SymbolPost{ 1, StateSet{ 0 } } >= SymbolPost{ 0, StateSet{ 1 } }); +} + +TEST_CASE("mata::lvlfa::Delta::state_post()") { + Lvlfa aut{}; + + SECTION("Add new states within the limit") { + aut.add_state(19); + aut.initial.insert(0); + aut.initial.insert(1); + aut.initial.insert(2); + REQUIRE_NOTHROW(aut.delta.state_post(0)); + REQUIRE_NOTHROW(aut.delta.state_post(1)); + REQUIRE_NOTHROW(aut.delta.state_post(2)); + REQUIRE(aut.delta.state_post(0).empty()); + REQUIRE(aut.delta.state_post(1).empty()); + REQUIRE(aut.delta.state_post(2).empty()); + + CHECK(&aut.delta.state_post(4) == &aut.delta[4]); + } + + SECTION("Add new states over the limit") { + aut.add_state(1); + REQUIRE_NOTHROW(aut.initial.insert(0)); + REQUIRE_NOTHROW(aut.initial.insert(1)); + REQUIRE_NOTHROW(aut.delta.state_post(0)); + REQUIRE_NOTHROW(aut.delta.state_post(1)); + REQUIRE_NOTHROW(aut.delta.state_post(2)); + CHECK(aut.delta.state_post(0).empty()); + CHECK(aut.delta.state_post(1).empty()); + CHECK(aut.delta.state_post(2).empty()); + } + + SECTION("Add new states without specifying the number of states") { + CHECK_NOTHROW(aut.initial.insert(0)); + CHECK_NOTHROW(aut.delta.state_post(2)); + CHECK(aut.delta.state_post(0).empty()); + CHECK(aut.delta.state_post(2).empty()); + } + + SECTION("Add new initial without specifying the number of states with over +1 number") { + REQUIRE_NOTHROW(aut.initial.insert(25)); + CHECK_NOTHROW(aut.delta.state_post(25)); + CHECK_NOTHROW(aut.delta.state_post(26)); + CHECK(aut.delta.state_post(25).empty()); + CHECK(aut.delta.state_post(26).empty()); + } + + SECTION("Add multiple targets at once") { + CHECK_NOTHROW(aut.delta.add(0, 1, { 3, 4, 5, 6 })); + CHECK_NOTHROW(aut.delta.add(26, 1, StateSet{})); + CHECK_NOTHROW(aut.delta.add(42, 1, StateSet{ 43 })); + CHECK(aut.delta.num_of_transitions() == 5); + } +} + +TEST_CASE("mata::lvlfa::Delta::contains()") { + Lvlfa lvlfa; + CHECK(!lvlfa.delta.contains(0, 1, 0)); + CHECK(!lvlfa.delta.contains(Transition{ 0, 1, 0 })); + lvlfa.delta.add(0, 1, 0); + CHECK(lvlfa.delta.contains(0, 1, 0)); + CHECK(lvlfa.delta.contains(Transition{ 0, 1, 0 })); +} + +TEST_CASE("mata::lvlfa::Delta::remove()") { + Lvlfa lvlfa; + + SECTION("Simple remove") { + lvlfa.delta.add(0, 1, 0); + CHECK_NOTHROW(lvlfa.delta.remove(3, 5, 6)); + CHECK_NOTHROW(lvlfa.delta.remove(0, 1, 0)); + CHECK(lvlfa.delta.empty()); + lvlfa.delta.add(10, 1, 0); + CHECK_THROWS_AS(lvlfa.delta.remove(3, 5, 6), std::invalid_argument); + } +} + +TEST_CASE("mata::lvlfa::Delta::mutable_post()") { + Lvlfa lvlfa; + + SECTION("Default initialized") { + CHECK(lvlfa.delta.num_of_states() == 0); + CHECK(!lvlfa.delta.uses_state(0)); + CHECK(lvlfa.delta.mutable_state_post(0).empty()); + CHECK(lvlfa.delta.num_of_states() == 1); + CHECK(lvlfa.delta.uses_state(0)); + + CHECK(lvlfa.delta.mutable_state_post(9).empty()); + CHECK(lvlfa.delta.num_of_states() == 10); + CHECK(lvlfa.delta.uses_state(1)); + CHECK(lvlfa.delta.uses_state(2)); + CHECK(lvlfa.delta.uses_state(9)); + CHECK(!lvlfa.delta.uses_state(10)); + + CHECK(lvlfa.delta.mutable_state_post(9).empty()); + CHECK(lvlfa.delta.num_of_states() == 10); + CHECK(lvlfa.delta.uses_state(9)); + CHECK(!lvlfa.delta.uses_state(10)); + } +} + +TEST_CASE("mata::lvlfa::StatePost iteration over moves") { + Lvlfa lvlfa; + std::vector iterated_moves{}; + std::vector expected_moves{}; + StatePost state_post{}; + + SECTION("Simple LVLFA") { + lvlfa.initial.insert(0); + lvlfa.final.insert(3); + lvlfa.delta.add(0, 1, 1); + lvlfa.delta.add(0, 2, 1); + lvlfa.delta.add(0, 5, 1); + lvlfa.delta.add(1, 3, 2); + lvlfa.delta.add(2, 0, 1); + lvlfa.delta.add(2, 0, 3); + + state_post = lvlfa.delta.state_post(0); + expected_moves = std::vector{ { 1, 1 }, { 2, 1 }, { 5, 1 } }; + StatePost::Moves moves{ state_post.moves() }; + iterated_moves.clear(); + for (auto move_it{ moves.begin() }; move_it != moves.end(); ++move_it) { + iterated_moves.push_back(*move_it); + } + CHECK(iterated_moves == expected_moves); + + iterated_moves = { moves.begin(), moves.end() }; + CHECK(iterated_moves == expected_moves); + + iterated_moves.clear(); + for (const Move& move: state_post.moves()) { iterated_moves.push_back(move); } + CHECK(iterated_moves == expected_moves); + + StatePost::Moves epsilon_moves{ state_post.moves_epsilons() }; + CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() }.empty()); + + state_post = lvlfa.delta.state_post(1); + moves = state_post.moves(); + StatePost::Moves moves_custom; + moves_custom = moves; + CHECK(std::vector{ moves.begin(), moves.end() } + == std::vector{ moves_custom.begin(), moves_custom.end() }); + moves_custom = state_post.moves(state_post.begin(), state_post.end()); + CHECK(std::vector{ moves.begin(), moves.end() } + == std::vector{ moves_custom.begin(), moves_custom.end() }); + iterated_moves.clear(); + for (auto move_it{ moves.begin() }; move_it != moves.end(); ++move_it) { + iterated_moves.push_back(*move_it); + } + expected_moves = std::vector{ { 3, 2 } }; + CHECK(iterated_moves == expected_moves); + iterated_moves = { moves.begin(), moves.end() }; + CHECK(iterated_moves == expected_moves); + iterated_moves.clear(); + for (const Move& move: state_post.moves()) { iterated_moves.push_back(move); } + CHECK(iterated_moves == expected_moves); + epsilon_moves = state_post.moves_epsilons(); + CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() }.empty()); + + state_post = lvlfa.delta.state_post(2); + moves = state_post.moves(); + iterated_moves.clear(); + for (auto move_it{ moves.begin() }; move_it != moves.end(); ++move_it) { + iterated_moves.push_back(*move_it); + } + expected_moves = std::vector{ { 0, 1 }, { 0, 3 } }; + CHECK(iterated_moves == expected_moves); + iterated_moves = { moves.begin(), moves.end() }; + CHECK(iterated_moves == expected_moves); + iterated_moves.clear(); + for (const Move& move: state_post.moves()) { iterated_moves.push_back(move); } + CHECK(iterated_moves == expected_moves); + epsilon_moves = state_post.moves_epsilons(); + CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() }.empty()); + + state_post = lvlfa.delta.state_post(3); + moves = state_post.moves(); + iterated_moves.clear(); + for (auto move_it{ moves.begin() }; move_it != moves.end(); ++move_it) { + iterated_moves.push_back(*move_it); + } + CHECK(iterated_moves.empty()); + CHECK(StatePost::Moves::const_iterator{ state_post } == moves.end()); + iterated_moves = { moves.begin(), moves.end() }; + CHECK(iterated_moves.empty()); + iterated_moves.clear(); + for (const Move& move: state_post.moves()) { iterated_moves.push_back(move); } + CHECK(iterated_moves.empty()); + epsilon_moves = state_post.moves_epsilons(); + CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() }.empty()); + + state_post = lvlfa.delta.state_post(4); + moves = state_post.moves(); + iterated_moves.clear(); + for (auto move_it{ moves.begin() }; move_it != moves.end(); ++move_it) { + iterated_moves.push_back(*move_it); + } + CHECK(iterated_moves.empty()); + iterated_moves = { moves.begin(), moves.end() }; + CHECK(iterated_moves.empty()); + iterated_moves.clear(); + for (const Move& move: state_post.moves()) { iterated_moves.push_back(move); } + CHECK(iterated_moves.empty()); + epsilon_moves = state_post.moves_epsilons(); + CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() }.empty()); + + lvlfa.delta.add(0, EPSILON, 2); + state_post = lvlfa.delta.state_post(0); + epsilon_moves = state_post.moves_epsilons(); + CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() } == std::vector{ { EPSILON, 2 } }); + lvlfa.delta.add(1, EPSILON, 3); + state_post = lvlfa.delta.state_post(1); + epsilon_moves = state_post.moves_epsilons(); + CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() } == std::vector{ { EPSILON, 3 } }); + lvlfa.delta.add(4, EPSILON, 4); + state_post = lvlfa.delta.state_post(4); + epsilon_moves = state_post.moves_epsilons(); + CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() } == std::vector{ { EPSILON, 4 } }); + + state_post = lvlfa.delta.state_post(0); + epsilon_moves = state_post.moves_epsilons(3); + iterated_moves.clear(); + for (const Move& move: epsilon_moves) { iterated_moves.push_back(move); } + CHECK(iterated_moves == std::vector{ { 5, 1 }, { EPSILON, 2 }}); + state_post = lvlfa.delta.state_post(1); + epsilon_moves = state_post.moves_epsilons(3); + CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() } == std::vector{ { 3, 2 }, { EPSILON, 3 } }); + + state_post = lvlfa.delta.state_post(2); + epsilon_moves = state_post.moves_epsilons(3); + CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() }.empty()); + state_post = lvlfa.delta.state_post(4); + epsilon_moves = state_post.moves_epsilons(3); + CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() } == std::vector{ { EPSILON, 4 } }); + + state_post = lvlfa.delta.state_post(0); + StatePost::Moves symbol_moves = state_post.moves_symbols(3); + iterated_moves.clear(); + for (const Move& move: symbol_moves) { iterated_moves.push_back(move); } + CHECK(iterated_moves == std::vector{ { 1, 1 }, { 2, 1 } }); + symbol_moves = state_post.moves_symbols(0); + iterated_moves.clear(); + for (const Move& move: symbol_moves) { iterated_moves.push_back(move); } + CHECK(iterated_moves.empty()); + + state_post = lvlfa.delta.state_post(1); + symbol_moves = state_post.moves_symbols(3); + CHECK(std::vector{ symbol_moves.begin(), symbol_moves.end() } == std::vector{ { 3, 2 } }); + state_post = lvlfa.delta.state_post(2); + symbol_moves = state_post.moves_symbols(3); + CHECK(std::vector{ symbol_moves.begin(), symbol_moves.end() } == std::vector{ { 0, 1 }, { 0 , 3 } }); + state_post = lvlfa.delta.state_post(4); + symbol_moves = state_post.moves_symbols(3); + CHECK(std::vector{ symbol_moves.begin(), symbol_moves.end() }.empty()); + + // Create custom moves iterator. + state_post = lvlfa.delta[0]; + moves = { state_post, state_post.cbegin(), state_post.cbegin() + 2 }; + iterated_moves = { moves.begin(), moves.end() }; + CHECK(iterated_moves == std::vector{ { 1, 1 }, { 2, 1 } }); + + state_post = lvlfa.delta[20]; + moves = { state_post, state_post.cbegin(), state_post.cend() }; + iterated_moves = { moves.begin(), moves.end() }; + CHECK(iterated_moves.empty()); + } +} + +TEST_CASE("mata::lvlfa::Delta iteration over transitions") { + Lvlfa lvlfa; + std::vector iterated_transitions{}; + std::vector expected_transitions{}; + + SECTION("empty automaton") { + Delta::Transitions transitions{ lvlfa.delta.transitions() }; + CHECK(transitions.begin() == transitions.end()); + Delta::Transitions::const_iterator transition_it{ lvlfa.delta }; + CHECK(transition_it == transitions.end()); + transition_it = { lvlfa.delta, 0 }; + CHECK(transition_it == transitions.end()); + } + + SECTION("Simple LVLFA") { + lvlfa.initial.insert(0); + lvlfa.final.insert(3); + lvlfa.delta.add(0, 1, 1); + lvlfa.delta.add(0, 2, 1); + lvlfa.delta.add(0, 5, 1); + lvlfa.delta.add(1, 3, 2); + lvlfa.delta.add(2, 0, 1); + lvlfa.delta.add(2, 0, 3); + + Delta::Transitions transitions{ lvlfa.delta.transitions() }; + iterated_transitions.clear(); + for (auto transitions_it{ transitions.begin() }; + transitions_it != transitions.end(); ++transitions_it) { + iterated_transitions.push_back(*transitions_it); + } + expected_transitions = std::vector{ + { 0, 1, 1 }, { 0, 2, 1 }, { 0, 5, 1 }, { 1, 3, 2 }, { 2, 0, 1 }, { 2, 0, 3 } + }; + CHECK(iterated_transitions == expected_transitions); + + iterated_transitions = { transitions.begin(), transitions.end() }; + CHECK(iterated_transitions == expected_transitions); + + iterated_transitions.clear(); + for (const Transition& transition: lvlfa.delta.transitions()) { iterated_transitions.push_back(transition); } + CHECK(iterated_transitions == expected_transitions); + + Delta::Transitions::const_iterator transitions_it{ lvlfa.delta.transitions().begin() }; + CHECK(*transitions_it == Transition{ 0, 1, 1 }); + transitions_it++; + CHECK(*transitions_it == Transition{ 0, 2, 1 }); + transitions_it++; + transitions_it++; + CHECK(*transitions_it == Transition{ 1, 3, 2 }); + + Delta::Transitions::const_iterator transitions_from_1_to_end_it{ lvlfa.delta, 1 }; + iterated_transitions.clear(); + while (transitions_from_1_to_end_it != lvlfa.delta.transitions().end()) { + iterated_transitions.push_back(*transitions_from_1_to_end_it); + transitions_from_1_to_end_it++; + } + expected_transitions = std::vector{ { 1, 3, 2 }, { 2, 0, 1 }, { 2, 0, 3 } }; + CHECK(iterated_transitions == expected_transitions); + } + + SECTION("Sparse automaton") { + const size_t state_num = 'r'+1; + lvlfa.delta.reserve(state_num); + + lvlfa.delta.add('q', 'a', 'r'); + lvlfa.delta.add('q', 'b', 'r'); + const Delta::Transitions transitions{ lvlfa.delta.transitions() }; + Delta::Transitions::const_iterator it{ transitions.begin() }; + Delta::Transitions::const_iterator jt{ transitions.begin() }; + CHECK(it == jt); + ++it; + CHECK(it != jt); + CHECK((it != transitions.begin() && it != transitions.end())); + CHECK(jt == transitions.begin()); + + ++jt; + CHECK(it == jt); + CHECK((jt != transitions.begin() && jt != transitions.end())); + + jt = transitions.end(); + CHECK(it != jt); + CHECK((jt != transitions.begin() && jt == transitions.end())); + + it = transitions.end(); + CHECK(it == jt); + CHECK((it != transitions.begin() && it == transitions.end())); + } +} + +TEST_CASE("mata::lvlfa::Delta::operator=()") { + Lvlfa lvlfa{}; + lvlfa.initial.insert(0); + lvlfa.final.insert(1); + lvlfa.delta.add(0, 'a', 1); + + Lvlfa copied_lvlfa{ lvlfa }; + lvlfa.delta.add(1, 'b', 0); + CHECK(lvlfa.delta.num_of_transitions() == 2); + CHECK(copied_lvlfa.delta.num_of_transitions() == 1); +} + +TEST_CASE("mata::lvlfa::StatePost::Moves") { + Lvlfa lvlfa{}; + lvlfa.initial.insert(0); + lvlfa.final.insert(5); + lvlfa.delta.add(0, 'a', 1); + lvlfa.delta.add(1, 'b', 2); + lvlfa.delta.add(1, 'c', 2); + lvlfa.delta.add(1, 'd', 2); + lvlfa.delta.add(2, 'e', 3); + lvlfa.delta.add(3, 'e', 4); + lvlfa.delta.add(4, 'f', 5); + // TODO: rewrite in a check of moves. + StatePost::Moves moves_from_source{ lvlfa.delta[0].moves() }; + + CHECK(std::vector{ moves_from_source.begin(), moves_from_source.end() } == std::vector{ { 'a', 1 }}); + moves_from_source = lvlfa.delta[1].moves(); + CHECK(std::vector{ moves_from_source.begin(), moves_from_source.end() } == + std::vector{ { 'b', 2 }, { 'c', 2 }, { 'd', 2 } }); + StatePost::Moves::const_iterator move_incremented_it{ moves_from_source.begin() }; + move_incremented_it++; + CHECK(*move_incremented_it == Move{ 'c', 2 }); + CHECK(*StatePost::Moves::const_iterator{ lvlfa.delta.state_post(1) } == Move{ 'b', 2 }); + CHECK(move_incremented_it != moves_from_source.begin()); + CHECK(move_incremented_it == ++moves_from_source.begin()); + StatePost::Moves moves_from_source_copy_constructed{ lvlfa.delta[12].moves() }; + CHECK( + std::vector{ moves_from_source_copy_constructed.begin(), moves_from_source_copy_constructed.end() } + .empty() + ); + +} + +TEST_CASE("mata::lvlfa::Delta::operator==()") { + Delta delta{}; + Delta delta2{}; + CHECK(delta == delta2); + delta.add(0, 0, 0); + CHECK(delta != delta2); + delta2.add(0, 0, 0); + CHECK(delta == delta2); + delta.add(0, 0, 1); + delta2.add(0, 0, 2); + CHECK(delta != delta2); + delta2.add(0, 0, 1); + CHECK(delta != delta2); + delta.add(0, 0, 2); + CHECK(delta == delta2); + delta2.add(0, 0, 3); + CHECK(delta != delta2); + delta.add(0, 0, 3); + CHECK(delta == delta2); +} + +TEST_CASE("mata::lvlfa::Delta::add_symbols_to()") { + mata::OnTheFlyAlphabet empty_alphabet{}; + mata::OnTheFlyAlphabet alphabet{}; + Delta delta{}; + delta.add_symbols_to(alphabet); + CHECK(alphabet.get_symbol_map().empty()); + delta.add(0, 0, 0); + delta.add_symbols_to(alphabet); + CHECK(alphabet.get_symbol_map().size() == 1); + delta.add(0, 0, 0); + delta.add_symbols_to(alphabet); + CHECK(alphabet.get_symbol_map().size() == 1); + delta.add(0, 1, 0); + delta.add_symbols_to(alphabet); + CHECK(alphabet.get_symbol_map().size() == 2); + delta.add(0, 2, 0); + delta.add(0, 3, 0); + delta.add_symbols_to(alphabet); + CHECK(alphabet.get_symbol_map().size() == 4); + CHECK(alphabet.get_symbol_map() == std::unordered_map{ + { "0", 0 }, { "1", 1 }, { "2", 2 }, { "3", 3 } + }); +} diff --git a/tests/lvlfa/lvlfa-concatenation.cc b/tests/lvlfa/lvlfa-concatenation.cc new file mode 100644 index 00000000..53c37a24 --- /dev/null +++ b/tests/lvlfa/lvlfa-concatenation.cc @@ -0,0 +1,1082 @@ +/* tests-lvlfa-concatenation.cc -- Tests for concatenation of NFAs + */ + + +#include + +#include + +#include "mata/lvlfa/lvlfa.hh" +#include "mata/lvlfa/strings.hh" +#include "mata/parser/re2parser.hh" + +using namespace mata::lvlfa; +using namespace mata::strings; +using namespace mata::utils; +using namespace mata::parser; + +using Symbol = mata::Symbol; + +// Some common automata {{{ + +// Automaton A +#define FILL_WITH_AUT_A(x) \ + x.initial = {1, 3}; \ + x.final = {5}; \ + x.delta.add(1, 'a', 3); \ + x.delta.add(1, 'a', 10); \ + x.delta.add(1, 'b', 7); \ + x.delta.add(3, 'a', 7); \ + x.delta.add(3, 'b', 9); \ + x.delta.add(9, 'a', 9); \ + x.delta.add(7, 'b', 1); \ + x.delta.add(7, 'a', 3); \ + x.delta.add(7, 'c', 3); \ + x.delta.add(10, 'a', 7); \ + x.delta.add(10, 'b', 7); \ + x.delta.add(10, 'c', 7); \ + x.delta.add(7, 'a', 5); \ + x.delta.add(5, 'a', 5); \ + x.delta.add(5, 'c', 9); \ + + +// Automaton B +#define FILL_WITH_AUT_B(x) \ + x.initial = {4}; \ + x.final = {2, 12}; \ + x.delta.add(4, 'c', 8); \ + x.delta.add(4, 'a', 8); \ + x.delta.add(8, 'b', 4); \ + x.delta.add(4, 'a', 6); \ + x.delta.add(4, 'b', 6); \ + x.delta.add(6, 'a', 2); \ + x.delta.add(2, 'b', 2); \ + x.delta.add(2, 'a', 0); \ + x.delta.add(0, 'a', 2); \ + x.delta.add(2, 'c', 12); \ + x.delta.add(12, 'a', 14); \ + x.delta.add(14, 'b', 12); \ + +// }}} + +TEST_CASE("mata::lvlfa::concatenate()") { + Lvlfa lhs{}; + Lvlfa rhs{}; + Lvlfa result{}; + + SECTION("Empty automaton without states") { + result = concatenate(lhs, rhs); + + CHECK(result.num_of_states() == 0); + CHECK(result.initial.empty()); + CHECK(result.final.empty()); + CHECK(result.delta.empty()); + CHECK(result.is_lang_empty()); + } + + SECTION("One empty automaton without states") { + rhs.add_state(); + result = concatenate(lhs, rhs); + + CHECK(result.num_of_states() == 0); + CHECK(result.initial.empty()); + CHECK(result.final.empty()); + CHECK(result.delta.empty()); + CHECK(result.is_lang_empty()); + } + + SECTION("Other empty automaton without states") { + lhs.add_state(); + result = concatenate(lhs, rhs); + + CHECK(result.num_of_states() == 0); + CHECK(result.initial.empty()); + CHECK(result.final.empty()); + CHECK(result.delta.empty()); + CHECK(result.is_lang_empty()); + } + + SECTION("One empty automaton without states with other with initial states") { + lhs.add_state(); + lhs.initial.insert(0); + result = concatenate(lhs, rhs); + + CHECK(result.num_of_states() == 0); + CHECK(result.initial.empty()); + CHECK(result.final.empty()); + CHECK(result.delta.empty()); + CHECK(result.is_lang_empty()); + } + + SECTION("Other empty automaton without states with other with initial states") { + rhs.add_state(); + rhs.initial.insert(0); + result = concatenate(lhs, rhs); + + CHECK(result.num_of_states() == 0); + CHECK(result.initial.empty()); + CHECK(result.final.empty()); + CHECK(result.delta.empty()); + CHECK(result.is_lang_empty()); + } + + SECTION("One empty automaton without states with other non-empty automaton") { + lhs.add_state(); + lhs.initial.insert(0); + lhs.final.insert(0); + result = concatenate(lhs, rhs); + + CHECK(result.num_of_states() == 0); + CHECK(result.initial.empty()); + CHECK(result.final.empty()); + CHECK(result.delta.empty()); + CHECK(result.is_lang_empty()); + } + + SECTION("Other empty automaton without states with other non-empty automaton") { + rhs.add_state(); + rhs.initial.insert(0); + rhs.final.insert(0); + result = concatenate(lhs, rhs); + + CHECK(result.num_of_states() == 0); + CHECK(result.initial.empty()); + CHECK(result.final.empty()); + CHECK(result.delta.empty()); + CHECK(result.is_lang_empty()); + } + + SECTION("Empty automaton") { + lhs.add_state(); + rhs.add_state(); + result = concatenate(lhs, rhs); + + CHECK(result.num_of_states() == 0); + CHECK(result.initial.empty()); + CHECK(result.final.empty()); + CHECK(result.delta.empty()); + CHECK(result.is_lang_empty()); + } + + SECTION("Empty language") { + lhs.add_state(); + lhs.initial.insert(0); + rhs.add_state(); + rhs.initial.insert(0); + + result = concatenate(lhs, rhs); + + CHECK(result.num_of_states() == 0); + CHECK(result.initial.empty()); + CHECK(result.final.empty()); + CHECK(result.delta.empty()); + } + + SECTION("Empty language rhs automaton") { + lhs.add_state(); + lhs.initial.insert(0); + lhs.final.insert(0); + rhs.add_state(); + rhs.initial.insert(0); + + result = concatenate(lhs, rhs); + CHECK(result.is_lang_empty()); + } + + SECTION("Single state automata accepting an empty string") { + lhs.add_state(); + lhs.initial.insert(0); + lhs.final.insert(0); + rhs.add_state(); + rhs.initial.insert(0); + rhs.final.insert(0); + + result = concatenate(lhs, rhs); + + CHECK(!result.is_lang_empty()); + CHECK(result.is_in_lang(Run{ {}, {} })); + CHECK(result.delta.empty()); + } + + SECTION("Empty language rhs automaton") { + lhs.add_state(); + lhs.initial.insert(0); + lhs.final.insert(0); + rhs.add_state(1); + rhs.initial.insert(0); + rhs.final.insert(1); + + result = concatenate(lhs, rhs); + + CHECK(!result.initial.empty()); + CHECK(!result.final.empty()); + CHECK(result.delta.empty()); + } + + SECTION("Simple two state rhs automaton") { + lhs.add_state(); + lhs.initial.insert(0); + lhs.final.insert(0); + rhs.add_state(1); + rhs.initial.insert(0); + rhs.final.insert(1); + rhs.delta.add(0, 'a', 1); + + result = concatenate(lhs, rhs); + + CHECK(!result.initial.empty()); + CHECK(!result.final.empty()); + } + + SECTION("Simple two state automata") { + lhs.add_state(1); + lhs.initial.insert(0); + lhs.final.insert(1); + lhs.delta.add(0, 'b', 1); + rhs.add_state(1); + rhs.initial.insert(0); + rhs.final.insert(1); + rhs.delta.add(0, 'a', 1); + + result = concatenate(lhs, rhs); + + CHECK(!result.initial.empty()); + CHECK(!result.final.empty()); + + auto shortest_words{ get_shortest_words(result) }; + CHECK(shortest_words.size() == 1); + CHECK(shortest_words.find(std::vector{ 'b', 'a' }) != shortest_words.end()); + } + + SECTION("Simple two state automata with higher state num for non-final state") { + lhs.add_state(1); + lhs.initial.insert(0); + lhs.final.insert(1); + lhs.delta.add(0, 'b', 1); + rhs.add_state(3); + rhs.initial.insert(0); + rhs.final.insert(1); + rhs.delta.add(0, 'a', 1); + rhs.delta.add(0, 'c', 3); + + result = concatenate(lhs, rhs); + + auto shortest_words{ get_shortest_words(result) }; + CHECK(shortest_words.size() == 1); + CHECK(shortest_words.find(std::vector{ 'b', 'a' }) != shortest_words.end()); + } + + SECTION("Simple two state lhs automaton") { + lhs.add_state(1); + lhs.initial.insert(0); + lhs.final.insert(1); + lhs.delta.add(0, 'b', 1); + rhs.add_state(); + rhs.initial.insert(0); + rhs.final.insert(0); + rhs.delta.add(0, 'a', 0); + + result = concatenate(lhs, rhs); + CHECK(result.is_in_lang(Run{ { 'b' }, {} })); + CHECK(result.is_in_lang(Run{ { 'b', 'a' }, {} })); + CHECK(result.is_in_lang(Run{ { 'b', 'a', 'a' }, {} })); + CHECK(!result.is_in_lang(Run{ { 'a' }, {} })); + CHECK(!result.is_in_lang(Run{ { 'a', 'b' }, {} })); + auto shortest_words{ get_shortest_words(result) }; + CHECK(shortest_words.size() == 1); + CHECK(shortest_words.find(std::vector{ 'b' }) != shortest_words.end()); + } + + SECTION("Automaton A concatenate automaton B") { + lhs.add_state(10); + FILL_WITH_AUT_A(lhs); + rhs.add_state(14); + FILL_WITH_AUT_B(rhs); + + result = concatenate(lhs, rhs); + + auto shortest_words{ get_shortest_words(result) }; + CHECK(shortest_words.size() == 4); + CHECK(shortest_words.find(std::vector{ 'b', 'a', 'a', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'b', 'a', 'b', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'a', 'a', 'a', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'a', 'a', 'b', 'a' }) != shortest_words.end()); + } + + SECTION("Automaton B concatenate automaton A") { + lhs.add_state(10); + FILL_WITH_AUT_A(lhs); + rhs.add_state(14); + FILL_WITH_AUT_B(rhs); + + result = concatenate(rhs, lhs); + + auto shortest_words{ get_shortest_words(result) }; + CHECK(shortest_words.size() == 4); + CHECK(shortest_words.find(std::vector{ 'b', 'a', 'a', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'b', 'a', 'b', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'a', 'a', 'a', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'a', 'a', 'b', 'a' }) != shortest_words.end()); + } + + SECTION("Sample automata") { + lhs.add_state(); + lhs.initial.insert(0); + lhs.final.insert(0); + lhs.delta.add(0, 58, 0); + lhs.delta.add(0, 65, 0); + lhs.delta.add(0, 102, 0); + lhs.delta.add(0, 112, 0); + lhs.delta.add(0, 115, 0); + lhs.delta.add(0, 116, 0); + + rhs.add_state(5); + rhs.final.insert({0, 5}); + rhs.initial.insert(5); + rhs.delta.add(1, 112, 0); + rhs.delta.add(2, 116, 1); + rhs.delta.add(3, 102, 2); + rhs.delta.add(4, 115, 3); + rhs.delta.add(5, 102, 2); + rhs.delta.add(5, 112, 0); + rhs.delta.add(5, 115, 3); + rhs.delta.add(5, 116, 1); + + result = concatenate(lhs, rhs); + CHECK(!result.is_lang_empty()); + // TODO: Add more checks. + } +} + +TEST_CASE("mata::lvlfa::concatenate() over epsilon symbol") { + Lvlfa lhs{}; + Lvlfa rhs{}; + Lvlfa result{}; + + SECTION("Empty automaton") { + lhs.add_state(); + rhs.add_state(); + result = concatenate(lhs, rhs, true); + + CHECK(result.num_of_states() == 0); + CHECK(result.initial.empty()); + CHECK(result.final.empty()); + CHECK(result.delta.empty()); + CHECK(result.is_lang_empty()); + } + + SECTION("Empty language") { + lhs.add_state(); + lhs.initial.insert(0); + rhs.add_state(); + rhs.initial.insert(0); + + result = concatenate(lhs, rhs, true); + + CHECK(result.num_of_states() == 0); + CHECK(result.initial.empty()); + CHECK(result.final.empty()); + CHECK(result.delta.empty()); + } + + SECTION("Empty language rhs automaton") + { + lhs.add_state(); + lhs.initial.insert(0); + lhs.final.insert(0); + rhs.add_state(); + rhs.initial.insert(0); + + result = concatenate(lhs, rhs, true); + CHECK(result.is_lang_empty()); + } + + SECTION("Single state automata accepting an empty string") + { + lhs.add_state(); + lhs.initial.insert(0); + lhs.final.insert(0); + rhs.add_state(); + rhs.initial.insert(0); + rhs.final.insert(0); + + result = concatenate(lhs, rhs, true); + + CHECK(result.initial[0]); + CHECK(result.final[1]); + CHECK(result.num_of_states() == 2); + CHECK(result.delta.num_of_transitions() == 1); + CHECK(result.delta.contains(0, EPSILON, 1)); + } + + SECTION("Empty language rhs automaton") + { + lhs.add_state(); + lhs.initial.insert(0); + lhs.final.insert(0); + rhs.add_state(1); + rhs.initial.insert(0); + rhs.final.insert(1); + + result = concatenate(lhs, rhs, true); + + CHECK(result.initial[0]); + CHECK(result.final[2]); + CHECK(result.num_of_states() == 3); + CHECK(result.delta.num_of_transitions() == 1); + CHECK(result.delta.contains(0, EPSILON, 1)); + } + + SECTION("Simple two state rhs automaton") + { + lhs.add_state(); + lhs.initial.insert(0); + lhs.final.insert(0); + rhs.add_state(1); + rhs.initial.insert(0); + rhs.final.insert(1); + rhs.delta.add(0, 'a', 1); + + result = concatenate(lhs, rhs, true); + + CHECK(result.initial[0]); + CHECK(result.final[2]); + CHECK(result.num_of_states() == 3); + CHECK(result.delta.num_of_transitions() == 2); + CHECK(result.delta.contains(1, 'a', 2)); + CHECK(result.delta.contains(0, EPSILON, 1)); + } + + SECTION("Simple two state automata") + { + lhs.add_state(1); + lhs.initial.insert(0); + lhs.final.insert(1); + lhs.delta.add(0, 'b', 1); + rhs.add_state(1); + rhs.initial.insert(0); + rhs.final.insert(1); + rhs.delta.add(0, 'a', 1); + + result = concatenate(lhs, rhs, true); + + CHECK(result.initial[0]); + CHECK(result.final[3]); + CHECK(result.num_of_states() == 4); + CHECK(result.delta.num_of_transitions() == 3); + CHECK(result.delta.contains(0, 'b', 1)); + CHECK(result.delta.contains(2, 'a', 3)); + CHECK(result.delta.contains(1, EPSILON, 2)); + + auto shortest_words{ get_shortest_words(result) }; + CHECK(shortest_words.size() == 1); + CHECK(shortest_words.find(std::vector{ 'b', EPSILON, 'a' }) != shortest_words.end()); + } + + SECTION("Simple two state automata with higher state num for non-final state") + { + lhs.add_state(1); + lhs.initial.insert(0); + lhs.final.insert(1); + lhs.delta.add(0, 'b', 1); + rhs.add_state(3); + rhs.initial.insert(0); + rhs.final.insert(1); + rhs.delta.add(0, 'a', 1); + rhs.delta.add(0, 'c', 3); + + result = concatenate(lhs, rhs, true); + + CHECK(result.initial[0]); + CHECK(result.final[3]); + CHECK(result.num_of_states() == 6); + CHECK(result.delta.num_of_transitions() == 4); + CHECK(result.delta.contains(0, 'b', 1)); + CHECK(result.delta.contains(2, 'a', 3)); + CHECK(result.delta.contains(2, 'c', 5)); + CHECK(result.delta.contains(1, EPSILON, 2)); + + auto shortest_words{ get_shortest_words(result) }; + CHECK(shortest_words.size() == 1); + CHECK(shortest_words.find(std::vector{ 'b', EPSILON, 'a' }) != shortest_words.end()); + } + + SECTION("Simple two state lhs automaton") + { + lhs.add_state(1); + lhs.initial.insert(0); + lhs.final.insert(1); + lhs.delta.add(0, 'b', 1); + rhs.add_state(); + rhs.initial.insert(0); + rhs.final.insert(0); + rhs.delta.add(0, 'a', 0); + + StateRenaming lhs_renaming{}; + StateRenaming rhs_renaming{}; + result = concatenate(lhs, rhs, true, &lhs_renaming, &rhs_renaming); + + CHECK(rhs_renaming == StateRenaming{{ 0, 2 } }); + + CHECK(result.initial[0]); + CHECK(result.final[2]); + CHECK(result.num_of_states() == 3); + CHECK(result.delta.num_of_transitions() == 3); + CHECK(result.delta.contains(0, 'b', 1)); + CHECK(result.delta.contains(2, 'a', 2)); + CHECK(result.delta.contains(1, EPSILON, 2)); + + auto shortest_words{ get_shortest_words(result) }; + CHECK(shortest_words.size() == 1); + } + + SECTION("Automaton A concatenate automaton B") + { + lhs.add_state(10); + FILL_WITH_AUT_A(lhs); + rhs.add_state(14); + FILL_WITH_AUT_B(rhs); + + result = concatenate(lhs, rhs, true); + + CHECK(result.initial.size() == 2); + CHECK(result.initial[1]); + CHECK(result.initial[3]); + + CHECK(result.num_of_states() == 26); + + auto shortest_words{ get_shortest_words(result) }; + CHECK(shortest_words.size() == 4); + CHECK(shortest_words.find(std::vector{ 'b', 'a', EPSILON, 'a', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'b', 'a', EPSILON, 'b', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'a', 'a', EPSILON, 'a', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'a', 'a', EPSILON, 'b', 'a' }) != shortest_words.end()); + } + + SECTION("Automaton B concatenate automaton A") + { + lhs.add_state(10); + FILL_WITH_AUT_A(lhs); + rhs.add_state(14); + FILL_WITH_AUT_B(rhs); + + result = concatenate(rhs, lhs, true); + + CHECK(result.num_of_states() == 26); + + CHECK(result.initial.size() == 1); + CHECK(result.initial[4]); + + auto shortest_words{ get_shortest_words(result) }; + CHECK(shortest_words.size() == 4); + CHECK(shortest_words.find(std::vector{ 'b', 'a', EPSILON, 'a', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'b', 'a', EPSILON, 'b', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'a', 'a', EPSILON, 'a', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'a', 'a', EPSILON, 'b', 'a' }) != shortest_words.end()); + } +} + +TEST_CASE("(a|b)*") { + Lvlfa aut1; + mata::parser::create_nfa(&aut1, "a*"); + Lvlfa aut2; + mata::parser::create_nfa(&aut2, "b*"); + Lvlfa aut3; + mata::parser::create_nfa(&aut3, "a*b*"); + auto concatenated_aut{ concatenate(aut1, aut2) }; + CHECK(are_equivalent(concatenated_aut, aut3)); +} + +TEST_CASE("Bug with epsilon transitions") { + Lvlfa lvlfa1{}; + lvlfa1.initial.insert(0); + lvlfa1.final.insert(3); + lvlfa1.delta.add(0, 97, 0); + lvlfa1.delta.add(0, 98, 0); + lvlfa1.delta.add(0, 99, 0); + lvlfa1.delta.add(0, 100, 0); + lvlfa1.delta.add(0, EPSILON, 1); + lvlfa1.delta.add(1, 97, 2); + lvlfa1.delta.add(2, 98, 3); + + Lvlfa lvlfa2{}; + lvlfa2.initial.insert(0); + lvlfa2.final.insert(0); + lvlfa2.delta.add(0, 97, 0); + lvlfa2.delta.add(0, 98, 0); + lvlfa2.delta.add(0, 99, 0); + lvlfa2.delta.add(0, 100, 0); + + auto result{ concatenate(lvlfa1, lvlfa2, true) }; + + Lvlfa expected{ lvlfa1 }; + expected.delta.add(3, EPSILON, 4); + expected.delta.add(4, 97, 4); + expected.delta.add(4, 98, 4); + expected.delta.add(4, 99, 4); + expected.delta.add(4, 100, 4); + expected.final = { 4 }; + + CHECK(are_equivalent(result, expected)); +} + +TEST_CASE("mata::lvlfa::concatenate() inplace") { + + + SECTION("Empty automaton without states") { + Lvlfa lhs{}; + Lvlfa rhs{}; + Lvlfa result{}; + result = lhs.concatenate(rhs); + + CHECK(result.initial.empty()); + CHECK(result.final.empty()); + CHECK(result.delta.empty()); + CHECK(result.is_lang_empty()); + } + + SECTION("One empty automaton without states") { + Lvlfa lhs{}; + Lvlfa rhs{}; + Lvlfa result{}; + rhs.add_state(); + result = lhs.concatenate(rhs); + + CHECK(result.initial.empty()); + CHECK(result.final.empty()); + CHECK(result.delta.empty()); + CHECK(result.is_lang_empty()); + } + + SECTION("Automaton A concatenate automaton B") { + Lvlfa lhs{}; + Lvlfa rhs{}; + Lvlfa result{}; + lhs.add_state(10); + FILL_WITH_AUT_A(lhs); + rhs.add_state(14); + FILL_WITH_AUT_B(rhs); + + result = lhs.concatenate(rhs); + + auto shortest_words{ get_shortest_words(result) }; + CHECK(shortest_words.size() == 4); + CHECK(shortest_words.find(std::vector{ 'b', 'a', 'a', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'b', 'a', 'b', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'a', 'a', 'a', 'a' }) != shortest_words.end()); + CHECK(shortest_words.find(std::vector{ 'a', 'a', 'b', 'a' }) != shortest_words.end()); + } + + SECTION("Sample automata") { + Lvlfa lhs{}; + Lvlfa rhs{}; + Lvlfa result{}; + lhs.add_state(); + lhs.initial.insert(0); + lhs.final.insert(0); + lhs.delta.add(0, 58, 0); + lhs.delta.add(0, 65, 0); + lhs.delta.add(0, 102, 0); + lhs.delta.add(0, 112, 0); + lhs.delta.add(0, 115, 0); + lhs.delta.add(0, 116, 0); + + rhs.add_state(5); + rhs.final.insert({0, 5}); + rhs.initial.insert(5); + rhs.delta.add(1, 112, 0); + rhs.delta.add(2, 116, 1); + rhs.delta.add(3, 102, 2); + rhs.delta.add(4, 115, 3); + rhs.delta.add(5, 102, 2); + rhs.delta.add(5, 112, 0); + rhs.delta.add(5, 115, 3); + rhs.delta.add(5, 116, 1); + + result = lhs.concatenate(rhs); + CHECK(!result.is_lang_empty()); + // TODO: Add more checks. + } + + SECTION("Delta smaller than states") { + Lvlfa lhs{}; + Lvlfa rhs{}; + + lhs.delta.add(0, 65, 5); + lhs.initial.insert(0); + lhs.initial.insert(7); + lhs.final.insert(5); + lhs.final.insert(7); + + rhs.delta.add(0, 65, 7); + rhs.initial.insert(0); + rhs.final.insert(7); + + Lvlfa result = lhs.concatenate(rhs); + CHECK(!result.is_lang_empty()); + } + + SECTION("the same automata") { + Lvlfa lhs{}; + + lhs.add_state(); + lhs.initial.insert(0); + lhs.final.insert(0); + lhs.delta.add(0, 58, 0); + lhs.delta.add(0, 65, 0); + lhs.delta.add(0, 102, 0); + lhs.delta.add(0, 112, 0); + lhs.delta.add(0, 115, 0); + lhs.delta.add(0, 116, 0); + + size_t lhs_size = lhs.num_of_states(); + Lvlfa result = lhs.concatenate(lhs); + CHECK(result.num_of_states() == lhs_size * 2); + } + +} + +TEST_CASE("Concat_inplace performance", "[.profiling]") { + Lvlfa base; + base.initial.insert(0); + base.final.insert(4); + base.delta.add(0, 45, 1); + base.delta.add(0, 46, 1); + base.delta.add(0, 48, 1); + base.delta.add(0, 49, 1); + base.delta.add(0, 50, 1); + base.delta.add(0, 51, 1); + base.delta.add(0, 52, 1); + base.delta.add(0, 53, 1); + base.delta.add(0, 54, 1); + base.delta.add(0, 55, 1); + base.delta.add(0, 56, 1); + base.delta.add(0, 57, 1); + base.delta.add(0, 65, 1); + base.delta.add(0, 66, 1); + base.delta.add(0, 67, 1); + base.delta.add(0, 68, 1); + base.delta.add(0, 69, 1); + base.delta.add(0, 70, 1); + base.delta.add(0, 71, 1); + base.delta.add(0, 72, 1); + base.delta.add(0, 73, 1); + base.delta.add(0, 74, 1); + base.delta.add(0, 75, 1); + base.delta.add(0, 76, 1); + base.delta.add(0, 77, 1); + base.delta.add(0, 78, 1); + base.delta.add(0, 79, 1); + base.delta.add(0, 80, 1); + base.delta.add(0, 81, 1); + base.delta.add(0, 82, 1); + base.delta.add(0, 83, 1); + base.delta.add(0, 84, 1); + base.delta.add(0, 85, 1); + base.delta.add(0, 86, 1); + base.delta.add(0, 87, 1); + base.delta.add(0, 88, 1); + base.delta.add(0, 89, 1); + base.delta.add(0, 90, 1); + base.delta.add(0, 95, 1); + base.delta.add(0, 97, 1); + base.delta.add(0, 98, 1); + base.delta.add(0, 99, 1); + base.delta.add(0, 100, 1); + base.delta.add(0, 101, 1); + base.delta.add(0, 102, 1); + base.delta.add(0, 103, 1); + base.delta.add(0, 104, 1); + base.delta.add(0, 105, 1); + base.delta.add(0, 106, 1); + base.delta.add(0, 107, 1); + base.delta.add(0, 108, 1); + base.delta.add(0, 109, 1); + base.delta.add(0, 110, 1); + base.delta.add(0, 111, 1); + base.delta.add(0, 112, 1); + base.delta.add(0, 113, 1); + base.delta.add(0, 114, 1); + base.delta.add(0, 115, 1); + base.delta.add(0, 116, 1); + base.delta.add(0, 117, 1); + base.delta.add(0, 118, 1); + base.delta.add(0, 119, 1); + base.delta.add(0, 120, 1); + base.delta.add(0, 121, 1); + base.delta.add(0, 122, 1); + base.delta.add(0, 124, 1); + base.delta.add(1, 45, 2); + base.delta.add(1, 46, 2); + base.delta.add(1, 48, 2); + base.delta.add(1, 49, 2); + base.delta.add(1, 50, 2); + base.delta.add(1, 51, 2); + base.delta.add(1, 52, 2); + base.delta.add(1, 53, 2); + base.delta.add(1, 54, 2); + base.delta.add(1, 55, 2); + base.delta.add(1, 56, 2); + base.delta.add(1, 57, 2); + base.delta.add(1, 65, 2); + base.delta.add(1, 66, 2); + base.delta.add(1, 67, 2); + base.delta.add(1, 68, 2); + base.delta.add(1, 69, 2); + base.delta.add(1, 70, 2); + base.delta.add(1, 71, 2); + base.delta.add(1, 72, 2); + base.delta.add(1, 73, 2); + base.delta.add(1, 74, 2); + base.delta.add(1, 75, 2); + base.delta.add(1, 76, 2); + base.delta.add(1, 77, 2); + base.delta.add(1, 78, 2); + base.delta.add(1, 79, 2); + base.delta.add(1, 80, 2); + base.delta.add(1, 81, 2); + base.delta.add(1, 82, 2); + base.delta.add(1, 83, 2); + base.delta.add(1, 84, 2); + base.delta.add(1, 85, 2); + base.delta.add(1, 86, 2); + base.delta.add(1, 87, 2); + base.delta.add(1, 88, 2); + base.delta.add(1, 89, 2); + base.delta.add(1, 90, 2); + base.delta.add(1, 95, 2); + base.delta.add(1, 97, 2); + base.delta.add(1, 98, 2); + base.delta.add(1, 99, 2); + base.delta.add(1, 100, 2); + base.delta.add(1, 101, 2); + base.delta.add(1, 102, 2); + base.delta.add(1, 103, 2); + base.delta.add(1, 104, 2); + base.delta.add(1, 105, 2); + base.delta.add(1, 106, 2); + base.delta.add(1, 107, 2); + base.delta.add(1, 108, 2); + base.delta.add(1, 109, 2); + base.delta.add(1, 110, 2); + base.delta.add(1, 111, 2); + base.delta.add(1, 112, 2); + base.delta.add(1, 113, 2); + base.delta.add(1, 114, 2); + base.delta.add(1, 115, 2); + base.delta.add(1, 116, 2); + base.delta.add(1, 117, 2); + base.delta.add(1, 118, 2); + base.delta.add(1, 119, 2); + base.delta.add(1, 120, 2); + base.delta.add(1, 121, 2); + base.delta.add(1, 122, 2); + base.delta.add(1, 124, 2); + base.delta.add(2, 45, 3); + base.delta.add(2, 46, 3); + base.delta.add(2, 48, 3); + base.delta.add(2, 49, 3); + base.delta.add(2, 50, 3); + base.delta.add(2, 51, 3); + base.delta.add(2, 52, 3); + base.delta.add(2, 53, 3); + base.delta.add(2, 54, 3); + base.delta.add(2, 55, 3); + base.delta.add(2, 56, 3); + base.delta.add(2, 57, 3); + base.delta.add(2, 65, 3); + base.delta.add(2, 66, 3); + base.delta.add(2, 67, 3); + base.delta.add(2, 68, 3); + base.delta.add(2, 69, 3); + base.delta.add(2, 70, 3); + base.delta.add(2, 71, 3); + base.delta.add(2, 72, 3); + base.delta.add(2, 73, 3); + base.delta.add(2, 74, 3); + base.delta.add(2, 75, 3); + base.delta.add(2, 76, 3); + base.delta.add(2, 77, 3); + base.delta.add(2, 78, 3); + base.delta.add(2, 79, 3); + base.delta.add(2, 80, 3); + base.delta.add(2, 81, 3); + base.delta.add(2, 82, 3); + base.delta.add(2, 83, 3); + base.delta.add(2, 84, 3); + base.delta.add(2, 85, 3); + base.delta.add(2, 86, 3); + base.delta.add(2, 87, 3); + base.delta.add(2, 88, 3); + base.delta.add(2, 89, 3); + base.delta.add(2, 90, 3); + base.delta.add(2, 95, 3); + base.delta.add(2, 97, 3); + base.delta.add(2, 98, 3); + base.delta.add(2, 99, 3); + base.delta.add(2, 100, 3); + base.delta.add(2, 101, 3); + base.delta.add(2, 102, 3); + base.delta.add(2, 103, 3); + base.delta.add(2, 104, 3); + base.delta.add(2, 105, 3); + base.delta.add(2, 106, 3); + base.delta.add(2, 107, 3); + base.delta.add(2, 108, 3); + base.delta.add(2, 109, 3); + base.delta.add(2, 110, 3); + base.delta.add(2, 111, 3); + base.delta.add(2, 112, 3); + base.delta.add(2, 113, 3); + base.delta.add(2, 114, 3); + base.delta.add(2, 115, 3); + base.delta.add(2, 116, 3); + base.delta.add(2, 117, 3); + base.delta.add(2, 118, 3); + base.delta.add(2, 119, 3); + base.delta.add(2, 120, 3); + base.delta.add(2, 121, 3); + base.delta.add(2, 122, 3); + base.delta.add(2, 124, 3); + base.delta.add(3, 45, 4); + base.delta.add(3, 46, 4); + base.delta.add(3, 48, 4); + base.delta.add(3, 49, 4); + base.delta.add(3, 50, 4); + base.delta.add(3, 51, 4); + base.delta.add(3, 52, 4); + base.delta.add(3, 53, 4); + base.delta.add(3, 54, 4); + base.delta.add(3, 55, 4); + base.delta.add(3, 56, 4); + base.delta.add(3, 57, 4); + base.delta.add(3, 65, 4); + base.delta.add(3, 66, 4); + base.delta.add(3, 67, 4); + base.delta.add(3, 68, 4); + base.delta.add(3, 69, 4); + base.delta.add(3, 70, 4); + base.delta.add(3, 71, 4); + base.delta.add(3, 72, 4); + base.delta.add(3, 73, 4); + base.delta.add(3, 74, 4); + base.delta.add(3, 75, 4); + base.delta.add(3, 76, 4); + base.delta.add(3, 77, 4); + base.delta.add(3, 78, 4); + base.delta.add(3, 79, 4); + base.delta.add(3, 80, 4); + base.delta.add(3, 81, 4); + base.delta.add(3, 82, 4); + base.delta.add(3, 83, 4); + base.delta.add(3, 84, 4); + base.delta.add(3, 85, 4); + base.delta.add(3, 86, 4); + base.delta.add(3, 87, 4); + base.delta.add(3, 88, 4); + base.delta.add(3, 89, 4); + base.delta.add(3, 90, 4); + base.delta.add(3, 95, 4); + base.delta.add(3, 97, 4); + base.delta.add(3, 98, 4); + base.delta.add(3, 99, 4); + base.delta.add(3, 100, 4); + base.delta.add(3, 101, 4); + base.delta.add(3, 102, 4); + base.delta.add(3, 103, 4); + base.delta.add(3, 104, 4); + base.delta.add(3, 105, 4); + base.delta.add(3, 106, 4); + base.delta.add(3, 107, 4); + base.delta.add(3, 108, 4); + base.delta.add(3, 109, 4); + base.delta.add(3, 110, 4); + base.delta.add(3, 111, 4); + base.delta.add(3, 112, 4); + base.delta.add(3, 113, 4); + base.delta.add(3, 114, 4); + base.delta.add(3, 115, 4); + base.delta.add(3, 116, 4); + base.delta.add(3, 117, 4); + base.delta.add(3, 118, 4); + base.delta.add(3, 119, 4); + base.delta.add(3, 120, 4); + base.delta.add(3, 121, 4); + base.delta.add(3, 122, 4); + base.delta.add(3, 124, 4); + + Lvlfa concat; + concat.initial.insert(1); + concat.final.insert(0); + concat.final.insert(1); + concat.delta.add(1, 45, 0); + concat.delta.add(1, 46, 0); + concat.delta.add(1, 48, 0); + concat.delta.add(1, 49, 0); + concat.delta.add(1, 50, 0); + concat.delta.add(1, 51, 0); + concat.delta.add(1, 52, 0); + concat.delta.add(1, 53, 0); + concat.delta.add(1, 54, 0); + concat.delta.add(1, 55, 0); + concat.delta.add(1, 56, 0); + concat.delta.add(1, 57, 0); + concat.delta.add(1, 65, 0); + concat.delta.add(1, 66, 0); + concat.delta.add(1, 67, 0); + concat.delta.add(1, 68, 0); + concat.delta.add(1, 69, 0); + concat.delta.add(1, 70, 0); + concat.delta.add(1, 71, 0); + concat.delta.add(1, 72, 0); + concat.delta.add(1, 73, 0); + concat.delta.add(1, 74, 0); + concat.delta.add(1, 75, 0); + concat.delta.add(1, 76, 0); + concat.delta.add(1, 77, 0); + concat.delta.add(1, 78, 0); + concat.delta.add(1, 79, 0); + concat.delta.add(1, 80, 0); + concat.delta.add(1, 81, 0); + concat.delta.add(1, 82, 0); + concat.delta.add(1, 83, 0); + concat.delta.add(1, 84, 0); + concat.delta.add(1, 85, 0); + concat.delta.add(1, 86, 0); + concat.delta.add(1, 87, 0); + concat.delta.add(1, 88, 0); + concat.delta.add(1, 89, 0); + concat.delta.add(1, 90, 0); + concat.delta.add(1, 95, 0); + concat.delta.add(1, 97, 0); + concat.delta.add(1, 98, 0); + concat.delta.add(1, 99, 0); + concat.delta.add(1, 100, 0); + concat.delta.add(1, 101, 0); + concat.delta.add(1, 102, 0); + concat.delta.add(1, 103, 0); + concat.delta.add(1, 104, 0); + concat.delta.add(1, 105, 0); + concat.delta.add(1, 106, 0); + concat.delta.add(1, 107, 0); + concat.delta.add(1, 108, 0); + concat.delta.add(1, 109, 0); + concat.delta.add(1, 110, 0); + concat.delta.add(1, 111, 0); + concat.delta.add(1, 112, 0); + concat.delta.add(1, 113, 0); + concat.delta.add(1, 114, 0); + concat.delta.add(1, 115, 0); + concat.delta.add(1, 116, 0); + concat.delta.add(1, 117, 0); + concat.delta.add(1, 118, 0); + concat.delta.add(1, 119, 0); + concat.delta.add(1, 120, 0); + concat.delta.add(1, 121, 0); + concat.delta.add(1, 122, 0); + concat.delta.add(1, 124, 0); + + for (auto i=0;i<1000;i++) { + base.concatenate(concat); + } +} diff --git a/tests/lvlfa/lvlfa-intersection.cc b/tests/lvlfa/lvlfa-intersection.cc new file mode 100644 index 00000000..dc586d03 --- /dev/null +++ b/tests/lvlfa/lvlfa-intersection.cc @@ -0,0 +1,327 @@ +/* tests-lvlfa-intersection.cc -- Tests for intersection of LVLFAs + */ + + +#include + +#include + +#include "mata/lvlfa/lvlfa.hh" + +using namespace mata::lvlfa; +using namespace mata::utils; +using namespace mata::parser; + +// Some common automata {{{ + +// Automaton A +#define FILL_WITH_AUT_A(x) \ + x.initial = {1, 3}; \ + x.final = {5}; \ + x.delta.add(1, 'a', 3); \ + x.delta.add(1, 'a', 10); \ + x.delta.add(1, 'b', 7); \ + x.delta.add(3, 'a', 7); \ + x.delta.add(3, 'b', 9); \ + x.delta.add(9, 'a', 9); \ + x.delta.add(7, 'b', 1); \ + x.delta.add(7, 'a', 3); \ + x.delta.add(7, 'c', 3); \ + x.delta.add(10, 'a', 7); \ + x.delta.add(10, 'b', 7); \ + x.delta.add(10, 'c', 7); \ + x.delta.add(7, 'a', 5); \ + x.delta.add(5, 'a', 5); \ + x.delta.add(5, 'c', 9); \ + + +// Automaton B +#define FILL_WITH_AUT_B(x) \ + x.initial = {4}; \ + x.final = {2, 12}; \ + x.delta.add(4, 'c', 8); \ + x.delta.add(4, 'a', 8); \ + x.delta.add(8, 'b', 4); \ + x.delta.add(4, 'a', 6); \ + x.delta.add(4, 'b', 6); \ + x.delta.add(6, 'a', 2); \ + x.delta.add(2, 'b', 2); \ + x.delta.add(2, 'a', 0); \ + x.delta.add(0, 'a', 2); \ + x.delta.add(2, 'c', 12); \ + x.delta.add(12, 'a', 14); \ + x.delta.add(14, 'b', 12); \ + +// }}} + +TEST_CASE("mata::lvlfa::intersection()") +{ // {{{ + Lvlfa a, b, res; + std::unordered_map, State> prod_map; + + SECTION("Intersection of empty automata") + { + res = intersection(a, b, EPSILON, &prod_map); + + REQUIRE(res.initial.empty()); + REQUIRE(res.final.empty()); + REQUIRE(res.delta.empty()); + REQUIRE(prod_map.empty()); + } + + SECTION("Intersection of empty automata 2") + { + res = intersection(a, b); + + REQUIRE(res.initial.empty()); + REQUIRE(res.final.empty()); + REQUIRE(res.delta.empty()); + } + + a.add_state(5); + b.add_state(6); + + SECTION("Intersection of automata with no transitions") + { + a.initial = {1, 3}; + a.final = {3, 5}; + + b.initial = {4, 6}; + b.final = {4, 2}; + + REQUIRE(!a.initial.empty()); + REQUIRE(!b.initial.empty()); + REQUIRE(!a.final.empty()); + REQUIRE(!b.final.empty()); + + res = intersection(a, b, EPSILON, &prod_map); + + REQUIRE(!res.initial.empty()); + REQUIRE(!res.final.empty()); + + State init_fin_st = prod_map[{3, 4}]; + + REQUIRE(res.initial[init_fin_st]); + REQUIRE(res.final[init_fin_st]); + } + + a.add_state(10); + b.add_state(14); + + SECTION("Intersection of automata with some transitions") + { + FILL_WITH_AUT_A(a); + FILL_WITH_AUT_B(b); + + res = intersection(a, b, EPSILON, &prod_map); + + REQUIRE(res.initial[prod_map[{1, 4}]]); + REQUIRE(res.initial[prod_map[{3, 4}]]); + REQUIRE(res.final[prod_map[{5, 2}]]); + + //for (const auto& c : prod_map) std::cout << c.first.first << "," << c.first.second << " -> " << c.second << "\n"; + //std::cout << prod_map[{7, 2}] << " " << prod_map[{1, 2}] << '\n'; + REQUIRE(res.delta.contains(prod_map[{1, 4}], 'a', prod_map[{3, 6}])); + REQUIRE(res.delta.contains(prod_map[{1, 4}], 'a', prod_map[{10, 8}])); + REQUIRE(res.delta.contains(prod_map[{1, 4}], 'a', prod_map[{10, 6}])); + REQUIRE(res.delta.contains(prod_map[{1, 4}], 'b', prod_map[{7, 6}])); + REQUIRE(res.delta.contains(prod_map[{3, 6}], 'a', prod_map[{7, 2}])); + REQUIRE(res.delta.contains(prod_map[{7, 2}], 'a', prod_map[{3, 0}])); + REQUIRE(res.delta.contains(prod_map[{7, 2}], 'a', prod_map[{5, 0}])); + // REQUIRE(res.delta.contains(prod_map[{7, 2}], 'b', prod_map[{1, 2}])); + REQUIRE(res.delta.contains(prod_map[{3, 0}], 'a', prod_map[{7, 2}])); + REQUIRE(res.delta.contains(prod_map[{1, 2}], 'a', prod_map[{10, 0}])); + REQUIRE(res.delta.contains(prod_map[{1, 2}], 'a', prod_map[{3, 0}])); + // REQUIRE(res.delta.contains(prod_map[{1, 2}], 'b', prod_map[{7, 2}])); + REQUIRE(res.delta.contains(prod_map[{10, 0}], 'a', prod_map[{7, 2}])); + REQUIRE(res.delta.contains(prod_map[{5, 0}], 'a', prod_map[{5, 2}])); + REQUIRE(res.delta.contains(prod_map[{5, 2}], 'a', prod_map[{5, 0}])); + REQUIRE(res.delta.contains(prod_map[{10, 6}], 'a', prod_map[{7, 2}])); + REQUIRE(res.delta.contains(prod_map[{7, 6}], 'a', prod_map[{5, 2}])); + REQUIRE(res.delta.contains(prod_map[{7, 6}], 'a', prod_map[{3, 2}])); + REQUIRE(res.delta.contains(prod_map[{10, 8}], 'b', prod_map[{7, 4}])); + REQUIRE(res.delta.contains(prod_map[{7, 4}], 'a', prod_map[{3, 6}])); + REQUIRE(res.delta.contains(prod_map[{7, 4}], 'a', prod_map[{3, 8}])); + // REQUIRE(res.delta.contains(prod_map[{7, 4}], 'b', prod_map[{1, 6}])); + REQUIRE(res.delta.contains(prod_map[{7, 4}], 'a', prod_map[{5, 6}])); + // REQUIRE(res.delta.contains(prod_map[{7, 4}], 'b', prod_map[{1, 6}])); + REQUIRE(res.delta.contains(prod_map[{1, 6}], 'a', prod_map[{3, 2}])); + REQUIRE(res.delta.contains(prod_map[{1, 6}], 'a', prod_map[{10, 2}])); + // REQUIRE(res.delta.contains(prod_map[{10, 2}], 'b', prod_map[{7, 2}])); + REQUIRE(res.delta.contains(prod_map[{10, 2}], 'a', prod_map[{7, 0}])); + REQUIRE(res.delta.contains(prod_map[{7, 0}], 'a', prod_map[{5, 2}])); + REQUIRE(res.delta.contains(prod_map[{7, 0}], 'a', prod_map[{3, 2}])); + REQUIRE(res.delta.contains(prod_map[{3, 2}], 'a', prod_map[{7, 0}])); + REQUIRE(res.delta.contains(prod_map[{5, 6}], 'a', prod_map[{5, 2}])); + REQUIRE(res.delta.contains(prod_map[{3, 4}], 'a', prod_map[{7, 6}])); + REQUIRE(res.delta.contains(prod_map[{3, 4}], 'a', prod_map[{7, 8}])); + REQUIRE(res.delta.contains(prod_map[{7, 8}], 'b', prod_map[{1, 4}])); + } + + SECTION("Intersection of automata with some transitions but without a final state") + { + FILL_WITH_AUT_A(a); + FILL_WITH_AUT_B(b); + b.final = {12}; + + res = intersection(a, b, EPSILON, &prod_map); + + REQUIRE(res.initial[prod_map[{1, 4}]]); + REQUIRE(res.initial[prod_map[{3, 4}]]); + REQUIRE(res.is_lang_empty()); + } +} // }}} + +TEST_CASE("mata::lvlfa::intersection() with preserving epsilon transitions") +{ + std::unordered_map, State> prod_map; + + Lvlfa a{6}; + a.initial.insert(0); + a.final.insert({1, 4, 5}); + a.delta.add(0, EPSILON, 1); + a.delta.add(1, 'a', 1); + a.delta.add(1, 'b', 1); + a.delta.add(1, 'c', 2); + a.delta.add(2, 'b', 4); + a.delta.add(2, EPSILON, 3); + a.delta.add(3, 'a', 5); + + Lvlfa b{10}; + b.initial.insert(0); + b.final.insert({2, 4, 8, 7}); + b.delta.add(0, 'b', 1); + b.delta.add(0, 'a', 2); + b.delta.add(2, 'a', 4); + b.delta.add(2, EPSILON, 3); + b.delta.add(3, 'b', 4); + b.delta.add(0, 'c', 5); + b.delta.add(5, 'a', 8); + b.delta.add(5, EPSILON, 6); + b.delta.add(6, 'a', 9); + b.delta.add(6, 'b', 7); + + Lvlfa result{intersection(a, b, EPSILON, &prod_map) }; + + // Check states. + CHECK(result.is_state(prod_map[{0, 0}])); + CHECK(result.is_state(prod_map[{1, 0}])); + CHECK(result.is_state(prod_map[{1, 1}])); + CHECK(result.is_state(prod_map[{1, 2}])); + CHECK(result.is_state(prod_map[{1, 3}])); + CHECK(result.is_state(prod_map[{1, 4}])); + CHECK(result.is_state(prod_map[{2, 5}])); + CHECK(result.is_state(prod_map[{3, 5}])); + CHECK(result.is_state(prod_map[{2, 6}])); + CHECK(result.is_state(prod_map[{3, 6}])); + CHECK(result.is_state(prod_map[{4, 7}])); + CHECK(result.is_state(prod_map[{5, 9}])); + CHECK(result.is_state(prod_map[{5, 8}])); + CHECK(result.num_of_states() == 13); + + CHECK(result.initial[prod_map[{0, 0}]]); + CHECK(result.initial.size() == 1); + + CHECK(result.final[prod_map[{1, 2}]]); + CHECK(result.final[prod_map[{1, 4}]]); + CHECK(result.final[prod_map[{4, 7}]]); + CHECK(result.final[prod_map[{5, 8}]]); + CHECK(result.final.size() == 4); + + // Check transitions. + CHECK(result.delta.num_of_transitions() == 14); + + CHECK(result.delta.contains(prod_map[{0, 0}], EPSILON, prod_map[{1, 0}])); + CHECK(result.delta.state_post(prod_map[{ 0, 0 }]).num_of_moves() == 1); + + CHECK(result.delta.contains(prod_map[{1, 0}], 'b', prod_map[{1, 1}])); + CHECK(result.delta.contains(prod_map[{1, 0}], 'a', prod_map[{1, 2}])); + CHECK(result.delta.contains(prod_map[{1, 0}], 'c', prod_map[{2, 5}])); + CHECK(result.delta.state_post(prod_map[{ 1, 0 }]).num_of_moves() == 3); + + CHECK(result.delta.state_post(prod_map[{ 1, 1 }]).empty()); + + CHECK(result.delta.contains(prod_map[{1, 2}], EPSILON, prod_map[{1, 3}])); + CHECK(result.delta.contains(prod_map[{1, 2}], 'a', prod_map[{1, 4}])); + CHECK(result.delta.state_post(prod_map[{ 1, 2 }]).num_of_moves() == 2); + + CHECK(result.delta.contains(prod_map[{1, 3}], 'b', prod_map[{1, 4}])); + CHECK(result.delta.state_post(prod_map[{ 1, 3 }]).num_of_moves() == 1); + + CHECK(result.delta.state_post(prod_map[{ 1, 4 }]).empty()); + + CHECK(result.delta.contains(prod_map[{2, 5}], EPSILON, prod_map[{3, 5}])); + CHECK(result.delta.contains(prod_map[{2, 5}], EPSILON, prod_map[{2, 6}])); + CHECK(result.delta.state_post(prod_map[{ 2, 5 }]).num_of_moves() == 2); + + CHECK(result.delta.contains(prod_map[{3, 5}], 'a', prod_map[{5, 8}])); + CHECK(result.delta.contains(prod_map[{3, 5}], EPSILON, prod_map[{3, 6}])); + CHECK(result.delta.state_post(prod_map[{ 3, 5 }]).num_of_moves() == 2); + + CHECK(result.delta.contains(prod_map[{2, 6}], 'b', prod_map[{4, 7}])); + CHECK(result.delta.contains(prod_map[{2, 6}], EPSILON, prod_map[{3, 6}])); + CHECK(result.delta.state_post(prod_map[{ 2, 6 }]).num_of_moves() == 2); + + CHECK(result.delta.contains(prod_map[{3, 6}], 'a', prod_map[{5, 9}])); + CHECK(result.delta.state_post(prod_map[{ 3, 6 }]).num_of_moves() == 1); + + CHECK(result.delta.state_post(prod_map[{ 4, 7 }]).empty()); + + CHECK(result.delta.state_post(prod_map[{ 5, 9 }]).empty()); + + CHECK(result.delta.state_post(prod_map[{ 5, 8 }]).empty()); +} + +TEST_CASE("mata::lvlfa::intersection() for profiling", "[.profiling],[intersection]") +{ + Lvlfa a{6}; + a.initial.insert(0); + a.final.insert({1, 4, 5}); + a.delta.add(0, EPSILON, 1); + a.delta.add(1, 'a', 1); + a.delta.add(1, 'b', 1); + a.delta.add(1, 'c', 2); + a.delta.add(2, 'b', 4); + a.delta.add(2, EPSILON, 3); + a.delta.add(3, 'a', 5); + + Lvlfa b{10}; + b.initial.insert(0); + b.final.insert({2, 4, 8, 7}); + b.delta.add(0, 'b', 1); + b.delta.add(0, 'a', 2); + b.delta.add(2, 'a', 4); + b.delta.add(2, EPSILON, 3); + b.delta.add(3, 'b', 4); + b.delta.add(0, 'c', 5); + b.delta.add(5, 'a', 8); + b.delta.add(5, EPSILON, 6); + b.delta.add(6, 'a', 9); + b.delta.add(6, 'b', 7); + + for (size_t i{ 0 }; i < 10000; ++i) { + Lvlfa result{intersection(a, b) }; + } +} + +TEST_CASE("Move semantics", "[.profiling][std::move]") { + Lvlfa b{10}; + b.initial.insert(0); + b.final.insert({2, 4, 8, 7}); + b.delta.add(0, 'b', 1); + b.delta.add(0, 'a', 2); + b.delta.add(2, 'a', 4); + b.delta.add(2, EPSILON, 3); + b.delta.add(3, 'b', 4); + b.delta.add(0, 'c', 5); + b.delta.add(5, 'a', 8); + b.delta.add(5, EPSILON, 6); + b.delta.add(6, 'a', 9); + b.delta.add(6, 'b', 7); + + for (size_t i{ 0 }; i < 1'000'000; ++i) { + Lvlfa a{ std::move(b) }; + a.initial.insert(1); + b = std::move(a); + } +} diff --git a/tests/lvlfa/lvlfa-plumbing.cc b/tests/lvlfa/lvlfa-plumbing.cc new file mode 100644 index 00000000..b5aec645 --- /dev/null +++ b/tests/lvlfa/lvlfa-plumbing.cc @@ -0,0 +1,120 @@ +/* tests-lvlfa-plumbing.cc -- Tests plumbing versions of functions + */ + + +#include + +#include + +#include "mata/lvlfa/lvlfa.hh" +#include "mata/lvlfa/plumbing.hh" + +using Symbol = mata::Symbol; +using OnTheFlyAlphabet = mata::OnTheFlyAlphabet; + +// Some common automata {{{ + +// Automaton A +#define FILL_WITH_AUT_A(x) \ + x.initial = {1, 3}; \ + x.final = {5}; \ + x.delta.add(1, 'a', 3); \ + x.delta.add(1, 'a', 10); \ + x.delta.add(1, 'b', 7); \ + x.delta.add(3, 'a', 7); \ + x.delta.add(3, 'b', 9); \ + x.delta.add(9, 'a', 9); \ + x.delta.add(7, 'b', 1); \ + x.delta.add(7, 'a', 3); \ + x.delta.add(7, 'c', 3); \ + x.delta.add(10, 'a', 7); \ + x.delta.add(10, 'b', 7); \ + x.delta.add(10, 'c', 7); \ + x.delta.add(7, 'a', 5); \ + x.delta.add(5, 'a', 5); \ + x.delta.add(5, 'c', 9); \ + + +// Automaton B +#define FILL_WITH_AUT_B(x) \ + x.initial = {4}; \ + x.final = {2, 12}; \ + x.delta.add(4, 'c', 8); \ + x.delta.add(4, 'a', 8); \ + x.delta.add(8, 'b', 4); \ + x.delta.add(4, 'a', 6); \ + x.delta.add(4, 'b', 6); \ + x.delta.add(6, 'a', 2); \ + x.delta.add(2, 'b', 2); \ + x.delta.add(2, 'a', 0); \ + x.delta.add(0, 'a', 2); \ + x.delta.add(2, 'c', 12); \ + x.delta.add(12, 'a', 14); \ + x.delta.add(14, 'b', 12); \ + +// }}} + +TEST_CASE("Mata::lvlfa::Plumbing") { + mata::lvlfa::Lvlfa lhs{}; + mata::lvlfa::Lvlfa rhs{}; + mata::lvlfa::Lvlfa result{}; + OnTheFlyAlphabet alph{ std::vector{ "a", "b", "c" } }; + + SECTION("Mata::lvlfa::Plumbing::concatenate") { + FILL_WITH_AUT_A(lhs); + FILL_WITH_AUT_B(lhs); + mata::lvlfa::plumbing::concatenate(&result, lhs, rhs); + CHECK(result.is_lang_empty()); + } + + SECTION("Mata::lvlfa::Plumbing::intersection") { + FILL_WITH_AUT_A(lhs); + FILL_WITH_AUT_B(lhs); + mata::lvlfa::plumbing::intersection(&result, lhs, rhs); + CHECK(result.is_lang_empty()); + } + + SECTION("Mata::lvlfa::Plumbing::union") { + FILL_WITH_AUT_A(lhs); + FILL_WITH_AUT_B(lhs); + mata::lvlfa::plumbing::uni(&result, lhs, rhs); + CHECK(!result.is_lang_empty()); + } + + SECTION("Mata::lvlfa::Plumbing::remove_epsilon") { + FILL_WITH_AUT_A(lhs); + mata::lvlfa::plumbing::remove_epsilon(&result, lhs); + CHECK(!result.is_lang_empty()); + } + + SECTION("Mata::lvlfa::Plumbing::revert") { + FILL_WITH_AUT_A(lhs); + mata::lvlfa::plumbing::revert(&result, lhs); + CHECK(!result.is_lang_empty()); + } + + SECTION("Mata::lvlfa::Plumbing::reduce") { + FILL_WITH_AUT_A(lhs); + mata::lvlfa::plumbing::reduce(&result, lhs); + CHECK(!result.is_lang_empty()); + CHECK(result.num_of_states() <= lhs.num_of_states()); + } + + SECTION("Mata::lvlfa::Plumbing::determinize") { + FILL_WITH_AUT_A(lhs); + mata::lvlfa::plumbing::determinize(&result, lhs); + CHECK(!result.is_lang_empty()); + } + + SECTION("Mata::lvlfa::Plumbing::minimize") { + FILL_WITH_AUT_A(lhs); + mata::lvlfa::plumbing::minimize(&result, lhs); + CHECK(!result.is_lang_empty()); + } + + SECTION("Mata::lvlfa::Plumbing::complement") { + FILL_WITH_AUT_A(lhs); + mata::lvlfa::plumbing::complement(&result, lhs, alph); + CHECK(!result.is_lang_empty()); + } +} diff --git a/tests/lvlfa/lvlfa-profiling.cc b/tests/lvlfa/lvlfa-profiling.cc new file mode 100644 index 00000000..576c4f90 --- /dev/null +++ b/tests/lvlfa/lvlfa-profiling.cc @@ -0,0 +1,125 @@ +// TODO: some header + +#include +#include "utils.hh" + +#include "mata/lvlfa/lvlfa.hh" +#include "mata/parser/re2parser.hh" + +using namespace mata::lvlfa; +using namespace mata::parser; + +///////////////////////////// +// Profiling revert and trim +///////////////////////////// + +TEST_CASE("mata::lvlfa::fragile_revert() speed, simple ", "[.profiling]") { + Lvlfa B; + FILL_WITH_AUT_D(B); + for (int i = 0; i < 300000; i++) { + B = fragile_revert(B); + } +} + +TEST_CASE("mata::lvlfa::simple_revert() speed, simple ", "[.profiling]") { + Lvlfa B; + FILL_WITH_AUT_B(B); + for (int i = 0; i < 300000; i++) { + B = simple_revert(B); + } +} + +TEST_CASE("mata::lvlfa::simple_revert() speed, harder", "[.profiling]") { + Lvlfa B; +//this gives an interesting test case if the parser is not trimming and reducing + mata::parser::create_nfa(&B, "((.*){10})*"); + for (int i = 0; i < 200; i++) { + B = simple_revert(B); + } +} + +TEST_CASE("mata::lvlfa::fragile_revert() speed, harder", "[.profiling]") { + Lvlfa B; +//this gives an interesting test case if the parser is not trimming and reducing + create_nfa(&B, "((.*){10})*"); + for (int i = 0; i < 200; i++) { + B = fragile_revert(B); + } +} + +TEST_CASE("mata::lvlfa::somewhat_simple_revert() speed, harder", "[.profiling]") { + Lvlfa B; +//this gives an interesting test case if the parser is not trimming and reducing + create_nfa(&B, "((.*){10})*"); +//FILL_WITH_AUT_C(B); + for (int i = 0; i < 200; i++) { + B = somewhat_simple_revert(B); + } +} + +TEST_CASE("mata::lvlfa::trim_inplace() speed, simple", "[.profiling]") { + Lvlfa A, B; +//this gives an interesting test case if the parser is not trimming and reducing + FILL_WITH_AUT_B(B); + for (int i = 0; i < 300000; i++) { + A = B; + A.trim(); + } +} + +TEST_CASE("mata::lvlfa::trim_inplace() speed, harder", "[.profiling]") { + Lvlfa A, B; +//this gives an interesting test case if the parser is not trimming and reducing + create_nfa(&B, "((.*){10})*"); + for (int i = 0; i < 200; i++) { + A = B; + A.trim(); + } +} + +////////////////////////////// +// Profiling get_used_symbols +////////////////////////////// + +TEST_CASE("mata::lvlfa::get_used_symbols speed, harder", "[.profiling]") { + Lvlfa A; + create_nfa(&A, "((.*){10})*"); + for (int i = 0; i < 2000000; i++) { + A.delta.get_used_symbols(); + } +} + +TEST_CASE("mata::lvlfa::get_used_symbols_bv speed, harder", "[.profiling]") { + Lvlfa A; + create_nfa(&A, "((.*){10})*"); + for (int i = 0; i < 2000000; i++) { + A.delta.get_used_symbols_bv(); + } +} + +TEST_CASE("mata::lvlfa::get_used_symbols_vec speed, harder", "[.profiling]") { + Lvlfa A; + create_nfa(&A, "((.*){10})*"); + for (int i = 0; i < 2000000; i++) { + A.delta.get_used_symbols_vec(); + } +} + +TEST_CASE("mata::lvlfa::get_used_symbols_set speed, harder", "[.profiling]") { + Lvlfa A; + create_nfa(&A, "((.*){10})*"); + for (int i = 0; i < 2000000; i++) { + A.delta.get_used_symbols_set(); + } +} + +TEST_CASE("mata::lvlfa::get_used_symbols_sps speed, harder", "[.profiling]") { + Lvlfa A; + create_nfa(&A, "((.*){10})*"); + for (int i = 0; i < 2000000; i++) { + A.delta.get_used_symbols_sps(); + } +} + +///////////////////////////// +///////////////////////////// diff --git a/tests/lvlfa/lvlfa.cc b/tests/lvlfa/lvlfa.cc new file mode 100644 index 00000000..775f8653 --- /dev/null +++ b/tests/lvlfa/lvlfa.cc @@ -0,0 +1,2986 @@ +// TODO: some header + +#include + +#include + +#include "utils.hh" + +#include "mata/utils/sparse-set.hh" +#include "mata/lvlfa/delta.hh" +#include "mata/lvlfa/lvlfa.hh" +#include "mata/lvlfa/strings.hh" +#include "mata/lvlfa/builder.hh" +#include "mata/lvlfa/plumbing.hh" +#include "mata/lvlfa/algorithms.hh" +#include "mata/parser/re2parser.hh" + +using namespace mata; +using namespace mata::lvlfa::algorithms; +using namespace mata::lvlfa; +using namespace mata::strings; +using namespace mata::lvlfa::plumbing; +using namespace mata::utils; +using namespace mata::parser; +using Symbol = mata::Symbol; +using Word = mata::Word; +using IntAlphabet = mata::IntAlphabet; +using OnTheFlyAlphabet = mata::OnTheFlyAlphabet; + +TEST_CASE("mata::lvlfa::size()") { + Lvlfa lvlfa{}; + CHECK(lvlfa.num_of_states() == 0); + + lvlfa.add_state(3); + CHECK(lvlfa.num_of_states() == 4); + + lvlfa.clear(); + lvlfa.add_state(); + CHECK(lvlfa.num_of_states() == 1); + + lvlfa.clear(); + FILL_WITH_AUT_A(lvlfa); + CHECK(lvlfa.num_of_states() == 11); + + lvlfa.clear(); + FILL_WITH_AUT_B(lvlfa); + CHECK(lvlfa.num_of_states() == 15); + + lvlfa = Lvlfa{ 0, {}, {} }; + CHECK(lvlfa.num_of_states() == 0); +} + +TEST_CASE("mata::lvlfa::Trans::operator<<") { + Transition trans(1, 2, 3); + REQUIRE(std::to_string(trans) == "(1, 2, 3)"); +} + +TEST_CASE("mata::lvlfa::create_alphabet()") { + Lvlfa a{1}; + a.delta.add(0, 'a', 0); + + Lvlfa b{1}; + b.delta.add(0, 'b', 0); + b.delta.add(0, 'a', 0); + Lvlfa c{1}; + b.delta.add(0, 'c', 0); + + auto alphabet{ create_alphabet(a, b, c) }; + + auto symbols{alphabet.get_alphabet_symbols() }; + CHECK(symbols == mata::utils::OrdVector{ 'c', 'b', 'a' }); + + // create_alphabet(1, 3, 4); // Will not compile: '1', '3', '4' are not of the required type. + // create_alphabet(a, b, 4); // Will not compile: '4' is not of the required type. +} + +TEST_CASE("mata::lvlfa::Lvlfa::delta.add()/delta.contains()") +{ // {{{ + Lvlfa a(3); + + SECTION("Empty automata have now transitions") + { + REQUIRE(!a.delta.contains(1, 'a', 1)); + } + + SECTION("If I add a transition, it is in the automaton") + { + a.delta.add(1, 'a', 1); + + REQUIRE(a.delta.contains(1, 'a', 1)); + } + + SECTION("If I add a transition, only it is added") + { + a.delta.add(1, 'a', 1); + + REQUIRE(a.delta.contains(1, 'a', 1)); + REQUIRE(!a.delta.contains(1, 'a', 2)); + REQUIRE(!a.delta.contains(1, 'b', 2)); + REQUIRE(!a.delta.contains(2, 'a', 1)); + } + + SECTION("Adding multiple transitions") + { + a.delta.add(2, 'b', {2,1,0}); + REQUIRE(a.delta.contains(2, 'b', 0)); + REQUIRE(a.delta.contains(2, 'b', 1)); + REQUIRE(a.delta.contains(2, 'b', 2)); + REQUIRE(!a.delta.contains(0, 'b', 0)); + + a.delta.add(0, 'b', StateSet({0})); + REQUIRE(a.delta.contains(0, 'b', 0)); + } + + SECTION("Iterating over transitions") { + Transition t1{ 0, 0, 0}; + Transition t2{ 0, 1, 0}; + Transition t3{ 1, 1, 1}; + Transition t4{ 2, 2, 2}; + a.delta.add(t1); + a.delta.add(t2); + a.delta.add(t3); + a.delta.add(t4); + a.delta.add(t3); + size_t transitions_cnt{ 0 }; + std::vector expected_transitions{ t1, t2, t3, t4 }; + std::vector iterated_transitions{}; + const Delta::Transitions transitions{ a.delta.transitions() }; + const Delta::Transitions::const_iterator transitions_end{ transitions.end() }; + for (Delta::Transitions::const_iterator trans_it{ transitions.begin()}; trans_it != transitions_end; ++trans_it) { + iterated_transitions.push_back(*trans_it); + ++transitions_cnt; + } + CHECK(transitions_cnt == 4); + CHECK(expected_transitions == iterated_transitions); + + transitions_cnt = 0; + iterated_transitions.clear(); + for (const Transition& trans: a.delta.transitions()) { + iterated_transitions.push_back(trans); + ++transitions_cnt; + } + CHECK(transitions_cnt == 4); + CHECK(expected_transitions == iterated_transitions); + } + +} // }}} + +TEST_CASE("mata::lvlfa::Delta.transform/append") +{ // {{{ + Lvlfa a(3); + a.delta.add(1, 'a', 1); + a.delta.add(2, 'b', {2,1,0}); + + SECTION("transform") + { + auto upd_fnc = [&](State st) { + return st + 5; + }; + std::vector state_posts = a.delta.renumber_targets(upd_fnc); + a.delta.append(state_posts); + + REQUIRE(a.delta.contains(4, 'a', 6)); + REQUIRE(a.delta.contains(5, 'b', 7)); + REQUIRE(a.delta.contains(5, 'b', 5)); + REQUIRE(a.delta.contains(5, 'b', 6)); + } + +} // }}} + +TEST_CASE("mata::lvlfa::is_lang_empty()") +{ // {{{ + Lvlfa aut(14); + Run cex; + + SECTION("An empty automaton has an empty language") + { + REQUIRE(aut.is_lang_empty()); + } + + SECTION("An automaton with a state that is both initial and final does not have an empty language") + { + aut.initial = {1, 2}; + aut.final = {2, 3}; + + bool is_empty = aut.is_lang_empty(&cex); + REQUIRE(!is_empty); + } + + SECTION("More complicated automaton") + { + aut.initial = {1, 2}; + aut.delta.add(1, 'a', 2); + aut.delta.add(1, 'a', 3); + aut.delta.add(1, 'b', 4); + aut.delta.add(2, 'a', 2); + aut.delta.add(2, 'a', 3); + aut.delta.add(2, 'b', 4); + aut.delta.add(3, 'b', 4); + aut.delta.add(3, 'c', 7); + aut.delta.add(3, 'b', 2); + aut.delta.add(7, 'a', 8); + + SECTION("with final states") + { + aut.final = {7}; + REQUIRE(!aut.is_lang_empty()); + } + + SECTION("without final states") + { + REQUIRE(aut.is_lang_empty()); + } + + SECTION("another complicated automaton") + { + FILL_WITH_AUT_A(aut); + + REQUIRE(!aut.is_lang_empty()); + } + + SECTION("a complicated automaton with unreachable final states") + { + FILL_WITH_AUT_A(aut); + aut.final = {13}; + + REQUIRE(aut.is_lang_empty()); + } + } + + SECTION("An automaton with a state that is both initial and final does not have an empty language") + { + aut.initial = {1, 2}; + aut.final = {2, 3}; + + bool is_empty = aut.is_lang_empty(&cex); + REQUIRE(!is_empty); + + // check the counterexample + REQUIRE(cex.path.size() == 1); + REQUIRE(cex.path[0] == 2); + } + + SECTION("Counterexample of an automaton with non-empty language") + { + aut.initial = {1, 2}; + aut.final = {8, 9}; + aut.delta.add(1, 'c', 2); + aut.delta.add(2, 'a', 4); + aut.delta.add(2, 'c', 1); + aut.delta.add(2, 'c', 3); + aut.delta.add(3, 'e', 5); + aut.delta.add(4, 'c', 8); + + bool is_empty = aut.is_lang_empty(&cex); + REQUIRE(!is_empty); + + // check the counterexample + REQUIRE(cex.path.size() == 3); + REQUIRE(cex.path[0] == 2); + REQUIRE(cex.path[1] == 4); + REQUIRE(cex.path[2] == 8); + } +} // }}} + +TEST_CASE("mata::lvlfa::is_acyclic") +{ // {{{ + Lvlfa aut(14); + + SECTION("An empty automaton is acyclic") + { + REQUIRE(aut.is_acyclic()); + } + + SECTION("An automaton with a state that is both initial and final is acyclic") + { + aut.initial = {1, 2}; + aut.final = {2, 3}; + REQUIRE(aut.is_acyclic()); + } + + SECTION("More complicated automaton") + { + aut.initial = {1, 2}; + aut.delta.add(1, 'a', 2); + aut.delta.add(1, 'a', 3); + aut.delta.add(1, 'b', 4); + aut.delta.add(2, 'a', 3); + aut.delta.add(2, 'b', 4); + aut.delta.add(3, 'b', 4); + aut.delta.add(3, 'c', 7); + aut.delta.add(7, 'a', 8); + + SECTION("without final states") + { + REQUIRE(aut.is_lang_empty()); + } + } + + SECTION("Cyclic automaton") + { + aut.initial = {1, 2}; + aut.final = {8, 9}; + aut.delta.add(1, 'c', 2); + aut.delta.add(2, 'a', 4); + aut.delta.add(2, 'c', 1); + aut.delta.add(2, 'c', 3); + aut.delta.add(3, 'e', 5); + aut.delta.add(4, 'c', 8); + REQUIRE(!aut.is_acyclic()); + } + + SECTION("Automaton with self-loops") + { + Lvlfa aut(2); + aut.initial = {0}; + aut.final = {1}; + aut.delta.add(0, 'c', 1); + aut.delta.add(1, 'a', 1); + REQUIRE(!aut.is_acyclic()); + } +} // }}} + +TEST_CASE("mata::lvlfa::get_word_for_path()") +{ // {{{ + Lvlfa aut(5); + Run path; + Word word; + + SECTION("empty word") + { + path = { }; + + auto word_bool_pair = aut.get_word_for_path(path); + REQUIRE(word_bool_pair.second); + REQUIRE(word_bool_pair.first.word.empty()); + } + + SECTION("empty word 2") + { + aut.initial = {1}; + path.path = {1}; + + auto word_bool_pair = aut.get_word_for_path(path); + REQUIRE(word_bool_pair.second); + REQUIRE(word_bool_pair.first.word.empty()); + } + + SECTION("nonempty word") + { + aut.initial = {1}; + aut.delta.add(1, 'c', 2); + aut.delta.add(2, 'a', 4); + aut.delta.add(2, 'c', 1); + aut.delta.add(2, 'b', 3); + + path.path = {1,2,3}; + + auto word_bool_pair = aut.get_word_for_path(path); + REQUIRE(word_bool_pair.second); + REQUIRE(word_bool_pair.first.word == Word({'c', 'b'})); + } + + SECTION("longer word") + { + aut.initial = {1}; + aut.delta.add(1, 'a', 2); + aut.delta.add(1, 'c', 2); + aut.delta.add(2, 'a', 4); + aut.delta.add(2, 'c', 1); + aut.delta.add(2, 'b', 3); + aut.delta.add(3, 'd', 2); + + path.path = {1,2,3,2,4}; + + auto word_bool_pair = aut.get_word_for_path(path); + std::set possible({ + Word({'c', 'b', 'd', 'a'}), + Word({'a', 'b', 'd', 'a'})}); + REQUIRE(word_bool_pair.second); + REQUIRE(haskey(possible, word_bool_pair.first.word)); + } + + SECTION("invalid path") + { + aut.initial = {1}; + aut.delta.add(1, 'a', 2); + aut.delta.add(1, 'c', 2); + aut.delta.add(2, 'a', 4); + aut.delta.add(2, 'c', 1); + aut.delta.add(2, 'b', 3); + aut.delta.add(3, 'd', 2); + + path.path = {1,2,3,1,2}; + + auto word_bool_pair = aut.get_word_for_path(path); + REQUIRE(!word_bool_pair.second); + } +} + + +TEST_CASE("mata::lvlfa::is_lang_empty_cex()") +{ + Lvlfa aut(10); + Run cex; + + SECTION("Counterexample of an automaton with non-empty language") + { + aut.initial = {1, 2}; + aut.final = {8, 9}; + aut.delta.add(1, 'c', 2); + aut.delta.add(2, 'a', 4); + aut.delta.add(2, 'c', 1); + aut.delta.add(2, 'c', 3); + aut.delta.add(3, 'e', 5); + aut.delta.add(4, 'c', 8); + + bool is_empty = aut.is_lang_empty(&cex); + REQUIRE(!is_empty); + + // check the counterexample + REQUIRE(cex.word.size() == 2); + REQUIRE(cex.word[0] == 'a'); + REQUIRE(cex.word[1] == 'c'); + } +} + + +TEST_CASE("mata::lvlfa::determinize()") +{ + Lvlfa aut(3); + Lvlfa result; + std::unordered_map subset_map; + + SECTION("empty automaton") + { + result = determinize(aut); + + REQUIRE(result.final.empty()); + REQUIRE(result.delta.empty()); + CHECK(result.is_lang_empty()); + } + + SECTION("simple automaton 1") + { + aut.initial = {1 }; + aut.final = {1 }; + result = determinize(aut, &subset_map); + + REQUIRE(result.initial[subset_map[{1}]]); + REQUIRE(result.final[subset_map[{1}]]); + REQUIRE(result.delta.empty()); + } + + SECTION("simple automaton 2") + { + aut.initial = {1 }; + aut.final = {2 }; + aut.delta.add(1, 'a', 2); + result = determinize(aut, &subset_map); + + REQUIRE(result.initial[subset_map[{1}]]); + REQUIRE(result.final[subset_map[{2}]]); + REQUIRE(result.delta.contains(subset_map[{1}], 'a', subset_map[{2}])); + } + + SECTION("This broke Delta when delta[q] could cause re-allocation of post") + { + Lvlfa x{}; + x.initial.insert(0); + x.final.insert(4); + x.delta.add(0, 1, 3); + x.delta.add(3, 1, 3); + x.delta.add(3, 2, 3); + x.delta.add(3, 0, 1); + x.delta.add(1, 1, 1); + x.delta.add(1, 2, 1); + x.delta.add(1, 0, 2); + x.delta.add(2, 0, 2); + x.delta.add(2, 1, 2); + x.delta.add(2, 2, 2); + x.delta.add(2, 0, 4); + OnTheFlyAlphabet alphabet{}; + auto complement_result{determinize(x)}; + } +} // }}} + +TEST_CASE("mata::lvlfa::minimize() for profiling", "[.profiling],[minimize]") { + Lvlfa aut(4); + Lvlfa result; + std::unordered_map subset_map; + + aut.initial.insert(0); + aut.final.insert(3); + aut.delta.add(0, 46, 0); + aut.delta.add(0, 47, 0); + aut.delta.add(0, 58, 0); + aut.delta.add(0, 58, 1); + aut.delta.add(0, 64, 0); + aut.delta.add(0, 64, 0); + aut.delta.add(0, 82, 0); + aut.delta.add(0, 92, 0); + aut.delta.add(0, 98, 0); + aut.delta.add(0, 100, 0); + aut.delta.add(0, 103, 0); + aut.delta.add(0, 109, 0); + aut.delta.add(0, 110, 0); + aut.delta.add(0, 111, 0); + aut.delta.add(0, 114, 0); + aut.delta.add(1, 47, 2); + aut.delta.add(2, 47, 3); + aut.delta.add(3, 46, 3); + aut.delta.add(3, 47, 3); + aut.delta.add(3, 58, 3); + aut.delta.add(3, 64, 3); + aut.delta.add(3, 82, 3); + aut.delta.add(3, 92, 3); + aut.delta.add(3, 98, 3); + aut.delta.add(3, 100, 3); + aut.delta.add(3, 103, 3); + aut.delta.add(3, 109, 3); + aut.delta.add(3, 110, 3); + aut.delta.add(3, 111, 3); + aut.delta.add(3, 114, 3); + minimize(&result, aut); +} + +TEST_CASE("mata::lvlfa::construct() correct calls") +{ // {{{ + Lvlfa aut(10); + mata::parser::ParsedSection parsec; + OnTheFlyAlphabet alphabet; + + SECTION("construct an empty automaton") + { + parsec.type = lvlfa::TYPE_NFA; + + aut = builder::construct(parsec); + + REQUIRE(aut.is_lang_empty()); + } + + SECTION("construct a simple non-empty automaton accepting the empty word") + { + parsec.type = lvlfa::TYPE_NFA; + parsec.dict.insert({"Initial", {"q1"}}); + parsec.dict.insert({"Final", {"q1"}}); + + aut = builder::construct(parsec); + + REQUIRE(!aut.is_lang_empty()); + } + + SECTION("construct an automaton with more than one initial/final states") + { + parsec.type = lvlfa::TYPE_NFA; + parsec.dict.insert({"Initial", {"q1", "q2"}}); + parsec.dict.insert({"Final", {"q1", "q2", "q3"}}); + + aut = builder::construct(parsec); + + REQUIRE(aut.initial.size() == 2); + REQUIRE(aut.final.size() == 3); + } + + SECTION("construct a simple non-empty automaton accepting only the word 'a'") + { + parsec.type = lvlfa::TYPE_NFA; + parsec.dict.insert({"Initial", {"q1"}}); + parsec.dict.insert({"Final", {"q2"}}); + parsec.body = { {"q1", "a", "q2"} }; + + aut = builder::construct(parsec, &alphabet); + + Run cex; + REQUIRE(!aut.is_lang_empty(&cex)); + auto word_bool_pair = aut.get_word_for_path(cex); + REQUIRE(word_bool_pair.second); + REQUIRE(word_bool_pair.first.word == encode_word(&alphabet, { "a"}).word); + + REQUIRE(aut.is_in_lang(encode_word(&alphabet, { "a"}))); + } + + SECTION("construct a more complicated non-empty automaton") + { + parsec.type = lvlfa::TYPE_NFA; + parsec.dict.insert({"Initial", {"q1", "q3"}}); + parsec.dict.insert({"Final", {"q5"}}); + parsec.body.push_back({"q1", "a", "q3"}); + parsec.body.push_back({"q1", "a", "q10"}); + parsec.body.push_back({"q1", "b", "q7"}); + parsec.body.push_back({"q3", "a", "q7"}); + parsec.body.push_back({"q3", "b", "q9"}); + parsec.body.push_back({"q9", "a", "q9"}); + parsec.body.push_back({"q7", "b", "q1"}); + parsec.body.push_back({"q7", "a", "q3"}); + parsec.body.push_back({"q7", "c", "q3"}); + parsec.body.push_back({"q10", "a", "q7"}); + parsec.body.push_back({"q10", "b", "q7"}); + parsec.body.push_back({"q10", "c", "q7"}); + parsec.body.push_back({"q7", "a", "q5"}); + parsec.body.push_back({"q5", "a", "q5"}); + parsec.body.push_back({"q5", "c", "q9"}); + + aut = builder::construct(parsec, &alphabet); + + // some samples + REQUIRE(aut.is_in_lang(encode_word(&alphabet, { "b", "a"}))); + REQUIRE(aut.is_in_lang(encode_word(&alphabet, { "a", "c", "a", "a"}))); + REQUIRE(aut.is_in_lang(encode_word(&alphabet, + {"a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a"}))); + // some wrong samples + REQUIRE(!aut.is_in_lang(encode_word(&alphabet, { "b", "c"}))); + REQUIRE(!aut.is_in_lang(encode_word(&alphabet, { "a", "c", "c", "a"}))); + REQUIRE(!aut.is_in_lang(encode_word(&alphabet, { "b", "a", "c", "b"}))); + } +} // }}} + +TEST_CASE("mata::lvlfa::construct() invalid calls") +{ // {{{ + Lvlfa aut; + mata::parser::ParsedSection parsec; + + SECTION("construct() call with invalid ParsedSection object") + { + parsec.type = "FA"; + + CHECK_THROWS_WITH(builder::construct(parsec), + Catch::Contains("expecting type")); + } + + SECTION("construct() call with an epsilon transition") + { + parsec.type = lvlfa::TYPE_NFA; + parsec.body = { {"q1", "q2"} }; + + CHECK_THROWS_WITH(builder::construct(parsec), + Catch::Contains("Epsilon transition")); + } + + SECTION("construct() call with a nonsense transition") + { + parsec.type = lvlfa::TYPE_NFA; + parsec.body = { {"q1", "a", "q2", "q3"} }; + + CHECK_THROWS_WITH(plumbing::construct(&aut, parsec), + Catch::Contains("Invalid transition")); + } +} // }}} + +TEST_CASE("mata::lvlfa::construct() from IntermediateAut correct calls") +{ // {{{ + Lvlfa aut; + mata::IntermediateAut inter_aut; + OnTheFlyAlphabet alphabet; + + SECTION("construct an empty automaton") + { + inter_aut.automaton_type = mata::IntermediateAut::AutomatonType::LVLFA; + REQUIRE(aut.is_lang_empty()); + aut = builder::construct(inter_aut); + REQUIRE(aut.is_lang_empty()); + } + + SECTION("construct a simple non-empty automaton accepting the empty word from intermediate automaton") + { + std::string file = + "@LVLFA-explicit\n" + "%States-enum p q r\n" + "%Alphabet-auto\n" + "%Initial p | q\n" + "%Final p | q\n"; + const auto auts = mata::IntermediateAut::parse_from_mf(parse_mf(file)); + inter_aut = auts[0]; + + aut = builder::construct(inter_aut); + + REQUIRE(!aut.is_lang_empty()); + } + + SECTION("construct an automaton with more than one initial/final states from intermediate automaton") + { + std::string file = + "@LVLFA-explicit\n" + "%States-enum p q 3\n" + "%Alphabet-auto\n" + "%Initial p | q\n" + "%Final p | q | r\n"; + const auto auts = mata::IntermediateAut::parse_from_mf(parse_mf(file)); + inter_aut = auts[0]; + + plumbing::construct(&aut, inter_aut); + + REQUIRE(aut.initial.size() == 2); + REQUIRE(aut.final.size() == 3); + } + + SECTION("construct an automaton with implicit operator completion one initial/final states from intermediate automaton") + { + std::string file = + "@LVLFA-explicit\n" + "%States-enum p q r\n" + "%Alphabet-auto\n" + "%Initial p q\n" + "%Final p q r\n"; + const auto auts = mata::IntermediateAut::parse_from_mf(parse_mf(file)); + inter_aut = auts[0]; + + plumbing::construct(&aut, inter_aut); + + REQUIRE(aut.initial.size() == 2); + REQUIRE(aut.final.size() == 3); + } + + SECTION("construct an automaton with implicit operator completion one initial/final states from intermediate automaton") + { + std::string file = + "@LVLFA-explicit\n" + "%States-enum p q r m n\n" + "%Alphabet-auto\n" + "%Initial p q r\n" + "%Final p q m n\n"; + const auto auts = mata::IntermediateAut::parse_from_mf(parse_mf(file)); + inter_aut = auts[0]; + + plumbing::construct(&aut, inter_aut); + + REQUIRE(aut.initial.size() == 3); + REQUIRE(aut.final.size() == 4); + } + + SECTION("construct a simple non-empty automaton accepting only the word 'a' from intermediate automaton") + { + std::string file = + "@LVLFA-explicit\n" + "%States-enum p q 3\n" + "%Alphabet-auto\n" + "%Initial q1\n" + "%Final q2\n" + "q1 a q2\n"; + + const auto auts = mata::IntermediateAut::parse_from_mf(parse_mf(file)); + inter_aut = auts[0]; + plumbing::construct(&aut, inter_aut, &alphabet); + + Run cex; + REQUIRE(!aut.is_lang_empty(&cex)); + auto word_bool_pair = aut.get_word_for_path(cex); + REQUIRE(word_bool_pair.second); + REQUIRE(word_bool_pair.first.word == encode_word(&alphabet, { "a" }).word); + + REQUIRE(aut.is_in_lang(encode_word(&alphabet, { "a" }))); + } + + SECTION("construct a more complicated non-empty automaton from intermediate automaton") + { + std::string file = + "@LVLFA-explicit\n" + "%States-enum p q 3\n" + "%Alphabet-auto\n" + "%Initial q1 | q3\n" + "%Final q5\n" + "q1 a q3\n" + "q1 a q10\n" + "q1 b q7\n" + "q3 a q7\n" + "q3 b q9\n" + "q9 a q9\n" + "q7 b q1\n" + "q7 a q3\n" + "q7 c q3\n" + "q10 a q7\n" + "q10 b q7\n" + "q10 c q7\n" + "q7 a q5\n" + "q5 c q9\n"; + + const auto auts = mata::IntermediateAut::parse_from_mf(parse_mf(file)); + inter_aut = auts[0]; + + plumbing::construct(&aut, inter_aut, &alphabet); + + // some samples + REQUIRE(aut.is_in_lang(encode_word(&alphabet, { "b", "a"}))); + REQUIRE(aut.is_in_lang(encode_word(&alphabet, { "a", "c", "a", "a"}))); + REQUIRE(aut.is_in_lang(encode_word(&alphabet, + {"a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a"}))); + // some wrong samples + REQUIRE(!aut.is_in_lang(encode_word(&alphabet, { "b", "c"}))); + REQUIRE(!aut.is_in_lang(encode_word(&alphabet, { "a", "c", "c", "a"}))); + REQUIRE(!aut.is_in_lang(encode_word(&alphabet, { "b", "a", "c", "b"}))); + } + + SECTION("construct - final states from negation") + { + std::string file = + "@LVLFA-bits\n" + "%Alphabet-auto\n" + "%Initial q0 q8\n" + "%Final !q0 & !q1 & !q4 & !q5 & !q6\n" + "q0 a1 q1\n" + "q1 a2 q2\n" + "q2 a3 q3\n" + "q2 a4 q4\n" + "q3 a5 q5\n" + "q3 a6 q6\n" + "q5 a7 q7\n"; + + const auto auts = mata::IntermediateAut::parse_from_mf(parse_mf(file)); + inter_aut = auts[0]; + + plumbing::construct(&aut, inter_aut, &alphabet); + REQUIRE(aut.final.size() == 4); + REQUIRE(aut.is_in_lang(encode_word(&alphabet, { "a1", "a2"}))); + REQUIRE(aut.is_in_lang(encode_word(&alphabet, { "a1", "a2", "a3"}))); + REQUIRE(!aut.is_in_lang(encode_word(&alphabet, { "a1", "a2", "a3", "a4"}))); + REQUIRE(aut.is_in_lang(encode_word(&alphabet, { "a1", "a2", "a3", "a5", "a7"}))); + } + + SECTION("construct - final states given as true") + { + std::string file = + "@LVLFA-bits\n" + "%Alphabet-auto\n" + "%Initial q0 q8\n" + "%Final \\true\n" + "q0 a1 q1\n" + "q1 a2 q2\n" + "q2 a3 q3\n" + "q2 a4 q4\n" + "q3 a5 q5\n" + "q3 a6 q6\n" + "q5 a7 q7\n"; + + const auto auts = mata::IntermediateAut::parse_from_mf(parse_mf(file)); + inter_aut = auts[0]; + + lvlfa::builder::NameStateMap state_map; + plumbing::construct(&aut, inter_aut, &alphabet, &state_map); + CHECK(aut.final.size() == 9); + CHECK(aut.final[state_map.at("0")]); + CHECK(aut.final[state_map.at("1")]); + CHECK(aut.final[state_map.at("2")]); + CHECK(aut.final[state_map.at("3")]); + CHECK(aut.final[state_map.at("4")]); + CHECK(aut.final[state_map.at("5")]); + CHECK(aut.final[state_map.at("6")]); + CHECK(aut.final[state_map.at("7")]); + CHECK(aut.final[state_map.at("8")]); + } + + SECTION("construct - final states given as false") + { + std::string file = + "@LVLFA-bits\n" + "%Alphabet-auto\n" + "%Initial q0 q8\n" + "%Final \\false\n" + "q0 a1 q1\n" + "q1 a2 q2\n" + "q2 a3 q3\n" + "q2 a4 q4\n" + "q3 a5 q5\n" + "q3 a6 q6\n" + "q5 a7 q7\n"; + + const auto auts = mata::IntermediateAut::parse_from_mf(parse_mf(file)); + inter_aut = auts[0]; + + lvlfa::builder::NameStateMap state_map; + plumbing::construct(&aut, inter_aut, &alphabet, &state_map); + CHECK(aut.final.empty()); + } +} // }}} + +TEST_CASE("mata::lvlfa::make_complete()") +{ // {{{ + Lvlfa aut(11); + + SECTION("empty automaton, empty alphabet") + { + OnTheFlyAlphabet alph{}; + + aut.make_complete(alph, 0); + + REQUIRE(aut.initial.empty()); + REQUIRE(aut.final.empty()); + REQUIRE(aut.delta.empty()); + } + + SECTION("empty automaton") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b" } }; + + aut.make_complete(alph, 0); + + REQUIRE(aut.initial.empty()); + REQUIRE(aut.final.empty()); + REQUIRE(aut.delta.contains(0, alph["a"], 0)); + REQUIRE(aut.delta.contains(0, alph["b"], 0)); + } + + SECTION("non-empty automaton, empty alphabet") + { + OnTheFlyAlphabet alphabet{}; + + aut.initial = {1}; + + aut.make_complete(alphabet, 0); + + REQUIRE(aut.initial.size() == 1); + REQUIRE(*aut.initial.begin() == 1); + REQUIRE(aut.final.empty()); + REQUIRE(aut.delta.empty()); + } + + SECTION("one-state automaton") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b" } }; + const State SINK = 10; + + aut.initial = {1}; + + aut.make_complete(alph, SINK); + + REQUIRE(aut.initial.size() == 1); + REQUIRE(*aut.initial.begin() == 1); + REQUIRE(aut.final.empty()); + REQUIRE(aut.delta.contains(1, alph["a"], SINK)); + REQUIRE(aut.delta.contains(1, alph["b"], SINK)); + REQUIRE(aut.delta.contains(SINK, alph["a"], SINK)); + REQUIRE(aut.delta.contains(SINK, alph["b"], SINK)); + } + + SECTION("bigger automaton") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b", "c" } }; + const State SINK = 9; + + aut.initial = {1, 2}; + aut.final = {8}; + aut.delta.add(1, alph["a"], 2); + aut.delta.add(2, alph["a"], 4); + aut.delta.add(2, alph["c"], 1); + aut.delta.add(2, alph["c"], 3); + aut.delta.add(3, alph["b"], 5); + aut.delta.add(4, alph["c"], 8); + + aut.make_complete(alph, SINK); + + REQUIRE(aut.delta.contains(1, alph["a"], 2)); + REQUIRE(aut.delta.contains(1, alph["b"], SINK)); + REQUIRE(aut.delta.contains(1, alph["c"], SINK)); + REQUIRE(aut.delta.contains(2, alph["a"], 4)); + REQUIRE(aut.delta.contains(2, alph["c"], 1)); + REQUIRE(aut.delta.contains(2, alph["c"], 3)); + REQUIRE(aut.delta.contains(2, alph["b"], SINK)); + REQUIRE(aut.delta.contains(3, alph["b"], 5)); + REQUIRE(aut.delta.contains(3, alph["a"], SINK)); + REQUIRE(aut.delta.contains(3, alph["c"], SINK)); + REQUIRE(aut.delta.contains(4, alph["c"], 8)); + REQUIRE(aut.delta.contains(4, alph["a"], SINK)); + REQUIRE(aut.delta.contains(4, alph["b"], SINK)); + REQUIRE(aut.delta.contains(5, alph["a"], SINK)); + REQUIRE(aut.delta.contains(5, alph["b"], SINK)); + REQUIRE(aut.delta.contains(5, alph["c"], SINK)); + REQUIRE(aut.delta.contains(8, alph["a"], SINK)); + REQUIRE(aut.delta.contains(8, alph["b"], SINK)); + REQUIRE(aut.delta.contains(8, alph["c"], SINK)); + REQUIRE(aut.delta.contains(SINK, alph["a"], SINK)); + REQUIRE(aut.delta.contains(SINK, alph["b"], SINK)); + REQUIRE(aut.delta.contains(SINK, alph["c"], SINK)); + } +} // }}} + +TEST_CASE("mata::lvlfa::complement()") +{ // {{{ + Lvlfa aut(3); + Lvlfa cmpl; + + SECTION("empty automaton, empty alphabet") + { + OnTheFlyAlphabet alph{}; + + cmpl = complement(aut, alph, {{"algorithm", "classical"}, + {"minimize", "false"}}); + Lvlfa empty_string_lvlfa{ lvlfa::builder::create_sigma_star_lvlfa(&alph) }; + CHECK(are_equivalent(cmpl, empty_string_lvlfa)); + } + + SECTION("empty automaton") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b" } }; + + cmpl = complement(aut, alph, {{"algorithm", "classical"}, + {"minimize", "false"}}); + + REQUIRE(cmpl.is_in_lang({})); + REQUIRE(cmpl.is_in_lang(Run{{ alph["a"] }, {}})); + REQUIRE(cmpl.is_in_lang(Run{{ alph["b"] }, {}})); + REQUIRE(cmpl.is_in_lang(Run{{ alph["a"], alph["a"]}, {}})); + REQUIRE(cmpl.is_in_lang(Run{{ alph["a"], alph["b"], alph["b"], alph["a"] }, {}})); + + Lvlfa sigma_star_lvlfa{ lvlfa::builder::create_sigma_star_lvlfa(&alph) }; + CHECK(are_equivalent(cmpl, sigma_star_lvlfa)); + } + + SECTION("empty automaton accepting epsilon, empty alphabet") + { + OnTheFlyAlphabet alph{}; + aut.initial = {1}; + aut.final = {1}; + + cmpl = complement(aut, alph, {{"algorithm", "classical"}, + {"minimize", "false"}}); + + CHECK(cmpl.is_lang_empty()); + } + + SECTION("empty automaton accepting epsilon") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b" } }; + aut.initial = {1}; + aut.final = {1}; + + cmpl = complement(aut, alph, {{"algorithm", "classical"}, + {"minimize", "false"}}); + + REQUIRE(!cmpl.is_in_lang({})); + REQUIRE(cmpl.is_in_lang(Run{{ alph["a"]}, {}})); + REQUIRE(cmpl.is_in_lang(Run{{ alph["b"]}, {}})); + REQUIRE(cmpl.is_in_lang(Run{{ alph["a"], alph["a"]}, {}})); + REQUIRE(cmpl.is_in_lang(Run{{ alph["a"], alph["b"], alph["b"], alph["a"]}, {}})); + REQUIRE(cmpl.initial.size() == 1); + REQUIRE(cmpl.final.size() == 1); + REQUIRE(cmpl.delta.num_of_transitions() == 4); + } + + SECTION("non-empty automaton accepting a*b*") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b" } }; + aut.initial = {1, 2}; + aut.final = {1, 2}; + + aut.delta.add(1, alph["a"], 1); + aut.delta.add(1, alph["a"], 2); + aut.delta.add(2, alph["b"], 2); + + cmpl = complement(aut, alph, {{"algorithm", "classical"}, + {"minimize", "false"}}); + + REQUIRE(!cmpl.is_in_lang(Word{})); + REQUIRE(!cmpl.is_in_lang(Word{ alph["a"] })); + REQUIRE(!cmpl.is_in_lang(Word{ alph["b"] })); + REQUIRE(!cmpl.is_in_lang(Word{ alph["a"], alph["a"] })); + REQUIRE(cmpl.is_in_lang(Word{ alph["a"], alph["b"], alph["b"], alph["a"] })); + REQUIRE(!cmpl.is_in_lang(Word{ alph["a"], alph["a"], alph["b"], alph["b"] })); + REQUIRE(cmpl.is_in_lang(Word{ alph["b"], alph["a"], alph["a"], alph["a"] })); + + REQUIRE(cmpl.initial.size() == 1); + REQUIRE(cmpl.final.size() == 1); + REQUIRE(cmpl.delta.num_of_transitions() == 6); + } + + SECTION("empty automaton, empty alphabet, minimization") + { + OnTheFlyAlphabet alph{}; + + cmpl = complement(aut, alph, {{"algorithm", "classical"}, + {"minimize", "true"}}); + Lvlfa empty_string_lvlfa{ lvlfa::builder::create_sigma_star_lvlfa(&alph) }; + CHECK(are_equivalent(empty_string_lvlfa, cmpl)); + } + + SECTION("empty automaton, minimization") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b" } }; + + cmpl = complement(aut, alph, {{"algorithm", "classical"}, + {"minimize", "true"}}); + + REQUIRE(cmpl.is_in_lang({})); + REQUIRE(cmpl.is_in_lang(Run{{ alph["a"] }, {}})); + REQUIRE(cmpl.is_in_lang(Run{{ alph["b"] }, {}})); + REQUIRE(cmpl.is_in_lang(Run{{ alph["a"], alph["a"]}, {}})); + REQUIRE(cmpl.is_in_lang(Run{{ alph["a"], alph["b"], alph["b"], alph["a"] }, {}})); + + Lvlfa sigma_star_lvlfa{ lvlfa::builder::create_sigma_star_lvlfa(&alph) }; + CHECK(are_equivalent(sigma_star_lvlfa, cmpl)); + } + + SECTION("minimization vs no minimization") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b" } }; + aut.initial = {0, 1}; + aut.final = {1, 2}; + + aut.delta.add(1, alph["b"], 1); + aut.delta.add(1, alph["a"], 2); + aut.delta.add(2, alph["b"], 2); + aut.delta.add(0, alph["a"], 1); + aut.delta.add(0, alph["a"], 2); + + cmpl = complement(aut, alph, {{"algorithm", "classical"}, + {"minimize", "false"}}); + + Lvlfa cmpl_min = complement(aut, alph, {{"algorithm", "classical"}, + {"minimize", "true"}}); + + CHECK(are_equivalent(cmpl, cmpl_min, &alph)); + CHECK(cmpl_min.num_of_states() == 4); + CHECK(cmpl.num_of_states() == 5); + } + +} // }}} + +TEST_CASE("mata::lvlfa::is_universal()") +{ // {{{ + Lvlfa aut(6); + Run cex; + ParameterMap params; + + const std::unordered_set ALGORITHMS = { + "naive", + "antichains", + }; + + SECTION("empty automaton, empty alphabet") + { + OnTheFlyAlphabet alph{}; + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_univ = aut.is_universal(alph, params); + + REQUIRE(!is_univ); + } + } + + SECTION("empty automaton accepting epsilon, empty alphabet") + { + OnTheFlyAlphabet alph{}; + aut.initial = {1}; + aut.final = {1}; + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_univ = aut.is_universal(alph, &cex, params); + + REQUIRE(is_univ); + REQUIRE(cex.word.empty()); + } + } + + SECTION("empty automaton accepting epsilon") + { + OnTheFlyAlphabet alph{ std::vector{ "a" } }; + aut.initial = {1}; + aut.final = {1}; + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_univ = aut.is_universal(alph, &cex, params); + + REQUIRE(!is_univ); + REQUIRE(((cex.word == Word{alph["a"]}) || (cex.word == Word{alph["b"]}))); + } + } + + SECTION("automaton for a*b*") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b" } }; + aut.initial = {1, 2}; + aut.final = {1, 2}; + + aut.delta.add(1, alph["a"], 1); + aut.delta.add(1, alph["a"], 2); + aut.delta.add(2, alph["b"], 2); + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_univ = aut.is_universal(alph, params); + + REQUIRE(!is_univ); + } + } + + SECTION("automaton for a* + b*") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b"} }; + aut.initial = {1, 2}; + aut.final = {1, 2}; + + aut.delta.add(1, alph["a"], 1); + aut.delta.add(2, alph["b"], 2); + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_univ = aut.is_universal(alph, params); + + REQUIRE(!is_univ); + } + } + + SECTION("automaton for (a + b)*") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b"} }; + aut.initial = {1}; + aut.final = {1}; + + aut.delta.add(1, alph["a"], 1); + aut.delta.add(1, alph["b"], 1); + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_univ = aut.is_universal(alph, params); + + REQUIRE(is_univ); + } + } + + SECTION("automaton for eps + (a+b) + (a+b)(a+b)(a* + b*)") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b"} }; + aut.initial = {1}; + aut.final = {1, 2, 3, 4, 5}; + + aut.delta.add(1, alph["a"], 2); + aut.delta.add(1, alph["b"], 2); + aut.delta.add(2, alph["a"], 3); + aut.delta.add(2, alph["b"], 3); + + aut.delta.add(3, alph["a"], 4); + aut.delta.add(4, alph["a"], 4); + + aut.delta.add(3, alph["b"], 5); + aut.delta.add(5, alph["b"], 5); + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_univ = aut.is_universal(alph, &cex, params); + + REQUIRE(!is_univ); + + REQUIRE(cex.word.size() == 4); + REQUIRE((cex.word[0] == alph["a"] || cex.word[0] == alph["b"])); + REQUIRE((cex.word[1] == alph["a"] || cex.word[1] == alph["b"])); + REQUIRE((cex.word[2] == alph["a"] || cex.word[2] == alph["b"])); + REQUIRE((cex.word[3] == alph["a"] || cex.word[3] == alph["b"])); + REQUIRE(cex.word[2] != cex.word[3]); + } + } + + SECTION("automaton for epsilon + a(a + b)* + b(a + b)*") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b"} }; + aut.initial = {1, 3}; + aut.final = {1, 2, 4}; + + aut.delta.add(1, alph["a"], 2); + aut.delta.add(2, alph["a"], 2); + aut.delta.add(2, alph["b"], 2); + aut.delta.add(3, alph["b"], 4); + aut.delta.add(4, alph["a"], 4); + aut.delta.add(4, alph["b"], 4); + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_univ = aut.is_universal(alph, &cex, params); + + REQUIRE(is_univ); + } + } + + SECTION("example from Abdulla et al. TACAS'10") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b"} }; + aut.initial = {1, 2}; + aut.final = {1, 2, 3}; + + aut.delta.add(1, alph["b"], 1); + aut.delta.add(1, alph["a"], 2); + aut.delta.add(1, alph["b"], 4); + aut.delta.add(2, alph["b"], 2); + aut.delta.add(2, alph["a"], 3); + aut.delta.add(3, alph["b"], 3); + aut.delta.add(3, alph["a"], 1); + aut.delta.add(4, alph["b"], 2); + aut.delta.add(4, alph["b"], 3); + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_univ = aut.is_universal(alph, &cex, params); + + REQUIRE(is_univ); + } + } + + SECTION("subsumption-pruning in processed") + { + OnTheFlyAlphabet alph{ std::vector{ "a" } }; + aut.initial = {1, 2}; + aut.final = {1}; + + aut.delta.add(1, alph["a"], 1); + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_univ = aut.is_universal(alph, &cex, params); + + REQUIRE(is_univ); + } + } + + SECTION("wrong parameters 1") + { + OnTheFlyAlphabet alph{}; + + CHECK_THROWS_WITH(aut.is_universal(alph, params), + Catch::Contains("requires setting the \"algo\" key")); + } + + SECTION("wrong parameters 2") + { + OnTheFlyAlphabet alph{}; + params["algorithm"] = "foo"; + + CHECK_THROWS_WITH(aut.is_universal(alph, params), + Catch::Contains("received an unknown value")); + } +} // }}} + +TEST_CASE("mata::lvlfa::is_included()") +{ // {{{ + Lvlfa smaller(10); + Lvlfa bigger(16); + Run cex; + ParameterMap params; + + const std::unordered_set ALGORITHMS = { + "naive", + "antichains", + }; + + SECTION("{} <= {}, empty alphabet") + { + OnTheFlyAlphabet alph{}; + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_incl = is_included(smaller, bigger, &alph, params); + CHECK(is_incl); + + is_incl = is_included(bigger, smaller, &alph, params); + CHECK(is_incl); + } + } + + SECTION("{} <= {epsilon}, empty alphabet") + { + OnTheFlyAlphabet alph{}; + bigger.initial = {1}; + bigger.final = {1}; + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_incl = is_included(smaller, bigger, &cex, &alph, params); + CHECK(is_incl); + + is_incl = is_included(bigger, smaller, &cex, &alph, params); + CHECK(!is_incl); + } + } + + SECTION("{epsilon} <= {epsilon}, empty alphabet") + { + OnTheFlyAlphabet alph{}; + smaller.initial = {1}; + smaller.final = {1}; + bigger.initial = {11}; + bigger.final = {11}; + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_incl = is_included(smaller, bigger, &cex, &alph, params); + CHECK(is_incl); + + is_incl = is_included(bigger, smaller, &cex, &alph, params); + CHECK(is_incl); + } + } + + SECTION("{epsilon} !<= {}, empty alphabet") + { + OnTheFlyAlphabet alph{}; + smaller.initial = {1}; + smaller.final = {1}; + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_incl = is_included(smaller, bigger, &cex, &alph, params); + + REQUIRE(!is_incl); + REQUIRE(cex.word.empty()); + + is_incl = is_included(bigger, smaller, &cex, &alph, params); + REQUIRE(cex.word.empty()); + REQUIRE(is_incl); + } + } + + SECTION("a* + b* <= (a+b)*") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b"} }; + smaller.initial = {1, 2}; + smaller.final = {1, 2}; + smaller.delta.add(1, alph["a"], 1); + smaller.delta.add(2, alph["b"], 2); + + bigger.initial = {11}; + bigger.final = {11}; + bigger.delta.add(11, alph["a"], 11); + bigger.delta.add(11, alph["b"], 11); + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_incl = is_included(smaller, bigger, &alph, params); + REQUIRE(is_incl); + + is_incl = is_included(bigger, smaller, &alph, params); + REQUIRE(!is_incl); + } + } + + SECTION("(a+b)* !<= a* + b*") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b"} }; + smaller.initial = {1}; + smaller.final = {1}; + smaller.delta.add(1, alph["a"], 1); + smaller.delta.add(1, alph["b"], 1); + + bigger.initial = {11, 12}; + bigger.final = {11, 12}; + bigger.delta.add(11, alph["a"], 11); + bigger.delta.add(12, alph["b"], 12); + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + + bool is_incl = is_included(smaller, bigger, &cex, &alph, params); + + REQUIRE(!is_incl); + REQUIRE(( + cex.word == Word{alph["a"], alph["b"]} || + cex.word == Word{alph["b"], alph["a"]})); + + is_incl = is_included(bigger, smaller, &cex, &alph, params); + REQUIRE(is_incl); + REQUIRE(( + cex.word == Word{alph["a"], alph["b"]} || + cex.word == Word{alph["b"], alph["a"]})); + } + } + + SECTION("(a+b)* !<= eps + (a+b) + (a+b)(a+b)(a* + b*)") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b"} }; + smaller.initial = {1}; + smaller.final = {1}; + smaller.delta.add(1, alph["a"], 1); + smaller.delta.add(1, alph["b"], 1); + + bigger.initial = {11}; + bigger.final = {11, 12, 13, 14, 15}; + + bigger.delta.add(11, alph["a"], 12); + bigger.delta.add(11, alph["b"], 12); + bigger.delta.add(12, alph["a"], 13); + bigger.delta.add(12, alph["b"], 13); + + bigger.delta.add(13, alph["a"], 14); + bigger.delta.add(14, alph["a"], 14); + + bigger.delta.add(13, alph["b"], 15); + bigger.delta.add(15, alph["b"], 15); + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + bool is_incl = is_included(smaller, bigger, &cex, &alph, params); + REQUIRE(!is_incl); + + REQUIRE(cex.word.size() == 4); + REQUIRE((cex.word[0] == alph["a"] || cex.word[0] == alph["b"])); + REQUIRE((cex.word[1] == alph["a"] || cex.word[1] == alph["b"])); + REQUIRE((cex.word[2] == alph["a"] || cex.word[2] == alph["b"])); + REQUIRE((cex.word[3] == alph["a"] || cex.word[3] == alph["b"])); + REQUIRE(cex.word[2] != cex.word[3]); + + is_incl = is_included(bigger, smaller, &cex, &alph, params); + REQUIRE(is_incl); + + REQUIRE(cex.word.size() == 4); + REQUIRE((cex.word[0] == alph["a"] || cex.word[0] == alph["b"])); + REQUIRE((cex.word[1] == alph["a"] || cex.word[1] == alph["b"])); + REQUIRE((cex.word[2] == alph["a"] || cex.word[2] == alph["b"])); + REQUIRE((cex.word[3] == alph["a"] || cex.word[3] == alph["b"])); + REQUIRE(cex.word[2] != cex.word[3]); + } + } + + SECTION("wrong parameters 1") + { + OnTheFlyAlphabet alph{}; + + CHECK_THROWS_WITH(is_included(smaller, bigger, &alph, params), + Catch::Contains("requires setting the \"algo\" key")); + CHECK_NOTHROW(is_included(smaller, bigger, &alph)); + } + + SECTION("wrong parameters 2") + { + OnTheFlyAlphabet alph{}; + params["algorithm"] = "foo"; + + CHECK_THROWS_WITH(is_included(smaller, bigger, &alph, params), + Catch::Contains("received an unknown value")); + CHECK_NOTHROW(is_included(smaller, bigger, &alph)); + } +} // }}} + +TEST_CASE("mata::lvlfa::are_equivalent") +{ + Lvlfa smaller(10); + Lvlfa bigger(16); + Word cex; + ParameterMap params; + + const std::unordered_set ALGORITHMS = { + "naive", + "antichains", + }; + + SECTION("{} == {}, empty alphabet") + { + OnTheFlyAlphabet alph{}; + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + + CHECK(are_equivalent(smaller, bigger, &alph, params)); + CHECK(are_equivalent(smaller, bigger, params)); + CHECK(are_equivalent(smaller, bigger)); + + CHECK(are_equivalent(bigger, smaller, &alph, params)); + CHECK(are_equivalent(bigger, smaller, params)); + CHECK(are_equivalent(bigger, smaller)); + } + } + + SECTION("{} == {epsilon}, empty alphabet") + { + OnTheFlyAlphabet alph{}; + bigger.initial = {1}; + bigger.final = {1}; + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + + CHECK(!are_equivalent(smaller, bigger, &alph, params)); + CHECK(!are_equivalent(smaller, bigger, params)); + CHECK(!are_equivalent(smaller, bigger)); + + CHECK(!are_equivalent(bigger, smaller, &alph, params)); + CHECK(!are_equivalent(bigger, smaller, params)); + CHECK(!are_equivalent(bigger, smaller)); + } + } + + SECTION("{epsilon} == {epsilon}, empty alphabet") + { + OnTheFlyAlphabet alph{}; + smaller.initial = {1}; + smaller.final = {1}; + bigger.initial = {11}; + bigger.final = {11}; + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + + CHECK(are_equivalent(smaller, bigger, &alph, params)); + CHECK(are_equivalent(smaller, bigger, params)); + CHECK(are_equivalent(smaller, bigger)); + + CHECK(are_equivalent(bigger, smaller, &alph, params)); + CHECK(are_equivalent(bigger, smaller, params)); + CHECK(are_equivalent(bigger, smaller)); + } + } + + SECTION("a* + b* == (a+b)*") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b"} }; + smaller.initial = {1, 2}; + smaller.final = {1, 2}; + smaller.delta.add(1, alph["a"], 1); + smaller.delta.add(2, alph["b"], 2); + + bigger.initial = {11}; + bigger.final = {11}; + bigger.delta.add(11, alph["a"], 11); + bigger.delta.add(11, alph["b"], 11); + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + + //TODO:what about we test the plumbing versions primarily? + // Debugging with the dispatcher is annoying. + + CHECK(!are_equivalent(smaller, bigger, &alph, params)); + CHECK(!are_equivalent(smaller, bigger, params)); + CHECK(!are_equivalent(smaller, bigger)); + + CHECK(!are_equivalent(bigger, smaller, &alph, params)); + CHECK(!are_equivalent(bigger, smaller, params)); + CHECK(!are_equivalent(bigger, smaller)); + } + } + + SECTION("a* != (a|b)*, was throwing exception") + { + Lvlfa aut; + mata::parser::create_nfa(&aut, "a*"); + Lvlfa aut2; + mata::parser::create_nfa(&aut2, "(a|b)*"); + CHECK(!are_equivalent(aut, aut2)); + } + + SECTION("(a+b)* !<= eps + (a+b) + (a+b)(a+b)(a* + b*)") + { + OnTheFlyAlphabet alph{ std::vector{ "a", "b"} }; + smaller.initial = {1}; + smaller.final = {1}; + smaller.delta.add(1, alph["a"], 1); + smaller.delta.add(1, alph["b"], 1); + + bigger.initial = {11}; + bigger.final = {11, 12, 13, 14, 15}; + + bigger.delta.add(11, alph["a"], 12); + bigger.delta.add(11, alph["b"], 12); + bigger.delta.add(12, alph["a"], 13); + bigger.delta.add(12, alph["b"], 13); + + bigger.delta.add(13, alph["a"], 14); + bigger.delta.add(14, alph["a"], 14); + + bigger.delta.add(13, alph["b"], 15); + bigger.delta.add(15, alph["b"], 15); + + for (const auto& algo : ALGORITHMS) { + params["algorithm"] = algo; + + CHECK(!are_equivalent(smaller, bigger, &alph, params)); + CHECK(!are_equivalent(smaller, bigger, params)); + CHECK(!are_equivalent(smaller, bigger)); + + CHECK(!are_equivalent(bigger, smaller, &alph, params)); + CHECK(!are_equivalent(bigger, smaller, params)); + CHECK(!are_equivalent(bigger, smaller)); + } + } + + SECTION("wrong parameters 1") + { + OnTheFlyAlphabet alph{}; + + CHECK_THROWS_WITH(are_equivalent(smaller, bigger, &alph, params), + Catch::Contains("requires setting the \"algo\" key")); + CHECK_THROWS_WITH(are_equivalent(smaller, bigger, params), + Catch::Contains("requires setting the \"algo\" key")); + CHECK_NOTHROW(are_equivalent(smaller, bigger)); + } + + SECTION("wrong parameters 2") + { + OnTheFlyAlphabet alph{}; + params["algorithm"] = "foo"; + + CHECK_THROWS_WITH(are_equivalent(smaller, bigger, &alph, params), + Catch::Contains("received an unknown value")); + CHECK_THROWS_WITH(are_equivalent(smaller, bigger, params), + Catch::Contains("received an unknown value")); + CHECK_NOTHROW(are_equivalent(smaller, bigger)); + } +} + +TEST_CASE("mata::lvlfa::revert()") +{ // {{{ + Lvlfa aut(9); + + SECTION("empty automaton") + { + Lvlfa result = revert(aut); + + REQUIRE(result.delta.empty()); + REQUIRE(result.initial.empty()); + REQUIRE(result.final.empty()); + } + + SECTION("no-transition automaton") + { + aut.initial.insert(1); + aut.initial.insert(3); + + aut.final.insert(2); + aut.final.insert(5); + + Lvlfa result = revert(aut); + + REQUIRE(result.delta.empty()); + REQUIRE(result.initial[2]); + REQUIRE(result.initial[5]); + REQUIRE(result.final[1]); + REQUIRE(result.final[3]); + } + + SECTION("one-transition automaton") + { + aut.initial.insert(1); + aut.final.insert(2); + aut.delta.add(1, 'a', 2); + + Lvlfa result = revert(aut); + + REQUIRE(result.initial[2]); + REQUIRE(result.final[1]); + REQUIRE(result.delta.contains(2, 'a', 1)); + REQUIRE(result.delta.num_of_transitions() == aut.delta.num_of_transitions()); + } + + SECTION("bigger automaton") + { + aut.initial = {1, 2}; + aut.delta.add(1, 'a', 2); + aut.delta.add(1, 'a', 3); + aut.delta.add(1, 'b', 4); + aut.delta.add(2, 'a', 2); + aut.delta.add(2, 'a', 3); + aut.delta.add(2, 'b', 4); + aut.delta.add(3, 'b', 4); + aut.delta.add(3, 'c', 7); + aut.delta.add(3, 'b', 2); + aut.delta.add(7, 'a', 8); + aut.final = {3}; + + Lvlfa result = revert(aut); + //REQUIRE(result.final == StateSet({1, 2})); + REQUIRE(StateSet(result.final) == StateSet({1, 2})); + REQUIRE(result.delta.contains(2, 'a', 1)); + REQUIRE(result.delta.contains(3, 'a', 1)); + REQUIRE(result.delta.contains(4, 'b', 1)); + REQUIRE(result.delta.contains(2, 'a', 2)); + REQUIRE(result.delta.contains(3, 'a', 2)); + REQUIRE(result.delta.contains(4, 'b', 2)); + REQUIRE(result.delta.contains(4, 'b', 3)); + REQUIRE(result.delta.contains(7, 'c', 3)); + REQUIRE(result.delta.contains(2, 'b', 3)); + REQUIRE(result.delta.contains(8, 'a', 7)); + REQUIRE(StateSet(result.initial) == StateSet({3})); + } + + SECTION("Automaton A") { + Lvlfa lvlfa{ 11 }; + FILL_WITH_AUT_A(lvlfa); + Lvlfa res = revert(lvlfa); + CHECK(res.initial[5]); + CHECK(res.final[1]); + CHECK(res.final[3]); + CHECK(res.delta.num_of_transitions() == 15); + CHECK(res.delta.contains(5, 'a', 5)); + CHECK(res.delta.contains(5, 'a', 7)); + CHECK(res.delta.contains(9, 'a', 9)); + CHECK(res.delta.contains(9, 'c', 5)); + CHECK(res.delta.contains(9, 'b', 3)); + CHECK(res.delta.contains(7, 'a', 3)); + CHECK(res.delta.contains(7, 'a', 10)); + CHECK(res.delta.contains(7, 'b', 10)); + CHECK(res.delta.contains(7, 'c', 10)); + CHECK(res.delta.contains(7, 'b', 1)); + CHECK(res.delta.contains(3, 'a', 7)); + CHECK(res.delta.contains(3, 'c', 7)); + CHECK(res.delta.contains(3, 'a', 1)); + CHECK(res.delta.contains(1, 'b', 7)); + CHECK(res.delta.contains(10, 'a', 1)); + } + + SECTION("Automaton B") { + Lvlfa lvlfa{ 15 }; + FILL_WITH_AUT_B(lvlfa); + Lvlfa res = revert(lvlfa); + CHECK(res.initial[2]); + CHECK(res.initial[12]); + CHECK(res.final[4]); + CHECK(res.delta.num_of_transitions() == 12); + CHECK(res.delta.contains(8, 'a', 4)); + CHECK(res.delta.contains(8, 'c', 4)); + CHECK(res.delta.contains(4, 'b', 8)); + CHECK(res.delta.contains(6, 'b', 4)); + CHECK(res.delta.contains(6, 'a', 4)); + CHECK(res.delta.contains(2, 'a', 6)); + CHECK(res.delta.contains(2, 'a', 0)); + CHECK(res.delta.contains(2, 'b', 2)); + CHECK(res.delta.contains(0, 'a', 2)); + CHECK(res.delta.contains(12, 'c', 2)); + CHECK(res.delta.contains(12, 'b', 14)); + CHECK(res.delta.contains(14, 'a', 12)); + } +} // }}} + + +TEST_CASE("mata::lvlfa::Lvlfa::is_deterministic()") +{ // {{{ + Lvlfa aut('s'+1); + + SECTION("(almost) empty automaton") { + // no initial states + REQUIRE(!aut.is_deterministic()); + + // add an initial state + aut.initial.insert('q'); + REQUIRE(aut.is_deterministic()); + + // add the same initial state + aut.initial.insert('q'); + REQUIRE(aut.is_deterministic()); + + // add another initial state + aut.initial.insert('r'); + REQUIRE(!aut.is_deterministic()); + + // add a final state + aut.final.insert('q'); + REQUIRE(!aut.is_deterministic()); + } + + SECTION("trivial automata") { + aut.initial.insert('q'); + aut.delta.add('q', 'a', 'r'); + REQUIRE(aut.is_deterministic()); + + // unreachable states + aut.delta.add('s', 'a', 'r'); + REQUIRE(aut.is_deterministic()); + + // transitions over a different symbol + aut.delta.add('q', 'b', 'h'); + REQUIRE(aut.is_deterministic()); + + // nondeterminism + aut.delta.add('q', 'a', 's'); + REQUIRE(!aut.is_deterministic()); + } + + SECTION("larger automaton 1") { + FILL_WITH_AUT_A(aut); + REQUIRE(!aut.is_deterministic()); + } + + SECTION("larger automaton 2") { + FILL_WITH_AUT_B(aut); + REQUIRE(!aut.is_deterministic()); + } +} // }}} + +TEST_CASE("mata::lvlfa::is_complete()") +{ // {{{ + Lvlfa aut('q'+1); + + SECTION("empty automaton") + { + OnTheFlyAlphabet alph{}; + + // is complete for the empty alphabet + REQUIRE(aut.is_complete(&alph)); + + alph.translate_symb("a1"); + alph.translate_symb("a2"); + + // the empty automaton is complete even for a non-empty alphabet + REQUIRE(aut.is_complete(&alph)); + + // add a non-reachable state (the automaton should still be complete) + aut.delta.add('q', alph["a1"], 'q'); + REQUIRE(aut.is_complete(&alph)); + } + + SECTION("small automaton") + { + OnTheFlyAlphabet alph{}; + + aut.initial.insert(4); + aut.delta.add(4, alph["a"], 8); + aut.delta.add(4, alph["c"], 8); + aut.delta.add(4, alph["a"], 6); + aut.delta.add(4, alph["b"], 6); + aut.delta.add(8, alph["b"], 4); + aut.delta.add(6, alph["a"], 2); + aut.delta.add(2, alph["b"], 2); + aut.delta.add(2, alph["a"], 0); + aut.delta.add(2, alph["c"], 12); + aut.delta.add(0, alph["a"], 2); + aut.delta.add(12, alph["a"], 14); + aut.delta.add(14, alph["b"], 12); + aut.final.insert({2, 12}); + + REQUIRE(!aut.is_complete(&alph)); + + aut.make_complete(alph, 100); + REQUIRE(aut.is_complete(&alph)); + } + + SECTION("using a non-alphabet symbol") + { + OnTheFlyAlphabet alph{}; + + aut.initial.insert(4); + aut.delta.add(4, alph["a"], 8); + aut.delta.add(4, alph["c"], 8); + aut.delta.add(4, alph["a"], 6); + aut.delta.add(4, alph["b"], 6); + aut.delta.add(6, 100, 4); + + CHECK_THROWS_WITH(aut.is_complete(&alph), + Catch::Contains("symbol that is not in the provided alphabet")); + } +} // }}} + +TEST_CASE("mata::lvlfa::is_prfx_in_lang()") +{ // {{{ + Lvlfa aut('q'+1); + + SECTION("empty automaton") + { + Run w; + w.word = {'a', 'b', 'd'}; + REQUIRE(!aut.is_prfx_in_lang(w)); + + w.word = { }; + REQUIRE(!aut.is_prfx_in_lang(w)); + } + + SECTION("automaton accepting only epsilon") + { + aut.initial.insert('q'); + aut.final.insert('q'); + + Run w; + w.word = { }; + REQUIRE(aut.is_prfx_in_lang(w)); + + w.word = {'a', 'b'}; + REQUIRE(aut.is_prfx_in_lang(w)); + } + + SECTION("small automaton") + { + FILL_WITH_AUT_B(aut); + + Run w; + w.word = {'b', 'a'}; + REQUIRE(aut.is_prfx_in_lang(w)); + + w.word = { }; + REQUIRE(!aut.is_prfx_in_lang(w)); + + w.word = {'c', 'b', 'a'}; + REQUIRE(!aut.is_prfx_in_lang(w)); + + w.word = {'c', 'b', 'a', 'a'}; + REQUIRE(aut.is_prfx_in_lang(w)); + + w.word = {'a', 'a'}; + REQUIRE(aut.is_prfx_in_lang(w)); + + w.word = {'c', 'b', 'b', 'a', 'c', 'b'}; + REQUIRE(aut.is_prfx_in_lang(w)); + + w.word = Word(100000, 'a'); + REQUIRE(aut.is_prfx_in_lang(w)); + + w.word = Word(100000, 'b'); + REQUIRE(!aut.is_prfx_in_lang(w)); + } +} // }}} + +TEST_CASE("mata::lvlfa::fw-direct-simulation()") +{ // {{{ + Lvlfa aut; + + SECTION("empty automaton") + { + Simlib::Util::BinaryRelation result = compute_relation(aut); + + REQUIRE(result.size() == 0); + } + + aut.add_state(8); + SECTION("no-transition automaton") + { + aut.initial.insert(1); + aut.initial.insert(3); + + aut.final.insert(2); + aut.final.insert(5); + + Simlib::Util::BinaryRelation result = compute_relation(aut); + REQUIRE(result.get(1,3)); + REQUIRE(result.get(2,5)); + REQUIRE(!result.get(5,1)); + REQUIRE(!result.get(2,3)); + } + + SECTION("small automaton") + { + aut.initial.insert(1); + aut.final.insert(2); + aut.delta.add(1, 'a', 4); + aut.delta.add(4, 'b', 5); + aut.delta.add(2, 'b', 5); + aut.delta.add(1, 'b', 4); + + Simlib::Util::BinaryRelation result = compute_relation(aut); + REQUIRE(result.get(4,1)); + REQUIRE(!result.get(2,5)); + + } + + Lvlfa aut_big(9); + + SECTION("bigger automaton") + { + aut_big.initial = {1, 2}; + aut_big.delta.add(1, 'a', 2); + aut_big.delta.add(1, 'a', 3); + aut_big.delta.add(1, 'b', 4); + aut_big.delta.add(2, 'a', 2); + aut_big.delta.add(2, 'b', 2); + aut_big.delta.add(2, 'a', 3); + aut_big.delta.add(2, 'b', 4); + aut_big.delta.add(3, 'b', 4); + aut_big.delta.add(3, 'c', 7); + aut_big.delta.add(3, 'b', 2); + aut_big.delta.add(5, 'c', 3); + aut_big.delta.add(7, 'a', 8); + aut_big.final = {3}; + + Simlib::Util::BinaryRelation result = compute_relation(aut_big); + REQUIRE(result.get(1,2)); + REQUIRE(!result.get(2,1)); + REQUIRE(!result.get(3,1)); + REQUIRE(!result.get(3,2)); + REQUIRE(result.get(4,1)); + REQUIRE(result.get(4,2)); + REQUIRE(result.get(4,5)); + REQUIRE(!result.get(5,2)); + REQUIRE(!result.get(5,1)); + REQUIRE(result.get(7,1)); + REQUIRE(result.get(7,2)); + REQUIRE(result.get(8,1)); + REQUIRE(result.get(8,2)); + REQUIRE(result.get(8,5)); + } +} // }} + +TEST_CASE("mata::lvlfa::reduce_size_by_simulation()") +{ + Lvlfa aut; + StateRenaming state_renaming; + + SECTION("empty automaton") + { + Lvlfa result = reduce(aut, &state_renaming); + + REQUIRE(result.delta.empty()); + REQUIRE(result.initial.empty()); + REQUIRE(result.final.empty()); + } + + SECTION("simple automaton") + { + aut.add_state(2); + aut.initial.insert(1); + + aut.final.insert(2); + Lvlfa result = reduce(aut, &state_renaming); + + REQUIRE(result.delta.empty()); + REQUIRE(result.initial[state_renaming[1]]); + REQUIRE(result.final[state_renaming[2]]); + REQUIRE(result.num_of_states() == 2); + REQUIRE(state_renaming[1] == state_renaming[0]); + REQUIRE(state_renaming[2] != state_renaming[0]); + } + + SECTION("big automaton") + { + aut.add_state(9); + aut.initial = {1, 2}; + aut.delta.add(1, 'a', 2); + aut.delta.add(1, 'a', 3); + aut.delta.add(1, 'b', 4); + aut.delta.add(2, 'a', 2); + aut.delta.add(2, 'b', 2); + aut.delta.add(2, 'a', 3); + aut.delta.add(2, 'b', 4); + aut.delta.add(3, 'b', 4); + aut.delta.add(3, 'c', 7); + aut.delta.add(3, 'b', 2); + aut.delta.add(5, 'c', 3); + aut.delta.add(7, 'a', 8); + aut.delta.add(9, 'b', 2); + aut.delta.add(9, 'c', 0); + aut.delta.add(0, 'a', 4); + aut.final = {3, 9}; + + + Lvlfa result = reduce(aut, &state_renaming); + + REQUIRE(result.num_of_states() == 6); + REQUIRE(result.initial[state_renaming[1]]); + REQUIRE(result.initial[state_renaming[2]]); + REQUIRE(result.delta.contains(state_renaming[9], 'c', state_renaming[0])); + REQUIRE(result.delta.contains(state_renaming[9], 'c', state_renaming[7])); + REQUIRE(result.delta.contains(state_renaming[3], 'c', state_renaming[0])); + REQUIRE(result.delta.contains(state_renaming[0], 'a', state_renaming[8])); + REQUIRE(result.delta.contains(state_renaming[7], 'a', state_renaming[4])); + REQUIRE(result.delta.contains(state_renaming[1], 'a', state_renaming[3])); + REQUIRE(!result.delta.contains(state_renaming[3], 'b', state_renaming[4])); + REQUIRE(result.delta.contains(state_renaming[2], 'a', state_renaming[2])); + REQUIRE(result.final[state_renaming[9]]); + REQUIRE(result.final[state_renaming[3]]); + + result = reduce(aut.trim(), &state_renaming); + CHECK(result.num_of_states() == 3); + CHECK(result.initial == SparseSet{ 0, 1 }); + CHECK(result.final == SparseSet{ 2 }); + CHECK(result.delta.num_of_transitions() == 6); + CHECK(result.delta.contains(state_renaming[0], 'a', state_renaming[2])); + CHECK(result.delta.contains(state_renaming[0], 'a', state_renaming[1])); + CHECK(result.delta.contains(state_renaming[1], 'a', state_renaming[1])); + CHECK(result.delta.contains(state_renaming[1], 'b', state_renaming[1])); + CHECK(result.delta.contains(state_renaming[1], 'a', state_renaming[2])); + CHECK(result.delta.contains(state_renaming[2], 'b', state_renaming[1])); + } + + SECTION("no transitions from non-final state") + { + aut.delta.add(0, 'a', 1); + aut.initial = { 0 }; + Lvlfa result = reduce(aut.trim(), &state_renaming); + CHECK(are_equivalent(result, aut)); + } +} + +TEST_CASE("mata::lvlfa::union_norename()") { + Run one{{1},{}}; + Run zero{{0}, {}}; + + Lvlfa lhs(2); + lhs.initial.insert(0); + lhs.delta.add(0, 0, 1); + lhs.final.insert(1); + REQUIRE(!lhs.is_in_lang(one)); + REQUIRE(lhs.is_in_lang(zero)); + + Lvlfa rhs(2); + rhs.initial.insert(0); + rhs.delta.add(0, 1, 1); + rhs.final.insert(1); + REQUIRE(rhs.is_in_lang(one)); + REQUIRE(!rhs.is_in_lang(zero)); + + SECTION("failing minimal scenario") { + Lvlfa result = uni(lhs, rhs); + REQUIRE(result.is_in_lang(one)); + REQUIRE(result.is_in_lang(zero)); + } +} + +TEST_CASE("mata::lvlfa::union_inplace") { + Run one{{1},{}}; + Run zero{{0}, {}}; + + Lvlfa lhs(2); + lhs.initial.insert(0); + lhs.delta.add(0, 0, 1); + lhs.final.insert(1); + REQUIRE(!lhs.is_in_lang(one)); + REQUIRE(lhs.is_in_lang(zero)); + + Lvlfa rhs(2); + rhs.initial.insert(0); + rhs.delta.add(0, 1, 1); + rhs.final.insert(1); + REQUIRE(rhs.is_in_lang(one)); + REQUIRE(!rhs.is_in_lang(zero)); + + SECTION("failing minimal scenario") { + Lvlfa result = lhs.uni(rhs); + REQUIRE(result.is_in_lang(one)); + REQUIRE(result.is_in_lang(zero)); + } + + SECTION("same automata") { + size_t lhs_states = lhs.num_of_states(); + Lvlfa result = lhs.uni(lhs); + REQUIRE(result.num_of_states() == lhs_states * 2); + } +} + +TEST_CASE("mata::lvlfa::remove_final()") +{ + Lvlfa aut('q' + 1); + + SECTION("Automaton B") + { + FILL_WITH_AUT_B(aut); + REQUIRE(aut.final[2]); + REQUIRE(aut.final[12]); + aut.final.erase(12); + REQUIRE(aut.final[2]); + REQUIRE(!aut.final[12]); + } +} + +TEST_CASE("mata::lvlfa::delta.remove()") +{ + Lvlfa aut('q' + 1); + + SECTION("Automaton B") + { + FILL_WITH_AUT_B(aut); + aut.delta.add(1, 3, 4); + aut.delta.add(1, 3, 5); + + SECTION("Simple remove") + { + REQUIRE(aut.delta.contains(1, 3, 4)); + REQUIRE(aut.delta.contains(1, 3, 5)); + aut.delta.remove(1, 3, 5); + REQUIRE(aut.delta.contains(1, 3, 4)); + REQUIRE(!aut.delta.contains(1, 3, 5)); + } + + SECTION("Remove missing transition") + { + REQUIRE_THROWS_AS(aut.delta.remove(1, 1, 5), std::invalid_argument); + } + + SECTION("Remove the last state_to from targets") { + REQUIRE(aut.delta.contains(6, 'a', 2)); + aut.delta.remove(6, 'a', 2); + REQUIRE(!aut.delta.contains(6, 'a', 2)); + REQUIRE(aut.delta[6].empty()); + + REQUIRE(aut.delta.contains(4, 'a', 8)); + REQUIRE(aut.delta.contains(4, 'c', 8)); + REQUIRE(aut.delta.contains(4, 'a', 6)); + REQUIRE(aut.delta.contains(4, 'b', 6)); + REQUIRE(aut.delta[4].size() == 3); + aut.delta.remove(4, 'a', 6); + REQUIRE(!aut.delta.contains(4, 'a', 6)); + REQUIRE(aut.delta.contains(4, 'b', 6)); + REQUIRE(aut.delta[4].size() == 3); + + aut.delta.remove(4, 'a', 8); + REQUIRE(!aut.delta.contains(4, 'a', 8)); + REQUIRE(aut.delta.contains(4, 'c', 8)); + REQUIRE(aut.delta[4].size() == 2); + + aut.delta.remove(4, 'c', 8); + REQUIRE(!aut.delta.contains(4, 'a', 8)); + REQUIRE(!aut.delta.contains(4, 'c', 8)); + REQUIRE(aut.delta[4].size() == 1); + } + } +} + +TEST_CASE("mata::lvlfa::get_trans_as_sequence(}") { + Lvlfa aut('q' + 1); + std::vector expected{}; + + aut.delta.add(1, 2, 3); + expected.emplace_back(1, 2, 3); + aut.delta.add(1, 3, 4); + expected.emplace_back(1, 3, 4); + aut.delta.add(2, 3, 4); + expected.emplace_back(2, 3, 4); + + + const Delta::Transitions transitions{ aut.delta.transitions() }; + REQUIRE(std::vector{ transitions.begin(), transitions.end() } == expected); +} + +TEST_CASE("mata::lvlfa::remove_epsilon()") +{ + Lvlfa aut{20}; + FILL_WITH_AUT_A(aut); + aut.remove_epsilon('c'); + REQUIRE(aut.delta.contains(10, 'a', 7)); + REQUIRE(aut.delta.contains(10, 'b', 7)); + REQUIRE(!aut.delta.contains(10, 'c', 7)); + REQUIRE(aut.delta.contains(7, 'a', 5)); + REQUIRE(aut.delta.contains(7, 'a', 3)); + REQUIRE(!aut.delta.contains(7, 'c', 3)); + REQUIRE(aut.delta.contains(7, 'b', 9)); + REQUIRE(aut.delta.contains(7, 'a', 7)); + REQUIRE(aut.delta.contains(5, 'a', 5)); + REQUIRE(!aut.delta.contains(5, 'c', 9)); + REQUIRE(aut.delta.contains(5, 'a', 9)); +} + +TEST_CASE("Profile mata::lvlfa::remove_epsilon()", "[.profiling]") +{ + for (size_t n{}; n < 100000; ++n) { + Lvlfa aut{20}; + FILL_WITH_AUT_A(aut); + aut.remove_epsilon('c'); + } +} + +TEST_CASE("mata::lvlfa::get_num_of_trans()") +{ + Lvlfa aut{20}; + FILL_WITH_AUT_A(aut); + REQUIRE(aut.delta.num_of_transitions() == 15); +} + +TEST_CASE("mata::lvlfa::get_one_letter_aut()") +{ + Lvlfa aut(11); + Symbol abstract_symbol{'x'}; + FILL_WITH_AUT_A(aut); + + Lvlfa digraph{aut.get_one_letter_aut() }; + + REQUIRE(digraph.num_of_states() == aut.num_of_states()); + REQUIRE(digraph.delta.num_of_transitions() == 12); + REQUIRE(digraph.delta.contains(1, abstract_symbol, 10)); + REQUIRE(digraph.delta.contains(10, abstract_symbol, 7)); + REQUIRE(!digraph.delta.contains(10, 'a', 7)); + REQUIRE(!digraph.delta.contains(10, 'b', 7)); + REQUIRE(!digraph.delta.contains(10, 'c', 7)); +} + +TEST_CASE("mata::lvlfa::get_reachable_states()") { + Lvlfa aut{20}; + + SECTION("Automaton A") { + FILL_WITH_AUT_A(aut); + aut.delta.remove(3, 'b', 9); + aut.delta.remove(5, 'c', 9); + aut.delta.remove(1, 'a', 10); + + StateSet reachable{ aut.get_reachable_states() }; + CHECK(!reachable.contains(0)); + CHECK(reachable.contains(1)); + CHECK(!reachable.contains(2)); + CHECK(reachable.contains(3)); + CHECK(!reachable.contains(4)); + CHECK(reachable.contains(5)); + CHECK(!reachable.contains(6)); + CHECK(reachable.contains(7)); + CHECK(!reachable.contains(8)); + CHECK(!reachable.contains(9)); + CHECK(!reachable.contains(10)); + + aut.initial.erase(1); + aut.initial.erase(3); + reachable = aut.get_reachable_states(); + CHECK(reachable.empty()); + } + + SECTION("Automaton B") + { + FILL_WITH_AUT_B(aut); + aut.delta.remove(2, 'c', 12); + aut.delta.remove(4, 'c', 8); + aut.delta.remove(4, 'a', 8); + + auto reachable{aut.get_reachable_states()}; + CHECK(reachable.find(0) != reachable.end()); + CHECK(reachable.find(1) == reachable.end()); + CHECK(reachable.find(2) != reachable.end()); + CHECK(reachable.find(3) == reachable.end()); + CHECK(reachable.find(4) != reachable.end()); + CHECK(reachable.find(5) == reachable.end()); + CHECK(reachable.find(6) != reachable.end()); + CHECK(reachable.find(7) == reachable.end()); + CHECK(reachable.find(8) == reachable.end()); + CHECK(reachable.find(9) == reachable.end()); + CHECK(reachable.find(10) == reachable.end()); + CHECK(reachable.find(11) == reachable.end()); + CHECK(reachable.find(12) == reachable.end()); + CHECK(reachable.find(13) == reachable.end()); + CHECK(reachable.find(14) == reachable.end()); + + aut.final.erase(2); + reachable = aut.get_reachable_states(); + CHECK(reachable.size() == 4); + CHECK(reachable.find(0) != reachable.end()); + CHECK(reachable.find(2) != reachable.end()); + CHECK(reachable.find(4) != reachable.end()); + CHECK(reachable.find(6) != reachable.end()); + CHECK(aut.get_useful_states().count() == 0); + + aut.final.insert(4); + reachable = aut.get_reachable_states(); + CHECK(reachable.find(4) != reachable.end()); + } +} + +TEST_CASE("mata::lvlfa::trim() for profiling", "[.profiling],[trim]") +{ + Lvlfa aut{20}; + FILL_WITH_AUT_A(aut); + aut.delta.remove(1, 'a', 10); + + for (size_t i{ 0 }; i < 10000; ++i) { + Lvlfa new_aut{ aut }; + new_aut.trim(); + } +} + +//TODO: make this a test for the new version +TEST_CASE("mata::lvlfa::get_useful_states() for profiling", "[.profiling],[useful_states]") +{ + Lvlfa aut{20}; + FILL_WITH_AUT_A(aut); + aut.delta.remove(1, 'a', 10); + + for (size_t i{ 0 }; i < 10000; ++i) { + aut.get_useful_states(); + } +} + +TEST_CASE("mata::lvlfa::trim() trivial") { + Lvlfa aut{1}; + aut.initial.insert(0); + aut.final.insert(0); + aut.trim(); +} + +TEST_CASE("mata::lvlfa::trim()") +{ + Lvlfa orig_aut{20}; + FILL_WITH_AUT_A(orig_aut); + orig_aut.delta.remove(1, 'a', 10); + + + SECTION("Without state map") { + Lvlfa aut{orig_aut}; + aut.trim(); + CHECK(aut.initial.size() == orig_aut.initial.size()); + CHECK(aut.final.size() == orig_aut.final.size()); + CHECK(aut.num_of_states() == 4); + for (const Word& word: get_shortest_words(orig_aut)) + { + CHECK(aut.is_in_lang(Run{word,{}})); + } + + aut.final.erase(2); // '2' is the new final state in the earlier trimmed automaton. + aut.trim(); + CHECK(aut.delta.empty()); + CHECK(aut.num_of_states() == 0); + } + + SECTION("With state map") { + Lvlfa aut{orig_aut}; + StateRenaming state_map{}; + aut.trim(&state_map); + CHECK(aut.initial.size() == orig_aut.initial.size()); + CHECK(aut.final.size() == orig_aut.final.size()); + CHECK(aut.num_of_states() == 4); + for (const Word& word: get_shortest_words(orig_aut)) + { + CHECK(aut.is_in_lang(Run{word,{}})); + } + REQUIRE(state_map.size() == 4); + CHECK(state_map.at(1) == 0); + CHECK(state_map.at(3) == 1); + CHECK(state_map.at(7) == 3); + CHECK(state_map.at(5) == 2); + + aut.final.erase(2); // '2' is the new final state in the earlier trimmed automaton. + aut.trim(&state_map); + CHECK(aut.delta.empty()); + CHECK(aut.num_of_states() == 0); + CHECK(state_map.empty()); + } +} + +TEST_CASE("mata::lvlfa::Lvlfa::delta.empty()") +{ + Lvlfa aut{}; + + SECTION("Empty automaton") + { + CHECK(aut.delta.empty()); + } + + SECTION("No transitions automaton") + { + aut.add_state(); + CHECK(aut.delta.empty()); + } + + SECTION("Single state automaton with no transitions") + { + aut.add_state(); + aut.initial.insert(0); + aut.final.insert(0); + CHECK(aut.delta.empty()); + } + + SECTION("Single state automaton with transitions") + { + aut.add_state(); + aut.initial.insert(0); + aut.final.insert(0); + aut.delta.add(0, 'a', 0); + CHECK(!aut.delta.empty()); + } + + SECTION("Single state automaton with transitions") + { + aut.add_state(1); + aut.initial.insert(0); + aut.final.insert(1); + CHECK(aut.delta.empty()); + } + + SECTION("Single state automaton with transitions") + { + aut.add_state(1); + aut.initial.insert(0); + aut.final.insert(1); + aut.delta.add(0, 'a', 1); + CHECK(!aut.delta.empty()); + } +} + +TEST_CASE("mata::lvlfa::delta.operator[]") +{ + Lvlfa aut{20}; + FILL_WITH_AUT_A(aut); + REQUIRE(aut.delta.num_of_transitions() == 15); + aut.delta[25]; + REQUIRE(aut.num_of_states() == 20); + + aut.delta.mutable_state_post(25); + REQUIRE(aut.num_of_states() == 26); + REQUIRE(aut.delta[25].empty()); + + aut.delta.mutable_state_post(50); + REQUIRE(aut.num_of_states() == 51); + REQUIRE(aut.delta[50].empty()); + + Lvlfa aut1 = aut; + aut1.delta.mutable_state_post(60); + REQUIRE(aut1.num_of_states() == 61); + REQUIRE(aut1.delta[60].empty()); + + const Lvlfa aut2 = aut; + aut2.delta[60]; + REQUIRE(aut2.num_of_states() == 51); + REQUIRE(aut2.delta[60].empty()); +} + +TEST_CASE("mata::lvlfa::Lvlfa::unify_(initial/final)()") { + Lvlfa lvlfa{10}; + + SECTION("No initial") { + lvlfa.unify_initial(); + CHECK(lvlfa.num_of_states() == 10); + CHECK(lvlfa.initial.empty()); + } + + SECTION("initial==final unify final") { + lvlfa.initial.insert(0); + lvlfa.final.insert(0); + lvlfa.final.insert(1); + lvlfa.unify_final(); + REQUIRE(lvlfa.num_of_states() == 11); + CHECK(lvlfa.final.size() == 1); + CHECK(lvlfa.final[10]); + CHECK(lvlfa.initial[10]); + } + + SECTION("initial==final unify initial") { + lvlfa.initial.insert(0); + lvlfa.initial.insert(1); + lvlfa.final.insert(0); + lvlfa.unify_initial(); + REQUIRE(lvlfa.num_of_states() == 11); + CHECK(lvlfa.initial.size() == 1); + CHECK(lvlfa.initial[10]); + CHECK(lvlfa.final[10]); + } + + SECTION("Single initial") { + lvlfa.initial.insert(0); + lvlfa.unify_initial(); + CHECK(lvlfa.num_of_states() == 10); + CHECK(lvlfa.initial.size() == 1); + CHECK(lvlfa.initial[0]); + } + + SECTION("Multiple initial") { + lvlfa.initial.insert(0); + lvlfa.initial.insert(1); + lvlfa.unify_initial(); + CHECK(lvlfa.num_of_states() == 11); + CHECK(lvlfa.initial.size() == 1); + CHECK(lvlfa.initial[10]); + } + + SECTION("With transitions") { + lvlfa.initial.insert(0); + lvlfa.initial.insert(1); + lvlfa.delta.add(0, 'a', 3); + lvlfa.delta.add(1, 'b', 0); + lvlfa.delta.add(1, 'c', 1); + lvlfa.unify_initial(); + CHECK(lvlfa.num_of_states() == 11); + CHECK(lvlfa.initial.size() == 1); + CHECK(lvlfa.initial[10]); + CHECK(lvlfa.delta.contains(10, 'a', 3)); + CHECK(lvlfa.delta.contains(10, 'b', 0)); + CHECK(lvlfa.delta.contains(10, 'c', 1)); + CHECK(lvlfa.delta.contains(0, 'a', 3)); + CHECK(lvlfa.delta.contains(1, 'b', 0)); + CHECK(lvlfa.delta.contains(1, 'c', 1)); + } + + SECTION("No final") { + lvlfa.unify_final(); + CHECK(lvlfa.num_of_states() == 10); + CHECK(lvlfa.final.empty()); + } + + SECTION("Single final") { + lvlfa.final.insert(0); + lvlfa.unify_final(); + CHECK(lvlfa.num_of_states() == 10); + CHECK(lvlfa.final.size() == 1); + CHECK(lvlfa.final[0]); + } + + SECTION("Multiple final") { + lvlfa.final.insert(0); + lvlfa.final.insert(1); + lvlfa.unify_final(); + CHECK(lvlfa.num_of_states() == 11); + CHECK(lvlfa.final.size() == 1); + CHECK(lvlfa.final[10]); + } + + SECTION("With transitions") { + lvlfa.final.insert(0); + lvlfa.final.insert(1); + lvlfa.delta.add(3, 'a', 0); + lvlfa.delta.add(4, 'b', 1); + lvlfa.delta.add(1, 'c', 1); + lvlfa.unify_final(); + CHECK(lvlfa.num_of_states() == 11); + CHECK(lvlfa.final.size() == 1); + CHECK(lvlfa.final[10]); + CHECK(lvlfa.delta.contains(3, 'a', 10)); + CHECK(lvlfa.delta.contains(4, 'b', 10)); + CHECK(lvlfa.delta.contains(1, 'c', 10)); + CHECK(lvlfa.delta.contains(3, 'a', 0)); + CHECK(lvlfa.delta.contains(4, 'b', 1)); + CHECK(lvlfa.delta.contains(1, 'c', 1)); + } + + SECTION("Bug: LVLFA with empty string unifying initial/final repeatedly") { + Lvlfa aut; + mata::parser::create_nfa(&aut, "a*b*"); + for (size_t i{ 0 }; i < 8; ++i) { + aut.unify_initial(); + aut.unify_final(); + } + CHECK(true); // Check that the program does not seg fault. + } +} + +TEST_CASE("mata::lvlfa::Lvlfa::get_delta.epsilon_symbol_posts()") { + Lvlfa aut{20}; + FILL_WITH_AUT_A(aut); + aut.delta.add(0, EPSILON, 3); + aut.delta.add(3, EPSILON, 3); + aut.delta.add(3, EPSILON, 4); + + auto state_eps_trans{ aut.delta.epsilon_symbol_posts(0) }; + CHECK(state_eps_trans->symbol == EPSILON); + CHECK(state_eps_trans->targets == StateSet{3 }); + state_eps_trans = aut.delta.epsilon_symbol_posts(3); + CHECK(state_eps_trans->symbol == EPSILON); + CHECK(state_eps_trans->targets == StateSet{3, 4 }); + + aut.delta.add(8, 42, 3); + aut.delta.add(8, 42, 4); + aut.delta.add(8, 42, 6); + + state_eps_trans = aut.delta.epsilon_symbol_posts(8, 42); + CHECK(state_eps_trans->symbol == 42); + CHECK(state_eps_trans->targets == StateSet{3, 4, 6 }); + + CHECK(aut.delta.epsilon_symbol_posts(1) == aut.delta.state_post(1).end()); + CHECK(aut.delta.epsilon_symbol_posts(5) == aut.delta.state_post(5).end()); + CHECK(aut.delta.epsilon_symbol_posts(19) == aut.delta.state_post(19).end()); + + StatePost state_post{ aut.delta[0] }; + state_eps_trans = aut.delta.epsilon_symbol_posts(state_post); + CHECK(state_eps_trans->symbol == EPSILON); + CHECK(state_eps_trans->targets == StateSet{3 }); + state_post = aut.delta[3]; + state_eps_trans = Delta::epsilon_symbol_posts(state_post); + CHECK(state_eps_trans->symbol == EPSILON); + CHECK(state_eps_trans->targets == StateSet{3, 4 }); + + state_post = aut.delta.state_post(1); + CHECK(aut.delta.epsilon_symbol_posts(state_post) == state_post.end()); + state_post = aut.delta.state_post(5); + CHECK(aut.delta.epsilon_symbol_posts(state_post) == state_post.end()); + state_post = aut.delta.state_post(19); + CHECK(aut.delta.epsilon_symbol_posts(state_post) == state_post.end()); +} + +TEST_CASE("mata::lvlfa::Lvlfa::delta()") { + Delta delta(6); +} + +TEST_CASE("A segmentation fault in the make_complement") { + Lvlfa r(1); + OnTheFlyAlphabet alph{}; + alph["a"]; + alph["b"]; + + r.initial = {0}; + r.delta.add(0, 0, 0); + REQUIRE(not r.is_complete(&alph)); + r.make_complete(alph, 1); + REQUIRE(r.is_complete(&alph)); +} + +TEST_CASE("mata::lvlfa:: create simple automata") { + Lvlfa lvlfa{ builder::create_empty_string_lvlfa() }; + CHECK(lvlfa.is_in_lang(Word{})); + CHECK(get_word_lengths(lvlfa) == std::set>{ std::make_pair(0, 0) }); + + OnTheFlyAlphabet alphabet{ { "a", 0 }, { "b", 1 }, { "c", 2 } }; + lvlfa = builder::create_sigma_star_lvlfa(&alphabet); + CHECK(lvlfa.is_in_lang({ {}, {} })); + CHECK(lvlfa.is_in_lang({ 0 , {} })); + CHECK(lvlfa.is_in_lang({ 1 , {} })); + CHECK(lvlfa.is_in_lang({ 2 , {} })); + CHECK(lvlfa.is_in_lang({ { 0, 1 }, {} })); + CHECK(lvlfa.is_in_lang({ { 1, 0 }, {} })); + CHECK(lvlfa.is_in_lang({ { 2, 2, 2 }, {} })); + CHECK(lvlfa.is_in_lang({ { 0, 1, 2, 2, 0, 1, 2, 1, 0, 0, 2, 1 }, {} })); + CHECK(!lvlfa.is_in_lang({ 3 , {} })); +} + +TEST_CASE("mata::lvlfa:: print_to_mata") { + Lvlfa aut_big; + aut_big.initial = {1, 2}; + aut_big.delta.add(1, 'a', 2); + aut_big.delta.add(1, 'a', 3); + aut_big.delta.add(1, 'b', 4); + aut_big.delta.add(2, 'a', 2); + aut_big.delta.add(2, 'b', 2); + aut_big.delta.add(2, 'a', 3); + aut_big.delta.add(2, 'b', 4); + aut_big.delta.add(3, 'b', 4); + aut_big.delta.add(3, 'c', 7); + aut_big.delta.add(3, 'b', 2); + aut_big.delta.add(5, 'c', 3); + aut_big.delta.add(7, 'a', 8); + aut_big.final = {3}; + + std::string aut_big_mata = aut_big.print_to_mata(); + // for parsing output of print_to_mata() we need to use IntAlphabet to get the same alphabet + IntAlphabet int_alph; + Lvlfa aut_big_from_mata = builder::construct(mata::IntermediateAut::parse_from_mf(parse_mf(aut_big_mata))[0], &int_alph); + + CHECK(are_equivalent(aut_big, aut_big_from_mata)); +} + +TEST_CASE("mata::lvlfa::Lvlfa::trim() bug") { + Lvlfa aut(5, {0}, {4}); + aut.delta.add(0, 122, 1); + aut.delta.add(1, 98, 1); + aut.delta.add(1, 122, 1); + aut.delta.add(1, 97, 2); + aut.delta.add(2, 122, 1); + aut.delta.add(2, 97, 1); + aut.delta.add(1, 97, 4); + aut.delta.add(3, 97, 4); + + Lvlfa aut_copy {aut}; + CHECK(are_equivalent(aut_copy.trim(), aut)); +} + +TEST_CASE("mata::lvlfa::get_useful_states_tarjan") { + SECTION("Lvlfa 1") { + Lvlfa aut(5, {0}, {4}); + aut.delta.add(0, 122, 1); + aut.delta.add(1, 98, 1); + aut.delta.add(1, 122, 1); + aut.delta.add(1, 97, 2); + aut.delta.add(2, 122, 1); + aut.delta.add(2, 97, 1); + aut.delta.add(1, 97, 4); + aut.delta.add(3, 97, 4); + + mata::BoolVector bv = aut.get_useful_states(); + mata::BoolVector ref({ 1, 1, 1, 0, 1}); + CHECK(bv == ref); + } + + SECTION("Empty LVLFA") { + Lvlfa aut; + mata::BoolVector bv = aut.get_useful_states(); + CHECK(bv == mata::BoolVector({})); + } + + SECTION("Single-state LVLFA") { + Lvlfa aut(1, {0}, {}); + mata::BoolVector bv = aut.get_useful_states(); + CHECK(bv == mata::BoolVector({ 0})); + } + + SECTION("Single-state LVLFA acc") { + Lvlfa aut(1, {0}, {0}); + mata::BoolVector bv = aut.get_useful_states(); + CHECK(bv == mata::BoolVector({ 1})); + } + + SECTION("Lvlfa 2") { + Lvlfa aut(5, {0, 1}, {2}); + aut.delta.add(0, 122, 2); + aut.delta.add(2, 98, 3); + aut.delta.add(1, 98, 4); + aut.delta.add(4, 97, 3); + + mata::BoolVector bv = aut.get_useful_states(); + mata::BoolVector ref({ 1, 0, 1, 0, 0}); + CHECK(bv == ref); + } + + SECTION("Lvlfa 3") { + Lvlfa aut(2, {0, 1}, {0, 1}); + aut.delta.add(0, 122, 0); + aut.delta.add(1, 98, 1); + + mata::BoolVector bv = aut.get_useful_states(); + mata::BoolVector ref({ 1, 1}); + CHECK(bv == ref); + } + + SECTION("Lvlfa no final") { + Lvlfa aut(5, {0}, {}); + aut.delta.add(0, 122, 1); + aut.delta.add(1, 98, 1); + aut.delta.add(1, 122, 1); + aut.delta.add(1, 97, 2); + aut.delta.add(2, 122, 1); + aut.delta.add(2, 97, 1); + aut.delta.add(1, 97, 4); + aut.delta.add(3, 97, 4); + + mata::BoolVector bv = aut.get_useful_states(); + mata::BoolVector ref({ 0, 0, 0, 0, 0}); + CHECK(bv == ref); + } + + SECTION("from regex (a+b*a*)") { + Lvlfa aut; + mata::parser::create_nfa(&aut, "(a+b*a*)", false, EPSILON, false); + + mata::BoolVector bv = aut.get_useful_states(); + mata::BoolVector ref({ 1, 0, 1, 0, 1, 0, 1, 0, 0}); + CHECK(bv == ref); + + aut = reduce(aut.trim()); + bv = aut.get_useful_states(); + CHECK(bv == mata::BoolVector({ 1, 1, 1, 1})); + } + + SECTION("more initials") { + Lvlfa aut(4, {0, 1, 2}, {0, 3}); + aut.delta.add(1, 48, 0); + aut.delta.add(2, 53, 3); + CHECK(aut.get_useful_states() == mata::BoolVector{ 1, 1, 1, 1}); + } +} + +TEST_CASE("mata::lvlfa::Lvlfa::get_words") { + SECTION("empty") { + Lvlfa aut; + CHECK(aut.get_words(0) == std::set()); + CHECK(aut.get_words(1) == std::set()); + CHECK(aut.get_words(5) == std::set()); + } + + SECTION("empty word") { + Lvlfa aut(1, {0}, {0}); + CHECK(aut.get_words(0) == std::set{{}}); + CHECK(aut.get_words(1) == std::set{{}}); + CHECK(aut.get_words(5) == std::set{{}}); + } + + SECTION("noodle - one final") { + Lvlfa aut(3, {0}, {2}); + aut.delta.add(0, 0, 1); + aut.delta.add(1, 1, 2); + CHECK(aut.get_words(0) == std::set{}); + CHECK(aut.get_words(1) == std::set{}); + CHECK(aut.get_words(2) == std::set{{0, 1}}); + CHECK(aut.get_words(3) == std::set{{0, 1}}); + CHECK(aut.get_words(5) == std::set{{0, 1}}); + } + + SECTION("noodle - two finals") { + Lvlfa aut(3, {0}, {1,2}); + aut.delta.add(0, 0, 1); + aut.delta.add(1, 1, 2); + CHECK(aut.get_words(0) == std::set{}); + CHECK(aut.get_words(1) == std::set{{0}}); + CHECK(aut.get_words(2) == std::set{{0}, {0, 1}}); + CHECK(aut.get_words(3) == std::set{{0}, {0, 1}}); + CHECK(aut.get_words(5) == std::set{{0}, {0, 1}}); + } + + SECTION("noodle - three finals") { + Lvlfa aut(3, {0}, {0,1,2}); + aut.delta.add(0, 0, 1); + aut.delta.add(1, 1, 2); + CHECK(aut.get_words(0) == std::set{{}}); + CHECK(aut.get_words(1) == std::set{{}, {0}}); + CHECK(aut.get_words(2) == std::set{{}, {0}, {0, 1}}); + CHECK(aut.get_words(3) == std::set{{}, {0}, {0, 1}}); + CHECK(aut.get_words(5) == std::set{{}, {0}, {0, 1}}); + } + + SECTION("more complex") { + Lvlfa aut(6, {0,1}, {1,3,4,5}); + aut.delta.add(0, 0, 3); + aut.delta.add(3, 1, 4); + aut.delta.add(0, 2, 2); + aut.delta.add(3, 3, 2); + aut.delta.add(1, 4, 2); + aut.delta.add(2, 5, 5); + CHECK(aut.get_words(0) == std::set{{}}); + CHECK(aut.get_words(1) == std::set{{}, {0}}); + CHECK(aut.get_words(2) == std::set{{}, {0}, {0, 1}, {2,5}, {4,5}}); + CHECK(aut.get_words(3) == std::set{{}, {0}, {0, 1}, {2,5}, {4,5}, {0,3,5}}); + CHECK(aut.get_words(4) == std::set{{}, {0}, {0, 1}, {2,5}, {4,5}, {0,3,5}}); + CHECK(aut.get_words(5) == std::set{{}, {0}, {0, 1}, {2,5}, {4,5}, {0,3,5}}); + } + + SECTION("cycle") { + Lvlfa aut(6, {0,1}, {0,1}); + aut.delta.add(0, 0, 1); + aut.delta.add(1, 1, 0); + CHECK(aut.get_words(0) == std::set{{}}); + CHECK(aut.get_words(1) == std::set{{}, {0}, {1}}); + CHECK(aut.get_words(2) == std::set{{}, {0}, {1}, {0, 1}, {1, 0}}); + CHECK(aut.get_words(3) == std::set{{}, {0}, {1}, {0, 1}, {1, 0}, {0,1,0}, {1,0,1}}); + CHECK(aut.get_words(4) == std::set{{}, {0}, {1}, {0, 1}, {1, 0}, {0,1,0}, {1,0,1}, {0,1,0,1}, {1,0,1,0}}); + CHECK(aut.get_words(5) == std::set{{}, {0}, {1}, {0, 1}, {1, 0}, {0,1,0}, {1,0,1}, {0,1,0,1}, {1,0,1,0}, {0,1,0,1,0}, {1,0,1,0,1}}); + } +} diff --git a/tests/lvlfa/utils.hh b/tests/lvlfa/utils.hh new file mode 100644 index 00000000..83679caa --- /dev/null +++ b/tests/lvlfa/utils.hh @@ -0,0 +1,98 @@ +// Automaton A +#define FILL_WITH_AUT_A(x) \ + x.initial = {1, 3}; \ + x.final = {5}; \ + x.delta.add(1, 'a', 3); \ + x.delta.add(1, 'a', 10); \ + x.delta.add(1, 'b', 7); \ + x.delta.add(3, 'a', 7); \ + x.delta.add(3, 'b', 9); \ + x.delta.add(9, 'a', 9); \ + x.delta.add(7, 'b', 1); \ + x.delta.add(7, 'a', 3); \ + x.delta.add(7, 'c', 3); \ + x.delta.add(10, 'a', 7); \ + x.delta.add(10, 'b', 7); \ + x.delta.add(10, 'c', 7); \ + x.delta.add(7, 'a', 5); \ + x.delta.add(5, 'a', 5); \ + x.delta.add(5, 'c', 9); \ + +// Automaton B +#define FILL_WITH_AUT_B(x) \ + x.initial = {4}; \ + x.final = {2, 12}; \ + x.delta.add(4, 'c', 8); \ + x.delta.add(4, 'a', 8); \ + x.delta.add(8, 'b', 4); \ + x.delta.add(4, 'a', 6); \ + x.delta.add(4, 'b', 6); \ + x.delta.add(6, 'a', 2); \ + x.delta.add(2, 'b', 2); \ + x.delta.add(2, 'a', 0); \ + x.delta.add(0, 'a', 2); \ + x.delta.add(2, 'c', 12); \ + x.delta.add(12, 'a', 14); \ + x.delta.add(14, 'b', 12); \ + +// Automaton C +// the same as B, but with small symbols +#define FILL_WITH_AUT_C(x) \ + x.initial = {4}; \ + x.final = {2, 12}; \ + x.delta.add(4, 3, 8); \ + x.delta.add(4, 1, 8); \ + x.delta.add(8, 2, 4); \ + x.delta.add(4, 1, 6); \ + x.delta.add(4, 2, 6); \ + x.delta.add(6, 1, 2); \ + x.delta.add(2, 2, 2); \ + x.delta.add(2, 1, 0); \ + x.delta.add(0, 1, 2); \ + x.delta.add(2, 3, 12); \ + x.delta.add(12, 1, 14); \ + x.delta.add(14, 2, 12); \ + +// Automaton D // shomewhat larger +#define FILL_WITH_AUT_D(x) \ + x.initial = {0}; \ + x.final = {3}; \ + x.delta.add(0, 46, 0); \ + x.delta.add(0, 47, 0); \ + x.delta.add(0, 58, 0); \ + x.delta.add(0, 58, 1); \ + x.delta.add(0, 64, 1); \ + x.delta.add(0, 64, 1); \ + x.delta.add(0, 82, 2); \ + x.delta.add(0, 92, 2); \ + x.delta.add(0, 98, 2); \ + x.delta.add(0, 100, 1);\ + x.delta.add(0, 103, 0);\ + x.delta.add(0, 109, 0);\ + x.delta.add(0, 110, 1);\ + x.delta.add(0, 111, 2);\ + x.delta.add(0, 114, 0);\ + x.delta.add(1, 47, 2); \ + x.delta.add(2, 47, 3); \ + x.delta.add(3, 46, 2); \ + x.delta.add(3, 47, 0); \ + x.delta.add(3, 58, 2); \ + x.delta.add(3, 64, 3); \ + x.delta.add(3, 82, 2); \ + x.delta.add(3, 92, 0); \ + x.delta.add(3, 98, 2); \ + x.delta.add(3, 100, 2);\ + x.delta.add(3, 103, 3);\ + x.delta.add(3, 109, 2);\ + x.delta.add(3, 110, 3);\ + x.delta.add(3, 111, 2);\ + x.delta.add(3, 114, 3);\ + +// Automaton E +#define FILL_WITH_AUT_E(x) \ + x.initial = {1, 3}; \ + x.final = {4}; \ + x.delta.add(1, 'b', 2); \ + x.delta.add(2, 'a', 4); \ + x.delta.add(1, 'a', 3); \ + From 465dbea6273078ce4e8dd47f4a3e3748e7d5e213 Mon Sep 17 00:00:00 2001 From: koniksedy Date: Thu, 8 Feb 2024 15:02:53 +0100 Subject: [PATCH 02/24] lvlfa builder done --- include/mata/lvlfa/builder.hh | 7 + include/mata/lvlfa/lvlfa.hh | 25 ++-- src/lvlfa/builder.cc | 53 +++----- src/lvlfa/lvlfa.cc | 47 +++---- src/lvlfa/operations.cc | 13 +- tests/CMakeLists.txt | 14 +- tests/lvlfa/builder.cc | 209 ++++++++++++++++++++++++++++- tests/lvlfa/lvlfa-concatenation.cc | 6 +- tests/lvlfa/lvlfa-intersection.cc | 2 +- tests/lvlfa/lvlfa.cc | 2 +- 10 files changed, 279 insertions(+), 99 deletions(-) diff --git a/include/mata/lvlfa/builder.hh b/include/mata/lvlfa/builder.hh index 08d23bbf..e0f67f2c 100644 --- a/include/mata/lvlfa/builder.hh +++ b/include/mata/lvlfa/builder.hh @@ -6,6 +6,8 @@ #include "lvlfa.hh" #include +#include "mata/nfa/builder.hh" + #include @@ -38,6 +40,11 @@ Lvlfa create_single_word_lvlfa(const std::vector& word, Alphabet* a */ Lvlfa create_empty_string_lvlfa(); +/** + * Create automaton accepting sigma star over the passed alphabet using DONT_CARE symbol. + */ +Lvlfa create_sigma_star_lvlfa(); + /** * Create automaton accepting sigma star over the passed alphabet. * diff --git a/include/mata/lvlfa/lvlfa.hh b/include/mata/lvlfa/lvlfa.hh index 28b632a8..70620f20 100644 --- a/include/mata/lvlfa/lvlfa.hh +++ b/include/mata/lvlfa/lvlfa.hh @@ -55,7 +55,7 @@ public: * The set of states of this automaton are the numbers from 0 to the number of states minus one. */ std::vector levels{}; - Level max_level = 0; + Level levels_cnt = 0; /// Key value store for additional attributes for the LVLFA. Keys are attribute names as strings and the value types /// are up to the user. /// For example, we can set up attributes such as "state_dict" for state dictionary attribute mapping states to their @@ -66,8 +66,8 @@ public: public: explicit Lvlfa(Delta delta = {}, utils::SparseSet initial_states = {}, - utils::SparseSet final_states = {}, std::vector levels = {}, Level max_level = 0, Alphabet* alphabet = nullptr) - : mata::nfa::Nfa(delta, initial_states, final_states, alphabet), levels(std::move(levels)), max_level(max_level) {} + utils::SparseSet final_states = {}, std::vector levels = {}, Level levels_cnt = 1, Alphabet* alphabet = nullptr) + : mata::nfa::Nfa(delta, initial_states, final_states, alphabet), levels(std::move(levels)), levels_cnt(levels_cnt) {} /** * @brief Construct a new explicit LVLFA with num_of_states states and optionally set initial and final states. @@ -75,12 +75,12 @@ public: * @param[in] num_of_states Number of states for which to preallocate Delta. */ explicit Lvlfa(const unsigned long num_of_states, StateSet initial_states = {}, - StateSet final_states = {}, std::vector levels = {}, Level max_level = 0, Alphabet* alphabet = nullptr) - : mata::nfa::Nfa(num_of_states, initial_states, final_states, alphabet), levels(levels), max_level(max_level) {} + StateSet final_states = {}, std::vector levels = {}, Level levels_cnt = 1, Alphabet* alphabet = nullptr) + : mata::nfa::Nfa(num_of_states, initial_states, final_states, alphabet), levels(levels), levels_cnt(levels_cnt) {} - // Lvlfa(const mata::nfa::Nfa& other) - // : delta(std::move(other.delta)), initial(std::move(other.initial)), final(std::move(other.final)), - // levels(std::move(std::vector(other.num_of_states(), 0))), max_level(0), alphabet(other.alphabet) {} + explicit Lvlfa(const mata::nfa::Nfa& other) + : mata::nfa::Nfa(other.delta, other.initial, other.final, other.alphabet), + levels(std::move(std::vector(other.num_of_states(), 0))), levels_cnt(1) {} /** * @brief Construct a new explicit LVLFA from other LVLFA. @@ -88,7 +88,7 @@ public: Lvlfa(const Lvlfa& other) = default; Lvlfa(Lvlfa&& other) noexcept - : levels { std::move(other.levels) }, max_level{ other.max_level } { + : levels { std::move(other.levels) }, levels_cnt{ other.levels_cnt } { delta = std::move(other.delta); initial = std::move(other.initial); final = std::move(other.final); @@ -192,13 +192,6 @@ public: */ void print_to_mata(std::ostream &output) const; - /** - * Fill @p alphabet with symbols from @p lvlfa. - * @param[in] lvlfa LVLFA with symbols to fill @p alphabet with. - * @param[out] alphabet Alphabet to be filled with symbols from @p lvlfa. - */ - void fill_alphabet(mata::OnTheFlyAlphabet& alphabet) const; - /// Is the language of the automaton universal? bool is_universal(const Alphabet& alphabet, Run* cex = nullptr, const ParameterMap& params = {{ "algorithm", "antichains" }}) const; diff --git a/src/lvlfa/builder.cc b/src/lvlfa/builder.cc index 7513d0dd..3392f82d 100644 --- a/src/lvlfa/builder.cc +++ b/src/lvlfa/builder.cc @@ -3,6 +3,8 @@ #include "mata/lvlfa/builder.hh" #include "mata/parser/mintermization.hh" +#include "mata/nfa/builder.hh" + #include using namespace mata::lvlfa; @@ -13,7 +15,8 @@ Lvlfa builder::construct(const mata::parser::ParsedSection& parsec, mata::Alphab Lvlfa aut; assert(nullptr != alphabet); - if (parsec.type != TYPE_NFA) { + // HACK - it should be only "parsec.type != TYPE_NFA" without the conjunction + if (parsec.type != TYPE_NFA && parsec.type != TYPE_NFA + "-explicit") { throw std::runtime_error(std::string(__FUNCTION__) + ": expecting type \"" + TYPE_NFA + "\""); } @@ -62,11 +65,9 @@ Lvlfa builder::construct(const mata::parser::ParsedSection& parsec, mata::Alphab } } - aut.levels.clear(); it = parsec.dict.find("Levels"); if (parsec.dict.end() != it) { - aut.levels.resize(it->second.size(), 0); for (const auto &str : it->second) { std::stringstream ss(str); @@ -95,20 +96,20 @@ Lvlfa builder::construct(const mata::parser::ParsedSection& parsec, mata::Alphab } } - it = parsec.dict.find("MaxLevel"); + it = parsec.dict.find("LevelsCnt"); if (parsec.dict.end() != it) { if (it->second.size() == 0) { - throw std::runtime_error("MaxLevel has to be specified."); + throw std::runtime_error("LevelsCnt has to be specified."); } if (it->second.size() > 1) { - throw std::runtime_error("Only one MexLevel can be specified."); + throw std::runtime_error("Only one LevelsCnt can be specified."); } try { long level = std::stol(it->second[0]); if (level < 0) { throw std::runtime_error("Bad format of levels: level " + it->second[0] + " is out of range."); } - aut.max_level = static_cast(level); + aut.levels_cnt = static_cast(level); } catch (const std::invalid_argument &ex) { throw std::runtime_error("Bad format of levels: unsupported level " + it->second[0]); } catch (const std::out_of_range &ex) { @@ -149,7 +150,6 @@ Lvlfa builder::construct(const mata::parser::ParsedSection& parsec, mata::Alphab } // construct(). Lvlfa builder::construct(const mata::IntermediateAut& inter_aut, mata::Alphabet* alphabet, NameStateMap* state_map) { - // throw std::runtime_error("Constructor via IntermediateAut is not implemented for LVLFA."); Lvlfa aut; assert(nullptr != alphabet); @@ -238,40 +238,27 @@ void builder::construct( } Lvlfa builder::create_single_word_lvlfa(const std::vector& word) { - const size_t word_size{ word.size() }; - Lvlfa lvlfa{ word_size + 1, { 0 }, { word_size } }; - - for (State state{ 0 }; state < word_size; ++state) { - lvlfa.delta.add(state, word[state], state + 1); - } - return lvlfa; + return Lvlfa(mata::nfa::builder::create_single_word_nfa(word)); } Lvlfa builder::create_single_word_lvlfa(const std::vector& word, mata::Alphabet *alphabet) { - if (!alphabet) { - alphabet = new OnTheFlyAlphabet{ word }; - } - const size_t word_size{ word.size() }; - Lvlfa lvlfa{ word_size + 1, { 0 }, { word_size }, std::vector(word_size + 1, 0), 0, alphabet }; - - for (State state{ 0 }; state < word_size; ++state) { - lvlfa.delta.add(state, alphabet->translate_symb(word[state]), state + 1); - } - return lvlfa; + return Lvlfa(mata::nfa::builder::create_single_word_nfa(word, alphabet)); } Lvlfa builder::create_empty_string_lvlfa() { - return Lvlfa{ 1, StateSet{ 0 }, StateSet{ 0 } }; + return Lvlfa(mata::nfa::builder::create_empty_string_nfa()); } -Lvlfa builder::create_sigma_star_lvlfa(mata::Alphabet* alphabet) { - Lvlfa lvlfa{ 1, StateSet{ 0 }, StateSet{ 0 }, { 0 }, 0, alphabet }; - for (const mata::Symbol& symbol : alphabet->get_alphabet_symbols()) { - lvlfa.delta.add(0, symbol, 0); - } +Lvlfa builder::create_sigma_star_lvlfa() { + Lvlfa lvlfa{ 1, { 0 }, { 0 }, { 0 }, 1 }; + lvlfa.delta.add(0, DONT_CARE, 0); return lvlfa; } +Lvlfa builder::create_sigma_star_lvlfa(mata::Alphabet* alphabet) { + return Lvlfa(mata::nfa::builder::create_sigma_star_nfa(alphabet)); +} + Lvlfa builder::parse_from_mata(std::istream& lvlfa_stream) { const std::string lvlfa_str = "LVLFA"; parser::Parsed parsed{ parser::parse_mf(lvlfa_stream) }; @@ -284,8 +271,8 @@ Lvlfa builder::parse_from_mata(std::istream& lvlfa_stream) { throw std::runtime_error("The type of input automaton is '" + automaton_type + "'. Required is 'LVLFA'\n"); } IntAlphabet alphabet; - return construct(IntermediateAut::parse_from_mf(parsed)[0], &alphabet); - // return construct(parsed, &alphabet); + // return construct(IntermediateAut::parse_from_mf(parsed)[0], &alphabet); + return construct(parsed[0], &alphabet); } Lvlfa builder::parse_from_mata(const std::filesystem::path& lvlfa_file) { diff --git a/src/lvlfa/lvlfa.cc b/src/lvlfa/lvlfa.cc index e4734cb5..f2883d63 100644 --- a/src/lvlfa/lvlfa.cc +++ b/src/lvlfa/lvlfa.cc @@ -23,11 +23,6 @@ using StateBoolArray = std::vector; ///< Bool array for states in the auto const std::string mata::lvlfa::TYPE_NFA = "LVLFA"; -// const State Limits::min_state; -// const State Limits::max_state; -// const Symbol Limits::min_symbol; -// const Symbol Limits::max_symbol; - Lvlfa& Lvlfa::trim(StateRenaming* state_renaming) { @@ -147,14 +142,27 @@ void Lvlfa::print_to_mata(std::ostream &output) const { output << std::endl; } - // if (!levels.empty()) { - // output << "%Levels"; - // for (State s{ 0 }; s < num_of_states(); s++) { - // output << " " << "q" << s << ":" << levels[s]; - // } - // output << std::endl; - // output << "MaxLevel " << max_level << std::endl; - // } + if (!levels.empty()) { + BoolVector live_states(num_of_states(), false); + for (const State &s : initial) { + live_states[s] = true; + } + for (const State &s : final) { + live_states[s] = true; + } + for (const Transition &trans: delta.transitions()) { + live_states[trans.source] = true; + live_states[trans.target] = true; + } + output << "%Levels"; + for (State s{ 0 }; s < num_of_states(); s++) { + if (live_states[s]) { + output << " " << "q" << s << ":" << levels[s]; + } + } + output << std::endl; + output << "%LevelsCnt " << levels_cnt << std::endl; + } for (const Transition& trans: delta.transitions()) { output << "q" << trans.source << " " << trans.symbol << " q" << trans.target << std::endl; @@ -162,14 +170,7 @@ void Lvlfa::print_to_mata(std::ostream &output) const { } Lvlfa Lvlfa::get_one_letter_aut(Symbol abstract_symbol) const { - Lvlfa digraph{num_of_states(), StateSet(initial), StateSet(final), std::vector(num_of_states(), 0), 0 }; - // Add directed transitions for digraph. - for (const Transition& transition: delta.transitions()) { - // Directly try to add the transition. Finding out whether the transition is already in the digraph - // only iterates through transition relation again. - digraph.delta.add(transition.source, abstract_symbol, transition.target); - } - return digraph; + return Lvlfa(mata::nfa::Nfa::get_one_letter_aut(abstract_symbol)); } void Lvlfa::get_one_letter_aut(Lvlfa& result) const { @@ -182,7 +183,7 @@ Lvlfa& Lvlfa::operator=(Lvlfa&& other) noexcept { initial = std::move(other.initial); final = std::move(other.final); levels = std::move(other.levels); - max_level = other.max_level; + levels_cnt = other.levels_cnt; alphabet = other.alphabet; attributes = std::move(other.attributes); other.alphabet = nullptr; @@ -206,5 +207,5 @@ void Lvlfa::clear() { } bool Lvlfa::is_identical(const Lvlfa& aut) const { - return max_level == aut.max_level && levels == aut.levels && mata::nfa::Nfa::is_identical(aut); + return levels_cnt == aut.levels_cnt && levels == aut.levels && mata::nfa::Nfa::is_identical(aut); } diff --git a/src/lvlfa/operations.cc b/src/lvlfa/operations.cc index 1c79aa0d..c33653fb 100644 --- a/src/lvlfa/operations.cc +++ b/src/lvlfa/operations.cc @@ -159,7 +159,7 @@ Lvlfa mata::lvlfa::remove_epsilon(const Lvlfa& aut, Symbol epsilon) { } // Construct the automaton without epsilon transitions. - Lvlfa result{ Delta{}, aut.initial, aut.final, aut.levels, aut.max_level, aut.alphabet }; + Lvlfa result{ Delta{}, aut.initial, aut.final, aut.levels, aut.levels_cnt, aut.alphabet }; for (const auto& state_closure_pair : eps_closure) { // For every state. State src_state = state_closure_pair.first; for (State eps_cl_state : state_closure_pair.second) { // For every state in its epsilon closure. @@ -598,17 +598,8 @@ std::ostream& std::operator<<(std::ostream& os, const Lvlfa& lvlfa) { return os; } -void mata::lvlfa::Lvlfa::fill_alphabet(OnTheFlyAlphabet& alphabet) const { - for (const StatePost& state_post: this->delta) { - for (const SymbolPost& symbol_post: state_post) { - alphabet.update_next_symbol_value(symbol_post.symbol); - alphabet.try_add_new_symbol(std::to_string(symbol_post.symbol), symbol_post.symbol); - } - } -} - Run mata::lvlfa::encode_word(const Alphabet* alphabet, const std::vector& input) { - return { .word = alphabet->translate_word(input) }; + return mata::nfa::encode_word(alphabet, input); } std::set mata::lvlfa::Lvlfa::get_words(unsigned max_length) { diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index d09407fa..29ae7e31 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -7,13 +7,13 @@ add_executable(tests parser.cc re2parser.cc mintermization.cc - # nfa/delta.cc - # nfa/nfa.cc - # nfa/builder.cc - # nfa/nfa-concatenation.cc - # nfa/nfa-intersection.cc - # nfa/nfa-profiling.cc - # nfa/nfa-plumbing.cc + nfa/delta.cc + nfa/nfa.cc + nfa/builder.cc + nfa/nfa-concatenation.cc + nfa/nfa-intersection.cc + nfa/nfa-profiling.cc + nfa/nfa-plumbing.cc lvlfa/delta.cc lvlfa/lvlfa.cc lvlfa/builder.cc diff --git a/tests/lvlfa/builder.cc b/tests/lvlfa/builder.cc index f81a8e05..08e680ae 100644 --- a/tests/lvlfa/builder.cc +++ b/tests/lvlfa/builder.cc @@ -16,14 +16,14 @@ using OnTheFlyAlphabet = mata::OnTheFlyAlphabet; using Word = std::vector; -TEST_CASE("parse_from_mata()") { +TEST_CASE("lvlfa::parse_from_mata()") { Delta delta; SECTION("Simple automaton") { delta.add(0, 0, 0); delta.add(0, 1, 1); delta.add(1, 2, 0); - Lvlfa lvlfa{ delta, { 0 }, { 1 }, {}, 0}; + Lvlfa lvlfa{ delta, { 0 }, { 1 }, { 0 }, 1}; SECTION("from string") { Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa.print_to_mata()) }; @@ -69,14 +69,25 @@ TEST_CASE("parse_from_mata()") { lvlfa.delta.add(1, 'b', 40); lvlfa.delta.add(51, 'z', 42); lvlfa.final = { 3, 103 }; - // lvlfa.levels = std::vector(lvlfa.num_of_states(), 0); - // lvlfa.max_level = 0; + lvlfa.levels = std::vector(lvlfa.num_of_states(), 0); + lvlfa.levels[3] = 42; + lvlfa.levels[103] = 42; + lvlfa.levels_cnt = 43; SECTION("from string") { Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa.print_to_mata()) }; parsed.final.contains(103); parsed.initial.contains(50); parsed.delta.contains(51, 'z', 42); + CHECK(parsed.levels_cnt == 43); + + std::vector test_levels(parsed.levels); + for (const State &s : parsed.final) { + CHECK(test_levels[s] == 42); + test_levels[s] = 0; + } + CHECK(std::all_of(test_levels.begin(), test_levels.end(), [](Level l) { return l==0; })); + CHECK(are_equivalent(parsed, lvlfa)); } @@ -87,6 +98,15 @@ TEST_CASE("parse_from_mata()") { parsed.final.contains(103); parsed.initial.contains(50); parsed.delta.contains(51, 'z', 42); + CHECK(parsed.levels_cnt == 43); + + std::vector test_levels(parsed.levels); + for (const State &s : parsed.final) { + CHECK(test_levels[s] == 42); + test_levels[s] = 0; + } + CHECK(std::all_of(test_levels.begin(), test_levels.end(), [](Level l) { return l==0; })); + CHECK(are_equivalent(parsed, lvlfa)); } @@ -101,7 +121,188 @@ TEST_CASE("parse_from_mata()") { parsed.final.contains(103); parsed.initial.contains(50); parsed.delta.contains(51, 'z', 42); + CHECK(parsed.levels_cnt == 43); + + std::vector test_levels(parsed.levels); + for (const State &s : parsed.final) { + CHECK(test_levels[s] == 42); + test_levels[s] = 0; + } + CHECK(std::all_of(test_levels.begin(), test_levels.end(), [](Level l) { return l==0; })); + CHECK(are_equivalent(parsed, lvlfa)); } } + + SECTION("levels testing") { + SECTION("ascending") { + Lvlfa lvlfa; + lvlfa.delta.add(0, 1, 1); + lvlfa.delta.add(1, 1, 2); + lvlfa.delta.add(2, 1, 3); + lvlfa.delta.add(3, 1, 4); + lvlfa.delta.add(4, 1, 5); + lvlfa.delta.add(5, 1, 6); + lvlfa.delta.add(6, 1, 7); + lvlfa.delta.add(7, 1, 8); + lvlfa.delta.add(8, 1, 9); + lvlfa.delta.add(9, 1, 10); + lvlfa.initial.insert(0); + lvlfa.final.insert(10); + lvlfa.levels = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; + lvlfa.levels_cnt = 11; + + SECTION("from string") { + Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa.print_to_mata()) }; + + REQUIRE(parsed.initial.size() == 1); + REQUIRE(parsed.final.size() == 1); + CHECK(parsed.levels_cnt == 11); + State s{ *parsed.initial.begin() }; + Level level = 0; + while (s != *parsed.final.begin()) { + CHECK(parsed.levels[s] == level); + REQUIRE(parsed.delta[s].size() == 1); + SymbolPost symbol_post = *parsed.delta[s].begin(); + REQUIRE(symbol_post.targets.size() == 1); + s = *symbol_post.targets.begin(); + level++; + } + CHECK(parsed.final.contains(s)); + CHECK(parsed.levels[s] == 10); + } + + SECTION("from stream") { + std::stringstream lvlfa_stream; + lvlfa.print_to_mata(lvlfa_stream); + Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_stream) }; + + REQUIRE(parsed.initial.size() == 1); + REQUIRE(parsed.final.size() == 1); + CHECK(parsed.levels_cnt == 11); + State s{ *parsed.initial.begin() }; + Level level = 0; + while (s != *parsed.final.begin()) { + CHECK(parsed.levels[s] == level); + REQUIRE(parsed.delta[s].size() == 1); + SymbolPost symbol_post = *parsed.delta[s].begin(); + REQUIRE(symbol_post.targets.size() == 1); + s = *symbol_post.targets.begin(); + level++; + } + CHECK(parsed.final.contains(s)); + CHECK(parsed.levels[s] == 10); + } + + SECTION("from file") { + std::filesystem::path lvlfa_file{ "./temp-test-parse_from_mata-levels_testing.mata" }; + std::fstream file{ lvlfa_file, std::fstream::in | std::fstream::out | std::fstream::trunc }; + lvlfa.print_to_mata(file); + Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_file) }; + file.close(); + std::filesystem::remove(lvlfa_file); + + REQUIRE(parsed.initial.size() == 1); + REQUIRE(parsed.final.size() == 1); + CHECK(parsed.levels_cnt == 11); + State s{ *parsed.initial.begin() }; + Level level = 0; + while (s != *parsed.final.begin()) { + CHECK(parsed.levels[s] == level); + REQUIRE(parsed.delta[s].size() == 1); + SymbolPost symbol_post = *parsed.delta[s].begin(); + REQUIRE(symbol_post.targets.size() == 1); + s = *symbol_post.targets.begin(); + level++; + } + CHECK(parsed.final.contains(s)); + CHECK(parsed.levels[s] == 10); + } + } + + SECTION("descending") { + Lvlfa lvlfa; + lvlfa.delta.add(0, 1, 1); + lvlfa.delta.add(1, 1, 2); + lvlfa.delta.add(2, 1, 3); + lvlfa.delta.add(3, 1, 4); + lvlfa.delta.add(4, 1, 5); + lvlfa.delta.add(5, 1, 6); + lvlfa.delta.add(6, 1, 7); + lvlfa.delta.add(7, 1, 8); + lvlfa.delta.add(8, 1, 9); + lvlfa.delta.add(9, 1, 10); + lvlfa.initial.insert(0); + lvlfa.final.insert(10); + lvlfa.levels = { 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 }; + lvlfa.levels_cnt = 11; + + SECTION("from string") { + Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa.print_to_mata()) }; + + REQUIRE(parsed.initial.size() == 1); + REQUIRE(parsed.final.size() == 1); + CHECK(parsed.levels_cnt == 11); + State s{ *parsed.initial.begin() }; + Level level = 10; + while (s != *parsed.final.begin()) { + CHECK(parsed.levels[s] == level); + REQUIRE(parsed.delta[s].size() == 1); + SymbolPost symbol_post = *parsed.delta[s].begin(); + REQUIRE(symbol_post.targets.size() == 1); + s = *symbol_post.targets.begin(); + level--; + } + CHECK(parsed.final.contains(s)); + CHECK(parsed.levels[s] == 0); + } + + SECTION("from stream") { + std::stringstream lvlfa_stream; + lvlfa.print_to_mata(lvlfa_stream); + Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_stream) }; + + REQUIRE(parsed.initial.size() == 1); + REQUIRE(parsed.final.size() == 1); + CHECK(parsed.levels_cnt == 11); + State s{ *parsed.initial.begin() }; + Level level = 10; + while (s != *parsed.final.begin()) { + CHECK(parsed.levels[s] == level); + REQUIRE(parsed.delta[s].size() == 1); + SymbolPost symbol_post = *parsed.delta[s].begin(); + REQUIRE(symbol_post.targets.size() == 1); + s = *symbol_post.targets.begin(); + level--; + } + CHECK(parsed.final.contains(s)); + CHECK(parsed.levels[s] == 0); + } + + SECTION("from file") { + std::filesystem::path lvlfa_file{ "./temp-test-parse_from_mata-levels_testing.mata" }; + std::fstream file{ lvlfa_file, std::fstream::in | std::fstream::out | std::fstream::trunc }; + lvlfa.print_to_mata(file); + Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_file) }; + file.close(); + std::filesystem::remove(lvlfa_file); + + REQUIRE(parsed.initial.size() == 1); + REQUIRE(parsed.final.size() == 1); + CHECK(parsed.levels_cnt == 11); + State s{ *parsed.initial.begin() }; + Level level = 10; + while (s != *parsed.final.begin()) { + CHECK(parsed.levels[s] == level); + REQUIRE(parsed.delta[s].size() == 1); + SymbolPost symbol_post = *parsed.delta[s].begin(); + REQUIRE(symbol_post.targets.size() == 1); + s = *symbol_post.targets.begin(); + level--; + } + CHECK(parsed.final.contains(s)); + CHECK(parsed.levels[s] == 0); + } + } + } } diff --git a/tests/lvlfa/lvlfa-concatenation.cc b/tests/lvlfa/lvlfa-concatenation.cc index 53c37a24..6b953766 100644 --- a/tests/lvlfa/lvlfa-concatenation.cc +++ b/tests/lvlfa/lvlfa-concatenation.cc @@ -576,7 +576,7 @@ TEST_CASE("mata::lvlfa::concatenate() over epsilon symbol") { } } -TEST_CASE("(a|b)*") { +TEST_CASE("Lvlfa (a|b)*") { Lvlfa aut1; mata::parser::create_nfa(&aut1, "a*"); Lvlfa aut2; @@ -587,7 +587,7 @@ TEST_CASE("(a|b)*") { CHECK(are_equivalent(concatenated_aut, aut3)); } -TEST_CASE("Bug with epsilon transitions") { +TEST_CASE("Bug with epsilon transitions in Lvlfa") { Lvlfa lvlfa1{}; lvlfa1.initial.insert(0); lvlfa1.final.insert(3); @@ -736,7 +736,7 @@ TEST_CASE("mata::lvlfa::concatenate() inplace") { } -TEST_CASE("Concat_inplace performance", "[.profiling]") { +TEST_CASE("Concat_inplace performance of LVLFA", "[.profiling]") { Lvlfa base; base.initial.insert(0); base.final.insert(4); diff --git a/tests/lvlfa/lvlfa-intersection.cc b/tests/lvlfa/lvlfa-intersection.cc index dc586d03..a4e9c046 100644 --- a/tests/lvlfa/lvlfa-intersection.cc +++ b/tests/lvlfa/lvlfa-intersection.cc @@ -304,7 +304,7 @@ TEST_CASE("mata::lvlfa::intersection() for profiling", "[.profiling],[intersecti } } -TEST_CASE("Move semantics", "[.profiling][std::move]") { +TEST_CASE("Move semantics of LVLFA", "[.profiling][std::move]") { Lvlfa b{10}; b.initial.insert(0); b.final.insert({2, 4, 8, 7}); diff --git a/tests/lvlfa/lvlfa.cc b/tests/lvlfa/lvlfa.cc index 775f8653..53957e41 100644 --- a/tests/lvlfa/lvlfa.cc +++ b/tests/lvlfa/lvlfa.cc @@ -2743,7 +2743,7 @@ TEST_CASE("mata::lvlfa::Lvlfa::delta()") { Delta delta(6); } -TEST_CASE("A segmentation fault in the make_complement") { +TEST_CASE("A segmentation fault in the lvlfa::make_complement") { Lvlfa r(1); OnTheFlyAlphabet alph{}; alph["a"]; From 931dfc28cf32e7c5a1c8d75c062c850f2eedabc9 Mon Sep 17 00:00:00 2001 From: koniksedy Date: Mon, 12 Feb 2024 23:31:18 +0100 Subject: [PATCH 03/24] make_one_level nontested --- include/mata/lvlfa/lvlfa.hh | 6 +++ src/lvlfa/lvlfa.cc | 79 ++++++++++++++++++++++++++++++++++--- 2 files changed, 79 insertions(+), 6 deletions(-) diff --git a/include/mata/lvlfa/lvlfa.hh b/include/mata/lvlfa/lvlfa.hh index 70620f20..96e34d3b 100644 --- a/include/mata/lvlfa/lvlfa.hh +++ b/include/mata/lvlfa/lvlfa.hh @@ -164,6 +164,12 @@ public: */ void get_one_letter_aut(Lvlfa& result) const; + void make_one_level_aut(const utils::OrdVector &dcare_replacements = { DONT_CARE }); + + Lvlfa get_one_level_aut(const utils::OrdVector &dcare_replacements = { DONT_CARE }) const; + + void get_one_level_aut(Lvlfa& result, const utils::OrdVector &dcare_replacements = { DONT_CARE }) const; + /** * @brief Prints the automaton in DOT format * diff --git a/src/lvlfa/lvlfa.cc b/src/lvlfa/lvlfa.cc index f2883d63..d699a2fe 100644 --- a/src/lvlfa/lvlfa.cc +++ b/src/lvlfa/lvlfa.cc @@ -177,16 +177,83 @@ void Lvlfa::get_one_letter_aut(Lvlfa& result) const { result = get_one_letter_aut(); } +void Lvlfa::make_one_level_aut(const utils::OrdVector &dcare_replacements) { + bool dcare_for_dcare = dcare_replacements == utils::OrdVector({ DONT_CARE }); + std::vector transitions_to_del; + std::vector transitions_to_add; + + auto add_inner_transitions = [&](State src, Symbol symbol, State trg) { + if (symbol == DONT_CARE && !dcare_for_dcare) { + for (const Symbol replace_symbol : dcare_replacements) { + transitions_to_add.push_back({ src, replace_symbol, trg }); + } + } else { + transitions_to_add.push_back({ src, symbol, trg }); + } + }; + + Level src_lvl, trg_lvl, diff_lvl; + for (const auto &transition : delta.transitions()) { + src_lvl = levels[transition.source]; + trg_lvl = levels[transition.target]; + diff_lvl = (trg_lvl == 0) ? (levels_cnt - src_lvl) : trg_lvl - src_lvl; + + if (diff_lvl == 1 && transition.symbol == DONT_CARE && !dcare_for_dcare) { + transitions_to_del.push_back(transition); + for (const Symbol replace_symbol : dcare_replacements) { + transitions_to_add.push_back({ transition.source, replace_symbol, transition.target }); + } + } else if (diff_lvl > 1) { + transitions_to_del.push_back(transition); + State inner_src = transition.source; + Level inner_src_lvl = src_lvl; + + // The first iteration connecting original source state with inner state. + State inner_trg = add_state(); + Level inner_trg_lvl = src_lvl + 1; + levels[inner_trg] = inner_trg_lvl; + add_inner_transitions(inner_src, transition.symbol, inner_trg); + inner_src = inner_trg; + inner_src_lvl++; + inner_trg_lvl++; + + // Iterations 1 to n-1 connecting inner states. + Level pre_trg_lvl = (trg_lvl == 0) ? (levels_cnt - 1) : (trg_lvl - 1); + for (; inner_src_lvl < pre_trg_lvl; inner_src_lvl++, inner_trg_lvl++) { + inner_trg = add_state(); + levels[inner_trg] = inner_trg_lvl; + add_inner_transitions(inner_src, DONT_CARE, inner_trg); + inner_src = inner_trg; + } + + // The last iteration connecting last inner state with the original target state. + add_inner_transitions(inner_src, DONT_CARE, transition.target); + } + } + + for (const Transition &transition : transitions_to_add) { + delta.add(transition); + } + for (const Transition &transition : transitions_to_del) { + delta.remove(transition); + } +} + +Lvlfa Lvlfa::get_one_level_aut(const utils::OrdVector &dcare_replacements) const { + Lvlfa result{ *this }; + result.make_one_level_aut(dcare_replacements); + return result; +} + +void Lvlfa::get_one_level_aut(Lvlfa& result, const utils::OrdVector &dcare_replacements) const { + result = get_one_level_aut(dcare_replacements); +} + Lvlfa& Lvlfa::operator=(Lvlfa&& other) noexcept { if (this != &other) { - delta = std::move(other.delta); - initial = std::move(other.initial); - final = std::move(other.final); + mata::nfa::Nfa::operator=(other); levels = std::move(other.levels); levels_cnt = other.levels_cnt; - alphabet = other.alphabet; - attributes = std::move(other.attributes); - other.alphabet = nullptr; } return *this; } From 8a78b8948d0ea67eb2bf4da05052b1f9d25116a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Wed, 14 Feb 2024 08:41:35 +0100 Subject: [PATCH 04/24] Improve levels description --- include/mata/lvlfa/lvlfa.hh | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/include/mata/lvlfa/lvlfa.hh b/include/mata/lvlfa/lvlfa.hh index 96e34d3b..29f4f9b5 100644 --- a/include/mata/lvlfa/lvlfa.hh +++ b/include/mata/lvlfa/lvlfa.hh @@ -49,11 +49,7 @@ namespace mata::lvlfa { */ struct Lvlfa : public mata::nfa::Nfa { public: - /** - * @brief For state q, delta[q] keeps the list of transitions ordered by symbols. - * - * The set of states of this automaton are the numbers from 0 to the number of states minus one. - */ + /// @brief For state q, levels[q] gives the state a level. std::vector levels{}; Level levels_cnt = 0; /// Key value store for additional attributes for the LVLFA. Keys are attribute names as strings and the value types From 7f5757a28484f8c2914d70f7d5778d39cc3e75e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Wed, 14 Feb 2024 08:43:53 +0100 Subject: [PATCH 05/24] Create identity NFT --- include/mata/alphabet.hh | 13 ++++- include/mata/lvlfa/strings.hh | 11 ++++- src/CMakeLists.txt | 1 + src/lvlfa/strings.cc | 48 ++++++++++++++++++ tests/CMakeLists.txt | 1 + tests/lvlfa/lvlfa.cc | 13 +++++ tests/lvlfa/strings.cc | 93 +++++++++++++++++++++++++++++++++++ 7 files changed, 178 insertions(+), 2 deletions(-) create mode 100644 src/lvlfa/strings.cc create mode 100644 tests/lvlfa/strings.cc diff --git a/include/mata/alphabet.hh b/include/mata/alphabet.hh index 43460301..bc11c030 100644 --- a/include/mata/alphabet.hh +++ b/include/mata/alphabet.hh @@ -82,8 +82,13 @@ public: bool operator==(const Alphabet &) const = delete; + /** + * Checks whether the alphabet has any symbols. + */ + virtual bool empty() const = 0; + protected: - virtual const void *address() const { return this; } + virtual const void* address() const { return this; } }; // class Alphabet. /** @@ -115,6 +120,8 @@ public: IntAlphabet& operator=(const IntAlphabet& int_alphabet) = delete; + bool empty() const override { return false; } + protected: const void* address() const override { return &alphabet_instance; } @@ -234,6 +241,8 @@ public: */ size_t get_number_of_symbols() const { return symbols_.size(); } + bool empty() const override { return symbols_.empty(); } + private: mata::utils::OrdVector symbols_{}; ///< Map of string transition symbols to symbol values. Symbol next_symbol_value_{ 0 }; ///< Next value to be used for a newly added symbol. @@ -362,6 +371,8 @@ public: */ const StringToSymbolMap& get_symbol_map() const { return symbol_map_; } + bool empty() const override { return symbol_map_.empty(); } + private: StringToSymbolMap symbol_map_{}; ///< Map of string transition symbols to symbol values. Symbol next_symbol_value_{}; ///< Next value to be used for a newly added symbol. diff --git a/include/mata/lvlfa/strings.hh b/include/mata/lvlfa/strings.hh index e06a5c8c..b3dd87f1 100644 --- a/include/mata/lvlfa/strings.hh +++ b/include/mata/lvlfa/strings.hh @@ -5,5 +5,14 @@ #define MATA_LVLFA_STRING_SOLVING_HH_ #include "mata/nfa/strings.hh" +#include "lvlfa.hh" -#endif // MATA_NFA_STRING_SOLVING_HH_. +namespace mata::lvlfa { +/** + * Create identity transducer over the @p alphabet with @p level_cnt levels. + */ +Lvlfa create_identity(mata::Alphabet* alphabet, Level level_cnt = 2); + +} // Namespace mata::lvlfa. + +#endif // MATA_LVLFA_STRING_SOLVING_HH_. diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index afc04cc2..d8d688c6 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -30,6 +30,7 @@ add_library(libmata STATIC lvlfa/concatenation.cc lvlfa/operations.cc lvlfa/builder.cc + lvlfa/strings.cc ) # libmata needs at least c++20 diff --git a/src/lvlfa/strings.cc b/src/lvlfa/strings.cc new file mode 100644 index 00000000..429cf013 --- /dev/null +++ b/src/lvlfa/strings.cc @@ -0,0 +1,48 @@ +/* nfa-strings.hh -- Operations on NFAs for string solving. + */ + +#include "mata/lvlfa/strings.hh" +#include "mata/lvlfa/lvlfa.hh" + +//using mata::lvlfa::Lvlfa; +using mata::lvlfa::Level; +using mata::Symbol; +using mata::lvlfa::State; +using mata::nfa::StatePost; +using mata::nfa::SymbolPost; +using namespace mata::lvlfa; + +Lvlfa mata::lvlfa::create_identity(mata::Alphabet* alphabet, Level level_cnt) { + if (level_cnt == 0) { throw std::runtime_error("NFT must have at least one level"); } + const auto alphabet_symbols{ alphabet->get_alphabet_symbols() }; + const size_t additional_states_per_symbol_num{ level_cnt - 1 }; + const size_t num_of_states{ alphabet_symbols.size() * additional_states_per_symbol_num + 1 }; + std::vector levels(num_of_states); + levels[0] = 0; + Level level{ 1 }; + for (State state{ 1 }; state < num_of_states; ++state) { + levels[state] = level; + const Level new_level{ level + 1 }; + level = new_level < level_cnt ? new_level : 1; + } + Lvlfa nft{ num_of_states, { 0 }, { 0 }, std::move(levels), level_cnt, alphabet }; + State state{ 0 }; + State new_state; + + for (const Symbol symbol: alphabet_symbols) { + level = 0; + new_state = 0; + for (; level < additional_states_per_symbol_num; ++level) { + new_state = state + 1; + if (level == 0) { + nft.delta.add(0, symbol, new_state); + } else { + nft.delta.add(state, symbol, new_state); + } + ++state; + } + nft.delta.add(new_state, symbol, 0); + } + return nft; +} + diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 29ae7e31..aec4af63 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -21,6 +21,7 @@ add_executable(tests lvlfa/lvlfa-intersection.cc lvlfa/lvlfa-profiling.cc lvlfa/lvlfa-plumbing.cc + lvlfa/strings.cc strings/nfa-noodlification.cc strings/nfa-segmentation.cc strings/nfa-string-solving.cc diff --git a/tests/lvlfa/lvlfa.cc b/tests/lvlfa/lvlfa.cc index 53957e41..f4a60a34 100644 --- a/tests/lvlfa/lvlfa.cc +++ b/tests/lvlfa/lvlfa.cc @@ -27,6 +27,19 @@ using Word = mata::Word; using IntAlphabet = mata::IntAlphabet; using OnTheFlyAlphabet = mata::OnTheFlyAlphabet; +TEST_CASE("mata::lvlfa::Lvlfa()") { + Lvlfa nft{}; + nft.levels.resize(3); + nft.levels_cnt = 5; + CHECK(nft.levels_cnt == 5); + CHECK(nft.levels.size() == 3); + nft.levels[0] = 0; + nft.levels[1] = 3; + nft.levels[2] = 1; + CHECK(nft.levels[2] == 1); + CHECK(nft.levels == std::vector{ 0, 3, 1 }); +} + TEST_CASE("mata::lvlfa::size()") { Lvlfa lvlfa{}; CHECK(lvlfa.num_of_states() == 0); diff --git a/tests/lvlfa/strings.cc b/tests/lvlfa/strings.cc new file mode 100644 index 00000000..a7211382 --- /dev/null +++ b/tests/lvlfa/strings.cc @@ -0,0 +1,93 @@ +// TODO: some header + +#include +#include +#include + +#include + +#include "mata/lvlfa/lvlfa.hh" +#include "mata/lvlfa/builder.hh" +#include "mata/lvlfa/strings.hh" + +using namespace mata::lvlfa; +using Symbol = mata::Symbol; +using IntAlphabet = mata::IntAlphabet; +using OnTheFlyAlphabet = mata::OnTheFlyAlphabet; +using mata::EnumAlphabet; + +using Word = std::vector; + +TEST_CASE("lvlfa::create_identity()") { + Lvlfa nft{}; + nft.initial = { 0 }; + nft.final = { 0 }; + SECTION("small identity nft") { + EnumAlphabet alphabet{ 0, 1, 2, 3 }; + nft.alphabet = &alphabet; + nft.delta.add(0, 0, 1); + nft.delta.add(1, 0, 2); + nft.delta.add(2, 0, 0); + nft.delta.add(0, 1, 3); + nft.delta.add(3, 1, 4); + nft.delta.add(4, 1, 0); + nft.delta.add(0, 2, 5); + nft.delta.add(5, 2, 6); + nft.delta.add(6, 2, 0); + nft.delta.add(0, 3, 7); + nft.delta.add(7, 3, 8); + nft.delta.add(8, 3, 0); + nft.levels_cnt = 3; + nft.levels.resize(nft.levels_cnt * ( alphabet.get_number_of_symbols() - 1)); + nft.levels[0] = 0; + nft.levels[1] = 1; + nft.levels[2] = 2; + nft.levels[3] = 1; + nft.levels[4] = 2; + nft.levels[5] = 1; + nft.levels[6] = 2; + nft.levels[7] = 1; + nft.levels[8] = 2; + Lvlfa nft_identity{ create_identity(&alphabet, 3) }; + CHECK(nft_identity.is_identical(nft)); + } + + SECTION("identity nft no symbols") { + EnumAlphabet alphabet{ }; + nft.alphabet = &alphabet; + nft.levels_cnt = 3; + nft.levels.resize(1); + nft.levels[0] = 0; + Lvlfa nft_identity{ create_identity(&alphabet, 3) }; + CHECK(nft_identity.is_identical(nft)); + } + + SECTION("identity nft one symbol") { + EnumAlphabet alphabet{ 0 }; + nft.alphabet = &alphabet; + nft.levels_cnt = 2; + nft.levels.resize(2); + nft.levels[0] = 0; + nft.levels[1] = 1; + nft.delta.add(0, 0, 1); + nft.delta.add(1, 0, 0); + Lvlfa nft_identity{ create_identity(&alphabet, 2) }; + CHECK(nft_identity.is_identical(nft)); + nft_identity = create_identity(&alphabet); + CHECK(nft_identity.is_identical(nft)); + } + + SECTION("small identity nft one level") { + EnumAlphabet alphabet{ 0, 1, 2, 3 }; + nft.alphabet = &alphabet; + nft.delta.add(0, 0, 0); + nft.delta.add(0, 1, 0); + nft.delta.add(0, 2, 0); + nft.delta.add(0, 3, 0); + nft.levels_cnt = 1; + nft.levels.resize(1); + nft.levels[0] = 0; + Lvlfa nft_identity{ create_identity(&alphabet, 1) }; + CHECK(nft_identity.is_identical(nft)); + } +} From c546f82f3723a710db839d7f9f8b3c1939ef7595 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Wed, 14 Feb 2024 08:45:33 +0100 Subject: [PATCH 06/24] Add moves wherever possible --- include/mata/lvlfa/lvlfa.hh | 13 ++++++++----- src/lvlfa/lvlfa.cc | 1 - 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/include/mata/lvlfa/lvlfa.hh b/include/mata/lvlfa/lvlfa.hh index 29f4f9b5..35717418 100644 --- a/include/mata/lvlfa/lvlfa.hh +++ b/include/mata/lvlfa/lvlfa.hh @@ -62,8 +62,10 @@ public: public: explicit Lvlfa(Delta delta = {}, utils::SparseSet initial_states = {}, - utils::SparseSet final_states = {}, std::vector levels = {}, Level levels_cnt = 1, Alphabet* alphabet = nullptr) - : mata::nfa::Nfa(delta, initial_states, final_states, alphabet), levels(std::move(levels)), levels_cnt(levels_cnt) {} + utils::SparseSet final_states = {}, std::vector levels = {}, const Level levels_cnt = 1, + Alphabet* alphabet = nullptr) + : mata::nfa::Nfa(std::move(delta), std::move(initial_states), std::move(final_states), alphabet), + levels(std::move(levels)), levels_cnt(levels_cnt) {} /** * @brief Construct a new explicit LVLFA with num_of_states states and optionally set initial and final states. @@ -71,12 +73,13 @@ public: * @param[in] num_of_states Number of states for which to preallocate Delta. */ explicit Lvlfa(const unsigned long num_of_states, StateSet initial_states = {}, - StateSet final_states = {}, std::vector levels = {}, Level levels_cnt = 1, Alphabet* alphabet = nullptr) - : mata::nfa::Nfa(num_of_states, initial_states, final_states, alphabet), levels(levels), levels_cnt(levels_cnt) {} + StateSet final_states = {}, std::vector levels = {}, const Level levels_cnt = 1, Alphabet* + alphabet = nullptr) + : mata::nfa::Nfa(num_of_states, std::move(initial_states), std::move(final_states), alphabet), levels(std::move(levels)), levels_cnt(levels_cnt) {} explicit Lvlfa(const mata::nfa::Nfa& other) : mata::nfa::Nfa(other.delta, other.initial, other.final, other.alphabet), - levels(std::move(std::vector(other.num_of_states(), 0))), levels_cnt(1) {} + levels(std::vector(other.num_of_states(), 0)), levels_cnt(1) {} /** * @brief Construct a new explicit LVLFA from other LVLFA. diff --git a/src/lvlfa/lvlfa.cc b/src/lvlfa/lvlfa.cc index d699a2fe..9247984f 100644 --- a/src/lvlfa/lvlfa.cc +++ b/src/lvlfa/lvlfa.cc @@ -25,7 +25,6 @@ const std::string mata::lvlfa::TYPE_NFA = "LVLFA"; Lvlfa& Lvlfa::trim(StateRenaming* state_renaming) { - #ifdef _STATIC_STRUCTURES_ BoolVector useful_states{ useful_states() }; useful_states.clear(); From ac8881013a593d350ed6a4a1985c8b21a0ddf272 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Wed, 14 Feb 2024 10:37:40 +0100 Subject: [PATCH 07/24] Create identity with single replace --- include/mata/lvlfa/strings.hh | 6 +++++ include/mata/nfa/delta.hh | 1 + src/lvlfa/strings.cc | 9 +++++++ tests/lvlfa/strings.cc | 45 +++++++++++++++++++++++++++++++++++ 4 files changed, 61 insertions(+) diff --git a/include/mata/lvlfa/strings.hh b/include/mata/lvlfa/strings.hh index b3dd87f1..ab1085ad 100644 --- a/include/mata/lvlfa/strings.hh +++ b/include/mata/lvlfa/strings.hh @@ -13,6 +13,12 @@ namespace mata::lvlfa { */ Lvlfa create_identity(mata::Alphabet* alphabet, Level level_cnt = 2); +/** + * Create identity input/output transducer with 2 levels over the @p alphabet with @p level_cnt levels with single + * symbol @p from_symbol replaced with @to_symbol. + */ +Lvlfa create_identity_with_single_replace(mata::Alphabet* alphabet, Symbol from_symbol, Symbol to_symbol); + } // Namespace mata::lvlfa. #endif // MATA_LVLFA_STRING_SOLVING_HH_. diff --git a/include/mata/nfa/delta.hh b/include/mata/nfa/delta.hh index 0811d49c..e4e6f873 100644 --- a/include/mata/nfa/delta.hh +++ b/include/mata/nfa/delta.hh @@ -113,6 +113,7 @@ public: // dangerous, breaks the sortedness invariant using super::push_back; // is adding non-const version as well ok? + using super::front; using super::back; using super::filter; diff --git a/src/lvlfa/strings.cc b/src/lvlfa/strings.cc index 429cf013..21694d8b 100644 --- a/src/lvlfa/strings.cc +++ b/src/lvlfa/strings.cc @@ -46,3 +46,12 @@ Lvlfa mata::lvlfa::create_identity(mata::Alphabet* alphabet, Level level_cnt) { return nft; } +Lvlfa mata::lvlfa::create_identity_with_single_replace( + mata::Alphabet *alphabet, const Symbol from_symbol, const Symbol to_symbol) { + Lvlfa nft{ create_identity(alphabet) }; + if (alphabet->empty()) { throw std::runtime_error("Alphabet does not contain symbol being replaced."); } + auto symbol_post_to_state_with_replace{ nft.delta.mutable_state_post(0).find(from_symbol) }; + const State from_replace_state{ symbol_post_to_state_with_replace->targets.front() }; + nft.delta.mutable_state_post(from_replace_state).front().symbol = to_symbol; + return nft; +} diff --git a/tests/lvlfa/strings.cc b/tests/lvlfa/strings.cc index a7211382..a164ef67 100644 --- a/tests/lvlfa/strings.cc +++ b/tests/lvlfa/strings.cc @@ -91,3 +91,48 @@ TEST_CASE("lvlfa::create_identity()") { CHECK(nft_identity.is_identical(nft)); } } + +TEST_CASE("lvlfa::create_identity_with_single_replace()") { + Lvlfa nft{}; + nft.initial = { 0 }; + nft.final = { 0 }; + SECTION("small identity nft") { + EnumAlphabet alphabet{ 0, 1, 2, 3 }; + nft.alphabet = &alphabet; + nft.delta.add(0, 0, 1); + nft.delta.add(1, 0, 0); + nft.delta.add(0, 1, 2); + nft.delta.add(2, 3, 0); + nft.delta.add(0, 2, 3); + nft.delta.add(3, 2, 0); + nft.delta.add(0, 3, 4); + nft.delta.add(4, 3, 0); + nft.levels_cnt = 2; + nft.levels.resize(5); + nft.levels[0] = 0; + nft.levels[1] = 1; + nft.levels[2] = 1; + nft.levels[3] = 1; + nft.levels[4] = 1; + Lvlfa nft_identity_with_replace{ create_identity_with_single_replace(&alphabet, 1, 3) }; + CHECK(nft_identity_with_replace.is_identical(nft)); + } + + SECTION("identity nft no symbols") { + EnumAlphabet alphabet{}; + CHECK_THROWS(create_identity_with_single_replace(&alphabet, 1, 2)); + } + + SECTION("identity nft one symbol") { + EnumAlphabet alphabet{ 0 }; + nft.alphabet = &alphabet; + nft.levels_cnt = 2; + nft.levels.resize(2); + nft.levels[0] = 0; + nft.levels[1] = 1; + nft.delta.add(0, 0, 1); + nft.delta.add(1, 1, 0); + Lvlfa nft_identity{ create_identity_with_single_replace(&alphabet, 0, 1) }; + CHECK(nft_identity.is_identical(nft)); + } +} From 6ba4248c24ccdc1e59f27cc5fc7a3923fa4dd123 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Wed, 14 Feb 2024 11:11:18 +0100 Subject: [PATCH 08/24] Rename 'lvlfa' to 'nft' --- include/mata/lvlfa/strings.hh | 24 - include/mata/{lvlfa => nft}/algorithms.hh | 48 +- include/mata/{lvlfa => nft}/builder.hh | 46 +- include/mata/{lvlfa => nft}/delta.hh | 10 +- include/mata/{lvlfa/lvlfa.hh => nft/nft.hh} | 126 ++-- include/mata/{lvlfa => nft}/plumbing.hh | 34 +- include/mata/nft/strings.hh | 24 + include/mata/{lvlfa => nft}/types.hh | 10 +- include/mata/parser/inter-aut.hh | 4 +- src/CMakeLists.txt | 18 +- src/inter-aut.cc | 10 +- src/{lvlfa => nft}/builder.cc | 80 +-- src/{lvlfa => nft}/complement.cc | 20 +- src/{lvlfa => nft}/concatenation.cc | 30 +- src/{lvlfa => nft}/inclusion.cc | 42 +- src/{lvlfa => nft}/intersection.cc | 28 +- src/{lvlfa/lvlfa.cc => nft/nft.cc} | 46 +- src/{lvlfa => nft}/operations.cc | 82 +-- src/{lvlfa => nft}/strings.cc | 20 +- src/{lvlfa => nft}/universal.cc | 22 +- tests/CMakeLists.txt | 16 +- tests/{lvlfa => nft}/builder.cc | 198 +++--- tests/{lvlfa => nft}/delta.cc | 248 ++++---- .../nft-concatenation.cc} | 120 ++-- .../nft-intersection.cc} | 32 +- .../lvlfa-plumbing.cc => nft/nft-plumbing.cc} | 50 +- .../nft-profiling.cc} | 52 +- tests/{lvlfa/lvlfa.cc => nft/nft.cc} | 602 +++++++++--------- tests/{lvlfa => nft}/strings.cc | 28 +- tests/{lvlfa => nft}/utils.hh | 0 30 files changed, 1035 insertions(+), 1035 deletions(-) delete mode 100644 include/mata/lvlfa/strings.hh rename include/mata/{lvlfa => nft}/algorithms.hh (72%) rename include/mata/{lvlfa => nft}/builder.hh (58%) rename include/mata/{lvlfa => nft}/delta.hh (94%) rename include/mata/{lvlfa/lvlfa.hh => nft/nft.hh} (78%) rename include/mata/{lvlfa => nft}/plumbing.hh (69%) create mode 100644 include/mata/nft/strings.hh rename include/mata/{lvlfa => nft}/types.hh (84%) rename src/{lvlfa => nft}/builder.cc (78%) rename src/{lvlfa => nft}/complement.cc (81%) rename src/{lvlfa => nft}/concatenation.cc (85%) rename src/{lvlfa => nft}/inclusion.cc (91%) rename src/{lvlfa => nft}/intersection.cc (93%) rename src/{lvlfa/lvlfa.cc => nft/nft.cc} (87%) rename src/{lvlfa => nft}/operations.cc (91%) rename src/{lvlfa => nft}/strings.cc (79%) rename src/{lvlfa => nft}/universal.cc (87%) rename tests/{lvlfa => nft}/builder.cc (58%) rename tests/{lvlfa => nft}/delta.cc (72%) rename tests/{lvlfa/lvlfa-concatenation.cc => nft/nft-concatenation.cc} (94%) rename tests/{lvlfa/lvlfa-intersection.cc => nft/nft-intersection.cc} (94%) rename tests/{lvlfa/lvlfa-plumbing.cc => nft/nft-plumbing.cc} (62%) rename tests/{lvlfa/lvlfa-profiling.cc => nft/nft-profiling.cc} (67%) rename tests/{lvlfa/lvlfa.cc => nft/nft.cc} (88%) rename tests/{lvlfa => nft}/strings.cc (83%) rename tests/{lvlfa => nft}/utils.hh (100%) diff --git a/include/mata/lvlfa/strings.hh b/include/mata/lvlfa/strings.hh deleted file mode 100644 index ab1085ad..00000000 --- a/include/mata/lvlfa/strings.hh +++ /dev/null @@ -1,24 +0,0 @@ -/* nfa-strings.hh -- Operations on NFAs for string solving. - */ - -#ifndef MATA_LVLFA_STRING_SOLVING_HH_ -#define MATA_LVLFA_STRING_SOLVING_HH_ - -#include "mata/nfa/strings.hh" -#include "lvlfa.hh" - -namespace mata::lvlfa { -/** - * Create identity transducer over the @p alphabet with @p level_cnt levels. - */ -Lvlfa create_identity(mata::Alphabet* alphabet, Level level_cnt = 2); - -/** - * Create identity input/output transducer with 2 levels over the @p alphabet with @p level_cnt levels with single - * symbol @p from_symbol replaced with @to_symbol. - */ -Lvlfa create_identity_with_single_replace(mata::Alphabet* alphabet, Symbol from_symbol, Symbol to_symbol); - -} // Namespace mata::lvlfa. - -#endif // MATA_LVLFA_STRING_SOLVING_HH_. diff --git a/include/mata/lvlfa/algorithms.hh b/include/mata/nft/algorithms.hh similarity index 72% rename from include/mata/lvlfa/algorithms.hh rename to include/mata/nft/algorithms.hh index 56f8067a..bcc351e7 100644 --- a/include/mata/lvlfa/algorithms.hh +++ b/include/mata/nft/algorithms.hh @@ -1,17 +1,17 @@ -/* algorithms.hh -- Wrapping up algorithms for Lvlfa manipulation which would be otherwise in anonymous namespaces. +/* algorithms.hh -- Wrapping up algorithms for Nft manipulation which would be otherwise in anonymous namespaces. */ -#ifndef MATA_LVLFA_INTERNALS_HH_ -#define MATA_LVLFA_INTERNALS_HH_ +#ifndef MATA_NFT_INTERNALS_HH_ +#define MATA_NFT_INTERNALS_HH_ -#include "lvlfa.hh" +#include "nft.hh" #include "mata/simlib/util/binary_relation.hh" /** - * Concrete LVLFA implementations of algorithms, such as complement, inclusion, or universality checking. + * Concrete NFT implementations of algorithms, such as complement, inclusion, or universality checking. * - * This is a separation of the implementation from the interface defined in mata::lvlfa. - * Note, that in mata::lvlfa interface, there are particular dispatch functions calling + * This is a separation of the implementation from the interface defined in mata::nft. + * Note, that in mata::nft interface, there are particular dispatch functions calling * these function according to parameters provided by a user. * E.g. we can call the following function: `is_universal(aut, alph, {{'algorithm', 'antichains'}})` * to check for universality based on antichain-based algorithm. @@ -23,14 +23,14 @@ * 4. Intersection/concatenation with epsilon transitions, or, * 5. Computing relation. */ -namespace mata::lvlfa::algorithms { +namespace mata::nft::algorithms { /** * Brzozowski minimization of automata (revert -> determinize -> revert -> determinize). * @param[in] aut Automaton to be minimized. * @return Minimized automaton. */ -Lvlfa minimize_brzozowski(const Lvlfa& aut); +Nft minimize_brzozowski(const Nft& aut); /** * Complement implemented by determization, adding sink state and making automaton complete. Then it adds final states @@ -41,7 +41,7 @@ Lvlfa minimize_brzozowski(const Lvlfa& aut); * minimization. * @return Complemented automaton. */ -Lvlfa complement_classical(const Lvlfa& aut, const mata::utils::OrdVector& symbols, +Nft complement_classical(const Nft& aut, const mata::utils::OrdVector& symbols, bool minimize_during_determinization = false); /** @@ -55,7 +55,7 @@ Lvlfa complement_classical(const Lvlfa& aut, const mata::utils::OrdVector && final_condition, +Nft product(const Nft& lhs, const Nft& rhs, const std::function && final_condition, const Symbol first_epsilon = EPSILON, std::unordered_map, State> *prod_map = nullptr); /** - * @brief Concatenate two LVLFAs. + * @brief Concatenate two NFTs. * * Supports epsilon symbols when @p use_epsilon is set to true. * @param[in] lhs First automaton to concatenate. @@ -117,9 +117,9 @@ Lvlfa product(const Lvlfa& lhs, const Lvlfa& rhs, const std::function #include "mata/nfa/builder.hh" @@ -14,16 +14,16 @@ /** * Namespace providing options to build NFAs. */ -namespace mata::lvlfa::builder { +namespace mata::nft::builder { -using namespace mata::lvlfa; +using namespace mata::nft; using NameStateMap = std::unordered_map; /** * Create an automaton accepting only a single @p word. */ -Lvlfa create_single_word_lvlfa(const std::vector& word); +Nft create_single_word_nft(const std::vector& word); /** * Create an automaton accepting only a single @p word. @@ -33,17 +33,17 @@ Lvlfa create_single_word_lvlfa(const std::vector& word); * translations for all of the word symbols. If left empty, a new alphabet with only the symbols of the word will be * created. */ -Lvlfa create_single_word_lvlfa(const std::vector& word, Alphabet* alphabet = nullptr); +Nft create_single_word_nft(const std::vector& word, Alphabet* alphabet = nullptr); /** * Create automaton accepting only epsilon string. */ -Lvlfa create_empty_string_lvlfa(); +Nft create_empty_string_nft(); /** * Create automaton accepting sigma star over the passed alphabet using DONT_CARE symbol. */ -Lvlfa create_sigma_star_lvlfa(); +Nft create_sigma_star_nft(); /** * Create automaton accepting sigma star over the passed alphabet. @@ -51,21 +51,21 @@ Lvlfa create_sigma_star_lvlfa(); * @param[in] alphabet Alphabet to construct sigma star automaton with. When alphabet is left empty, the default empty * alphabet is used, creating an automaton accepting only the empty string. */ -Lvlfa create_sigma_star_lvlfa(Alphabet* alphabet = new OnTheFlyAlphabet{}); +Nft create_sigma_star_nft(Alphabet* alphabet = new OnTheFlyAlphabet{}); /** Loads an automaton from Parsed object */ // TODO this function should the same thing as the one taking IntermediateAut or be deleted -Lvlfa construct(const mata::parser::ParsedSection& parsec, Alphabet* alphabet, NameStateMap* state_map = nullptr); +Nft construct(const mata::parser::ParsedSection& parsec, Alphabet* alphabet, NameStateMap* state_map = nullptr); /** Loads an automaton from Parsed object */ -Lvlfa construct(const mata::IntermediateAut& inter_aut, Alphabet* alphabet, NameStateMap* state_map = nullptr); +Nft construct(const mata::IntermediateAut& inter_aut, Alphabet* alphabet, NameStateMap* state_map = nullptr); /** Loads an automaton from Parsed object; version for python binding */ void construct( - Lvlfa* result, const mata::IntermediateAut& inter_aut, Alphabet* alphabet, NameStateMap* state_map = nullptr + Nft* result, const mata::IntermediateAut& inter_aut, Alphabet* alphabet, NameStateMap* state_map = nullptr ); template -Lvlfa construct(const ParsedObject& parsed, Alphabet* alphabet = nullptr, +Nft construct(const ParsedObject& parsed, Alphabet* alphabet = nullptr, NameStateMap* state_map = nullptr) { OnTheFlyAlphabet tmp_alphabet{}; if (!alphabet) { @@ -77,28 +77,28 @@ Lvlfa construct(const ParsedObject& parsed, Alphabet* alphabet = nullptr, /** * Parse NFA from the mata format in an input stream. * - * @param lvlfa_stream Input stream containing NFA in mata format. + * @param nft_stream Input stream containing NFA in mata format. * @throws std::runtime_error Parsing of NFA fails. */ -Lvlfa parse_from_mata(std::istream& lvlfa_stream); +Nft parse_from_mata(std::istream& nft_stream); /** * Parse NFA from the mata format in a string. * - * @param lvlfa_stream String containing NFA in mata format. + * @param nft_stream String containing NFA in mata format. * @throws std::runtime_error Parsing of NFA fails. */ -Lvlfa parse_from_mata(const std::string& lvlfa_in_mata); +Nft parse_from_mata(const std::string& nft_in_mata); /** * Parse NFA from the mata format in a file. * - * @param lvlfa_stream Path to the file containing NFA in mata format. - * @throws std::runtime_error @p lvlfa_file does not exist. + * @param nft_stream Path to the file containing NFA in mata format. + * @throws std::runtime_error @p nft_file does not exist. * @throws std::runtime_error Parsing of NFA fails. */ -Lvlfa parse_from_mata(const std::filesystem::path& lvlfa_file); +Nft parse_from_mata(const std::filesystem::path& nft_file); -} // namespace mata::lvlfa::builder. +} // namespace mata::nft::builder. -#endif //LIBMATA_LVLFA_BUILDER_HH +#endif //LIBMATA_NFT_BUILDER_HH diff --git a/include/mata/lvlfa/delta.hh b/include/mata/nft/delta.hh similarity index 94% rename from include/mata/lvlfa/delta.hh rename to include/mata/nft/delta.hh index 1a5d15a3..9130fc8f 100644 --- a/include/mata/lvlfa/delta.hh +++ b/include/mata/nft/delta.hh @@ -1,18 +1,18 @@ // TODO: Insert file header. -#ifndef MATA_LVLFA_DELTA_HH -#define MATA_LVLFA_DELTA_HH +#ifndef MATA_NFT_DELTA_HH +#define MATA_NFT_DELTA_HH #include "mata/utils/sparse-set.hh" #include "mata/utils/synchronized-iterator.hh" #include "mata/alphabet.hh" -#include "mata/lvlfa/types.hh" +#include "mata/nft/types.hh" #include "mata/nfa/delta.hh" #include -namespace mata::lvlfa { +namespace mata::nft { /// A single transition in Delta represented as a triple(source, symbol, target). using Transition = mata::nfa::Transition; @@ -60,6 +60,6 @@ using SynchronizedExistentialSymbolPostIterator = mata::nfa::SynchronizedExisten */ using Delta = mata::nfa::Delta; -} // namespace mata::lvlfa. +} // namespace mata::nft. #endif //MATA_DELTA_HH diff --git a/include/mata/lvlfa/lvlfa.hh b/include/mata/nft/nft.hh similarity index 78% rename from include/mata/lvlfa/lvlfa.hh rename to include/mata/nft/nft.hh index 35717418..c3ed2f80 100644 --- a/include/mata/lvlfa/lvlfa.hh +++ b/include/mata/nft/nft.hh @@ -1,8 +1,8 @@ -/* lvlfa.hh -- Nondeterministic finite automaton (over finite words). +/* nft.hh -- Nondeterministic finite automaton (over finite words). */ -#ifndef MATA_LVLFA_HH_ -#define MATA_LVLFA_HH_ +#ifndef MATA_NFT_HH_ +#define MATA_NFT_HH_ // Static data structures, such as search stack, in algorithms. Might have some effect on some algorithms (like // fragile_revert). @@ -39,20 +39,20 @@ * 2. Algorithms (operations, checks, tests), * 3. Constructions. * - * Other algorithms are included in mata::lvlfa::Plumbing (simplified API for, e.g., binding) - * and mata::lvlfa::algorithms (concrete implementations of algorithms, such as for complement). + * Other algorithms are included in mata::nft::Plumbing (simplified API for, e.g., binding) + * and mata::nft::algorithms (concrete implementations of algorithms, such as for complement). */ -namespace mata::lvlfa { +namespace mata::nft { /** - * A struct representing an LVLFA. + * A struct representing an NFT. */ -struct Lvlfa : public mata::nfa::Nfa { +struct Nft : public mata::nfa::Nfa { public: /// @brief For state q, levels[q] gives the state a level. std::vector levels{}; Level levels_cnt = 0; - /// Key value store for additional attributes for the LVLFA. Keys are attribute names as strings and the value types + /// Key value store for additional attributes for the NFT. Keys are attribute names as strings and the value types /// are up to the user. /// For example, we can set up attributes such as "state_dict" for state dictionary attribute mapping states to their /// respective names, or "transition_dict" for transition dictionary adding a human-readable meaning to each @@ -61,32 +61,32 @@ public: // dictionary in the attributes. public: - explicit Lvlfa(Delta delta = {}, utils::SparseSet initial_states = {}, + explicit Nft(Delta delta = {}, utils::SparseSet initial_states = {}, utils::SparseSet final_states = {}, std::vector levels = {}, const Level levels_cnt = 1, Alphabet* alphabet = nullptr) : mata::nfa::Nfa(std::move(delta), std::move(initial_states), std::move(final_states), alphabet), levels(std::move(levels)), levels_cnt(levels_cnt) {} /** - * @brief Construct a new explicit LVLFA with num_of_states states and optionally set initial and final states. + * @brief Construct a new explicit NFT with num_of_states states and optionally set initial and final states. * * @param[in] num_of_states Number of states for which to preallocate Delta. */ - explicit Lvlfa(const unsigned long num_of_states, StateSet initial_states = {}, + explicit Nft(const unsigned long num_of_states, StateSet initial_states = {}, StateSet final_states = {}, std::vector levels = {}, const Level levels_cnt = 1, Alphabet* alphabet = nullptr) : mata::nfa::Nfa(num_of_states, std::move(initial_states), std::move(final_states), alphabet), levels(std::move(levels)), levels_cnt(levels_cnt) {} - explicit Lvlfa(const mata::nfa::Nfa& other) + explicit Nft(const mata::nfa::Nfa& other) : mata::nfa::Nfa(other.delta, other.initial, other.final, other.alphabet), levels(std::vector(other.num_of_states(), 0)), levels_cnt(1) {} /** - * @brief Construct a new explicit LVLFA from other LVLFA. + * @brief Construct a new explicit NFT from other NFT. */ - Lvlfa(const Lvlfa& other) = default; + Nft(const Nft& other) = default; - Lvlfa(Lvlfa&& other) noexcept + Nft(Nft&& other) noexcept : levels { std::move(other.levels) }, levels_cnt{ other.levels_cnt } { delta = std::move(other.delta); initial = std::move(other.initial); @@ -96,8 +96,8 @@ public: other.alphabet = nullptr; } - Lvlfa& operator=(const Lvlfa& other) = default; - Lvlfa& operator=(Lvlfa&& other) noexcept; + Nft& operator=(const Nft& other) = default; + Nft& operator=(Nft&& other) noexcept; /** * Add a new (fresh) state to the automaton. @@ -112,9 +112,9 @@ public: State add_state(State state); /** - * @brief Clear the underlying LVLFA to a blank LVLFA. + * @brief Clear the underlying NFT to a blank NFT. * - * The whole LVLFA is cleared, each member is set to its zero value. + * The whole NFT is cleared, each member is set to its zero value. */ void clear(); @@ -125,7 +125,7 @@ public: * essentially only useful for testing purposes. * @return True if automata are exactly identical, false otherwise. */ - bool is_identical(const Lvlfa& aut) const; + bool is_identical(const Nft& aut) const; /** * @brief Remove inaccessible (unreachable) and not co-accessible (non-terminating) states in-place. @@ -137,37 +137,37 @@ public: * @param[out] state_renaming Mapping of trimmed states to new states. * @return @c this after trimming. */ - Lvlfa& trim(StateRenaming* state_renaming = nullptr); + Nft& trim(StateRenaming* state_renaming = nullptr); /** * @brief In-place concatenation. */ - Lvlfa& concatenate(const Lvlfa& aut); + Nft& concatenate(const Nft& aut); /** * @brief In-place union */ - Lvlfa& uni(const Lvlfa &aut); + Nft& uni(const Nft &aut); /** * Unify transitions to create a directed graph with at most a single transition between two states. * @param[in] abstract_symbol Abstract symbol to use for transitions in digraph. * @return An automaton representing a directed graph. */ - Lvlfa get_one_letter_aut(Symbol abstract_symbol = 'x') const; + Nft get_one_letter_aut(Symbol abstract_symbol = 'x') const; /** * Unify transitions to create a directed graph with at most a single transition between two states. * * @param[out] result An automaton representing a directed graph. */ - void get_one_letter_aut(Lvlfa& result) const; + void get_one_letter_aut(Nft& result) const; void make_one_level_aut(const utils::OrdVector &dcare_replacements = { DONT_CARE }); - Lvlfa get_one_level_aut(const utils::OrdVector &dcare_replacements = { DONT_CARE }) const; + Nft get_one_level_aut(const utils::OrdVector &dcare_replacements = { DONT_CARE }) const; - void get_one_level_aut(Lvlfa& result, const utils::OrdVector &dcare_replacements = { DONT_CARE }) const; + void get_one_level_aut(Nft& result, const utils::OrdVector &dcare_replacements = { DONT_CARE }) const; /** * @brief Prints the automaton in DOT format @@ -222,7 +222,7 @@ public: */ std::set get_words(unsigned max_length); -}; // struct Lvlfa. +}; // struct Nft. // Allow variadic number of arguments of the same type. // @@ -235,27 +235,27 @@ template using conjunction = std::is_same using AreAllOfType = typename conjunction...>::type; -Lvlfa uni(const Lvlfa &lhs, const Lvlfa &rhs); +Nft uni(const Nft &lhs, const Nft &rhs); /** - * @brief Compute intersection of two LVLFAs. + * @brief Compute intersection of two NFTs. * * Both automata can contain ε-transitions. The product preserves the ε-transitions, i.e., * for each each product state `(s, t)` with`s -ε-> p`, `(s, t) -ε-> (p, t)` is created, and vice versa. * * Automata must share alphabets. //TODO: this is not implemented yet. * - * @param[in] lhs First LVLFA to compute intersection for. - * @param[in] rhs Second LVLFA to compute intersection for. + * @param[in] lhs First NFT to compute intersection for. + * @param[in] rhs Second NFT to compute intersection for. * @param[in] first_epsilon smallest epsilon. //TODO: this should eventually be taken from the alphabet as anything larger than the largest symbol? * @param[out] prod_map Mapping of pairs of the original states (lhs_state, rhs_state) to new product states (not used internally, allocated only when !=nullptr, expensive). - * @return LVLFA as a product of LVLFAs @p lhs and @p rhs with ε-transitions preserved. + * @return NFT as a product of NFTs @p lhs and @p rhs with ε-transitions preserved. */ -Lvlfa intersection(const Lvlfa& lhs, const Lvlfa& rhs, +Nft intersection(const Nft& lhs, const Nft& rhs, const Symbol first_epsilon = EPSILON, std::unordered_map, State> *prod_map = nullptr); /** - * @brief Concatenate two LVLFAs. + * @brief Concatenate two NFTs. * * Supports epsilon symbols when @p use_epsilon is set to true. * @param[in] lhs First automaton to concatenate. @@ -266,7 +266,7 @@ Lvlfa intersection(const Lvlfa& lhs, const Lvlfa& rhs, * @return Concatenated automaton. */ // TODO: check how fast is using just concatenate over epsilon and then call remove_epsilon(). -Lvlfa concatenate(const Lvlfa& lhs, const Lvlfa& rhs, bool use_epsilon = false, +Nft concatenate(const Nft& lhs, const Nft& rhs, bool use_epsilon = false, StateRenaming* lhs_state_renaming = nullptr, StateRenaming* rhs_state_renaming = nullptr); /** @@ -279,7 +279,7 @@ Lvlfa concatenate(const Lvlfa& lhs, const Lvlfa& rhs, bool use_epsilon = false, * - "minimize": "true"/"false" (whether to compute minimal deterministic automaton for classical algorithm); * @return Complemented automaton. */ -Lvlfa complement(const Lvlfa& aut, const Alphabet& alphabet, +Nft complement(const Nft& aut, const Alphabet& alphabet, const ParameterMap& params = {{ "algorithm", "classical" }, { "minimize", "false" }}); /** @@ -297,7 +297,7 @@ Lvlfa complement(const Lvlfa& aut, const Alphabet& alphabet, * - "minimize": "true"/"false" (whether to compute minimal deterministic automaton for classical algorithm); * @return Complemented automaton. */ -Lvlfa complement(const Lvlfa& aut, const utils::OrdVector& symbols, +Nft complement(const Nft& aut, const utils::OrdVector& symbols, const ParameterMap& params = {{ "algorithm", "classical" }, { "minimize", "false" }}); /** @@ -308,7 +308,7 @@ Lvlfa complement(const Lvlfa& aut, const utils::OrdVector& symbols, * - "algorithm": "brzozowski" * @return Minimal deterministic automaton. */ -Lvlfa minimize(const Lvlfa &aut, const ParameterMap& params = {{ "algorithm", "brzozowski" }}); +Nft minimize(const Nft &aut, const ParameterMap& params = {{ "algorithm", "brzozowski" }}); /** * @brief Determinize automaton. @@ -317,7 +317,7 @@ Lvlfa minimize(const Lvlfa &aut, const ParameterMap& params = {{ "algorithm", "b * @param[out] subset_map Map that maps sets of states of input automaton to states of determinized automaton. * @return Determinized automaton. */ -Lvlfa determinize(const Lvlfa& aut, std::unordered_map *subset_map = nullptr); +Nft determinize(const Nft& aut, std::unordered_map *subset_map = nullptr); /** * @brief Reduce the size of the automaton. @@ -328,55 +328,55 @@ Lvlfa determinize(const Lvlfa& aut, std::unordered_map *subset_ * - "algorithm": "simulation". * @return Reduced automaton. */ -Lvlfa reduce(const Lvlfa &aut, StateRenaming *state_renaming = nullptr, +Nft reduce(const Nft &aut, StateRenaming *state_renaming = nullptr, const ParameterMap& params = {{ "algorithm", "simulation" } }); /** - * @brief Checks inclusion of languages of two LVLFAs: @p smaller and @p bigger (smaller <= bigger). + * @brief Checks inclusion of languages of two NFTs: @p smaller and @p bigger (smaller <= bigger). * * @param[in] smaller First automaton to concatenate. * @param[in] bigger Second automaton to concatenate. * @param[out] cex Counterexample for the inclusion. - * @param[in] alphabet Alphabet of both LVLFAs to compute with. + * @param[in] alphabet Alphabet of both NFTs to compute with. * @param[in] params Optional parameters to control the equivalence check algorithm: * - "algorithm": "naive", "antichains" (Default: "antichains") * @return True if @p smaller is included in @p bigger, false otherwise. */ -bool is_included(const Lvlfa& smaller, const Lvlfa& bigger, Run* cex, const Alphabet* alphabet = nullptr, +bool is_included(const Nft& smaller, const Nft& bigger, Run* cex, const Alphabet* alphabet = nullptr, const ParameterMap& params = {{ "algorithm", "antichains" }}); /** - * @brief Checks inclusion of languages of two LVLFAs: @p smaller and @p bigger (smaller <= bigger). + * @brief Checks inclusion of languages of two NFTs: @p smaller and @p bigger (smaller <= bigger). * * @param[in] smaller First automaton to concatenate. * @param[in] bigger Second automaton to concatenate. - * @param[in] alphabet Alphabet of both LVLFAs to compute with. + * @param[in] alphabet Alphabet of both NFTs to compute with. * @param[in] params Optional parameters to control the equivalence check algorithm: * - "algorithm": "naive", "antichains" (Default: "antichains") * @return True if @p smaller is included in @p bigger, false otherwise. */ -inline bool is_included(const Lvlfa& smaller, const Lvlfa& bigger, const Alphabet* const alphabet = nullptr, +inline bool is_included(const Nft& smaller, const Nft& bigger, const Alphabet* const alphabet = nullptr, const ParameterMap& params = {{ "algorithm", "antichains" }}) { return is_included(smaller, bigger, nullptr, alphabet, params); } /** - * @brief Perform equivalence check of two LVLFAs: @p lhs and @p rhs. + * @brief Perform equivalence check of two NFTs: @p lhs and @p rhs. * * @param[in] lhs First automaton to concatenate. * @param[in] rhs Second automaton to concatenate. - * @param[in] alphabet Alphabet of both LVLFAs to compute with. + * @param[in] alphabet Alphabet of both NFTs to compute with. * @param[in] params[ Optional parameters to control the equivalence check algorithm: * - "algorithm": "naive", "antichains" (Default: "antichains") * @return True if @p lhs and @p rhs are equivalent, false otherwise. */ -bool are_equivalent(const Lvlfa& lhs, const Lvlfa& rhs, const Alphabet* alphabet, +bool are_equivalent(const Nft& lhs, const Nft& rhs, const Alphabet* alphabet, const ParameterMap& params = {{ "algorithm", "antichains"}}); /** - * @brief Perform equivalence check of two LVLFAs: @p lhs and @p rhs. + * @brief Perform equivalence check of two NFTs: @p lhs and @p rhs. * - * The current implementation of @c Lvlfa does not accept input alphabet. For this reason, an alphabet + * The current implementation of @c Nft does not accept input alphabet. For this reason, an alphabet * has to be created from all transitions each time an operation on alphabet is called. When calling this function, * the alphabet has to be computed first. * @@ -390,30 +390,30 @@ bool are_equivalent(const Lvlfa& lhs, const Lvlfa& rhs, const Alphabet* alphabet * - "algorithm": "naive", "antichains" (Default: "antichains") * @return True if @p lhs and @p rhs are equivalent, false otherwise. */ -bool are_equivalent(const Lvlfa& lhs, const Lvlfa& rhs, const ParameterMap& params = {{ "algorithm", "antichains"}}); +bool are_equivalent(const Nft& lhs, const Nft& rhs, const ParameterMap& params = {{ "algorithm", "antichains"}}); // Reverting the automaton by one of the three functions below, // currently simple_revert seems best (however, not tested enough). -Lvlfa revert(const Lvlfa& aut); +Nft revert(const Nft& aut); -// This revert algorithm is fragile, uses low level accesses to Lvlfa and static data structures, +// This revert algorithm is fragile, uses low level accesses to Nft and static data structures, // and it is potentially dangerous when there are used symbols with large numbers (allocates an array indexed by symbols) // It is faster asymptotically and for somewhat dense automata, // the same or a little bit slower than simple_revert otherwise. // Not affected by pre-reserving vectors. -Lvlfa fragile_revert(const Lvlfa& aut); +Nft fragile_revert(const Nft& aut); // Reverting the automaton by a simple algorithm, which does a lot of random access addition to Post and Move. // Much affected by pre-reserving vectors. -Lvlfa simple_revert(const Lvlfa& aut); +Nft simple_revert(const Nft& aut); // Reverting the automaton by a modification of the simple algorithm. // It replaces random access addition to SymbolPost by push_back and sorting later, so far seems the slowest of all, except on // dense automata, where it is almost as slow as simple_revert. Candidate for removal. -Lvlfa somewhat_simple_revert(const Lvlfa& aut); +Nft somewhat_simple_revert(const Nft& aut); // Removing epsilon transitions -Lvlfa remove_epsilon(const Lvlfa& aut, Symbol epsilon = EPSILON); +Nft remove_epsilon(const Nft& aut, Symbol epsilon = EPSILON); /** Encodes a vector of strings (each corresponding to one symbol) into a * @c Word instance @@ -423,10 +423,10 @@ Lvlfa remove_epsilon(const Lvlfa& aut, Symbol epsilon = EPSILON); // What are the symbol names and their sequences? Run encode_word(const Alphabet* alphabet, const std::vector& input); -} // namespace mata::lvlfa. +} // namespace mata::nft. namespace std { -std::ostream& operator<<(std::ostream& os, const mata::lvlfa::Lvlfa& lvlfa); +std::ostream& operator<<(std::ostream& os, const mata::nft::Nft& nft); } // namespace std. -#endif /* MATA_LVLFA_HH_ */ +#endif /* MATA_NFT_HH_ */ diff --git a/include/mata/lvlfa/plumbing.hh b/include/mata/nft/plumbing.hh similarity index 69% rename from include/mata/lvlfa/plumbing.hh rename to include/mata/nft/plumbing.hh index 2514e68c..86e578e4 100644 --- a/include/mata/lvlfa/plumbing.hh +++ b/include/mata/nft/plumbing.hh @@ -1,23 +1,23 @@ /* nfa-plumbings.hh -- Wrapping up different supporting functions. */ -#ifndef MATA_LVLFA_PLUMBING_HH_ -#define MATA_LVLFA_PLUMBING_HH_ +#ifndef MATA_NFT_PLUMBING_HH_ +#define MATA_NFT_PLUMBING_HH_ -#include "lvlfa.hh" +#include "nft.hh" #include "builder.hh" -using namespace mata::lvlfa::builder; +using namespace mata::nft::builder; /** * Simplified NFA API, used in binding to call NFA algorithms. * * In particular, this mostly includes operations and checks, that do not return Automaton, - * but instead take resulting automaton as pointer (e.g. `void f(Lvlfa* result, const Lvlfa& lhs, const Lvlfa& rhs)`). + * but instead take resulting automaton as pointer (e.g. `void f(Nft* result, const Nft& lhs, const Nft& rhs)`). */ -namespace mata::lvlfa::plumbing { +namespace mata::nft::plumbing { inline void get_elements(StateSet* element_set, const BoolVector& bool_vec) { @@ -31,38 +31,38 @@ inline void get_elements(StateSet* element_set, const BoolVector& bool_vec) { } inline void complement( - Lvlfa* result, - const Lvlfa& aut, + Nft* result, + const Nft& aut, const Alphabet& alphabet, const ParameterMap& params = {{ "algorithm", "classical"}, { "minimize", "false"}}) { *result = complement(aut, alphabet, params); } -inline void minimize(Lvlfa* res, const Lvlfa &aut) { *res = minimize(aut); } +inline void minimize(Nft* res, const Nft &aut) { *res = minimize(aut); } -inline void determinize(Lvlfa* result, const Lvlfa& aut, std::unordered_map *subset_map = nullptr) { +inline void determinize(Nft* result, const Nft& aut, std::unordered_map *subset_map = nullptr) { *result = determinize(aut, subset_map); } -inline void reduce(Lvlfa* result, const Lvlfa &aut, StateRenaming *state_renaming = nullptr, +inline void reduce(Nft* result, const Nft &aut, StateRenaming *state_renaming = nullptr, const ParameterMap& params = {{ "algorithm", "simulation"}}) { *result = reduce(aut, state_renaming, params); } -inline void revert(Lvlfa* result, const Lvlfa& aut) { *result = revert(aut); } +inline void revert(Nft* result, const Nft& aut) { *result = revert(aut); } -inline void remove_epsilon(Lvlfa* result, const Lvlfa& aut, Symbol epsilon = EPSILON) { *result = remove_epsilon(aut, epsilon); } +inline void remove_epsilon(Nft* result, const Nft& aut, Symbol epsilon = EPSILON) { *result = remove_epsilon(aut, epsilon); } /** Loads an automaton from Parsed object */ template -void construct(Lvlfa* result, const ParsedObject& parsed, Alphabet* alphabet = nullptr, +void construct(Nft* result, const ParsedObject& parsed, Alphabet* alphabet = nullptr, NameStateMap* state_map = nullptr) { OnTheFlyAlphabet tmp_alphabet{}; if (!alphabet) { alphabet = &tmp_alphabet; } *result = builder::construct(parsed, alphabet, state_map); } -inline void uni(Lvlfa *unionAutomaton, const Lvlfa &lhs, const Lvlfa &rhs) { *unionAutomaton = uni(lhs, rhs); } +inline void uni(Nft *unionAutomaton, const Nft &lhs, const Nft &rhs) { *unionAutomaton = uni(lhs, rhs); } /** * @brief Compute intersection of two NFAs. @@ -79,7 +79,7 @@ inline void uni(Lvlfa *unionAutomaton, const Lvlfa &lhs, const Lvlfa &rhs) { *un * @param[out] prod_map Mapping of pairs of the original states (lhs_state, rhs_state) to new product states (not used internally, allocated only when !=nullptr, expensive). * @return NFA as a product of NFAs @p lhs and @p rhs with ε-transitions preserved. */ -inline void intersection(Lvlfa* res, const Lvlfa& lhs, const Lvlfa& rhs, Symbol first_epsilon = EPSILON, +inline void intersection(Nft* res, const Nft& lhs, const Nft& rhs, Symbol first_epsilon = EPSILON, std::unordered_map, State> *prod_map = nullptr) { *res = intersection(lhs, rhs, first_epsilon, prod_map); } @@ -89,7 +89,7 @@ inline void intersection(Lvlfa* res, const Lvlfa& lhs, const Lvlfa& rhs, Symbol * @param[out] lhs_result_state_renaming Map mapping lhs states to result states. * @param[out] rhs_result_state_renaming Map mapping rhs states to result states. */ -inline void concatenate(Lvlfa* res, const Lvlfa& lhs, const Lvlfa& rhs, bool use_epsilon = false, +inline void concatenate(Nft* res, const Nft& lhs, const Nft& rhs, bool use_epsilon = false, StateRenaming* lhs_result_state_renaming = nullptr, StateRenaming* rhs_result_state_renaming = nullptr) { *res = concatenate(lhs, rhs, use_epsilon, lhs_result_state_renaming, rhs_result_state_renaming); } diff --git a/include/mata/nft/strings.hh b/include/mata/nft/strings.hh new file mode 100644 index 00000000..c22c186d --- /dev/null +++ b/include/mata/nft/strings.hh @@ -0,0 +1,24 @@ +/* nfa-strings.hh -- Operations on NFAs for string solving. + */ + +#ifndef MATA_NFT_STRING_SOLVING_HH_ +#define MATA_NFT_STRING_SOLVING_HH_ + +#include "mata/nfa/strings.hh" +#include "nft.hh" + +namespace mata::nft { +/** + * Create identity transducer over the @p alphabet with @p level_cnt levels. + */ +Nft create_identity(mata::Alphabet* alphabet, Level level_cnt = 2); + +/** + * Create identity input/output transducer with 2 levels over the @p alphabet with @p level_cnt levels with single + * symbol @p from_symbol replaced with @to_symbol. + */ +Nft create_identity_with_single_replace(mata::Alphabet* alphabet, Symbol from_symbol, Symbol to_symbol); + +} // Namespace mata::nft. + +#endif // MATA_NFT_STRING_SOLVING_HH_. diff --git a/include/mata/lvlfa/types.hh b/include/mata/nft/types.hh similarity index 84% rename from include/mata/lvlfa/types.hh rename to include/mata/nft/types.hh index eb2eb889..2fb38269 100644 --- a/include/mata/lvlfa/types.hh +++ b/include/mata/nft/types.hh @@ -1,7 +1,7 @@ // TODO: Insert file header. -#ifndef MATA_LVLFA_TYPES_HH -#define MATA_LVLFA_TYPES_HH +#ifndef MATA_NFT_TYPES_HH +#define MATA_NFT_TYPES_HH #include "mata/alphabet.hh" #include "mata/parser/parser.hh" @@ -11,9 +11,9 @@ #include -namespace mata::lvlfa { +namespace mata::nft { -extern const std::string TYPE_NFA; +extern const std::string TYPE_NFT; using Level = unsigned; using State = mata::nfa::State; @@ -38,7 +38,7 @@ using ParameterMap = mata::nfa::ParameterMap; using Limits = mata::nfa::Limits; -struct Lvlfa; ///< A non-deterministic finite automaton. +struct Nft; ///< A non-deterministic finite automaton. /// An epsilon symbol which is now defined as the maximal value of data type used for symbols. constexpr Symbol EPSILON = mata::nfa::EPSILON; diff --git a/include/mata/parser/inter-aut.hh b/include/mata/parser/inter-aut.hh index 634b55f8..f6729d31 100644 --- a/include/mata/parser/inter-aut.hh +++ b/include/mata/parser/inter-aut.hh @@ -148,7 +148,7 @@ public: enum class AutomatonType { NFA, AFA, - LVLFA + NFT }; /** @@ -230,7 +230,7 @@ public: bool are_nodes_enum_type() const {return node_naming == Naming::ENUM;} bool is_bitvector() const {return alphabet_type == AlphabetType::BITVECTOR;} - bool is_lvlfa() const {return automaton_type == AutomatonType::LVLFA;} + bool is_nft() const {return automaton_type == AutomatonType::NFT;} bool is_nfa() const {return automaton_type == AutomatonType::NFA;} bool is_afa() const {return automaton_type == AutomatonType::AFA;} diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index d8d688c6..2c8bf801 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -22,15 +22,15 @@ add_library(libmata STATIC nfa/operations.cc nfa/builder.cc - lvlfa/lvlfa.cc - lvlfa/inclusion.cc - lvlfa/universal.cc - lvlfa/complement.cc - lvlfa/intersection.cc - lvlfa/concatenation.cc - lvlfa/operations.cc - lvlfa/builder.cc - lvlfa/strings.cc + nft/nft.cc + nft/inclusion.cc + nft/universal.cc + nft/complement.cc + nft/intersection.cc + nft/concatenation.cc + nft/operations.cc + nft/builder.cc + nft/strings.cc ) # libmata needs at least c++20 diff --git a/src/inter-aut.cc b/src/inter-aut.cc index 4e5f0bf2..c3780bba 100644 --- a/src/inter-aut.cc +++ b/src/inter-aut.cc @@ -344,8 +344,8 @@ bool has_atmost_one_auto_naming(const mata::IntermediateAut& aut) { aut.automaton_type = mata::IntermediateAut::AutomatonType::NFA; } else if (section.type.find("AFA") != std::string::npos) { aut.automaton_type = mata::IntermediateAut::AutomatonType::AFA; - } else if (section.type.find("LVLFA") != std::string::npos) { - aut.automaton_type = mata::IntermediateAut::AutomatonType::LVLFA; + } else if (section.type.find("NFT") != std::string::npos) { + aut.automaton_type = mata::IntermediateAut::AutomatonType::NFT; } aut.alphabet_type = get_alphabet_type(section.type); @@ -457,7 +457,7 @@ void mata::IntermediateAut::parse_transition(mata::IntermediateAut &aut, const s assert(false && "Unknown NFA type"); postfix.emplace_back(mata::FormulaNode::Type::OPERATOR, "&", "&", mata::FormulaNode::OperatorType::AND); - } else if (aut.automaton_type == mata::IntermediateAut::AutomatonType::LVLFA && tokens[tokens.size() - 2] != "&") { + } else if (aut.automaton_type == mata::IntermediateAut::AutomatonType::NFT && tokens[tokens.size() - 2] != "&") { // we need to take care about this case manually since user does not need to determine // symbol and state naming and put conjunction to transition if (aut.alphabet_type != mata::IntermediateAut::AlphabetType::BITVECTOR) { @@ -471,7 +471,7 @@ void mata::IntermediateAut::parse_transition(mata::IntermediateAut &aut, const s postfix = infix_to_postfix(aut, rhs); postfix.emplace_back(create_node(aut, last_token)); } else - assert(false && "Unknown LVLFA type"); + assert(false && "Unknown NFT type"); postfix.emplace_back(mata::FormulaNode::Type::OPERATOR, "&", "&", mata::FormulaNode::OperatorType::AND); } else @@ -539,7 +539,7 @@ std::vector mata::IntermediateAut::parse_from_mf(const ma result.reserve(parsed.size()); for (const parser::ParsedSection& parsed_section: parsed) { - if (parsed_section.type.find("FA") == std::string::npos) { + if (parsed_section.type.find("FA") == std::string::npos && parsed_section.type.find("FT") == std::string::npos) { continue; } result.push_back(mf_to_aut(parsed_section)); diff --git a/src/lvlfa/builder.cc b/src/nft/builder.cc similarity index 78% rename from src/lvlfa/builder.cc rename to src/nft/builder.cc index 3392f82d..c88f291d 100644 --- a/src/lvlfa/builder.cc +++ b/src/nft/builder.cc @@ -1,24 +1,24 @@ // TODO: Insert header file. -#include "mata/lvlfa/builder.hh" +#include "mata/nft/builder.hh" #include "mata/parser/mintermization.hh" -#include "mata/nfa/builder.hh" +#include "mata/nft/builder.hh" #include -using namespace mata::lvlfa; -using mata::lvlfa::Lvlfa; +using namespace mata::nft; +using mata::nft::Nft; using mata::Symbol; -Lvlfa builder::construct(const mata::parser::ParsedSection& parsec, mata::Alphabet* alphabet, NameStateMap* state_map) { - Lvlfa aut; +Nft builder::construct(const mata::parser::ParsedSection& parsec, mata::Alphabet* alphabet, NameStateMap* state_map) { + Nft aut; assert(nullptr != alphabet); // HACK - it should be only "parsec.type != TYPE_NFA" without the conjunction - if (parsec.type != TYPE_NFA && parsec.type != TYPE_NFA + "-explicit") { + if (parsec.type != TYPE_NFT && parsec.type != TYPE_NFT + "-explicit") { throw std::runtime_error(std::string(__FUNCTION__) + ": expecting type \"" + - TYPE_NFA + "\""); + TYPE_NFT + "\""); } bool remove_state_map = false; @@ -149,13 +149,13 @@ Lvlfa builder::construct(const mata::parser::ParsedSection& parsec, mata::Alphab return aut; } // construct(). -Lvlfa builder::construct(const mata::IntermediateAut& inter_aut, mata::Alphabet* alphabet, NameStateMap* state_map) { - Lvlfa aut; +Nft builder::construct(const mata::IntermediateAut& inter_aut, mata::Alphabet* alphabet, NameStateMap* state_map) { + Nft aut; assert(nullptr != alphabet); - if (!inter_aut.is_lvlfa()) { + if (!inter_aut.is_nft()) { throw std::runtime_error(std::string(__FUNCTION__) + ": expecting type \"" + - TYPE_NFA + "\""); + TYPE_NFT + "\""); } NameStateMap tmp_state_map; @@ -229,69 +229,69 @@ Lvlfa builder::construct(const mata::IntermediateAut& inter_aut, mata::Alphabet* } // construct(). void builder::construct( - mata::lvlfa::Lvlfa *result, + mata::nft::Nft *result, const mata::IntermediateAut &inter_aut, mata::Alphabet *alphabet, - mata::lvlfa::builder::NameStateMap *state_map + mata::nft::builder::NameStateMap *state_map ) { *result = construct(inter_aut, alphabet, state_map); } -Lvlfa builder::create_single_word_lvlfa(const std::vector& word) { - return Lvlfa(mata::nfa::builder::create_single_word_nfa(word)); +Nft builder::create_single_word_nft(const std::vector& word) { + return Nft(mata::nfa::builder::create_single_word_nfa(word)); } -Lvlfa builder::create_single_word_lvlfa(const std::vector& word, mata::Alphabet *alphabet) { - return Lvlfa(mata::nfa::builder::create_single_word_nfa(word, alphabet)); +Nft builder::create_single_word_nft(const std::vector& word, mata::Alphabet *alphabet) { + return Nft(mata::nfa::builder::create_single_word_nfa(word, alphabet)); } -Lvlfa builder::create_empty_string_lvlfa() { - return Lvlfa(mata::nfa::builder::create_empty_string_nfa()); +Nft builder::create_empty_string_nft() { + return Nft(mata::nfa::builder::create_empty_string_nfa()); } -Lvlfa builder::create_sigma_star_lvlfa() { - Lvlfa lvlfa{ 1, { 0 }, { 0 }, { 0 }, 1 }; - lvlfa.delta.add(0, DONT_CARE, 0); - return lvlfa; +Nft builder::create_sigma_star_nft() { + Nft nft{ 1, { 0 }, { 0 }, { 0 }, 1 }; + nft.delta.add(0, DONT_CARE, 0); + return nft; } -Lvlfa builder::create_sigma_star_lvlfa(mata::Alphabet* alphabet) { - return Lvlfa(mata::nfa::builder::create_sigma_star_nfa(alphabet)); +Nft builder::create_sigma_star_nft(mata::Alphabet* alphabet) { + return Nft(mata::nfa::builder::create_sigma_star_nfa(alphabet)); } -Lvlfa builder::parse_from_mata(std::istream& lvlfa_stream) { - const std::string lvlfa_str = "LVLFA"; - parser::Parsed parsed{ parser::parse_mf(lvlfa_stream) }; +Nft builder::parse_from_mata(std::istream& nft_stream) { + const std::string nft_str = "NFT"; + parser::Parsed parsed{ parser::parse_mf(nft_stream) }; if (parsed.size() != 1) { throw std::runtime_error("The number of sections in the input file is '" + std::to_string(parsed.size()) + "'. Required is '1'.\n"); } const std::string automaton_type{ parsed[0].type }; - if (automaton_type.compare(0, lvlfa_str.length(), lvlfa_str) != 0) { - throw std::runtime_error("The type of input automaton is '" + automaton_type + "'. Required is 'LVLFA'\n"); + if (automaton_type.compare(0, nft_str.length(), nft_str) != 0) { + throw std::runtime_error("The type of input automaton is '" + automaton_type + "'. Required is 'NFT'\n"); } IntAlphabet alphabet; // return construct(IntermediateAut::parse_from_mf(parsed)[0], &alphabet); return construct(parsed[0], &alphabet); } -Lvlfa builder::parse_from_mata(const std::filesystem::path& lvlfa_file) { - std::ifstream file_stream{ lvlfa_file }; +Nft builder::parse_from_mata(const std::filesystem::path& nft_file) { + std::ifstream file_stream{ nft_file }; if (!file_stream) { - throw std::runtime_error("Could not open file \'" + lvlfa_file.string() + "'\n"); + throw std::runtime_error("Could not open file \'" + nft_file.string() + "'\n"); } - Lvlfa lvlfa; + Nft nft; try { - lvlfa = parse_from_mata(file_stream); + nft = parse_from_mata(file_stream); } catch (const std::exception& ex) { file_stream.close(); throw; } - return lvlfa; + return nft; } -Lvlfa builder::parse_from_mata(const std::string& lvlfa_in_mata) { - std::istringstream lvlfa_stream(lvlfa_in_mata); - return parse_from_mata(lvlfa_stream); +Nft builder::parse_from_mata(const std::string& nft_in_mata) { + std::istringstream nft_stream(nft_in_mata); + return parse_from_mata(nft_stream); } diff --git a/src/lvlfa/complement.cc b/src/nft/complement.cc similarity index 81% rename from src/lvlfa/complement.cc rename to src/nft/complement.cc index 814ae405..7d592665 100644 --- a/src/lvlfa/complement.cc +++ b/src/nft/complement.cc @@ -1,16 +1,16 @@ -/* lvlfa-complement.cc -- LVLFA complement +/* nft-complement.cc -- NFT complement */ // MATA headers -#include "mata/lvlfa/lvlfa.hh" -#include "mata/lvlfa/algorithms.hh" +#include "mata/nft/nft.hh" +#include "mata/nft/algorithms.hh" -using namespace mata::lvlfa; +using namespace mata::nft; using namespace mata::utils; -Lvlfa mata::lvlfa::algorithms::complement_classical(const Lvlfa& aut, const OrdVector& symbols, +Nft mata::nft::algorithms::complement_classical(const Nft& aut, const OrdVector& symbols, bool minimize_during_determinization) { - Lvlfa result; + Nft result; State sink_state; if (minimize_during_determinization) { result = minimize_brzozowski(aut); // brzozowski minimization makes it deterministic @@ -39,12 +39,12 @@ Lvlfa mata::lvlfa::algorithms::complement_classical(const Lvlfa& aut, const OrdV return result; } -Lvlfa mata::lvlfa::complement(const Lvlfa& aut, const Alphabet& alphabet, const ParameterMap& params) { - return mata::lvlfa::complement(aut, alphabet.get_alphabet_symbols(), params); +Nft mata::nft::complement(const Nft& aut, const Alphabet& alphabet, const ParameterMap& params) { + return mata::nft::complement(aut, alphabet.get_alphabet_symbols(), params); } -Lvlfa mata::lvlfa::complement(const Lvlfa& aut, const mata::utils::OrdVector& symbols, const ParameterMap& params) { - Lvlfa result; +Nft mata::nft::complement(const Nft& aut, const mata::utils::OrdVector& symbols, const ParameterMap& params) { + Nft result; // Setting the requested algorithm. decltype(algorithms::complement_classical)* algo = algorithms::complement_classical; if (!haskey(params, "algorithm")) { diff --git a/src/lvlfa/concatenation.cc b/src/nft/concatenation.cc similarity index 85% rename from src/lvlfa/concatenation.cc rename to src/nft/concatenation.cc index 4c1ef80e..e9072942 100644 --- a/src/lvlfa/concatenation.cc +++ b/src/nft/concatenation.cc @@ -1,28 +1,28 @@ -/* lvlfa-concatenation.cc -- Concatenation of LVLFAs +/* nft-concatenation.cc -- Concatenation of NFTs */ // MATA headers -#include "mata/lvlfa/lvlfa.hh" -#include "mata/lvlfa/algorithms.hh" +#include "mata/nft/nft.hh" +#include "mata/nft/algorithms.hh" -using namespace mata::lvlfa; +using namespace mata::nft; -namespace mata::lvlfa { +namespace mata::nft { -Lvlfa concatenate(const Lvlfa& lhs, const Lvlfa& rhs, bool use_epsilon, +Nft concatenate(const Nft& lhs, const Nft& rhs, bool use_epsilon, StateRenaming* lhs_state_renaming, StateRenaming* rhs_state_renaming) { return algorithms::concatenate_eps(lhs, rhs, EPSILON, use_epsilon, lhs_state_renaming, rhs_state_renaming); } -Lvlfa& Lvlfa::concatenate(const Lvlfa& aut) { +Nft& Nft::concatenate(const Nft& aut) { size_t n = this->num_of_states(); auto upd_fnc = [&](State st) { return st + n; }; // copy the information about aut to save the case when this is the same object as aut. - utils::SparseSet aut_initial = aut.initial; - utils::SparseSet aut_final = aut.final; + utils::SparseSet aut_initial = aut.initial; + utils::SparseSet aut_final = aut.final; size_t aut_n = aut.num_of_states(); this->delta.allocate(n); @@ -57,24 +57,24 @@ Lvlfa& Lvlfa::concatenate(const Lvlfa& aut) { return *this; } -Lvlfa algorithms::concatenate_eps(const Lvlfa& lhs, const Lvlfa& rhs, const Symbol& epsilon, bool use_epsilon, +Nft algorithms::concatenate_eps(const Nft& lhs, const Nft& rhs, const Symbol& epsilon, bool use_epsilon, StateRenaming* lhs_state_renaming, StateRenaming* rhs_state_renaming) { // Compute concatenation of given automata. // Concatenation will proceed in the order of the passed automata: Result is 'lhs . rhs'. if (lhs.num_of_states() == 0 || rhs.num_of_states() == 0 || lhs.initial.empty() || lhs.final.empty() || rhs.initial.empty() || rhs.final.empty()) { - return Lvlfa{}; + return Nft{}; } const unsigned long lhs_states_num{lhs.num_of_states() }; const unsigned long rhs_states_num{rhs.num_of_states() }; - Lvlfa result{}; // Concatenated automaton. + Nft result{}; // Concatenated automaton. StateRenaming _lhs_states_renaming{}; // Map mapping rhs states to result states. StateRenaming _rhs_states_renaming{}; // Map mapping rhs states to result states. const size_t result_num_of_states{lhs_states_num + rhs_states_num}; - if (result_num_of_states == 0) { return Lvlfa{}; } + if (result_num_of_states == 0) { return Nft{}; } // Map lhs states to result states. _lhs_states_renaming.reserve(lhs_states_num); @@ -90,7 +90,7 @@ Lvlfa algorithms::concatenate_eps(const Lvlfa& lhs, const Lvlfa& rhs, const Symb ++result_state_index; } - result = Lvlfa(); + result = Nft(); result.delta = lhs.delta; result.initial = lhs.initial; result.add_state(result_num_of_states-1); @@ -131,4 +131,4 @@ Lvlfa algorithms::concatenate_eps(const Lvlfa& lhs, const Lvlfa& rhs, const Symb if (rhs_state_renaming != nullptr) { *rhs_state_renaming = _rhs_states_renaming; } return result; } // concatenate_eps(). -} // Namespace mata::lvlfa. +} // Namespace mata::nft. diff --git a/src/lvlfa/inclusion.cc b/src/nft/inclusion.cc similarity index 91% rename from src/lvlfa/inclusion.cc rename to src/nft/inclusion.cc index 45857a8d..8d47077d 100644 --- a/src/lvlfa/inclusion.cc +++ b/src/nft/inclusion.cc @@ -1,37 +1,37 @@ -/* lvlfa-incl.cc -- LVLFA language inclusion +/* nft-incl.cc -- NFT language inclusion */ // MATA headers -#include "mata/lvlfa/lvlfa.hh" -#include "mata/lvlfa/algorithms.hh" +#include "mata/nft/nft.hh" +#include "mata/nft/algorithms.hh" #include "mata/utils/sparse-set.hh" -using namespace mata::lvlfa; +using namespace mata::nft; using namespace mata::utils; /// naive language inclusion check (complementation + intersection + emptiness) -bool mata::lvlfa::algorithms::is_included_naive( - const Lvlfa &smaller, - const Lvlfa &bigger, +bool mata::nft::algorithms::is_included_naive( + const Nft &smaller, + const Nft &bigger, const Alphabet *const alphabet,//TODO: this should not be needed, likewise for equivalence Run *cex) { // {{{ - Lvlfa bigger_cmpl; + Nft bigger_cmpl; if (alphabet == nullptr) { bigger_cmpl = complement(bigger, create_alphabet(smaller, bigger)); } else { bigger_cmpl = complement(bigger, *alphabet); } - Lvlfa lvlfa_isect = intersection(smaller, bigger_cmpl); + Nft nft_isect = intersection(smaller, bigger_cmpl); - return lvlfa_isect.is_lang_empty(cex); + return nft_isect.is_lang_empty(cex); } // is_included_naive }}} /// language inclusion check using Antichains // TODO, what about to construct the separator from this? -bool mata::lvlfa::algorithms::is_included_antichains( - const Lvlfa& smaller, - const Lvlfa& bigger, +bool mata::nft::algorithms::is_included_antichains( + const Nft& smaller, + const Nft& bigger, const Alphabet* const alphabet, //TODO: this parameter is not used Run* cex) { // {{{ @@ -41,7 +41,7 @@ bool mata::lvlfa::algorithms::is_included_antichains( using ProdStateType = std::tuple; using ProdStatesType = std::vector; - // ProcessedType is indexed by states of the smaller lvlfa + // ProcessedType is indexed by states of the smaller nft // tailored for pure antichain approach ... the simulation-based antichain will not work (without changes). using ProcessedType = std::vector; @@ -63,7 +63,7 @@ bool mata::lvlfa::algorithms::is_included_antichains( ProdStatesType worklist{};//Pairs (q,S) to be processed. It sometimes gives a huge speed-up when they are kept sorted by the size of S, // worklist.reserve(32); // so those with smaller popped for processing first. - ProcessedType processed(smaller.num_of_states()); // Allocate to the number of states of the smaller lvlfa. + ProcessedType processed(smaller.num_of_states()); // Allocate to the number of states of the smaller nft. // The pairs of each state are also kept sorted. It allows slightly faster antichain pruning - no need to test inclusion in sets that have less elements. //Is |S| < |S'| for the inut pairs (q,S) and (q',S')? @@ -224,7 +224,7 @@ bool mata::lvlfa::algorithms::is_included_antichains( namespace { using AlgoType = decltype(algorithms::is_included_naive)*; - bool compute_equivalence(const Lvlfa &lhs, const Lvlfa &rhs, const mata::Alphabet *const alphabet, const AlgoType &algo) { + bool compute_equivalence(const Nft &lhs, const Nft &rhs, const mata::Alphabet *const alphabet, const AlgoType &algo) { //alphabet should not be needed as input parameter if (algo(lhs, rhs, alphabet, nullptr)) { if (algo(rhs, lhs, alphabet, nullptr)) { @@ -259,9 +259,9 @@ namespace { } // The dispatching method that calls the correct one based on parameters -bool mata::lvlfa::is_included( - const Lvlfa &smaller, - const Lvlfa &bigger, +bool mata::nft::is_included( + const Nft &smaller, + const Nft &bigger, Run *cex, const Alphabet *const alphabet, const ParameterMap ¶ms) { // {{{ @@ -269,7 +269,7 @@ bool mata::lvlfa::is_included( return algo(smaller, bigger, alphabet, cex); } // is_included }}} -bool mata::lvlfa::are_equivalent(const Lvlfa& lhs, const Lvlfa& rhs, const Alphabet *alphabet, const ParameterMap& params) +bool mata::nft::are_equivalent(const Nft& lhs, const Nft& rhs, const Alphabet *alphabet, const ParameterMap& params) { //TODO: add comment on what this is doing, what is __func__ ... AlgoType algo{ set_algorithm(std::to_string(__func__), params) }; @@ -284,6 +284,6 @@ bool mata::lvlfa::are_equivalent(const Lvlfa& lhs, const Lvlfa& rhs, const Alpha return compute_equivalence(lhs, rhs, alphabet, algo); } -bool mata::lvlfa::are_equivalent(const Lvlfa& lhs, const Lvlfa& rhs, const ParameterMap& params) { +bool mata::nft::are_equivalent(const Nft& lhs, const Nft& rhs, const ParameterMap& params) { return are_equivalent(lhs, rhs, nullptr, params); } diff --git a/src/lvlfa/intersection.cc b/src/nft/intersection.cc similarity index 93% rename from src/lvlfa/intersection.cc rename to src/nft/intersection.cc index 51d610e6..d61265a0 100644 --- a/src/lvlfa/intersection.cc +++ b/src/nft/intersection.cc @@ -1,14 +1,14 @@ -/* lvlfa-intersection.cc -- Intersection of LVLFAs +/* nft-intersection.cc -- Intersection of NFTs */ // MATA headers -#include "mata/lvlfa/lvlfa.hh" -#include "mata/lvlfa/algorithms.hh" +#include "mata/nft/nft.hh" +#include "mata/nft/algorithms.hh" #include #include -using namespace mata::lvlfa; +using namespace mata::nft; namespace { @@ -20,26 +20,26 @@ using InvertedProductStorage = std::vector; } // Anonymous namespace. -namespace mata::lvlfa { +namespace mata::nft { -Lvlfa intersection(const Lvlfa& lhs, const Lvlfa& rhs, const Symbol first_epsilon, ProductMap *prod_map) { +Nft intersection(const Nft& lhs, const Nft& rhs, const Symbol first_epsilon, ProductMap *prod_map) { auto both_final = [&](const State lhs_state,const State rhs_state) { return lhs.final.contains(lhs_state) && rhs.final.contains(rhs_state); }; if (lhs.final.empty() || lhs.initial.empty() || rhs.initial.empty() || rhs.final.empty()) - return Lvlfa{}; + return Nft{}; return algorithms::product(lhs, rhs, both_final, first_epsilon, prod_map); } -//TODO: move this method to lvlfa.hh? It is something one might want to use (e.g. for union, inclusion, equivalence of DFAs). -Lvlfa mata::lvlfa::algorithms::product( - const Lvlfa& lhs, const Lvlfa& rhs, const std::function&& final_condition, +//TODO: move this method to nft.hh? It is something one might want to use (e.g. for union, inclusion, equivalence of DFAs). +Nft mata::nft::algorithms::product( + const Nft& lhs, const Nft& rhs, const std::function&& final_condition, const Symbol first_epsilon, ProductMap *product_map) { - Lvlfa product{}; // The product automaton. + Nft product{}; // The product automaton. // Set of product states to process. std::deque worklist{}; @@ -130,8 +130,8 @@ Lvlfa mata::lvlfa::algorithms::product( /** * Create product state if it does not exist in storage yet and fill in its symbol_post from lhs and rhs targets. - * @param[in] lhs_target Target state in LVLFA @c lhs. - * @param[in] rhs_target Target state in LVLFA @c rhs. + * @param[in] lhs_target Target state in NFT @c lhs. + * @param[in] rhs_target Target state in NFT @c rhs. * @param[out] product_symbol_post New SymbolPost of the product state. */ auto create_product_state_and_symbol_post = [&](const State lhs_target, const State rhs_target, SymbolPost& product_symbol_post) @@ -237,4 +237,4 @@ Lvlfa mata::lvlfa::algorithms::product( return product; } // intersection(). -} // namespace mata::lvlfa. +} // namespace mata::nft. diff --git a/src/lvlfa/lvlfa.cc b/src/nft/nft.cc similarity index 87% rename from src/lvlfa/lvlfa.cc rename to src/nft/nft.cc index 9247984f..fb8b112a 100644 --- a/src/lvlfa/lvlfa.cc +++ b/src/nft/nft.cc @@ -1,4 +1,4 @@ -/* lvlfa.cc -- operations for NFA +/* nft.cc -- operations for NFA */ #include @@ -8,22 +8,22 @@ // MATA headers #include "mata/utils/sparse-set.hh" -#include "mata/lvlfa/lvlfa.hh" -#include "mata/lvlfa/algorithms.hh" +#include "mata/nft/nft.hh" +#include "mata/nft/algorithms.hh" #include using namespace mata::utils; -using namespace mata::lvlfa; +using namespace mata::nft; using mata::Symbol; using mata::Word; using mata::BoolVector; using StateBoolArray = std::vector; ///< Bool array for states in the automaton. -const std::string mata::lvlfa::TYPE_NFA = "LVLFA"; +const std::string mata::nft::TYPE_NFT = "NFT"; -Lvlfa& Lvlfa::trim(StateRenaming* state_renaming) { +Nft& Nft::trim(StateRenaming* state_renaming) { #ifdef _STATIC_STRUCTURES_ BoolVector useful_states{ useful_states() }; @@ -76,13 +76,13 @@ Lvlfa& Lvlfa::trim(StateRenaming* state_renaming) { return *this; } -std::string Lvlfa::print_to_DOT() const { +std::string Nft::print_to_DOT() const { std::stringstream output; print_to_DOT(output); return output.str(); } -void Lvlfa::print_to_DOT(std::ostream &output) const { +void Nft::print_to_DOT(std::ostream &output) const { output << "digraph finiteAutomaton {" << std::endl << "node [shape=circle];" << std::endl; @@ -113,14 +113,14 @@ void Lvlfa::print_to_DOT(std::ostream &output) const { output << "}" << std::endl; } -std::string Lvlfa::print_to_mata() const { +std::string Nft::print_to_mata() const { std::stringstream output; print_to_mata(output); return output.str(); } -void Lvlfa::print_to_mata(std::ostream &output) const { - output << "@LVLFA-explicit" << std::endl +void Nft::print_to_mata(std::ostream &output) const { + output << "@NFT-explicit" << std::endl << "%Alphabet-auto" << std::endl; // TODO should be this, but we cannot parse %Alphabet-numbers yet //<< "%Alphabet-numbers" << std::endl; @@ -168,15 +168,15 @@ void Lvlfa::print_to_mata(std::ostream &output) const { } } -Lvlfa Lvlfa::get_one_letter_aut(Symbol abstract_symbol) const { - return Lvlfa(mata::nfa::Nfa::get_one_letter_aut(abstract_symbol)); +Nft Nft::get_one_letter_aut(Symbol abstract_symbol) const { + return Nft(mata::nfa::Nfa::get_one_letter_aut(abstract_symbol)); } -void Lvlfa::get_one_letter_aut(Lvlfa& result) const { +void Nft::get_one_letter_aut(Nft& result) const { result = get_one_letter_aut(); } -void Lvlfa::make_one_level_aut(const utils::OrdVector &dcare_replacements) { +void Nft::make_one_level_aut(const utils::OrdVector &dcare_replacements) { bool dcare_for_dcare = dcare_replacements == utils::OrdVector({ DONT_CARE }); std::vector transitions_to_del; std::vector transitions_to_add; @@ -238,17 +238,17 @@ void Lvlfa::make_one_level_aut(const utils::OrdVector &dcare_replacement } } -Lvlfa Lvlfa::get_one_level_aut(const utils::OrdVector &dcare_replacements) const { - Lvlfa result{ *this }; +Nft Nft::get_one_level_aut(const utils::OrdVector &dcare_replacements) const { + Nft result{ *this }; result.make_one_level_aut(dcare_replacements); return result; } -void Lvlfa::get_one_level_aut(Lvlfa& result, const utils::OrdVector &dcare_replacements) const { +void Nft::get_one_level_aut(Nft& result, const utils::OrdVector &dcare_replacements) const { result = get_one_level_aut(dcare_replacements); } -Lvlfa& Lvlfa::operator=(Lvlfa&& other) noexcept { +Nft& Nft::operator=(Nft&& other) noexcept { if (this != &other) { mata::nfa::Nfa::operator=(other); levels = std::move(other.levels); @@ -257,21 +257,21 @@ Lvlfa& Lvlfa::operator=(Lvlfa&& other) noexcept { return *this; } -State Lvlfa::add_state() { +State Nft::add_state() { levels.push_back(0); return mata::nfa::Nfa::add_state(); } -State Lvlfa::add_state(State state) { +State Nft::add_state(State state) { levels.push_back(0); return mata::nfa::Nfa::add_state(state); } -void Lvlfa::clear() { +void Nft::clear() { mata::nfa::Nfa::clear(); levels.clear(); } -bool Lvlfa::is_identical(const Lvlfa& aut) const { +bool Nft::is_identical(const Nft& aut) const { return levels_cnt == aut.levels_cnt && levels == aut.levels && mata::nfa::Nfa::is_identical(aut); } diff --git a/src/lvlfa/operations.cc b/src/nft/operations.cc similarity index 91% rename from src/lvlfa/operations.cc rename to src/nft/operations.cc index c33653fb..58c3ea32 100644 --- a/src/lvlfa/operations.cc +++ b/src/nft/operations.cc @@ -1,4 +1,4 @@ -/* lvlfa.cc -- operations for LVLFA +/* nft.cc -- operations for NFT */ #include @@ -7,23 +7,23 @@ #include // MATA headers -#include "mata/lvlfa/delta.hh" +#include "mata/nft/delta.hh" #include "mata/utils/sparse-set.hh" -#include "mata/lvlfa/lvlfa.hh" -#include "mata/lvlfa/algorithms.hh" -#include "mata/lvlfa/builder.hh" +#include "mata/nft/nft.hh" +#include "mata/nft/algorithms.hh" +#include "mata/nft/builder.hh" #include using std::tie; using namespace mata::utils; -using namespace mata::lvlfa; +using namespace mata::nft; using mata::Symbol; using StateBoolArray = std::vector; ///< Bool array for states in the automaton. namespace { - Simlib::Util::BinaryRelation compute_fw_direct_simulation(const Lvlfa& aut) { + Simlib::Util::BinaryRelation compute_fw_direct_simulation(const Nft& aut) { Symbol maxSymbol{ aut.delta.get_max_symbol() }; const size_t state_num{ aut.num_of_states() }; Simlib::ExplicitLTS LTSforSimulation(state_num); @@ -41,8 +41,8 @@ namespace { return LTSforSimulation.compute_simulation(); } - Lvlfa reduce_size_by_simulation(const Lvlfa& aut, StateRenaming &state_renaming) { - Lvlfa result; + Nft reduce_size_by_simulation(const Nft& aut, StateRenaming &state_renaming) { + Nft result; const auto sim_relation = algorithms::compute_relation( aut, ParameterMap{{ "relation", "simulation"}, { "direction", "forward"}}); @@ -115,7 +115,7 @@ namespace { } //TODO: based on the comments inside, this function needs to be rewritten in a more optimal way. -Lvlfa mata::lvlfa::remove_epsilon(const Lvlfa& aut, Symbol epsilon) { +Nft mata::nft::remove_epsilon(const Nft& aut, Symbol epsilon) { // cannot use multimap, because it can contain multiple occurrences of (a -> a), (a -> a) std::unordered_map eps_closure; @@ -159,7 +159,7 @@ Lvlfa mata::lvlfa::remove_epsilon(const Lvlfa& aut, Symbol epsilon) { } // Construct the automaton without epsilon transitions. - Lvlfa result{ Delta{}, aut.initial, aut.final, aut.levels, aut.levels_cnt, aut.alphabet }; + Nft result{ Delta{}, aut.initial, aut.final, aut.levels, aut.levels_cnt, aut.alphabet }; for (const auto& state_closure_pair : eps_closure) { // For every state. State src_state = state_closure_pair.first; for (State eps_cl_state : state_closure_pair.second) { // For every state in its epsilon closure. @@ -176,10 +176,10 @@ Lvlfa mata::lvlfa::remove_epsilon(const Lvlfa& aut, Symbol epsilon) { return result; } -Lvlfa mata::lvlfa::fragile_revert(const Lvlfa& aut) { +Nft mata::nft::fragile_revert(const Nft& aut) { const size_t num_of_states{ aut.num_of_states() }; - Lvlfa result(num_of_states); + Nft result(num_of_states); result.initial = aut.final; result.final = aut.initial; @@ -298,8 +298,8 @@ Lvlfa mata::lvlfa::fragile_revert(const Lvlfa& aut) { return result; } -Lvlfa mata::lvlfa::simple_revert(const Lvlfa& aut) { - Lvlfa result; +Nft mata::nft::simple_revert(const Nft& aut) { + Nft result; result.clear(); const size_t num_of_states{ aut.num_of_states() }; @@ -320,10 +320,10 @@ Lvlfa mata::lvlfa::simple_revert(const Lvlfa& aut) { } //not so great, can be removed -Lvlfa mata::lvlfa::somewhat_simple_revert(const Lvlfa& aut) { +Nft mata::nft::somewhat_simple_revert(const Nft& aut) { const size_t num_of_states{ aut.num_of_states() }; - Lvlfa result(num_of_states); + Nft result(num_of_states); result.initial = aut.final; result.final = aut.initial; @@ -355,13 +355,13 @@ Lvlfa mata::lvlfa::somewhat_simple_revert(const Lvlfa& aut) { return result; } -Lvlfa mata::lvlfa::revert(const Lvlfa& aut) { +Nft mata::nft::revert(const Nft& aut) { return simple_revert(aut); //return fragile_revert(aut); //return somewhat_simple_revert(aut); } -std::pair mata::lvlfa::Lvlfa::get_word_for_path(const Run& run) const { +std::pair mata::nft::Nft::get_word_for_path(const Run& run) const { if (run.path.empty()) { return {{}, true}; } Run word; @@ -388,7 +388,7 @@ std::pair mata::lvlfa::Lvlfa::get_word_for_path(const Run& run) const } //TODO: this is not efficient -bool mata::lvlfa::Lvlfa::is_in_lang(const Run& run) const { +bool mata::nft::Nft::is_in_lang(const Run& run) const { StateSet current_post(this->initial); for (const Symbol sym : run.word) { current_post = this->post(current_post, sym); @@ -399,7 +399,7 @@ bool mata::lvlfa::Lvlfa::is_in_lang(const Run& run) const { /// Checks whether the prefix of a string is in the language of an automaton // TODO: slow and it should share code with is_in_lang -bool mata::lvlfa::Lvlfa::is_prfx_in_lang(const Run& run) const { +bool mata::nft::Nft::is_prfx_in_lang(const Run& run) const { StateSet current_post{ this->initial }; for (const Symbol sym : run.word) { if (this->final.intersects_with(current_post)) { return true; } @@ -409,16 +409,16 @@ bool mata::lvlfa::Lvlfa::is_prfx_in_lang(const Run& run) const { return this->final.intersects_with(current_post); } -Lvlfa mata::lvlfa::algorithms::minimize_brzozowski(const Lvlfa& aut) { +Nft mata::nft::algorithms::minimize_brzozowski(const Nft& aut) { //compute the minimal deterministic automaton, Brzozovski algorithm return determinize(revert(determinize(revert(aut)))); } -Lvlfa mata::lvlfa::minimize( - const Lvlfa& aut, +Nft mata::nft::minimize( + const Nft& aut, const ParameterMap& params) { - Lvlfa result; + Nft result; // setting the default algorithm decltype(algorithms::minimize_brzozowski)* algo = algorithms::minimize_brzozowski; if (!haskey(params, "algorithm")) { @@ -437,12 +437,12 @@ Lvlfa mata::lvlfa::minimize( return algo(aut); } -Lvlfa mata::lvlfa::uni(const Lvlfa &lhs, const Lvlfa &rhs) { - Lvlfa union_lvlfa{ lhs }; - return union_lvlfa.uni(rhs); +Nft mata::nft::uni(const Nft &lhs, const Nft &rhs) { + Nft union_nft{ lhs }; + return union_nft.uni(rhs); } -Lvlfa& Lvlfa::uni(const Lvlfa& aut) { +Nft& Nft::uni(const Nft& aut) { size_t n = this->num_of_states(); auto upd_fnc = [&](State st) { return st + n; @@ -450,8 +450,8 @@ Lvlfa& Lvlfa::uni(const Lvlfa& aut) { // copy the information about aut to save the case when this is the same object as aut. size_t aut_states = aut.num_of_states(); - SparseSet aut_final_copy = aut.final; - SparseSet aut_initial_copy = aut.initial; + SparseSet aut_final_copy = aut.final; + SparseSet aut_initial_copy = aut.initial; this->delta.allocate(n); this->delta.append(aut.delta.renumber_targets(upd_fnc)); @@ -470,7 +470,7 @@ Lvlfa& Lvlfa::uni(const Lvlfa& aut) { return *this; } -Simlib::Util::BinaryRelation mata::lvlfa::algorithms::compute_relation(const Lvlfa& aut, const ParameterMap& params) { +Simlib::Util::BinaryRelation mata::nft::algorithms::compute_relation(const Nft& aut, const ParameterMap& params) { if (!haskey(params, "relation")) { throw std::runtime_error(std::to_string(__func__) + " requires setting the \"relation\" key in the \"params\" argument; " @@ -493,14 +493,14 @@ Simlib::Util::BinaryRelation mata::lvlfa::algorithms::compute_relation(const Lvl } } -Lvlfa mata::lvlfa::reduce(const Lvlfa &aut, StateRenaming *state_renaming, const ParameterMap& params) { +Nft mata::nft::reduce(const Nft &aut, StateRenaming *state_renaming, const ParameterMap& params) { if (!haskey(params, "algorithm")) { throw std::runtime_error(std::to_string(__func__) + " requires setting the \"algorithm\" key in the \"params\" argument; " "received: " + std::to_string(params)); } - Lvlfa result; + Nft result; std::unordered_map reduced_state_map; const std::string& algorithm = params.at("algorithm"); if ("simulation" == algorithm) { @@ -517,11 +517,11 @@ Lvlfa mata::lvlfa::reduce(const Lvlfa &aut, StateRenaming *state_renaming, const return result; } -Lvlfa mata::lvlfa::determinize( - const Lvlfa& aut, +Nft mata::nft::determinize( + const Nft& aut, std::unordered_map *subset_map) { - Lvlfa result; + Nft result; //assuming all sets targets are non-empty std::vector> worklist; bool deallocate_subset_map = false; @@ -593,16 +593,16 @@ Lvlfa mata::lvlfa::determinize( return result; } -std::ostream& std::operator<<(std::ostream& os, const Lvlfa& lvlfa) { - lvlfa.print_to_mata(os); +std::ostream& std::operator<<(std::ostream& os, const Nft& nft) { + nft.print_to_mata(os); return os; } -Run mata::lvlfa::encode_word(const Alphabet* alphabet, const std::vector& input) { +Run mata::nft::encode_word(const Alphabet* alphabet, const std::vector& input) { return mata::nfa::encode_word(alphabet, input); } -std::set mata::lvlfa::Lvlfa::get_words(unsigned max_length) { +std::set mata::nft::Nft::get_words(unsigned max_length) { std::set result; // contains a pair: a state s and the word with which we got to the state s diff --git a/src/lvlfa/strings.cc b/src/nft/strings.cc similarity index 79% rename from src/lvlfa/strings.cc rename to src/nft/strings.cc index 21694d8b..1fab2af4 100644 --- a/src/lvlfa/strings.cc +++ b/src/nft/strings.cc @@ -1,18 +1,18 @@ /* nfa-strings.hh -- Operations on NFAs for string solving. */ -#include "mata/lvlfa/strings.hh" -#include "mata/lvlfa/lvlfa.hh" +#include "mata/nft/strings.hh" +#include "mata/nft/nft.hh" -//using mata::lvlfa::Lvlfa; -using mata::lvlfa::Level; +//using mata::nft::Nft; +using mata::nft::Level; using mata::Symbol; -using mata::lvlfa::State; +using mata::nft::State; using mata::nfa::StatePost; using mata::nfa::SymbolPost; -using namespace mata::lvlfa; +using namespace mata::nft; -Lvlfa mata::lvlfa::create_identity(mata::Alphabet* alphabet, Level level_cnt) { +Nft mata::nft::create_identity(mata::Alphabet* alphabet, Level level_cnt) { if (level_cnt == 0) { throw std::runtime_error("NFT must have at least one level"); } const auto alphabet_symbols{ alphabet->get_alphabet_symbols() }; const size_t additional_states_per_symbol_num{ level_cnt - 1 }; @@ -25,7 +25,7 @@ Lvlfa mata::lvlfa::create_identity(mata::Alphabet* alphabet, Level level_cnt) { const Level new_level{ level + 1 }; level = new_level < level_cnt ? new_level : 1; } - Lvlfa nft{ num_of_states, { 0 }, { 0 }, std::move(levels), level_cnt, alphabet }; + Nft nft{ num_of_states, { 0 }, { 0 }, std::move(levels), level_cnt, alphabet }; State state{ 0 }; State new_state; @@ -46,9 +46,9 @@ Lvlfa mata::lvlfa::create_identity(mata::Alphabet* alphabet, Level level_cnt) { return nft; } -Lvlfa mata::lvlfa::create_identity_with_single_replace( +Nft mata::nft::create_identity_with_single_replace( mata::Alphabet *alphabet, const Symbol from_symbol, const Symbol to_symbol) { - Lvlfa nft{ create_identity(alphabet) }; + Nft nft{ create_identity(alphabet) }; if (alphabet->empty()) { throw std::runtime_error("Alphabet does not contain symbol being replaced."); } auto symbol_post_to_state_with_replace{ nft.delta.mutable_state_post(0).find(from_symbol) }; const State from_replace_state{ symbol_post_to_state_with_replace->targets.front() }; diff --git a/src/lvlfa/universal.cc b/src/nft/universal.cc similarity index 87% rename from src/lvlfa/universal.cc rename to src/nft/universal.cc index eee68a00..0c5bde97 100644 --- a/src/lvlfa/universal.cc +++ b/src/nft/universal.cc @@ -1,12 +1,12 @@ -/* lvlfa-universal.cc -- LVLFA universality +/* nft-universal.cc -- NFT universality */ // MATA headers -#include "mata/lvlfa/lvlfa.hh" -#include "mata/lvlfa/algorithms.hh" +#include "mata/nft/nft.hh" +#include "mata/nft/algorithms.hh" #include "mata/utils/sparse-set.hh" -using namespace mata::lvlfa; +using namespace mata::nft; using namespace mata::utils; //TODO: this could be merged with inclusion, or even removed, universality could be implemented using inclusion, @@ -14,20 +14,20 @@ using namespace mata::utils; /// naive universality check (complementation + emptiness) -bool mata::lvlfa::algorithms::is_universal_naive( - const Lvlfa& aut, +bool mata::nft::algorithms::is_universal_naive( + const Nft& aut, const Alphabet& alphabet, Run* cex) { // {{{ - Lvlfa cmpl = complement(aut, alphabet); + Nft cmpl = complement(aut, alphabet); return cmpl.is_lang_empty(cex); } // is_universal_naive }}} /// universality check using Antichains -bool mata::lvlfa::algorithms::is_universal_antichains( - const Lvlfa& aut, +bool mata::nft::algorithms::is_universal_antichains( + const Nft& aut, const Alphabet& alphabet, Run* cex) { // {{{ @@ -131,7 +131,7 @@ bool mata::lvlfa::algorithms::is_universal_antichains( } // }}} // The dispatching method that calls the correct one based on parameters. -bool mata::lvlfa::Lvlfa::is_universal(const Alphabet& alphabet, Run* cex, const ParameterMap& params) const { +bool mata::nft::Nft::is_universal(const Alphabet& alphabet, Run* cex, const ParameterMap& params) const { // setting the default algorithm decltype(algorithms::is_universal_naive)* algo = algorithms::is_universal_naive; if (!haskey(params, "algorithm")) { @@ -151,6 +151,6 @@ bool mata::lvlfa::Lvlfa::is_universal(const Alphabet& alphabet, Run* cex, const return algo(*this, alphabet, cex); } // is_universal() -bool mata::lvlfa::Lvlfa::is_universal(const Alphabet& alphabet, const ParameterMap& params) const { +bool mata::nft::Nft::is_universal(const Alphabet& alphabet, const ParameterMap& params) const { return this->is_universal(alphabet, nullptr, params); } diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index aec4af63..bef83996 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -14,14 +14,14 @@ add_executable(tests nfa/nfa-intersection.cc nfa/nfa-profiling.cc nfa/nfa-plumbing.cc - lvlfa/delta.cc - lvlfa/lvlfa.cc - lvlfa/builder.cc - lvlfa/lvlfa-concatenation.cc - lvlfa/lvlfa-intersection.cc - lvlfa/lvlfa-profiling.cc - lvlfa/lvlfa-plumbing.cc - lvlfa/strings.cc + nft/delta.cc + nft/nft.cc + nft/builder.cc + nft/nft-concatenation.cc + nft/nft-intersection.cc + nft/nft-profiling.cc + nft/nft-plumbing.cc + nft/strings.cc strings/nfa-noodlification.cc strings/nfa-segmentation.cc strings/nfa-string-solving.cc diff --git a/tests/lvlfa/builder.cc b/tests/nft/builder.cc similarity index 58% rename from tests/lvlfa/builder.cc rename to tests/nft/builder.cc index 08e680ae..4b61215c 100644 --- a/tests/lvlfa/builder.cc +++ b/tests/nft/builder.cc @@ -6,76 +6,76 @@ #include -#include "mata/lvlfa/lvlfa.hh" -#include "mata/lvlfa/builder.hh" +#include "mata/nft/nft.hh" +#include "mata/nft/builder.hh" -using namespace mata::lvlfa; +using namespace mata::nft; using Symbol = mata::Symbol; using IntAlphabet = mata::IntAlphabet; using OnTheFlyAlphabet = mata::OnTheFlyAlphabet; using Word = std::vector; -TEST_CASE("lvlfa::parse_from_mata()") { +TEST_CASE("nft::parse_from_mata()") { Delta delta; SECTION("Simple automaton") { delta.add(0, 0, 0); delta.add(0, 1, 1); delta.add(1, 2, 0); - Lvlfa lvlfa{ delta, { 0 }, { 1 }, { 0 }, 1}; + Nft nft{ delta, { 0 }, { 1 }, { 0 }, 1}; SECTION("from string") { - Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa.print_to_mata()) }; - CHECK(are_equivalent(parsed, lvlfa)); + Nft parsed{ mata::nft::builder::parse_from_mata(nft.print_to_mata()) }; + CHECK(are_equivalent(parsed, nft)); } SECTION("from stream") { - std::stringstream lvlfa_stream; - lvlfa.print_to_mata(lvlfa_stream); - Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_stream) }; - CHECK(are_equivalent(parsed, lvlfa)); + std::stringstream nft_stream; + nft.print_to_mata(nft_stream); + Nft parsed{ mata::nft::builder::parse_from_mata(nft_stream) }; + CHECK(are_equivalent(parsed, nft)); } SECTION("from file") { - std::filesystem::path lvlfa_file{ "./temp-test-parse_from_mata-simple_lvlfa.mata" }; - std::fstream file{ lvlfa_file, std::fstream::in | std::fstream::out | std::fstream::trunc}; - lvlfa.print_to_mata(file); - Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_file) }; + std::filesystem::path nft_file{ "./temp-test-parse_from_mata-simple_nft.mata" }; + std::fstream file{ nft_file, std::fstream::in | std::fstream::out | std::fstream::trunc}; + nft.print_to_mata(file); + Nft parsed{ mata::nft::builder::parse_from_mata(nft_file) }; file.close(); - std::filesystem::remove(lvlfa_file); + std::filesystem::remove(nft_file); - CHECK(are_equivalent(parsed, lvlfa)); + CHECK(are_equivalent(parsed, nft)); } } SECTION("larger automaton") { - Lvlfa lvlfa; - lvlfa.initial = { 1, 2, 50 }; - lvlfa.delta.add(1, 'a', 2); - lvlfa.delta.add(1, 'a', 3); - lvlfa.delta.add(1, 'b', 4); - lvlfa.delta.add(2, 'a', 2); - lvlfa.delta.add(2, 'b', 2); - lvlfa.delta.add(2, 'a', 3); - lvlfa.delta.add(2, 'b', 4); - lvlfa.delta.add(3, 'b', 4); - lvlfa.delta.add(3, 'c', 7); - lvlfa.delta.add(3, 'b', 2); - lvlfa.delta.add(5, 'c', 3); - lvlfa.delta.add(7, 'a', 8); - lvlfa.delta.add(12, 'b', 15); - lvlfa.delta.add(1, 'b', 40); - lvlfa.delta.add(51, 'z', 42); - lvlfa.final = { 3, 103 }; - lvlfa.levels = std::vector(lvlfa.num_of_states(), 0); - lvlfa.levels[3] = 42; - lvlfa.levels[103] = 42; - lvlfa.levels_cnt = 43; + Nft nft; + nft.initial = { 1, 2, 50 }; + nft.delta.add(1, 'a', 2); + nft.delta.add(1, 'a', 3); + nft.delta.add(1, 'b', 4); + nft.delta.add(2, 'a', 2); + nft.delta.add(2, 'b', 2); + nft.delta.add(2, 'a', 3); + nft.delta.add(2, 'b', 4); + nft.delta.add(3, 'b', 4); + nft.delta.add(3, 'c', 7); + nft.delta.add(3, 'b', 2); + nft.delta.add(5, 'c', 3); + nft.delta.add(7, 'a', 8); + nft.delta.add(12, 'b', 15); + nft.delta.add(1, 'b', 40); + nft.delta.add(51, 'z', 42); + nft.final = { 3, 103 }; + nft.levels = std::vector(nft.num_of_states(), 0); + nft.levels[3] = 42; + nft.levels[103] = 42; + nft.levels_cnt = 43; SECTION("from string") { - Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa.print_to_mata()) }; + Nft parsed{ mata::nft::builder::parse_from_mata(nft.print_to_mata()) }; parsed.final.contains(103); parsed.initial.contains(50); parsed.delta.contains(51, 'z', 42); @@ -88,13 +88,13 @@ TEST_CASE("lvlfa::parse_from_mata()") { } CHECK(std::all_of(test_levels.begin(), test_levels.end(), [](Level l) { return l==0; })); - CHECK(are_equivalent(parsed, lvlfa)); + CHECK(are_equivalent(parsed, nft)); } SECTION("from stream") { - std::stringstream lvlfa_stream; - lvlfa.print_to_mata(lvlfa_stream); - Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_stream) }; + std::stringstream nft_stream; + nft.print_to_mata(nft_stream); + Nft parsed{ mata::nft::builder::parse_from_mata(nft_stream) }; parsed.final.contains(103); parsed.initial.contains(50); parsed.delta.contains(51, 'z', 42); @@ -107,16 +107,16 @@ TEST_CASE("lvlfa::parse_from_mata()") { } CHECK(std::all_of(test_levels.begin(), test_levels.end(), [](Level l) { return l==0; })); - CHECK(are_equivalent(parsed, lvlfa)); + CHECK(are_equivalent(parsed, nft)); } SECTION("from file") { - std::filesystem::path lvlfa_file{ "./temp-test-parse_from_mata-larger_lvlfa.mata" }; - std::fstream file{ lvlfa_file, std::fstream::in | std::fstream::out | std::fstream::trunc }; - lvlfa.print_to_mata(file); - Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_file) }; + std::filesystem::path nft_file{ "./temp-test-parse_from_mata-larger_nft.mata" }; + std::fstream file{ nft_file, std::fstream::in | std::fstream::out | std::fstream::trunc }; + nft.print_to_mata(file); + Nft parsed{ mata::nft::builder::parse_from_mata(nft_file) }; file.close(); - std::filesystem::remove(lvlfa_file); + std::filesystem::remove(nft_file); parsed.final.contains(103); parsed.initial.contains(50); @@ -130,30 +130,30 @@ TEST_CASE("lvlfa::parse_from_mata()") { } CHECK(std::all_of(test_levels.begin(), test_levels.end(), [](Level l) { return l==0; })); - CHECK(are_equivalent(parsed, lvlfa)); + CHECK(are_equivalent(parsed, nft)); } } SECTION("levels testing") { SECTION("ascending") { - Lvlfa lvlfa; - lvlfa.delta.add(0, 1, 1); - lvlfa.delta.add(1, 1, 2); - lvlfa.delta.add(2, 1, 3); - lvlfa.delta.add(3, 1, 4); - lvlfa.delta.add(4, 1, 5); - lvlfa.delta.add(5, 1, 6); - lvlfa.delta.add(6, 1, 7); - lvlfa.delta.add(7, 1, 8); - lvlfa.delta.add(8, 1, 9); - lvlfa.delta.add(9, 1, 10); - lvlfa.initial.insert(0); - lvlfa.final.insert(10); - lvlfa.levels = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; - lvlfa.levels_cnt = 11; + Nft nft; + nft.delta.add(0, 1, 1); + nft.delta.add(1, 1, 2); + nft.delta.add(2, 1, 3); + nft.delta.add(3, 1, 4); + nft.delta.add(4, 1, 5); + nft.delta.add(5, 1, 6); + nft.delta.add(6, 1, 7); + nft.delta.add(7, 1, 8); + nft.delta.add(8, 1, 9); + nft.delta.add(9, 1, 10); + nft.initial.insert(0); + nft.final.insert(10); + nft.levels = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; + nft.levels_cnt = 11; SECTION("from string") { - Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa.print_to_mata()) }; + Nft parsed{ mata::nft::builder::parse_from_mata(nft.print_to_mata()) }; REQUIRE(parsed.initial.size() == 1); REQUIRE(parsed.final.size() == 1); @@ -173,9 +173,9 @@ TEST_CASE("lvlfa::parse_from_mata()") { } SECTION("from stream") { - std::stringstream lvlfa_stream; - lvlfa.print_to_mata(lvlfa_stream); - Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_stream) }; + std::stringstream nft_stream; + nft.print_to_mata(nft_stream); + Nft parsed{ mata::nft::builder::parse_from_mata(nft_stream) }; REQUIRE(parsed.initial.size() == 1); REQUIRE(parsed.final.size() == 1); @@ -195,12 +195,12 @@ TEST_CASE("lvlfa::parse_from_mata()") { } SECTION("from file") { - std::filesystem::path lvlfa_file{ "./temp-test-parse_from_mata-levels_testing.mata" }; - std::fstream file{ lvlfa_file, std::fstream::in | std::fstream::out | std::fstream::trunc }; - lvlfa.print_to_mata(file); - Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_file) }; + std::filesystem::path nft_file{ "./temp-test-parse_from_mata-levels_testing.mata" }; + std::fstream file{ nft_file, std::fstream::in | std::fstream::out | std::fstream::trunc }; + nft.print_to_mata(file); + Nft parsed{ mata::nft::builder::parse_from_mata(nft_file) }; file.close(); - std::filesystem::remove(lvlfa_file); + std::filesystem::remove(nft_file); REQUIRE(parsed.initial.size() == 1); REQUIRE(parsed.final.size() == 1); @@ -221,24 +221,24 @@ TEST_CASE("lvlfa::parse_from_mata()") { } SECTION("descending") { - Lvlfa lvlfa; - lvlfa.delta.add(0, 1, 1); - lvlfa.delta.add(1, 1, 2); - lvlfa.delta.add(2, 1, 3); - lvlfa.delta.add(3, 1, 4); - lvlfa.delta.add(4, 1, 5); - lvlfa.delta.add(5, 1, 6); - lvlfa.delta.add(6, 1, 7); - lvlfa.delta.add(7, 1, 8); - lvlfa.delta.add(8, 1, 9); - lvlfa.delta.add(9, 1, 10); - lvlfa.initial.insert(0); - lvlfa.final.insert(10); - lvlfa.levels = { 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 }; - lvlfa.levels_cnt = 11; + Nft nft; + nft.delta.add(0, 1, 1); + nft.delta.add(1, 1, 2); + nft.delta.add(2, 1, 3); + nft.delta.add(3, 1, 4); + nft.delta.add(4, 1, 5); + nft.delta.add(5, 1, 6); + nft.delta.add(6, 1, 7); + nft.delta.add(7, 1, 8); + nft.delta.add(8, 1, 9); + nft.delta.add(9, 1, 10); + nft.initial.insert(0); + nft.final.insert(10); + nft.levels = { 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 }; + nft.levels_cnt = 11; SECTION("from string") { - Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa.print_to_mata()) }; + Nft parsed{ mata::nft::builder::parse_from_mata(nft.print_to_mata()) }; REQUIRE(parsed.initial.size() == 1); REQUIRE(parsed.final.size() == 1); @@ -258,9 +258,9 @@ TEST_CASE("lvlfa::parse_from_mata()") { } SECTION("from stream") { - std::stringstream lvlfa_stream; - lvlfa.print_to_mata(lvlfa_stream); - Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_stream) }; + std::stringstream nft_stream; + nft.print_to_mata(nft_stream); + Nft parsed{ mata::nft::builder::parse_from_mata(nft_stream) }; REQUIRE(parsed.initial.size() == 1); REQUIRE(parsed.final.size() == 1); @@ -280,12 +280,12 @@ TEST_CASE("lvlfa::parse_from_mata()") { } SECTION("from file") { - std::filesystem::path lvlfa_file{ "./temp-test-parse_from_mata-levels_testing.mata" }; - std::fstream file{ lvlfa_file, std::fstream::in | std::fstream::out | std::fstream::trunc }; - lvlfa.print_to_mata(file); - Lvlfa parsed{ mata::lvlfa::builder::parse_from_mata(lvlfa_file) }; + std::filesystem::path nft_file{ "./temp-test-parse_from_mata-levels_testing.mata" }; + std::fstream file{ nft_file, std::fstream::in | std::fstream::out | std::fstream::trunc }; + nft.print_to_mata(file); + Nft parsed{ mata::nft::builder::parse_from_mata(nft_file) }; file.close(); - std::filesystem::remove(lvlfa_file); + std::filesystem::remove(nft_file); REQUIRE(parsed.initial.size() == 1); REQUIRE(parsed.final.size() == 1); diff --git a/tests/lvlfa/delta.cc b/tests/nft/delta.cc similarity index 72% rename from tests/lvlfa/delta.cc rename to tests/nft/delta.cc index de8b52a9..20b49823 100644 --- a/tests/lvlfa/delta.cc +++ b/tests/nft/delta.cc @@ -3,17 +3,17 @@ #include "utils.hh" #include "mata/alphabet.hh" -#include "mata/lvlfa/types.hh" -#include "mata/lvlfa/delta.hh" -#include "mata/lvlfa/lvlfa.hh" +#include "mata/nft/types.hh" +#include "mata/nft/delta.hh" +#include "mata/nft/nft.hh" #include -using namespace mata::lvlfa; +using namespace mata::nft; using Symbol = mata::Symbol; -TEST_CASE("mata::lvlfa::SymbolPost") { +TEST_CASE("mata::nft::SymbolPost") { CHECK(SymbolPost{ 0, StateSet{} } == SymbolPost{ 0, StateSet{ 0, 1 } }); CHECK(SymbolPost{ 1, StateSet{} } != SymbolPost{ 0, StateSet{} }); CHECK(SymbolPost{ 0, StateSet{ 1 } } < SymbolPost{ 1, StateSet{} }); @@ -24,8 +24,8 @@ TEST_CASE("mata::lvlfa::SymbolPost") { CHECK(SymbolPost{ 1, StateSet{ 0 } } >= SymbolPost{ 0, StateSet{ 1 } }); } -TEST_CASE("mata::lvlfa::Delta::state_post()") { - Lvlfa aut{}; +TEST_CASE("mata::nft::Delta::state_post()") { + Nft aut{}; SECTION("Add new states within the limit") { aut.add_state(19); @@ -77,69 +77,69 @@ TEST_CASE("mata::lvlfa::Delta::state_post()") { } } -TEST_CASE("mata::lvlfa::Delta::contains()") { - Lvlfa lvlfa; - CHECK(!lvlfa.delta.contains(0, 1, 0)); - CHECK(!lvlfa.delta.contains(Transition{ 0, 1, 0 })); - lvlfa.delta.add(0, 1, 0); - CHECK(lvlfa.delta.contains(0, 1, 0)); - CHECK(lvlfa.delta.contains(Transition{ 0, 1, 0 })); +TEST_CASE("mata::nft::Delta::contains()") { + Nft nft; + CHECK(!nft.delta.contains(0, 1, 0)); + CHECK(!nft.delta.contains(Transition{ 0, 1, 0 })); + nft.delta.add(0, 1, 0); + CHECK(nft.delta.contains(0, 1, 0)); + CHECK(nft.delta.contains(Transition{ 0, 1, 0 })); } -TEST_CASE("mata::lvlfa::Delta::remove()") { - Lvlfa lvlfa; +TEST_CASE("mata::nft::Delta::remove()") { + Nft nft; SECTION("Simple remove") { - lvlfa.delta.add(0, 1, 0); - CHECK_NOTHROW(lvlfa.delta.remove(3, 5, 6)); - CHECK_NOTHROW(lvlfa.delta.remove(0, 1, 0)); - CHECK(lvlfa.delta.empty()); - lvlfa.delta.add(10, 1, 0); - CHECK_THROWS_AS(lvlfa.delta.remove(3, 5, 6), std::invalid_argument); + nft.delta.add(0, 1, 0); + CHECK_NOTHROW(nft.delta.remove(3, 5, 6)); + CHECK_NOTHROW(nft.delta.remove(0, 1, 0)); + CHECK(nft.delta.empty()); + nft.delta.add(10, 1, 0); + CHECK_THROWS_AS(nft.delta.remove(3, 5, 6), std::invalid_argument); } } -TEST_CASE("mata::lvlfa::Delta::mutable_post()") { - Lvlfa lvlfa; +TEST_CASE("mata::nft::Delta::mutable_post()") { + Nft nft; SECTION("Default initialized") { - CHECK(lvlfa.delta.num_of_states() == 0); - CHECK(!lvlfa.delta.uses_state(0)); - CHECK(lvlfa.delta.mutable_state_post(0).empty()); - CHECK(lvlfa.delta.num_of_states() == 1); - CHECK(lvlfa.delta.uses_state(0)); - - CHECK(lvlfa.delta.mutable_state_post(9).empty()); - CHECK(lvlfa.delta.num_of_states() == 10); - CHECK(lvlfa.delta.uses_state(1)); - CHECK(lvlfa.delta.uses_state(2)); - CHECK(lvlfa.delta.uses_state(9)); - CHECK(!lvlfa.delta.uses_state(10)); - - CHECK(lvlfa.delta.mutable_state_post(9).empty()); - CHECK(lvlfa.delta.num_of_states() == 10); - CHECK(lvlfa.delta.uses_state(9)); - CHECK(!lvlfa.delta.uses_state(10)); + CHECK(nft.delta.num_of_states() == 0); + CHECK(!nft.delta.uses_state(0)); + CHECK(nft.delta.mutable_state_post(0).empty()); + CHECK(nft.delta.num_of_states() == 1); + CHECK(nft.delta.uses_state(0)); + + CHECK(nft.delta.mutable_state_post(9).empty()); + CHECK(nft.delta.num_of_states() == 10); + CHECK(nft.delta.uses_state(1)); + CHECK(nft.delta.uses_state(2)); + CHECK(nft.delta.uses_state(9)); + CHECK(!nft.delta.uses_state(10)); + + CHECK(nft.delta.mutable_state_post(9).empty()); + CHECK(nft.delta.num_of_states() == 10); + CHECK(nft.delta.uses_state(9)); + CHECK(!nft.delta.uses_state(10)); } } -TEST_CASE("mata::lvlfa::StatePost iteration over moves") { - Lvlfa lvlfa; +TEST_CASE("mata::nft::StatePost iteration over moves") { + Nft nft; std::vector iterated_moves{}; std::vector expected_moves{}; StatePost state_post{}; - SECTION("Simple LVLFA") { - lvlfa.initial.insert(0); - lvlfa.final.insert(3); - lvlfa.delta.add(0, 1, 1); - lvlfa.delta.add(0, 2, 1); - lvlfa.delta.add(0, 5, 1); - lvlfa.delta.add(1, 3, 2); - lvlfa.delta.add(2, 0, 1); - lvlfa.delta.add(2, 0, 3); - - state_post = lvlfa.delta.state_post(0); + SECTION("Simple NFT") { + nft.initial.insert(0); + nft.final.insert(3); + nft.delta.add(0, 1, 1); + nft.delta.add(0, 2, 1); + nft.delta.add(0, 5, 1); + nft.delta.add(1, 3, 2); + nft.delta.add(2, 0, 1); + nft.delta.add(2, 0, 3); + + state_post = nft.delta.state_post(0); expected_moves = std::vector{ { 1, 1 }, { 2, 1 }, { 5, 1 } }; StatePost::Moves moves{ state_post.moves() }; iterated_moves.clear(); @@ -158,7 +158,7 @@ TEST_CASE("mata::lvlfa::StatePost iteration over moves") { StatePost::Moves epsilon_moves{ state_post.moves_epsilons() }; CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() }.empty()); - state_post = lvlfa.delta.state_post(1); + state_post = nft.delta.state_post(1); moves = state_post.moves(); StatePost::Moves moves_custom; moves_custom = moves; @@ -181,7 +181,7 @@ TEST_CASE("mata::lvlfa::StatePost iteration over moves") { epsilon_moves = state_post.moves_epsilons(); CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() }.empty()); - state_post = lvlfa.delta.state_post(2); + state_post = nft.delta.state_post(2); moves = state_post.moves(); iterated_moves.clear(); for (auto move_it{ moves.begin() }; move_it != moves.end(); ++move_it) { @@ -197,7 +197,7 @@ TEST_CASE("mata::lvlfa::StatePost iteration over moves") { epsilon_moves = state_post.moves_epsilons(); CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() }.empty()); - state_post = lvlfa.delta.state_post(3); + state_post = nft.delta.state_post(3); moves = state_post.moves(); iterated_moves.clear(); for (auto move_it{ moves.begin() }; move_it != moves.end(); ++move_it) { @@ -213,7 +213,7 @@ TEST_CASE("mata::lvlfa::StatePost iteration over moves") { epsilon_moves = state_post.moves_epsilons(); CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() }.empty()); - state_post = lvlfa.delta.state_post(4); + state_post = nft.delta.state_post(4); moves = state_post.moves(); iterated_moves.clear(); for (auto move_it{ moves.begin() }; move_it != moves.end(); ++move_it) { @@ -228,36 +228,36 @@ TEST_CASE("mata::lvlfa::StatePost iteration over moves") { epsilon_moves = state_post.moves_epsilons(); CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() }.empty()); - lvlfa.delta.add(0, EPSILON, 2); - state_post = lvlfa.delta.state_post(0); + nft.delta.add(0, EPSILON, 2); + state_post = nft.delta.state_post(0); epsilon_moves = state_post.moves_epsilons(); CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() } == std::vector{ { EPSILON, 2 } }); - lvlfa.delta.add(1, EPSILON, 3); - state_post = lvlfa.delta.state_post(1); + nft.delta.add(1, EPSILON, 3); + state_post = nft.delta.state_post(1); epsilon_moves = state_post.moves_epsilons(); CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() } == std::vector{ { EPSILON, 3 } }); - lvlfa.delta.add(4, EPSILON, 4); - state_post = lvlfa.delta.state_post(4); + nft.delta.add(4, EPSILON, 4); + state_post = nft.delta.state_post(4); epsilon_moves = state_post.moves_epsilons(); CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() } == std::vector{ { EPSILON, 4 } }); - state_post = lvlfa.delta.state_post(0); + state_post = nft.delta.state_post(0); epsilon_moves = state_post.moves_epsilons(3); iterated_moves.clear(); for (const Move& move: epsilon_moves) { iterated_moves.push_back(move); } CHECK(iterated_moves == std::vector{ { 5, 1 }, { EPSILON, 2 }}); - state_post = lvlfa.delta.state_post(1); + state_post = nft.delta.state_post(1); epsilon_moves = state_post.moves_epsilons(3); CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() } == std::vector{ { 3, 2 }, { EPSILON, 3 } }); - state_post = lvlfa.delta.state_post(2); + state_post = nft.delta.state_post(2); epsilon_moves = state_post.moves_epsilons(3); CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() }.empty()); - state_post = lvlfa.delta.state_post(4); + state_post = nft.delta.state_post(4); epsilon_moves = state_post.moves_epsilons(3); CHECK(std::vector{ epsilon_moves.begin(), epsilon_moves.end() } == std::vector{ { EPSILON, 4 } }); - state_post = lvlfa.delta.state_post(0); + state_post = nft.delta.state_post(0); StatePost::Moves symbol_moves = state_post.moves_symbols(3); iterated_moves.clear(); for (const Move& move: symbol_moves) { iterated_moves.push_back(move); } @@ -267,54 +267,54 @@ TEST_CASE("mata::lvlfa::StatePost iteration over moves") { for (const Move& move: symbol_moves) { iterated_moves.push_back(move); } CHECK(iterated_moves.empty()); - state_post = lvlfa.delta.state_post(1); + state_post = nft.delta.state_post(1); symbol_moves = state_post.moves_symbols(3); CHECK(std::vector{ symbol_moves.begin(), symbol_moves.end() } == std::vector{ { 3, 2 } }); - state_post = lvlfa.delta.state_post(2); + state_post = nft.delta.state_post(2); symbol_moves = state_post.moves_symbols(3); CHECK(std::vector{ symbol_moves.begin(), symbol_moves.end() } == std::vector{ { 0, 1 }, { 0 , 3 } }); - state_post = lvlfa.delta.state_post(4); + state_post = nft.delta.state_post(4); symbol_moves = state_post.moves_symbols(3); CHECK(std::vector{ symbol_moves.begin(), symbol_moves.end() }.empty()); // Create custom moves iterator. - state_post = lvlfa.delta[0]; + state_post = nft.delta[0]; moves = { state_post, state_post.cbegin(), state_post.cbegin() + 2 }; iterated_moves = { moves.begin(), moves.end() }; CHECK(iterated_moves == std::vector{ { 1, 1 }, { 2, 1 } }); - state_post = lvlfa.delta[20]; + state_post = nft.delta[20]; moves = { state_post, state_post.cbegin(), state_post.cend() }; iterated_moves = { moves.begin(), moves.end() }; CHECK(iterated_moves.empty()); } } -TEST_CASE("mata::lvlfa::Delta iteration over transitions") { - Lvlfa lvlfa; +TEST_CASE("mata::nft::Delta iteration over transitions") { + Nft nft; std::vector iterated_transitions{}; std::vector expected_transitions{}; SECTION("empty automaton") { - Delta::Transitions transitions{ lvlfa.delta.transitions() }; + Delta::Transitions transitions{ nft.delta.transitions() }; CHECK(transitions.begin() == transitions.end()); - Delta::Transitions::const_iterator transition_it{ lvlfa.delta }; + Delta::Transitions::const_iterator transition_it{ nft.delta }; CHECK(transition_it == transitions.end()); - transition_it = { lvlfa.delta, 0 }; + transition_it = { nft.delta, 0 }; CHECK(transition_it == transitions.end()); } - SECTION("Simple LVLFA") { - lvlfa.initial.insert(0); - lvlfa.final.insert(3); - lvlfa.delta.add(0, 1, 1); - lvlfa.delta.add(0, 2, 1); - lvlfa.delta.add(0, 5, 1); - lvlfa.delta.add(1, 3, 2); - lvlfa.delta.add(2, 0, 1); - lvlfa.delta.add(2, 0, 3); - - Delta::Transitions transitions{ lvlfa.delta.transitions() }; + SECTION("Simple NFT") { + nft.initial.insert(0); + nft.final.insert(3); + nft.delta.add(0, 1, 1); + nft.delta.add(0, 2, 1); + nft.delta.add(0, 5, 1); + nft.delta.add(1, 3, 2); + nft.delta.add(2, 0, 1); + nft.delta.add(2, 0, 3); + + Delta::Transitions transitions{ nft.delta.transitions() }; iterated_transitions.clear(); for (auto transitions_it{ transitions.begin() }; transitions_it != transitions.end(); ++transitions_it) { @@ -329,10 +329,10 @@ TEST_CASE("mata::lvlfa::Delta iteration over transitions") { CHECK(iterated_transitions == expected_transitions); iterated_transitions.clear(); - for (const Transition& transition: lvlfa.delta.transitions()) { iterated_transitions.push_back(transition); } + for (const Transition& transition: nft.delta.transitions()) { iterated_transitions.push_back(transition); } CHECK(iterated_transitions == expected_transitions); - Delta::Transitions::const_iterator transitions_it{ lvlfa.delta.transitions().begin() }; + Delta::Transitions::const_iterator transitions_it{ nft.delta.transitions().begin() }; CHECK(*transitions_it == Transition{ 0, 1, 1 }); transitions_it++; CHECK(*transitions_it == Transition{ 0, 2, 1 }); @@ -340,9 +340,9 @@ TEST_CASE("mata::lvlfa::Delta iteration over transitions") { transitions_it++; CHECK(*transitions_it == Transition{ 1, 3, 2 }); - Delta::Transitions::const_iterator transitions_from_1_to_end_it{ lvlfa.delta, 1 }; + Delta::Transitions::const_iterator transitions_from_1_to_end_it{ nft.delta, 1 }; iterated_transitions.clear(); - while (transitions_from_1_to_end_it != lvlfa.delta.transitions().end()) { + while (transitions_from_1_to_end_it != nft.delta.transitions().end()) { iterated_transitions.push_back(*transitions_from_1_to_end_it); transitions_from_1_to_end_it++; } @@ -352,11 +352,11 @@ TEST_CASE("mata::lvlfa::Delta iteration over transitions") { SECTION("Sparse automaton") { const size_t state_num = 'r'+1; - lvlfa.delta.reserve(state_num); + nft.delta.reserve(state_num); - lvlfa.delta.add('q', 'a', 'r'); - lvlfa.delta.add('q', 'b', 'r'); - const Delta::Transitions transitions{ lvlfa.delta.transitions() }; + nft.delta.add('q', 'a', 'r'); + nft.delta.add('q', 'b', 'r'); + const Delta::Transitions transitions{ nft.delta.transitions() }; Delta::Transitions::const_iterator it{ transitions.begin() }; Delta::Transitions::const_iterator jt{ transitions.begin() }; CHECK(it == jt); @@ -379,43 +379,43 @@ TEST_CASE("mata::lvlfa::Delta iteration over transitions") { } } -TEST_CASE("mata::lvlfa::Delta::operator=()") { - Lvlfa lvlfa{}; - lvlfa.initial.insert(0); - lvlfa.final.insert(1); - lvlfa.delta.add(0, 'a', 1); +TEST_CASE("mata::nft::Delta::operator=()") { + Nft nft{}; + nft.initial.insert(0); + nft.final.insert(1); + nft.delta.add(0, 'a', 1); - Lvlfa copied_lvlfa{ lvlfa }; - lvlfa.delta.add(1, 'b', 0); - CHECK(lvlfa.delta.num_of_transitions() == 2); - CHECK(copied_lvlfa.delta.num_of_transitions() == 1); + Nft copied_nft{ nft }; + nft.delta.add(1, 'b', 0); + CHECK(nft.delta.num_of_transitions() == 2); + CHECK(copied_nft.delta.num_of_transitions() == 1); } -TEST_CASE("mata::lvlfa::StatePost::Moves") { - Lvlfa lvlfa{}; - lvlfa.initial.insert(0); - lvlfa.final.insert(5); - lvlfa.delta.add(0, 'a', 1); - lvlfa.delta.add(1, 'b', 2); - lvlfa.delta.add(1, 'c', 2); - lvlfa.delta.add(1, 'd', 2); - lvlfa.delta.add(2, 'e', 3); - lvlfa.delta.add(3, 'e', 4); - lvlfa.delta.add(4, 'f', 5); +TEST_CASE("mata::nft::StatePost::Moves") { + Nft nft{}; + nft.initial.insert(0); + nft.final.insert(5); + nft.delta.add(0, 'a', 1); + nft.delta.add(1, 'b', 2); + nft.delta.add(1, 'c', 2); + nft.delta.add(1, 'd', 2); + nft.delta.add(2, 'e', 3); + nft.delta.add(3, 'e', 4); + nft.delta.add(4, 'f', 5); // TODO: rewrite in a check of moves. - StatePost::Moves moves_from_source{ lvlfa.delta[0].moves() }; + StatePost::Moves moves_from_source{ nft.delta[0].moves() }; CHECK(std::vector{ moves_from_source.begin(), moves_from_source.end() } == std::vector{ { 'a', 1 }}); - moves_from_source = lvlfa.delta[1].moves(); + moves_from_source = nft.delta[1].moves(); CHECK(std::vector{ moves_from_source.begin(), moves_from_source.end() } == std::vector{ { 'b', 2 }, { 'c', 2 }, { 'd', 2 } }); StatePost::Moves::const_iterator move_incremented_it{ moves_from_source.begin() }; move_incremented_it++; CHECK(*move_incremented_it == Move{ 'c', 2 }); - CHECK(*StatePost::Moves::const_iterator{ lvlfa.delta.state_post(1) } == Move{ 'b', 2 }); + CHECK(*StatePost::Moves::const_iterator{ nft.delta.state_post(1) } == Move{ 'b', 2 }); CHECK(move_incremented_it != moves_from_source.begin()); CHECK(move_incremented_it == ++moves_from_source.begin()); - StatePost::Moves moves_from_source_copy_constructed{ lvlfa.delta[12].moves() }; + StatePost::Moves moves_from_source_copy_constructed{ nft.delta[12].moves() }; CHECK( std::vector{ moves_from_source_copy_constructed.begin(), moves_from_source_copy_constructed.end() } .empty() @@ -423,7 +423,7 @@ TEST_CASE("mata::lvlfa::StatePost::Moves") { } -TEST_CASE("mata::lvlfa::Delta::operator==()") { +TEST_CASE("mata::nft::Delta::operator==()") { Delta delta{}; Delta delta2{}; CHECK(delta == delta2); @@ -444,7 +444,7 @@ TEST_CASE("mata::lvlfa::Delta::operator==()") { CHECK(delta == delta2); } -TEST_CASE("mata::lvlfa::Delta::add_symbols_to()") { +TEST_CASE("mata::nft::Delta::add_symbols_to()") { mata::OnTheFlyAlphabet empty_alphabet{}; mata::OnTheFlyAlphabet alphabet{}; Delta delta{}; diff --git a/tests/lvlfa/lvlfa-concatenation.cc b/tests/nft/nft-concatenation.cc similarity index 94% rename from tests/lvlfa/lvlfa-concatenation.cc rename to tests/nft/nft-concatenation.cc index 6b953766..7067395b 100644 --- a/tests/lvlfa/lvlfa-concatenation.cc +++ b/tests/nft/nft-concatenation.cc @@ -1,4 +1,4 @@ -/* tests-lvlfa-concatenation.cc -- Tests for concatenation of NFAs +/* tests-nft-concatenation.cc -- Tests for concatenation of NFAs */ @@ -6,11 +6,11 @@ #include -#include "mata/lvlfa/lvlfa.hh" -#include "mata/lvlfa/strings.hh" +#include "mata/nft/nft.hh" +#include "mata/nft/strings.hh" #include "mata/parser/re2parser.hh" -using namespace mata::lvlfa; +using namespace mata::nft; using namespace mata::strings; using namespace mata::utils; using namespace mata::parser; @@ -59,10 +59,10 @@ using Symbol = mata::Symbol; // }}} -TEST_CASE("mata::lvlfa::concatenate()") { - Lvlfa lhs{}; - Lvlfa rhs{}; - Lvlfa result{}; +TEST_CASE("mata::nft::concatenate()") { + Nft lhs{}; + Nft rhs{}; + Nft result{}; SECTION("Empty automaton without states") { result = concatenate(lhs, rhs); @@ -348,10 +348,10 @@ TEST_CASE("mata::lvlfa::concatenate()") { } } -TEST_CASE("mata::lvlfa::concatenate() over epsilon symbol") { - Lvlfa lhs{}; - Lvlfa rhs{}; - Lvlfa result{}; +TEST_CASE("mata::nft::concatenate() over epsilon symbol") { + Nft lhs{}; + Nft rhs{}; + Nft result{}; SECTION("Empty automaton") { lhs.add_state(); @@ -576,40 +576,40 @@ TEST_CASE("mata::lvlfa::concatenate() over epsilon symbol") { } } -TEST_CASE("Lvlfa (a|b)*") { - Lvlfa aut1; +TEST_CASE("Nft (a|b)*") { + Nft aut1; mata::parser::create_nfa(&aut1, "a*"); - Lvlfa aut2; + Nft aut2; mata::parser::create_nfa(&aut2, "b*"); - Lvlfa aut3; + Nft aut3; mata::parser::create_nfa(&aut3, "a*b*"); auto concatenated_aut{ concatenate(aut1, aut2) }; CHECK(are_equivalent(concatenated_aut, aut3)); } -TEST_CASE("Bug with epsilon transitions in Lvlfa") { - Lvlfa lvlfa1{}; - lvlfa1.initial.insert(0); - lvlfa1.final.insert(3); - lvlfa1.delta.add(0, 97, 0); - lvlfa1.delta.add(0, 98, 0); - lvlfa1.delta.add(0, 99, 0); - lvlfa1.delta.add(0, 100, 0); - lvlfa1.delta.add(0, EPSILON, 1); - lvlfa1.delta.add(1, 97, 2); - lvlfa1.delta.add(2, 98, 3); - - Lvlfa lvlfa2{}; - lvlfa2.initial.insert(0); - lvlfa2.final.insert(0); - lvlfa2.delta.add(0, 97, 0); - lvlfa2.delta.add(0, 98, 0); - lvlfa2.delta.add(0, 99, 0); - lvlfa2.delta.add(0, 100, 0); - - auto result{ concatenate(lvlfa1, lvlfa2, true) }; - - Lvlfa expected{ lvlfa1 }; +TEST_CASE("Bug with epsilon transitions in Nft") { + Nft nft1{}; + nft1.initial.insert(0); + nft1.final.insert(3); + nft1.delta.add(0, 97, 0); + nft1.delta.add(0, 98, 0); + nft1.delta.add(0, 99, 0); + nft1.delta.add(0, 100, 0); + nft1.delta.add(0, EPSILON, 1); + nft1.delta.add(1, 97, 2); + nft1.delta.add(2, 98, 3); + + Nft nft2{}; + nft2.initial.insert(0); + nft2.final.insert(0); + nft2.delta.add(0, 97, 0); + nft2.delta.add(0, 98, 0); + nft2.delta.add(0, 99, 0); + nft2.delta.add(0, 100, 0); + + auto result{ concatenate(nft1, nft2, true) }; + + Nft expected{ nft1 }; expected.delta.add(3, EPSILON, 4); expected.delta.add(4, 97, 4); expected.delta.add(4, 98, 4); @@ -620,13 +620,13 @@ TEST_CASE("Bug with epsilon transitions in Lvlfa") { CHECK(are_equivalent(result, expected)); } -TEST_CASE("mata::lvlfa::concatenate() inplace") { +TEST_CASE("mata::nft::concatenate() inplace") { SECTION("Empty automaton without states") { - Lvlfa lhs{}; - Lvlfa rhs{}; - Lvlfa result{}; + Nft lhs{}; + Nft rhs{}; + Nft result{}; result = lhs.concatenate(rhs); CHECK(result.initial.empty()); @@ -636,9 +636,9 @@ TEST_CASE("mata::lvlfa::concatenate() inplace") { } SECTION("One empty automaton without states") { - Lvlfa lhs{}; - Lvlfa rhs{}; - Lvlfa result{}; + Nft lhs{}; + Nft rhs{}; + Nft result{}; rhs.add_state(); result = lhs.concatenate(rhs); @@ -649,9 +649,9 @@ TEST_CASE("mata::lvlfa::concatenate() inplace") { } SECTION("Automaton A concatenate automaton B") { - Lvlfa lhs{}; - Lvlfa rhs{}; - Lvlfa result{}; + Nft lhs{}; + Nft rhs{}; + Nft result{}; lhs.add_state(10); FILL_WITH_AUT_A(lhs); rhs.add_state(14); @@ -668,9 +668,9 @@ TEST_CASE("mata::lvlfa::concatenate() inplace") { } SECTION("Sample automata") { - Lvlfa lhs{}; - Lvlfa rhs{}; - Lvlfa result{}; + Nft lhs{}; + Nft rhs{}; + Nft result{}; lhs.add_state(); lhs.initial.insert(0); lhs.final.insert(0); @@ -699,8 +699,8 @@ TEST_CASE("mata::lvlfa::concatenate() inplace") { } SECTION("Delta smaller than states") { - Lvlfa lhs{}; - Lvlfa rhs{}; + Nft lhs{}; + Nft rhs{}; lhs.delta.add(0, 65, 5); lhs.initial.insert(0); @@ -712,12 +712,12 @@ TEST_CASE("mata::lvlfa::concatenate() inplace") { rhs.initial.insert(0); rhs.final.insert(7); - Lvlfa result = lhs.concatenate(rhs); + Nft result = lhs.concatenate(rhs); CHECK(!result.is_lang_empty()); } SECTION("the same automata") { - Lvlfa lhs{}; + Nft lhs{}; lhs.add_state(); lhs.initial.insert(0); @@ -730,14 +730,14 @@ TEST_CASE("mata::lvlfa::concatenate() inplace") { lhs.delta.add(0, 116, 0); size_t lhs_size = lhs.num_of_states(); - Lvlfa result = lhs.concatenate(lhs); + Nft result = lhs.concatenate(lhs); CHECK(result.num_of_states() == lhs_size * 2); } } -TEST_CASE("Concat_inplace performance of LVLFA", "[.profiling]") { - Lvlfa base; +TEST_CASE("Concat_inplace performance of NFT", "[.profiling]") { + Nft base; base.initial.insert(0); base.final.insert(4); base.delta.add(0, 45, 1); @@ -1005,7 +1005,7 @@ TEST_CASE("Concat_inplace performance of LVLFA", "[.profiling]") { base.delta.add(3, 122, 4); base.delta.add(3, 124, 4); - Lvlfa concat; + Nft concat; concat.initial.insert(1); concat.final.insert(0); concat.final.insert(1); diff --git a/tests/lvlfa/lvlfa-intersection.cc b/tests/nft/nft-intersection.cc similarity index 94% rename from tests/lvlfa/lvlfa-intersection.cc rename to tests/nft/nft-intersection.cc index a4e9c046..a4a2cf07 100644 --- a/tests/lvlfa/lvlfa-intersection.cc +++ b/tests/nft/nft-intersection.cc @@ -1,4 +1,4 @@ -/* tests-lvlfa-intersection.cc -- Tests for intersection of LVLFAs +/* tests-nft-intersection.cc -- Tests for intersection of NFTs */ @@ -6,9 +6,9 @@ #include -#include "mata/lvlfa/lvlfa.hh" +#include "mata/nft/nft.hh" -using namespace mata::lvlfa; +using namespace mata::nft; using namespace mata::utils; using namespace mata::parser; @@ -54,9 +54,9 @@ using namespace mata::parser; // }}} -TEST_CASE("mata::lvlfa::intersection()") +TEST_CASE("mata::nft::intersection()") { // {{{ - Lvlfa a, b, res; + Nft a, b, res; std::unordered_map, State> prod_map; SECTION("Intersection of empty automata") @@ -172,11 +172,11 @@ TEST_CASE("mata::lvlfa::intersection()") } } // }}} -TEST_CASE("mata::lvlfa::intersection() with preserving epsilon transitions") +TEST_CASE("mata::nft::intersection() with preserving epsilon transitions") { std::unordered_map, State> prod_map; - Lvlfa a{6}; + Nft a{6}; a.initial.insert(0); a.final.insert({1, 4, 5}); a.delta.add(0, EPSILON, 1); @@ -187,7 +187,7 @@ TEST_CASE("mata::lvlfa::intersection() with preserving epsilon transitions") a.delta.add(2, EPSILON, 3); a.delta.add(3, 'a', 5); - Lvlfa b{10}; + Nft b{10}; b.initial.insert(0); b.final.insert({2, 4, 8, 7}); b.delta.add(0, 'b', 1); @@ -201,7 +201,7 @@ TEST_CASE("mata::lvlfa::intersection() with preserving epsilon transitions") b.delta.add(6, 'a', 9); b.delta.add(6, 'b', 7); - Lvlfa result{intersection(a, b, EPSILON, &prod_map) }; + Nft result{intersection(a, b, EPSILON, &prod_map) }; // Check states. CHECK(result.is_state(prod_map[{0, 0}])); @@ -272,9 +272,9 @@ TEST_CASE("mata::lvlfa::intersection() with preserving epsilon transitions") CHECK(result.delta.state_post(prod_map[{ 5, 8 }]).empty()); } -TEST_CASE("mata::lvlfa::intersection() for profiling", "[.profiling],[intersection]") +TEST_CASE("mata::nft::intersection() for profiling", "[.profiling],[intersection]") { - Lvlfa a{6}; + Nft a{6}; a.initial.insert(0); a.final.insert({1, 4, 5}); a.delta.add(0, EPSILON, 1); @@ -285,7 +285,7 @@ TEST_CASE("mata::lvlfa::intersection() for profiling", "[.profiling],[intersecti a.delta.add(2, EPSILON, 3); a.delta.add(3, 'a', 5); - Lvlfa b{10}; + Nft b{10}; b.initial.insert(0); b.final.insert({2, 4, 8, 7}); b.delta.add(0, 'b', 1); @@ -300,12 +300,12 @@ TEST_CASE("mata::lvlfa::intersection() for profiling", "[.profiling],[intersecti b.delta.add(6, 'b', 7); for (size_t i{ 0 }; i < 10000; ++i) { - Lvlfa result{intersection(a, b) }; + Nft result{intersection(a, b) }; } } -TEST_CASE("Move semantics of LVLFA", "[.profiling][std::move]") { - Lvlfa b{10}; +TEST_CASE("Move semantics of NFT", "[.profiling][std::move]") { + Nft b{10}; b.initial.insert(0); b.final.insert({2, 4, 8, 7}); b.delta.add(0, 'b', 1); @@ -320,7 +320,7 @@ TEST_CASE("Move semantics of LVLFA", "[.profiling][std::move]") { b.delta.add(6, 'b', 7); for (size_t i{ 0 }; i < 1'000'000; ++i) { - Lvlfa a{ std::move(b) }; + Nft a{ std::move(b) }; a.initial.insert(1); b = std::move(a); } diff --git a/tests/lvlfa/lvlfa-plumbing.cc b/tests/nft/nft-plumbing.cc similarity index 62% rename from tests/lvlfa/lvlfa-plumbing.cc rename to tests/nft/nft-plumbing.cc index b5aec645..d23f4833 100644 --- a/tests/lvlfa/lvlfa-plumbing.cc +++ b/tests/nft/nft-plumbing.cc @@ -1,4 +1,4 @@ -/* tests-lvlfa-plumbing.cc -- Tests plumbing versions of functions +/* tests-nft-plumbing.cc -- Tests plumbing versions of functions */ @@ -6,8 +6,8 @@ #include -#include "mata/lvlfa/lvlfa.hh" -#include "mata/lvlfa/plumbing.hh" +#include "mata/nft/nft.hh" +#include "mata/nft/plumbing.hh" using Symbol = mata::Symbol; using OnTheFlyAlphabet = mata::OnTheFlyAlphabet; @@ -54,67 +54,67 @@ using OnTheFlyAlphabet = mata::OnTheFlyAlphabet; // }}} -TEST_CASE("Mata::lvlfa::Plumbing") { - mata::lvlfa::Lvlfa lhs{}; - mata::lvlfa::Lvlfa rhs{}; - mata::lvlfa::Lvlfa result{}; +TEST_CASE("Mata::nft::Plumbing") { + mata::nft::Nft lhs{}; + mata::nft::Nft rhs{}; + mata::nft::Nft result{}; OnTheFlyAlphabet alph{ std::vector{ "a", "b", "c" } }; - SECTION("Mata::lvlfa::Plumbing::concatenate") { + SECTION("Mata::nft::Plumbing::concatenate") { FILL_WITH_AUT_A(lhs); FILL_WITH_AUT_B(lhs); - mata::lvlfa::plumbing::concatenate(&result, lhs, rhs); + mata::nft::plumbing::concatenate(&result, lhs, rhs); CHECK(result.is_lang_empty()); } - SECTION("Mata::lvlfa::Plumbing::intersection") { + SECTION("Mata::nft::Plumbing::intersection") { FILL_WITH_AUT_A(lhs); FILL_WITH_AUT_B(lhs); - mata::lvlfa::plumbing::intersection(&result, lhs, rhs); + mata::nft::plumbing::intersection(&result, lhs, rhs); CHECK(result.is_lang_empty()); } - SECTION("Mata::lvlfa::Plumbing::union") { + SECTION("Mata::nft::Plumbing::union") { FILL_WITH_AUT_A(lhs); FILL_WITH_AUT_B(lhs); - mata::lvlfa::plumbing::uni(&result, lhs, rhs); + mata::nft::plumbing::uni(&result, lhs, rhs); CHECK(!result.is_lang_empty()); } - SECTION("Mata::lvlfa::Plumbing::remove_epsilon") { + SECTION("Mata::nft::Plumbing::remove_epsilon") { FILL_WITH_AUT_A(lhs); - mata::lvlfa::plumbing::remove_epsilon(&result, lhs); + mata::nft::plumbing::remove_epsilon(&result, lhs); CHECK(!result.is_lang_empty()); } - SECTION("Mata::lvlfa::Plumbing::revert") { + SECTION("Mata::nft::Plumbing::revert") { FILL_WITH_AUT_A(lhs); - mata::lvlfa::plumbing::revert(&result, lhs); + mata::nft::plumbing::revert(&result, lhs); CHECK(!result.is_lang_empty()); } - SECTION("Mata::lvlfa::Plumbing::reduce") { + SECTION("Mata::nft::Plumbing::reduce") { FILL_WITH_AUT_A(lhs); - mata::lvlfa::plumbing::reduce(&result, lhs); + mata::nft::plumbing::reduce(&result, lhs); CHECK(!result.is_lang_empty()); CHECK(result.num_of_states() <= lhs.num_of_states()); } - SECTION("Mata::lvlfa::Plumbing::determinize") { + SECTION("Mata::nft::Plumbing::determinize") { FILL_WITH_AUT_A(lhs); - mata::lvlfa::plumbing::determinize(&result, lhs); + mata::nft::plumbing::determinize(&result, lhs); CHECK(!result.is_lang_empty()); } - SECTION("Mata::lvlfa::Plumbing::minimize") { + SECTION("Mata::nft::Plumbing::minimize") { FILL_WITH_AUT_A(lhs); - mata::lvlfa::plumbing::minimize(&result, lhs); + mata::nft::plumbing::minimize(&result, lhs); CHECK(!result.is_lang_empty()); } - SECTION("Mata::lvlfa::Plumbing::complement") { + SECTION("Mata::nft::Plumbing::complement") { FILL_WITH_AUT_A(lhs); - mata::lvlfa::plumbing::complement(&result, lhs, alph); + mata::nft::plumbing::complement(&result, lhs, alph); CHECK(!result.is_lang_empty()); } } diff --git a/tests/lvlfa/lvlfa-profiling.cc b/tests/nft/nft-profiling.cc similarity index 67% rename from tests/lvlfa/lvlfa-profiling.cc rename to tests/nft/nft-profiling.cc index 576c4f90..1a8a2a3b 100644 --- a/tests/lvlfa/lvlfa-profiling.cc +++ b/tests/nft/nft-profiling.cc @@ -3,34 +3,34 @@ #include #include "utils.hh" -#include "mata/lvlfa/lvlfa.hh" +#include "mata/nft/nft.hh" #include "mata/parser/re2parser.hh" -using namespace mata::lvlfa; +using namespace mata::nft; using namespace mata::parser; ///////////////////////////// // Profiling revert and trim ///////////////////////////// -TEST_CASE("mata::lvlfa::fragile_revert() speed, simple ", "[.profiling]") { - Lvlfa B; +TEST_CASE("mata::nft::fragile_revert() speed, simple ", "[.profiling]") { + Nft B; FILL_WITH_AUT_D(B); for (int i = 0; i < 300000; i++) { B = fragile_revert(B); } } -TEST_CASE("mata::lvlfa::simple_revert() speed, simple ", "[.profiling]") { - Lvlfa B; +TEST_CASE("mata::nft::simple_revert() speed, simple ", "[.profiling]") { + Nft B; FILL_WITH_AUT_B(B); for (int i = 0; i < 300000; i++) { B = simple_revert(B); } } -TEST_CASE("mata::lvlfa::simple_revert() speed, harder", "[.profiling]") { - Lvlfa B; +TEST_CASE("mata::nft::simple_revert() speed, harder", "[.profiling]") { + Nft B; //this gives an interesting test case if the parser is not trimming and reducing mata::parser::create_nfa(&B, "((.*){10})*"); for (int i = 0; i < 200; i++) { @@ -38,8 +38,8 @@ TEST_CASE("mata::lvlfa::simple_revert() speed, harder", "[.profiling]") { } } -TEST_CASE("mata::lvlfa::fragile_revert() speed, harder", "[.profiling]") { - Lvlfa B; +TEST_CASE("mata::nft::fragile_revert() speed, harder", "[.profiling]") { + Nft B; //this gives an interesting test case if the parser is not trimming and reducing create_nfa(&B, "((.*){10})*"); for (int i = 0; i < 200; i++) { @@ -47,8 +47,8 @@ TEST_CASE("mata::lvlfa::fragile_revert() speed, harder", "[.profiling]") { } } -TEST_CASE("mata::lvlfa::somewhat_simple_revert() speed, harder", "[.profiling]") { - Lvlfa B; +TEST_CASE("mata::nft::somewhat_simple_revert() speed, harder", "[.profiling]") { + Nft B; //this gives an interesting test case if the parser is not trimming and reducing create_nfa(&B, "((.*){10})*"); //FILL_WITH_AUT_C(B); @@ -57,8 +57,8 @@ TEST_CASE("mata::lvlfa::somewhat_simple_revert() speed, harder", "[.profiling]") } } -TEST_CASE("mata::lvlfa::trim_inplace() speed, simple", "[.profiling]") { - Lvlfa A, B; +TEST_CASE("mata::nft::trim_inplace() speed, simple", "[.profiling]") { + Nft A, B; //this gives an interesting test case if the parser is not trimming and reducing FILL_WITH_AUT_B(B); for (int i = 0; i < 300000; i++) { @@ -67,8 +67,8 @@ TEST_CASE("mata::lvlfa::trim_inplace() speed, simple", "[.profiling]") { } } -TEST_CASE("mata::lvlfa::trim_inplace() speed, harder", "[.profiling]") { - Lvlfa A, B; +TEST_CASE("mata::nft::trim_inplace() speed, harder", "[.profiling]") { + Nft A, B; //this gives an interesting test case if the parser is not trimming and reducing create_nfa(&B, "((.*){10})*"); for (int i = 0; i < 200; i++) { @@ -81,40 +81,40 @@ TEST_CASE("mata::lvlfa::trim_inplace() speed, harder", "[.profiling]") { // Profiling get_used_symbols ////////////////////////////// -TEST_CASE("mata::lvlfa::get_used_symbols speed, harder", "[.profiling]") { - Lvlfa A; +TEST_CASE("mata::nft::get_used_symbols speed, harder", "[.profiling]") { + Nft A; create_nfa(&A, "((.*){10})*"); for (int i = 0; i < 2000000; i++) { A.delta.get_used_symbols(); } } -TEST_CASE("mata::lvlfa::get_used_symbols_bv speed, harder", "[.profiling]") { - Lvlfa A; +TEST_CASE("mata::nft::get_used_symbols_bv speed, harder", "[.profiling]") { + Nft A; create_nfa(&A, "((.*){10})*"); for (int i = 0; i < 2000000; i++) { A.delta.get_used_symbols_bv(); } } -TEST_CASE("mata::lvlfa::get_used_symbols_vec speed, harder", "[.profiling]") { - Lvlfa A; +TEST_CASE("mata::nft::get_used_symbols_vec speed, harder", "[.profiling]") { + Nft A; create_nfa(&A, "((.*){10})*"); for (int i = 0; i < 2000000; i++) { A.delta.get_used_symbols_vec(); } } -TEST_CASE("mata::lvlfa::get_used_symbols_set speed, harder", "[.profiling]") { - Lvlfa A; +TEST_CASE("mata::nft::get_used_symbols_set speed, harder", "[.profiling]") { + Nft A; create_nfa(&A, "((.*){10})*"); for (int i = 0; i < 2000000; i++) { A.delta.get_used_symbols_set(); } } -TEST_CASE("mata::lvlfa::get_used_symbols_sps speed, harder", "[.profiling]") { - Lvlfa A; +TEST_CASE("mata::nft::get_used_symbols_sps speed, harder", "[.profiling]") { + Nft A; create_nfa(&A, "((.*){10})*"); for (int i = 0; i < 2000000; i++) { A.delta.get_used_symbols_sps(); diff --git a/tests/lvlfa/lvlfa.cc b/tests/nft/nft.cc similarity index 88% rename from tests/lvlfa/lvlfa.cc rename to tests/nft/nft.cc index f4a60a34..ff41a1d9 100644 --- a/tests/lvlfa/lvlfa.cc +++ b/tests/nft/nft.cc @@ -7,19 +7,19 @@ #include "utils.hh" #include "mata/utils/sparse-set.hh" -#include "mata/lvlfa/delta.hh" -#include "mata/lvlfa/lvlfa.hh" -#include "mata/lvlfa/strings.hh" -#include "mata/lvlfa/builder.hh" -#include "mata/lvlfa/plumbing.hh" -#include "mata/lvlfa/algorithms.hh" +#include "mata/nft/delta.hh" +#include "mata/nft/nft.hh" +#include "mata/nft/strings.hh" +#include "mata/nft/builder.hh" +#include "mata/nft/plumbing.hh" +#include "mata/nft/algorithms.hh" #include "mata/parser/re2parser.hh" using namespace mata; -using namespace mata::lvlfa::algorithms; -using namespace mata::lvlfa; +using namespace mata::nft::algorithms; +using namespace mata::nft; using namespace mata::strings; -using namespace mata::lvlfa::plumbing; +using namespace mata::nft::plumbing; using namespace mata::utils; using namespace mata::parser; using Symbol = mata::Symbol; @@ -27,8 +27,8 @@ using Word = mata::Word; using IntAlphabet = mata::IntAlphabet; using OnTheFlyAlphabet = mata::OnTheFlyAlphabet; -TEST_CASE("mata::lvlfa::Lvlfa()") { - Lvlfa nft{}; +TEST_CASE("mata::nft::Nft()") { + Nft nft{}; nft.levels.resize(3); nft.levels_cnt = 5; CHECK(nft.levels_cnt == 5); @@ -40,42 +40,42 @@ TEST_CASE("mata::lvlfa::Lvlfa()") { CHECK(nft.levels == std::vector{ 0, 3, 1 }); } -TEST_CASE("mata::lvlfa::size()") { - Lvlfa lvlfa{}; - CHECK(lvlfa.num_of_states() == 0); +TEST_CASE("mata::nft::size()") { + Nft nft{}; + CHECK(nft.num_of_states() == 0); - lvlfa.add_state(3); - CHECK(lvlfa.num_of_states() == 4); + nft.add_state(3); + CHECK(nft.num_of_states() == 4); - lvlfa.clear(); - lvlfa.add_state(); - CHECK(lvlfa.num_of_states() == 1); + nft.clear(); + nft.add_state(); + CHECK(nft.num_of_states() == 1); - lvlfa.clear(); - FILL_WITH_AUT_A(lvlfa); - CHECK(lvlfa.num_of_states() == 11); + nft.clear(); + FILL_WITH_AUT_A(nft); + CHECK(nft.num_of_states() == 11); - lvlfa.clear(); - FILL_WITH_AUT_B(lvlfa); - CHECK(lvlfa.num_of_states() == 15); + nft.clear(); + FILL_WITH_AUT_B(nft); + CHECK(nft.num_of_states() == 15); - lvlfa = Lvlfa{ 0, {}, {} }; - CHECK(lvlfa.num_of_states() == 0); + nft = Nft{ 0, {}, {} }; + CHECK(nft.num_of_states() == 0); } -TEST_CASE("mata::lvlfa::Trans::operator<<") { +TEST_CASE("mata::nft::Trans::operator<<") { Transition trans(1, 2, 3); REQUIRE(std::to_string(trans) == "(1, 2, 3)"); } -TEST_CASE("mata::lvlfa::create_alphabet()") { - Lvlfa a{1}; +TEST_CASE("mata::nft::create_alphabet()") { + Nft a{1}; a.delta.add(0, 'a', 0); - Lvlfa b{1}; + Nft b{1}; b.delta.add(0, 'b', 0); b.delta.add(0, 'a', 0); - Lvlfa c{1}; + Nft c{1}; b.delta.add(0, 'c', 0); auto alphabet{ create_alphabet(a, b, c) }; @@ -87,9 +87,9 @@ TEST_CASE("mata::lvlfa::create_alphabet()") { // create_alphabet(a, b, 4); // Will not compile: '4' is not of the required type. } -TEST_CASE("mata::lvlfa::Lvlfa::delta.add()/delta.contains()") +TEST_CASE("mata::nft::Nft::delta.add()/delta.contains()") { // {{{ - Lvlfa a(3); + Nft a(3); SECTION("Empty automata have now transitions") { @@ -159,9 +159,9 @@ TEST_CASE("mata::lvlfa::Lvlfa::delta.add()/delta.contains()") } // }}} -TEST_CASE("mata::lvlfa::Delta.transform/append") +TEST_CASE("mata::nft::Delta.transform/append") { // {{{ - Lvlfa a(3); + Nft a(3); a.delta.add(1, 'a', 1); a.delta.add(2, 'b', {2,1,0}); @@ -181,9 +181,9 @@ TEST_CASE("mata::lvlfa::Delta.transform/append") } // }}} -TEST_CASE("mata::lvlfa::is_lang_empty()") +TEST_CASE("mata::nft::is_lang_empty()") { // {{{ - Lvlfa aut(14); + Nft aut(14); Run cex; SECTION("An empty automaton has an empty language") @@ -276,9 +276,9 @@ TEST_CASE("mata::lvlfa::is_lang_empty()") } } // }}} -TEST_CASE("mata::lvlfa::is_acyclic") +TEST_CASE("mata::nft::is_acyclic") { // {{{ - Lvlfa aut(14); + Nft aut(14); SECTION("An empty automaton is acyclic") { @@ -325,7 +325,7 @@ TEST_CASE("mata::lvlfa::is_acyclic") SECTION("Automaton with self-loops") { - Lvlfa aut(2); + Nft aut(2); aut.initial = {0}; aut.final = {1}; aut.delta.add(0, 'c', 1); @@ -334,9 +334,9 @@ TEST_CASE("mata::lvlfa::is_acyclic") } } // }}} -TEST_CASE("mata::lvlfa::get_word_for_path()") +TEST_CASE("mata::nft::get_word_for_path()") { // {{{ - Lvlfa aut(5); + Nft aut(5); Run path; Word word; @@ -412,9 +412,9 @@ TEST_CASE("mata::lvlfa::get_word_for_path()") } -TEST_CASE("mata::lvlfa::is_lang_empty_cex()") +TEST_CASE("mata::nft::is_lang_empty_cex()") { - Lvlfa aut(10); + Nft aut(10); Run cex; SECTION("Counterexample of an automaton with non-empty language") @@ -439,10 +439,10 @@ TEST_CASE("mata::lvlfa::is_lang_empty_cex()") } -TEST_CASE("mata::lvlfa::determinize()") +TEST_CASE("mata::nft::determinize()") { - Lvlfa aut(3); - Lvlfa result; + Nft aut(3); + Nft result; std::unordered_map subset_map; SECTION("empty automaton") @@ -479,7 +479,7 @@ TEST_CASE("mata::lvlfa::determinize()") SECTION("This broke Delta when delta[q] could cause re-allocation of post") { - Lvlfa x{}; + Nft x{}; x.initial.insert(0); x.final.insert(4); x.delta.add(0, 1, 3); @@ -498,9 +498,9 @@ TEST_CASE("mata::lvlfa::determinize()") } } // }}} -TEST_CASE("mata::lvlfa::minimize() for profiling", "[.profiling],[minimize]") { - Lvlfa aut(4); - Lvlfa result; +TEST_CASE("mata::nft::minimize() for profiling", "[.profiling],[minimize]") { + Nft aut(4); + Nft result; std::unordered_map subset_map; aut.initial.insert(0); @@ -538,15 +538,15 @@ TEST_CASE("mata::lvlfa::minimize() for profiling", "[.profiling],[minimize]") { minimize(&result, aut); } -TEST_CASE("mata::lvlfa::construct() correct calls") +TEST_CASE("mata::nft::construct() correct calls") { // {{{ - Lvlfa aut(10); + Nft aut(10); mata::parser::ParsedSection parsec; OnTheFlyAlphabet alphabet; SECTION("construct an empty automaton") { - parsec.type = lvlfa::TYPE_NFA; + parsec.type = nft::TYPE_NFT; aut = builder::construct(parsec); @@ -555,7 +555,7 @@ TEST_CASE("mata::lvlfa::construct() correct calls") SECTION("construct a simple non-empty automaton accepting the empty word") { - parsec.type = lvlfa::TYPE_NFA; + parsec.type = nft::TYPE_NFT; parsec.dict.insert({"Initial", {"q1"}}); parsec.dict.insert({"Final", {"q1"}}); @@ -566,7 +566,7 @@ TEST_CASE("mata::lvlfa::construct() correct calls") SECTION("construct an automaton with more than one initial/final states") { - parsec.type = lvlfa::TYPE_NFA; + parsec.type = nft::TYPE_NFT; parsec.dict.insert({"Initial", {"q1", "q2"}}); parsec.dict.insert({"Final", {"q1", "q2", "q3"}}); @@ -578,7 +578,7 @@ TEST_CASE("mata::lvlfa::construct() correct calls") SECTION("construct a simple non-empty automaton accepting only the word 'a'") { - parsec.type = lvlfa::TYPE_NFA; + parsec.type = nft::TYPE_NFT; parsec.dict.insert({"Initial", {"q1"}}); parsec.dict.insert({"Final", {"q2"}}); parsec.body = { {"q1", "a", "q2"} }; @@ -596,7 +596,7 @@ TEST_CASE("mata::lvlfa::construct() correct calls") SECTION("construct a more complicated non-empty automaton") { - parsec.type = lvlfa::TYPE_NFA; + parsec.type = nft::TYPE_NFT; parsec.dict.insert({"Initial", {"q1", "q3"}}); parsec.dict.insert({"Final", {"q5"}}); parsec.body.push_back({"q1", "a", "q3"}); @@ -629,9 +629,9 @@ TEST_CASE("mata::lvlfa::construct() correct calls") } } // }}} -TEST_CASE("mata::lvlfa::construct() invalid calls") +TEST_CASE("mata::nft::construct() invalid calls") { // {{{ - Lvlfa aut; + Nft aut; mata::parser::ParsedSection parsec; SECTION("construct() call with invalid ParsedSection object") @@ -644,7 +644,7 @@ TEST_CASE("mata::lvlfa::construct() invalid calls") SECTION("construct() call with an epsilon transition") { - parsec.type = lvlfa::TYPE_NFA; + parsec.type = nft::TYPE_NFT; parsec.body = { {"q1", "q2"} }; CHECK_THROWS_WITH(builder::construct(parsec), @@ -653,7 +653,7 @@ TEST_CASE("mata::lvlfa::construct() invalid calls") SECTION("construct() call with a nonsense transition") { - parsec.type = lvlfa::TYPE_NFA; + parsec.type = nft::TYPE_NFT; parsec.body = { {"q1", "a", "q2", "q3"} }; CHECK_THROWS_WITH(plumbing::construct(&aut, parsec), @@ -661,15 +661,15 @@ TEST_CASE("mata::lvlfa::construct() invalid calls") } } // }}} -TEST_CASE("mata::lvlfa::construct() from IntermediateAut correct calls") +TEST_CASE("mata::nft::construct() from IntermediateAut correct calls") { // {{{ - Lvlfa aut; + Nft aut; mata::IntermediateAut inter_aut; OnTheFlyAlphabet alphabet; SECTION("construct an empty automaton") { - inter_aut.automaton_type = mata::IntermediateAut::AutomatonType::LVLFA; + inter_aut.automaton_type = mata::IntermediateAut::AutomatonType::NFT; REQUIRE(aut.is_lang_empty()); aut = builder::construct(inter_aut); REQUIRE(aut.is_lang_empty()); @@ -678,7 +678,7 @@ TEST_CASE("mata::lvlfa::construct() from IntermediateAut correct calls") SECTION("construct a simple non-empty automaton accepting the empty word from intermediate automaton") { std::string file = - "@LVLFA-explicit\n" + "@NFT-explicit\n" "%States-enum p q r\n" "%Alphabet-auto\n" "%Initial p | q\n" @@ -694,7 +694,7 @@ TEST_CASE("mata::lvlfa::construct() from IntermediateAut correct calls") SECTION("construct an automaton with more than one initial/final states from intermediate automaton") { std::string file = - "@LVLFA-explicit\n" + "@NFT-explicit\n" "%States-enum p q 3\n" "%Alphabet-auto\n" "%Initial p | q\n" @@ -711,7 +711,7 @@ TEST_CASE("mata::lvlfa::construct() from IntermediateAut correct calls") SECTION("construct an automaton with implicit operator completion one initial/final states from intermediate automaton") { std::string file = - "@LVLFA-explicit\n" + "@NFT-explicit\n" "%States-enum p q r\n" "%Alphabet-auto\n" "%Initial p q\n" @@ -728,7 +728,7 @@ TEST_CASE("mata::lvlfa::construct() from IntermediateAut correct calls") SECTION("construct an automaton with implicit operator completion one initial/final states from intermediate automaton") { std::string file = - "@LVLFA-explicit\n" + "@NFT-explicit\n" "%States-enum p q r m n\n" "%Alphabet-auto\n" "%Initial p q r\n" @@ -745,7 +745,7 @@ TEST_CASE("mata::lvlfa::construct() from IntermediateAut correct calls") SECTION("construct a simple non-empty automaton accepting only the word 'a' from intermediate automaton") { std::string file = - "@LVLFA-explicit\n" + "@NFT-explicit\n" "%States-enum p q 3\n" "%Alphabet-auto\n" "%Initial q1\n" @@ -768,7 +768,7 @@ TEST_CASE("mata::lvlfa::construct() from IntermediateAut correct calls") SECTION("construct a more complicated non-empty automaton from intermediate automaton") { std::string file = - "@LVLFA-explicit\n" + "@NFT-explicit\n" "%States-enum p q 3\n" "%Alphabet-auto\n" "%Initial q1 | q3\n" @@ -807,7 +807,7 @@ TEST_CASE("mata::lvlfa::construct() from IntermediateAut correct calls") SECTION("construct - final states from negation") { std::string file = - "@LVLFA-bits\n" + "@NFT-bits\n" "%Alphabet-auto\n" "%Initial q0 q8\n" "%Final !q0 & !q1 & !q4 & !q5 & !q6\n" @@ -833,7 +833,7 @@ TEST_CASE("mata::lvlfa::construct() from IntermediateAut correct calls") SECTION("construct - final states given as true") { std::string file = - "@LVLFA-bits\n" + "@NFT-bits\n" "%Alphabet-auto\n" "%Initial q0 q8\n" "%Final \\true\n" @@ -848,7 +848,7 @@ TEST_CASE("mata::lvlfa::construct() from IntermediateAut correct calls") const auto auts = mata::IntermediateAut::parse_from_mf(parse_mf(file)); inter_aut = auts[0]; - lvlfa::builder::NameStateMap state_map; + nft::builder::NameStateMap state_map; plumbing::construct(&aut, inter_aut, &alphabet, &state_map); CHECK(aut.final.size() == 9); CHECK(aut.final[state_map.at("0")]); @@ -865,7 +865,7 @@ TEST_CASE("mata::lvlfa::construct() from IntermediateAut correct calls") SECTION("construct - final states given as false") { std::string file = - "@LVLFA-bits\n" + "@NFT-bits\n" "%Alphabet-auto\n" "%Initial q0 q8\n" "%Final \\false\n" @@ -880,15 +880,15 @@ TEST_CASE("mata::lvlfa::construct() from IntermediateAut correct calls") const auto auts = mata::IntermediateAut::parse_from_mf(parse_mf(file)); inter_aut = auts[0]; - lvlfa::builder::NameStateMap state_map; + nft::builder::NameStateMap state_map; plumbing::construct(&aut, inter_aut, &alphabet, &state_map); CHECK(aut.final.empty()); } } // }}} -TEST_CASE("mata::lvlfa::make_complete()") +TEST_CASE("mata::nft::make_complete()") { // {{{ - Lvlfa aut(11); + Nft aut(11); SECTION("empty automaton, empty alphabet") { @@ -986,10 +986,10 @@ TEST_CASE("mata::lvlfa::make_complete()") } } // }}} -TEST_CASE("mata::lvlfa::complement()") +TEST_CASE("mata::nft::complement()") { // {{{ - Lvlfa aut(3); - Lvlfa cmpl; + Nft aut(3); + Nft cmpl; SECTION("empty automaton, empty alphabet") { @@ -997,8 +997,8 @@ TEST_CASE("mata::lvlfa::complement()") cmpl = complement(aut, alph, {{"algorithm", "classical"}, {"minimize", "false"}}); - Lvlfa empty_string_lvlfa{ lvlfa::builder::create_sigma_star_lvlfa(&alph) }; - CHECK(are_equivalent(cmpl, empty_string_lvlfa)); + Nft empty_string_nft{ nft::builder::create_sigma_star_nft(&alph) }; + CHECK(are_equivalent(cmpl, empty_string_nft)); } SECTION("empty automaton") @@ -1014,8 +1014,8 @@ TEST_CASE("mata::lvlfa::complement()") REQUIRE(cmpl.is_in_lang(Run{{ alph["a"], alph["a"]}, {}})); REQUIRE(cmpl.is_in_lang(Run{{ alph["a"], alph["b"], alph["b"], alph["a"] }, {}})); - Lvlfa sigma_star_lvlfa{ lvlfa::builder::create_sigma_star_lvlfa(&alph) }; - CHECK(are_equivalent(cmpl, sigma_star_lvlfa)); + Nft sigma_star_nft{ nft::builder::create_sigma_star_nft(&alph) }; + CHECK(are_equivalent(cmpl, sigma_star_nft)); } SECTION("empty automaton accepting epsilon, empty alphabet") @@ -1081,8 +1081,8 @@ TEST_CASE("mata::lvlfa::complement()") cmpl = complement(aut, alph, {{"algorithm", "classical"}, {"minimize", "true"}}); - Lvlfa empty_string_lvlfa{ lvlfa::builder::create_sigma_star_lvlfa(&alph) }; - CHECK(are_equivalent(empty_string_lvlfa, cmpl)); + Nft empty_string_nft{ nft::builder::create_sigma_star_nft(&alph) }; + CHECK(are_equivalent(empty_string_nft, cmpl)); } SECTION("empty automaton, minimization") @@ -1098,8 +1098,8 @@ TEST_CASE("mata::lvlfa::complement()") REQUIRE(cmpl.is_in_lang(Run{{ alph["a"], alph["a"]}, {}})); REQUIRE(cmpl.is_in_lang(Run{{ alph["a"], alph["b"], alph["b"], alph["a"] }, {}})); - Lvlfa sigma_star_lvlfa{ lvlfa::builder::create_sigma_star_lvlfa(&alph) }; - CHECK(are_equivalent(sigma_star_lvlfa, cmpl)); + Nft sigma_star_nft{ nft::builder::create_sigma_star_nft(&alph) }; + CHECK(are_equivalent(sigma_star_nft, cmpl)); } SECTION("minimization vs no minimization") @@ -1117,7 +1117,7 @@ TEST_CASE("mata::lvlfa::complement()") cmpl = complement(aut, alph, {{"algorithm", "classical"}, {"minimize", "false"}}); - Lvlfa cmpl_min = complement(aut, alph, {{"algorithm", "classical"}, + Nft cmpl_min = complement(aut, alph, {{"algorithm", "classical"}, {"minimize", "true"}}); CHECK(are_equivalent(cmpl, cmpl_min, &alph)); @@ -1127,9 +1127,9 @@ TEST_CASE("mata::lvlfa::complement()") } // }}} -TEST_CASE("mata::lvlfa::is_universal()") +TEST_CASE("mata::nft::is_universal()") { // {{{ - Lvlfa aut(6); + Nft aut(6); Run cex; ParameterMap params; @@ -1343,10 +1343,10 @@ TEST_CASE("mata::lvlfa::is_universal()") } } // }}} -TEST_CASE("mata::lvlfa::is_included()") +TEST_CASE("mata::nft::is_included()") { // {{{ - Lvlfa smaller(10); - Lvlfa bigger(16); + Nft smaller(10); + Nft bigger(16); Run cex; ParameterMap params; @@ -1542,10 +1542,10 @@ TEST_CASE("mata::lvlfa::is_included()") } } // }}} -TEST_CASE("mata::lvlfa::are_equivalent") +TEST_CASE("mata::nft::are_equivalent") { - Lvlfa smaller(10); - Lvlfa bigger(16); + Nft smaller(10); + Nft bigger(16); Word cex; ParameterMap params; @@ -1642,9 +1642,9 @@ TEST_CASE("mata::lvlfa::are_equivalent") SECTION("a* != (a|b)*, was throwing exception") { - Lvlfa aut; + Nft aut; mata::parser::create_nfa(&aut, "a*"); - Lvlfa aut2; + Nft aut2; mata::parser::create_nfa(&aut2, "(a|b)*"); CHECK(!are_equivalent(aut, aut2)); } @@ -1708,13 +1708,13 @@ TEST_CASE("mata::lvlfa::are_equivalent") } } -TEST_CASE("mata::lvlfa::revert()") +TEST_CASE("mata::nft::revert()") { // {{{ - Lvlfa aut(9); + Nft aut(9); SECTION("empty automaton") { - Lvlfa result = revert(aut); + Nft result = revert(aut); REQUIRE(result.delta.empty()); REQUIRE(result.initial.empty()); @@ -1729,7 +1729,7 @@ TEST_CASE("mata::lvlfa::revert()") aut.final.insert(2); aut.final.insert(5); - Lvlfa result = revert(aut); + Nft result = revert(aut); REQUIRE(result.delta.empty()); REQUIRE(result.initial[2]); @@ -1744,7 +1744,7 @@ TEST_CASE("mata::lvlfa::revert()") aut.final.insert(2); aut.delta.add(1, 'a', 2); - Lvlfa result = revert(aut); + Nft result = revert(aut); REQUIRE(result.initial[2]); REQUIRE(result.final[1]); @@ -1767,7 +1767,7 @@ TEST_CASE("mata::lvlfa::revert()") aut.delta.add(7, 'a', 8); aut.final = {3}; - Lvlfa result = revert(aut); + Nft result = revert(aut); //REQUIRE(result.final == StateSet({1, 2})); REQUIRE(StateSet(result.final) == StateSet({1, 2})); REQUIRE(result.delta.contains(2, 'a', 1)); @@ -1784,9 +1784,9 @@ TEST_CASE("mata::lvlfa::revert()") } SECTION("Automaton A") { - Lvlfa lvlfa{ 11 }; - FILL_WITH_AUT_A(lvlfa); - Lvlfa res = revert(lvlfa); + Nft nft{ 11 }; + FILL_WITH_AUT_A(nft); + Nft res = revert(nft); CHECK(res.initial[5]); CHECK(res.final[1]); CHECK(res.final[3]); @@ -1809,9 +1809,9 @@ TEST_CASE("mata::lvlfa::revert()") } SECTION("Automaton B") { - Lvlfa lvlfa{ 15 }; - FILL_WITH_AUT_B(lvlfa); - Lvlfa res = revert(lvlfa); + Nft nft{ 15 }; + FILL_WITH_AUT_B(nft); + Nft res = revert(nft); CHECK(res.initial[2]); CHECK(res.initial[12]); CHECK(res.final[4]); @@ -1832,9 +1832,9 @@ TEST_CASE("mata::lvlfa::revert()") } // }}} -TEST_CASE("mata::lvlfa::Lvlfa::is_deterministic()") +TEST_CASE("mata::nft::Nft::is_deterministic()") { // {{{ - Lvlfa aut('s'+1); + Nft aut('s'+1); SECTION("(almost) empty automaton") { // no initial states @@ -1886,9 +1886,9 @@ TEST_CASE("mata::lvlfa::Lvlfa::is_deterministic()") } } // }}} -TEST_CASE("mata::lvlfa::is_complete()") +TEST_CASE("mata::nft::is_complete()") { // {{{ - Lvlfa aut('q'+1); + Nft aut('q'+1); SECTION("empty automaton") { @@ -1949,9 +1949,9 @@ TEST_CASE("mata::lvlfa::is_complete()") } } // }}} -TEST_CASE("mata::lvlfa::is_prfx_in_lang()") +TEST_CASE("mata::nft::is_prfx_in_lang()") { // {{{ - Lvlfa aut('q'+1); + Nft aut('q'+1); SECTION("empty automaton") { @@ -2007,9 +2007,9 @@ TEST_CASE("mata::lvlfa::is_prfx_in_lang()") } } // }}} -TEST_CASE("mata::lvlfa::fw-direct-simulation()") +TEST_CASE("mata::nft::fw-direct-simulation()") { // {{{ - Lvlfa aut; + Nft aut; SECTION("empty automaton") { @@ -2049,7 +2049,7 @@ TEST_CASE("mata::lvlfa::fw-direct-simulation()") } - Lvlfa aut_big(9); + Nft aut_big(9); SECTION("bigger automaton") { @@ -2086,14 +2086,14 @@ TEST_CASE("mata::lvlfa::fw-direct-simulation()") } } // }} -TEST_CASE("mata::lvlfa::reduce_size_by_simulation()") +TEST_CASE("mata::nft::reduce_size_by_simulation()") { - Lvlfa aut; + Nft aut; StateRenaming state_renaming; SECTION("empty automaton") { - Lvlfa result = reduce(aut, &state_renaming); + Nft result = reduce(aut, &state_renaming); REQUIRE(result.delta.empty()); REQUIRE(result.initial.empty()); @@ -2106,7 +2106,7 @@ TEST_CASE("mata::lvlfa::reduce_size_by_simulation()") aut.initial.insert(1); aut.final.insert(2); - Lvlfa result = reduce(aut, &state_renaming); + Nft result = reduce(aut, &state_renaming); REQUIRE(result.delta.empty()); REQUIRE(result.initial[state_renaming[1]]); @@ -2138,7 +2138,7 @@ TEST_CASE("mata::lvlfa::reduce_size_by_simulation()") aut.final = {3, 9}; - Lvlfa result = reduce(aut, &state_renaming); + Nft result = reduce(aut, &state_renaming); REQUIRE(result.num_of_states() == 6); REQUIRE(result.initial[state_renaming[1]]); @@ -2171,23 +2171,23 @@ TEST_CASE("mata::lvlfa::reduce_size_by_simulation()") { aut.delta.add(0, 'a', 1); aut.initial = { 0 }; - Lvlfa result = reduce(aut.trim(), &state_renaming); + Nft result = reduce(aut.trim(), &state_renaming); CHECK(are_equivalent(result, aut)); } } -TEST_CASE("mata::lvlfa::union_norename()") { +TEST_CASE("mata::nft::union_norename()") { Run one{{1},{}}; Run zero{{0}, {}}; - Lvlfa lhs(2); + Nft lhs(2); lhs.initial.insert(0); lhs.delta.add(0, 0, 1); lhs.final.insert(1); REQUIRE(!lhs.is_in_lang(one)); REQUIRE(lhs.is_in_lang(zero)); - Lvlfa rhs(2); + Nft rhs(2); rhs.initial.insert(0); rhs.delta.add(0, 1, 1); rhs.final.insert(1); @@ -2195,24 +2195,24 @@ TEST_CASE("mata::lvlfa::union_norename()") { REQUIRE(!rhs.is_in_lang(zero)); SECTION("failing minimal scenario") { - Lvlfa result = uni(lhs, rhs); + Nft result = uni(lhs, rhs); REQUIRE(result.is_in_lang(one)); REQUIRE(result.is_in_lang(zero)); } } -TEST_CASE("mata::lvlfa::union_inplace") { +TEST_CASE("mata::nft::union_inplace") { Run one{{1},{}}; Run zero{{0}, {}}; - Lvlfa lhs(2); + Nft lhs(2); lhs.initial.insert(0); lhs.delta.add(0, 0, 1); lhs.final.insert(1); REQUIRE(!lhs.is_in_lang(one)); REQUIRE(lhs.is_in_lang(zero)); - Lvlfa rhs(2); + Nft rhs(2); rhs.initial.insert(0); rhs.delta.add(0, 1, 1); rhs.final.insert(1); @@ -2220,21 +2220,21 @@ TEST_CASE("mata::lvlfa::union_inplace") { REQUIRE(!rhs.is_in_lang(zero)); SECTION("failing minimal scenario") { - Lvlfa result = lhs.uni(rhs); + Nft result = lhs.uni(rhs); REQUIRE(result.is_in_lang(one)); REQUIRE(result.is_in_lang(zero)); } SECTION("same automata") { size_t lhs_states = lhs.num_of_states(); - Lvlfa result = lhs.uni(lhs); + Nft result = lhs.uni(lhs); REQUIRE(result.num_of_states() == lhs_states * 2); } } -TEST_CASE("mata::lvlfa::remove_final()") +TEST_CASE("mata::nft::remove_final()") { - Lvlfa aut('q' + 1); + Nft aut('q' + 1); SECTION("Automaton B") { @@ -2247,9 +2247,9 @@ TEST_CASE("mata::lvlfa::remove_final()") } } -TEST_CASE("mata::lvlfa::delta.remove()") +TEST_CASE("mata::nft::delta.remove()") { - Lvlfa aut('q' + 1); + Nft aut('q' + 1); SECTION("Automaton B") { @@ -2300,8 +2300,8 @@ TEST_CASE("mata::lvlfa::delta.remove()") } } -TEST_CASE("mata::lvlfa::get_trans_as_sequence(}") { - Lvlfa aut('q' + 1); +TEST_CASE("mata::nft::get_trans_as_sequence(}") { + Nft aut('q' + 1); std::vector expected{}; aut.delta.add(1, 2, 3); @@ -2316,9 +2316,9 @@ TEST_CASE("mata::lvlfa::get_trans_as_sequence(}") { REQUIRE(std::vector{ transitions.begin(), transitions.end() } == expected); } -TEST_CASE("mata::lvlfa::remove_epsilon()") +TEST_CASE("mata::nft::remove_epsilon()") { - Lvlfa aut{20}; + Nft aut{20}; FILL_WITH_AUT_A(aut); aut.remove_epsilon('c'); REQUIRE(aut.delta.contains(10, 'a', 7)); @@ -2334,29 +2334,29 @@ TEST_CASE("mata::lvlfa::remove_epsilon()") REQUIRE(aut.delta.contains(5, 'a', 9)); } -TEST_CASE("Profile mata::lvlfa::remove_epsilon()", "[.profiling]") +TEST_CASE("Profile mata::nft::remove_epsilon()", "[.profiling]") { for (size_t n{}; n < 100000; ++n) { - Lvlfa aut{20}; + Nft aut{20}; FILL_WITH_AUT_A(aut); aut.remove_epsilon('c'); } } -TEST_CASE("mata::lvlfa::get_num_of_trans()") +TEST_CASE("mata::nft::get_num_of_trans()") { - Lvlfa aut{20}; + Nft aut{20}; FILL_WITH_AUT_A(aut); REQUIRE(aut.delta.num_of_transitions() == 15); } -TEST_CASE("mata::lvlfa::get_one_letter_aut()") +TEST_CASE("mata::nft::get_one_letter_aut()") { - Lvlfa aut(11); + Nft aut(11); Symbol abstract_symbol{'x'}; FILL_WITH_AUT_A(aut); - Lvlfa digraph{aut.get_one_letter_aut() }; + Nft digraph{aut.get_one_letter_aut() }; REQUIRE(digraph.num_of_states() == aut.num_of_states()); REQUIRE(digraph.delta.num_of_transitions() == 12); @@ -2367,8 +2367,8 @@ TEST_CASE("mata::lvlfa::get_one_letter_aut()") REQUIRE(!digraph.delta.contains(10, 'c', 7)); } -TEST_CASE("mata::lvlfa::get_reachable_states()") { - Lvlfa aut{20}; +TEST_CASE("mata::nft::get_reachable_states()") { + Nft aut{20}; SECTION("Automaton A") { FILL_WITH_AUT_A(aut); @@ -2434,22 +2434,22 @@ TEST_CASE("mata::lvlfa::get_reachable_states()") { } } -TEST_CASE("mata::lvlfa::trim() for profiling", "[.profiling],[trim]") +TEST_CASE("mata::nft::trim() for profiling", "[.profiling],[trim]") { - Lvlfa aut{20}; + Nft aut{20}; FILL_WITH_AUT_A(aut); aut.delta.remove(1, 'a', 10); for (size_t i{ 0 }; i < 10000; ++i) { - Lvlfa new_aut{ aut }; + Nft new_aut{ aut }; new_aut.trim(); } } //TODO: make this a test for the new version -TEST_CASE("mata::lvlfa::get_useful_states() for profiling", "[.profiling],[useful_states]") +TEST_CASE("mata::nft::get_useful_states() for profiling", "[.profiling],[useful_states]") { - Lvlfa aut{20}; + Nft aut{20}; FILL_WITH_AUT_A(aut); aut.delta.remove(1, 'a', 10); @@ -2458,22 +2458,22 @@ TEST_CASE("mata::lvlfa::get_useful_states() for profiling", "[.profiling],[usefu } } -TEST_CASE("mata::lvlfa::trim() trivial") { - Lvlfa aut{1}; +TEST_CASE("mata::nft::trim() trivial") { + Nft aut{1}; aut.initial.insert(0); aut.final.insert(0); aut.trim(); } -TEST_CASE("mata::lvlfa::trim()") +TEST_CASE("mata::nft::trim()") { - Lvlfa orig_aut{20}; + Nft orig_aut{20}; FILL_WITH_AUT_A(orig_aut); orig_aut.delta.remove(1, 'a', 10); SECTION("Without state map") { - Lvlfa aut{orig_aut}; + Nft aut{orig_aut}; aut.trim(); CHECK(aut.initial.size() == orig_aut.initial.size()); CHECK(aut.final.size() == orig_aut.final.size()); @@ -2490,7 +2490,7 @@ TEST_CASE("mata::lvlfa::trim()") } SECTION("With state map") { - Lvlfa aut{orig_aut}; + Nft aut{orig_aut}; StateRenaming state_map{}; aut.trim(&state_map); CHECK(aut.initial.size() == orig_aut.initial.size()); @@ -2514,9 +2514,9 @@ TEST_CASE("mata::lvlfa::trim()") } } -TEST_CASE("mata::lvlfa::Lvlfa::delta.empty()") +TEST_CASE("mata::nft::Nft::delta.empty()") { - Lvlfa aut{}; + Nft aut{}; SECTION("Empty automaton") { @@ -2564,9 +2564,9 @@ TEST_CASE("mata::lvlfa::Lvlfa::delta.empty()") } } -TEST_CASE("mata::lvlfa::delta.operator[]") +TEST_CASE("mata::nft::delta.operator[]") { - Lvlfa aut{20}; + Nft aut{20}; FILL_WITH_AUT_A(aut); REQUIRE(aut.delta.num_of_transitions() == 15); aut.delta[25]; @@ -2580,126 +2580,126 @@ TEST_CASE("mata::lvlfa::delta.operator[]") REQUIRE(aut.num_of_states() == 51); REQUIRE(aut.delta[50].empty()); - Lvlfa aut1 = aut; + Nft aut1 = aut; aut1.delta.mutable_state_post(60); REQUIRE(aut1.num_of_states() == 61); REQUIRE(aut1.delta[60].empty()); - const Lvlfa aut2 = aut; + const Nft aut2 = aut; aut2.delta[60]; REQUIRE(aut2.num_of_states() == 51); REQUIRE(aut2.delta[60].empty()); } -TEST_CASE("mata::lvlfa::Lvlfa::unify_(initial/final)()") { - Lvlfa lvlfa{10}; +TEST_CASE("mata::nft::Nft::unify_(initial/final)()") { + Nft nft{10}; SECTION("No initial") { - lvlfa.unify_initial(); - CHECK(lvlfa.num_of_states() == 10); - CHECK(lvlfa.initial.empty()); + nft.unify_initial(); + CHECK(nft.num_of_states() == 10); + CHECK(nft.initial.empty()); } SECTION("initial==final unify final") { - lvlfa.initial.insert(0); - lvlfa.final.insert(0); - lvlfa.final.insert(1); - lvlfa.unify_final(); - REQUIRE(lvlfa.num_of_states() == 11); - CHECK(lvlfa.final.size() == 1); - CHECK(lvlfa.final[10]); - CHECK(lvlfa.initial[10]); + nft.initial.insert(0); + nft.final.insert(0); + nft.final.insert(1); + nft.unify_final(); + REQUIRE(nft.num_of_states() == 11); + CHECK(nft.final.size() == 1); + CHECK(nft.final[10]); + CHECK(nft.initial[10]); } SECTION("initial==final unify initial") { - lvlfa.initial.insert(0); - lvlfa.initial.insert(1); - lvlfa.final.insert(0); - lvlfa.unify_initial(); - REQUIRE(lvlfa.num_of_states() == 11); - CHECK(lvlfa.initial.size() == 1); - CHECK(lvlfa.initial[10]); - CHECK(lvlfa.final[10]); + nft.initial.insert(0); + nft.initial.insert(1); + nft.final.insert(0); + nft.unify_initial(); + REQUIRE(nft.num_of_states() == 11); + CHECK(nft.initial.size() == 1); + CHECK(nft.initial[10]); + CHECK(nft.final[10]); } SECTION("Single initial") { - lvlfa.initial.insert(0); - lvlfa.unify_initial(); - CHECK(lvlfa.num_of_states() == 10); - CHECK(lvlfa.initial.size() == 1); - CHECK(lvlfa.initial[0]); + nft.initial.insert(0); + nft.unify_initial(); + CHECK(nft.num_of_states() == 10); + CHECK(nft.initial.size() == 1); + CHECK(nft.initial[0]); } SECTION("Multiple initial") { - lvlfa.initial.insert(0); - lvlfa.initial.insert(1); - lvlfa.unify_initial(); - CHECK(lvlfa.num_of_states() == 11); - CHECK(lvlfa.initial.size() == 1); - CHECK(lvlfa.initial[10]); + nft.initial.insert(0); + nft.initial.insert(1); + nft.unify_initial(); + CHECK(nft.num_of_states() == 11); + CHECK(nft.initial.size() == 1); + CHECK(nft.initial[10]); } SECTION("With transitions") { - lvlfa.initial.insert(0); - lvlfa.initial.insert(1); - lvlfa.delta.add(0, 'a', 3); - lvlfa.delta.add(1, 'b', 0); - lvlfa.delta.add(1, 'c', 1); - lvlfa.unify_initial(); - CHECK(lvlfa.num_of_states() == 11); - CHECK(lvlfa.initial.size() == 1); - CHECK(lvlfa.initial[10]); - CHECK(lvlfa.delta.contains(10, 'a', 3)); - CHECK(lvlfa.delta.contains(10, 'b', 0)); - CHECK(lvlfa.delta.contains(10, 'c', 1)); - CHECK(lvlfa.delta.contains(0, 'a', 3)); - CHECK(lvlfa.delta.contains(1, 'b', 0)); - CHECK(lvlfa.delta.contains(1, 'c', 1)); + nft.initial.insert(0); + nft.initial.insert(1); + nft.delta.add(0, 'a', 3); + nft.delta.add(1, 'b', 0); + nft.delta.add(1, 'c', 1); + nft.unify_initial(); + CHECK(nft.num_of_states() == 11); + CHECK(nft.initial.size() == 1); + CHECK(nft.initial[10]); + CHECK(nft.delta.contains(10, 'a', 3)); + CHECK(nft.delta.contains(10, 'b', 0)); + CHECK(nft.delta.contains(10, 'c', 1)); + CHECK(nft.delta.contains(0, 'a', 3)); + CHECK(nft.delta.contains(1, 'b', 0)); + CHECK(nft.delta.contains(1, 'c', 1)); } SECTION("No final") { - lvlfa.unify_final(); - CHECK(lvlfa.num_of_states() == 10); - CHECK(lvlfa.final.empty()); + nft.unify_final(); + CHECK(nft.num_of_states() == 10); + CHECK(nft.final.empty()); } SECTION("Single final") { - lvlfa.final.insert(0); - lvlfa.unify_final(); - CHECK(lvlfa.num_of_states() == 10); - CHECK(lvlfa.final.size() == 1); - CHECK(lvlfa.final[0]); + nft.final.insert(0); + nft.unify_final(); + CHECK(nft.num_of_states() == 10); + CHECK(nft.final.size() == 1); + CHECK(nft.final[0]); } SECTION("Multiple final") { - lvlfa.final.insert(0); - lvlfa.final.insert(1); - lvlfa.unify_final(); - CHECK(lvlfa.num_of_states() == 11); - CHECK(lvlfa.final.size() == 1); - CHECK(lvlfa.final[10]); + nft.final.insert(0); + nft.final.insert(1); + nft.unify_final(); + CHECK(nft.num_of_states() == 11); + CHECK(nft.final.size() == 1); + CHECK(nft.final[10]); } SECTION("With transitions") { - lvlfa.final.insert(0); - lvlfa.final.insert(1); - lvlfa.delta.add(3, 'a', 0); - lvlfa.delta.add(4, 'b', 1); - lvlfa.delta.add(1, 'c', 1); - lvlfa.unify_final(); - CHECK(lvlfa.num_of_states() == 11); - CHECK(lvlfa.final.size() == 1); - CHECK(lvlfa.final[10]); - CHECK(lvlfa.delta.contains(3, 'a', 10)); - CHECK(lvlfa.delta.contains(4, 'b', 10)); - CHECK(lvlfa.delta.contains(1, 'c', 10)); - CHECK(lvlfa.delta.contains(3, 'a', 0)); - CHECK(lvlfa.delta.contains(4, 'b', 1)); - CHECK(lvlfa.delta.contains(1, 'c', 1)); - } - - SECTION("Bug: LVLFA with empty string unifying initial/final repeatedly") { - Lvlfa aut; + nft.final.insert(0); + nft.final.insert(1); + nft.delta.add(3, 'a', 0); + nft.delta.add(4, 'b', 1); + nft.delta.add(1, 'c', 1); + nft.unify_final(); + CHECK(nft.num_of_states() == 11); + CHECK(nft.final.size() == 1); + CHECK(nft.final[10]); + CHECK(nft.delta.contains(3, 'a', 10)); + CHECK(nft.delta.contains(4, 'b', 10)); + CHECK(nft.delta.contains(1, 'c', 10)); + CHECK(nft.delta.contains(3, 'a', 0)); + CHECK(nft.delta.contains(4, 'b', 1)); + CHECK(nft.delta.contains(1, 'c', 1)); + } + + SECTION("Bug: NFT with empty string unifying initial/final repeatedly") { + Nft aut; mata::parser::create_nfa(&aut, "a*b*"); for (size_t i{ 0 }; i < 8; ++i) { aut.unify_initial(); @@ -2709,8 +2709,8 @@ TEST_CASE("mata::lvlfa::Lvlfa::unify_(initial/final)()") { } } -TEST_CASE("mata::lvlfa::Lvlfa::get_delta.epsilon_symbol_posts()") { - Lvlfa aut{20}; +TEST_CASE("mata::nft::Nft::get_delta.epsilon_symbol_posts()") { + Nft aut{20}; FILL_WITH_AUT_A(aut); aut.delta.add(0, EPSILON, 3); aut.delta.add(3, EPSILON, 3); @@ -2752,12 +2752,12 @@ TEST_CASE("mata::lvlfa::Lvlfa::get_delta.epsilon_symbol_posts()") { CHECK(aut.delta.epsilon_symbol_posts(state_post) == state_post.end()); } -TEST_CASE("mata::lvlfa::Lvlfa::delta()") { +TEST_CASE("mata::nft::Nft::delta()") { Delta delta(6); } -TEST_CASE("A segmentation fault in the lvlfa::make_complement") { - Lvlfa r(1); +TEST_CASE("A segmentation fault in the nft::make_complement") { + Nft r(1); OnTheFlyAlphabet alph{}; alph["a"]; alph["b"]; @@ -2769,26 +2769,26 @@ TEST_CASE("A segmentation fault in the lvlfa::make_complement") { REQUIRE(r.is_complete(&alph)); } -TEST_CASE("mata::lvlfa:: create simple automata") { - Lvlfa lvlfa{ builder::create_empty_string_lvlfa() }; - CHECK(lvlfa.is_in_lang(Word{})); - CHECK(get_word_lengths(lvlfa) == std::set>{ std::make_pair(0, 0) }); +TEST_CASE("mata::nft:: create simple automata") { + Nft nft{ builder::create_empty_string_nft() }; + CHECK(nft.is_in_lang(Word{})); + CHECK(get_word_lengths(nft) == std::set>{ std::make_pair(0, 0) }); OnTheFlyAlphabet alphabet{ { "a", 0 }, { "b", 1 }, { "c", 2 } }; - lvlfa = builder::create_sigma_star_lvlfa(&alphabet); - CHECK(lvlfa.is_in_lang({ {}, {} })); - CHECK(lvlfa.is_in_lang({ 0 , {} })); - CHECK(lvlfa.is_in_lang({ 1 , {} })); - CHECK(lvlfa.is_in_lang({ 2 , {} })); - CHECK(lvlfa.is_in_lang({ { 0, 1 }, {} })); - CHECK(lvlfa.is_in_lang({ { 1, 0 }, {} })); - CHECK(lvlfa.is_in_lang({ { 2, 2, 2 }, {} })); - CHECK(lvlfa.is_in_lang({ { 0, 1, 2, 2, 0, 1, 2, 1, 0, 0, 2, 1 }, {} })); - CHECK(!lvlfa.is_in_lang({ 3 , {} })); + nft = builder::create_sigma_star_nft(&alphabet); + CHECK(nft.is_in_lang({ {}, {} })); + CHECK(nft.is_in_lang({ 0 , {} })); + CHECK(nft.is_in_lang({ 1 , {} })); + CHECK(nft.is_in_lang({ 2 , {} })); + CHECK(nft.is_in_lang({ { 0, 1 }, {} })); + CHECK(nft.is_in_lang({ { 1, 0 }, {} })); + CHECK(nft.is_in_lang({ { 2, 2, 2 }, {} })); + CHECK(nft.is_in_lang({ { 0, 1, 2, 2, 0, 1, 2, 1, 0, 0, 2, 1 }, {} })); + CHECK(!nft.is_in_lang({ 3 , {} })); } -TEST_CASE("mata::lvlfa:: print_to_mata") { - Lvlfa aut_big; +TEST_CASE("mata::nft::print_to_mata()") { + Nft aut_big; aut_big.initial = {1, 2}; aut_big.delta.add(1, 'a', 2); aut_big.delta.add(1, 'a', 3); @@ -2807,13 +2807,13 @@ TEST_CASE("mata::lvlfa:: print_to_mata") { std::string aut_big_mata = aut_big.print_to_mata(); // for parsing output of print_to_mata() we need to use IntAlphabet to get the same alphabet IntAlphabet int_alph; - Lvlfa aut_big_from_mata = builder::construct(mata::IntermediateAut::parse_from_mf(parse_mf(aut_big_mata))[0], &int_alph); + Nft aut_big_from_mata = builder::construct(mata::IntermediateAut::parse_from_mf(parse_mf(aut_big_mata))[0], &int_alph); CHECK(are_equivalent(aut_big, aut_big_from_mata)); } -TEST_CASE("mata::lvlfa::Lvlfa::trim() bug") { - Lvlfa aut(5, {0}, {4}); +TEST_CASE("mata::nft::Nft::trim() bug") { + Nft aut(5, {0}, {4}); aut.delta.add(0, 122, 1); aut.delta.add(1, 98, 1); aut.delta.add(1, 122, 1); @@ -2823,13 +2823,13 @@ TEST_CASE("mata::lvlfa::Lvlfa::trim() bug") { aut.delta.add(1, 97, 4); aut.delta.add(3, 97, 4); - Lvlfa aut_copy {aut}; + Nft aut_copy {aut}; CHECK(are_equivalent(aut_copy.trim(), aut)); } -TEST_CASE("mata::lvlfa::get_useful_states_tarjan") { - SECTION("Lvlfa 1") { - Lvlfa aut(5, {0}, {4}); +TEST_CASE("mata::nft::get_useful_states_tarjan") { + SECTION("Nft 1") { + Nft aut(5, {0}, {4}); aut.delta.add(0, 122, 1); aut.delta.add(1, 98, 1); aut.delta.add(1, 122, 1); @@ -2844,26 +2844,26 @@ TEST_CASE("mata::lvlfa::get_useful_states_tarjan") { CHECK(bv == ref); } - SECTION("Empty LVLFA") { - Lvlfa aut; + SECTION("Empty NFT") { + Nft aut; mata::BoolVector bv = aut.get_useful_states(); CHECK(bv == mata::BoolVector({})); } - SECTION("Single-state LVLFA") { - Lvlfa aut(1, {0}, {}); + SECTION("Single-state NFT") { + Nft aut(1, {0}, {}); mata::BoolVector bv = aut.get_useful_states(); CHECK(bv == mata::BoolVector({ 0})); } - SECTION("Single-state LVLFA acc") { - Lvlfa aut(1, {0}, {0}); + SECTION("Single-state NFT acc") { + Nft aut(1, {0}, {0}); mata::BoolVector bv = aut.get_useful_states(); CHECK(bv == mata::BoolVector({ 1})); } - SECTION("Lvlfa 2") { - Lvlfa aut(5, {0, 1}, {2}); + SECTION("Nft 2") { + Nft aut(5, {0, 1}, {2}); aut.delta.add(0, 122, 2); aut.delta.add(2, 98, 3); aut.delta.add(1, 98, 4); @@ -2874,8 +2874,8 @@ TEST_CASE("mata::lvlfa::get_useful_states_tarjan") { CHECK(bv == ref); } - SECTION("Lvlfa 3") { - Lvlfa aut(2, {0, 1}, {0, 1}); + SECTION("Nft 3") { + Nft aut(2, {0, 1}, {0, 1}); aut.delta.add(0, 122, 0); aut.delta.add(1, 98, 1); @@ -2884,8 +2884,8 @@ TEST_CASE("mata::lvlfa::get_useful_states_tarjan") { CHECK(bv == ref); } - SECTION("Lvlfa no final") { - Lvlfa aut(5, {0}, {}); + SECTION("Nft no final") { + Nft aut(5, {0}, {}); aut.delta.add(0, 122, 1); aut.delta.add(1, 98, 1); aut.delta.add(1, 122, 1); @@ -2901,7 +2901,7 @@ TEST_CASE("mata::lvlfa::get_useful_states_tarjan") { } SECTION("from regex (a+b*a*)") { - Lvlfa aut; + Nft aut; mata::parser::create_nfa(&aut, "(a+b*a*)", false, EPSILON, false); mata::BoolVector bv = aut.get_useful_states(); @@ -2914,30 +2914,30 @@ TEST_CASE("mata::lvlfa::get_useful_states_tarjan") { } SECTION("more initials") { - Lvlfa aut(4, {0, 1, 2}, {0, 3}); + Nft aut(4, {0, 1, 2}, {0, 3}); aut.delta.add(1, 48, 0); aut.delta.add(2, 53, 3); CHECK(aut.get_useful_states() == mata::BoolVector{ 1, 1, 1, 1}); } } -TEST_CASE("mata::lvlfa::Lvlfa::get_words") { +TEST_CASE("mata::nft::Nft::get_words") { SECTION("empty") { - Lvlfa aut; + Nft aut; CHECK(aut.get_words(0) == std::set()); CHECK(aut.get_words(1) == std::set()); CHECK(aut.get_words(5) == std::set()); } SECTION("empty word") { - Lvlfa aut(1, {0}, {0}); + Nft aut(1, {0}, {0}); CHECK(aut.get_words(0) == std::set{{}}); CHECK(aut.get_words(1) == std::set{{}}); CHECK(aut.get_words(5) == std::set{{}}); } SECTION("noodle - one final") { - Lvlfa aut(3, {0}, {2}); + Nft aut(3, {0}, {2}); aut.delta.add(0, 0, 1); aut.delta.add(1, 1, 2); CHECK(aut.get_words(0) == std::set{}); @@ -2948,7 +2948,7 @@ TEST_CASE("mata::lvlfa::Lvlfa::get_words") { } SECTION("noodle - two finals") { - Lvlfa aut(3, {0}, {1,2}); + Nft aut(3, {0}, {1,2}); aut.delta.add(0, 0, 1); aut.delta.add(1, 1, 2); CHECK(aut.get_words(0) == std::set{}); @@ -2959,7 +2959,7 @@ TEST_CASE("mata::lvlfa::Lvlfa::get_words") { } SECTION("noodle - three finals") { - Lvlfa aut(3, {0}, {0,1,2}); + Nft aut(3, {0}, {0,1,2}); aut.delta.add(0, 0, 1); aut.delta.add(1, 1, 2); CHECK(aut.get_words(0) == std::set{{}}); @@ -2970,7 +2970,7 @@ TEST_CASE("mata::lvlfa::Lvlfa::get_words") { } SECTION("more complex") { - Lvlfa aut(6, {0,1}, {1,3,4,5}); + Nft aut(6, {0,1}, {1,3,4,5}); aut.delta.add(0, 0, 3); aut.delta.add(3, 1, 4); aut.delta.add(0, 2, 2); @@ -2986,7 +2986,7 @@ TEST_CASE("mata::lvlfa::Lvlfa::get_words") { } SECTION("cycle") { - Lvlfa aut(6, {0,1}, {0,1}); + Nft aut(6, {0,1}, {0,1}); aut.delta.add(0, 0, 1); aut.delta.add(1, 1, 0); CHECK(aut.get_words(0) == std::set{{}}); diff --git a/tests/lvlfa/strings.cc b/tests/nft/strings.cc similarity index 83% rename from tests/lvlfa/strings.cc rename to tests/nft/strings.cc index a164ef67..d3985fc6 100644 --- a/tests/lvlfa/strings.cc +++ b/tests/nft/strings.cc @@ -6,11 +6,11 @@ #include -#include "mata/lvlfa/lvlfa.hh" -#include "mata/lvlfa/builder.hh" -#include "mata/lvlfa/strings.hh" +#include "mata/nft/nft.hh" +#include "mata/nft/builder.hh" +#include "mata/nft/strings.hh" -using namespace mata::lvlfa; +using namespace mata::nft; using Symbol = mata::Symbol; using IntAlphabet = mata::IntAlphabet; using OnTheFlyAlphabet = mata::OnTheFlyAlphabet; @@ -18,8 +18,8 @@ using mata::EnumAlphabet; using Word = std::vector; -TEST_CASE("lvlfa::create_identity()") { - Lvlfa nft{}; +TEST_CASE("nft::create_identity()") { + Nft nft{}; nft.initial = { 0 }; nft.final = { 0 }; SECTION("small identity nft") { @@ -48,7 +48,7 @@ TEST_CASE("lvlfa::create_identity()") { nft.levels[6] = 2; nft.levels[7] = 1; nft.levels[8] = 2; - Lvlfa nft_identity{ create_identity(&alphabet, 3) }; + Nft nft_identity{ create_identity(&alphabet, 3) }; CHECK(nft_identity.is_identical(nft)); } @@ -58,7 +58,7 @@ TEST_CASE("lvlfa::create_identity()") { nft.levels_cnt = 3; nft.levels.resize(1); nft.levels[0] = 0; - Lvlfa nft_identity{ create_identity(&alphabet, 3) }; + Nft nft_identity{ create_identity(&alphabet, 3) }; CHECK(nft_identity.is_identical(nft)); } @@ -71,7 +71,7 @@ TEST_CASE("lvlfa::create_identity()") { nft.levels[1] = 1; nft.delta.add(0, 0, 1); nft.delta.add(1, 0, 0); - Lvlfa nft_identity{ create_identity(&alphabet, 2) }; + Nft nft_identity{ create_identity(&alphabet, 2) }; CHECK(nft_identity.is_identical(nft)); nft_identity = create_identity(&alphabet); CHECK(nft_identity.is_identical(nft)); @@ -87,13 +87,13 @@ TEST_CASE("lvlfa::create_identity()") { nft.levels_cnt = 1; nft.levels.resize(1); nft.levels[0] = 0; - Lvlfa nft_identity{ create_identity(&alphabet, 1) }; + Nft nft_identity{ create_identity(&alphabet, 1) }; CHECK(nft_identity.is_identical(nft)); } } -TEST_CASE("lvlfa::create_identity_with_single_replace()") { - Lvlfa nft{}; +TEST_CASE("nft::create_identity_with_single_replace()") { + Nft nft{}; nft.initial = { 0 }; nft.final = { 0 }; SECTION("small identity nft") { @@ -114,7 +114,7 @@ TEST_CASE("lvlfa::create_identity_with_single_replace()") { nft.levels[2] = 1; nft.levels[3] = 1; nft.levels[4] = 1; - Lvlfa nft_identity_with_replace{ create_identity_with_single_replace(&alphabet, 1, 3) }; + Nft nft_identity_with_replace{ create_identity_with_single_replace(&alphabet, 1, 3) }; CHECK(nft_identity_with_replace.is_identical(nft)); } @@ -132,7 +132,7 @@ TEST_CASE("lvlfa::create_identity_with_single_replace()") { nft.levels[1] = 1; nft.delta.add(0, 0, 1); nft.delta.add(1, 1, 0); - Lvlfa nft_identity{ create_identity_with_single_replace(&alphabet, 0, 1) }; + Nft nft_identity{ create_identity_with_single_replace(&alphabet, 0, 1) }; CHECK(nft_identity.is_identical(nft)); } } diff --git a/tests/lvlfa/utils.hh b/tests/nft/utils.hh similarity index 100% rename from tests/lvlfa/utils.hh rename to tests/nft/utils.hh From 94794dd270d49ff64ad5fe1403f9f3380d343f31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Thu, 15 Feb 2024 08:55:40 +0100 Subject: [PATCH 09/24] Create NFT from NFA --- include/mata/nft/builder.hh | 9 +++++++ src/nft/builder.cc | 53 +++++++++++++++++++++++++++++++++++-- tests/nft/builder.cc | 41 ++++++++++++++++++++++++++++ 3 files changed, 101 insertions(+), 2 deletions(-) diff --git a/include/mata/nft/builder.hh b/include/mata/nft/builder.hh index 5a3c0521..01677838 100644 --- a/include/mata/nft/builder.hh +++ b/include/mata/nft/builder.hh @@ -99,6 +99,15 @@ Nft parse_from_mata(const std::string& nft_in_mata); */ Nft parse_from_mata(const std::filesystem::path& nft_file); +/** + * Create NFT from NFA. + * @param nfa_state NFA to create NFT from. + * @param level_cnt Number of levels of NFT. + * @param epsilons Which symbols handle as epsilons. + * @return NFT representing @p nfa_state with @p level_cnt number of levels. + */ +Nft create_from_nfa(const mata::nfa::Nfa& nfa_state, Level level_cnt = 2, const std::set& epsilons = { EPSILON }); + } // namespace mata::nft::builder. #endif //LIBMATA_NFT_BUILDER_HH diff --git a/src/nft/builder.cc b/src/nft/builder.cc index c88f291d..7f7703e7 100644 --- a/src/nft/builder.cc +++ b/src/nft/builder.cc @@ -1,8 +1,7 @@ // TODO: Insert header file. -#include "mata/nft/builder.hh" +#include "mata/utils/sparse-set.hh" #include "mata/parser/mintermization.hh" - #include "mata/nft/builder.hh" #include @@ -295,3 +294,53 @@ Nft builder::parse_from_mata(const std::string& nft_in_mata) { std::istringstream nft_stream(nft_in_mata); return parse_from_mata(nft_stream); } + +Nft builder::create_from_nfa(const mata::nfa::Nfa& nfa, Level level_cnt, const std::set& epsilons) { + const Level num_of_additional_states_per_nfa_trans{ level_cnt - 1 }; + Nft nft{}; + size_t nfa_num_of_states{ nfa.num_of_states() }; + nft.levels_cnt = level_cnt; + nft.levels.resize(nfa_num_of_states + nfa.delta.num_of_transitions() * num_of_additional_states_per_nfa_trans); + std::unordered_map state_mapping{}; + state_mapping.reserve(nfa_num_of_states); + State nft_state{ 0 }; + State curr_nft_state{ 0 }; + for (State source{ 0 }; source < nfa.num_of_states(); ++source) { + const auto nft_state_it{ state_mapping.find(source) }; + if (nft_state_it == state_mapping.end()) { + curr_nft_state = nft_state; + state_mapping[source] = curr_nft_state; + ++nft_state; + } else { + curr_nft_state = nft_state_it->second; + } + for (const SymbolPost& symbol_post: nfa.delta[source]) { + Level level{ 0 }; + if (!epsilons.contains(symbol_post.symbol)) { + for (; level < num_of_additional_states_per_nfa_trans; ++level) { + nft.levels[curr_nft_state] = level; + nft.delta.add(curr_nft_state, symbol_post.symbol, nft_state); + curr_nft_state = nft_state; + ++nft_state; + } + } + for (State nft_target; State nfa_target: symbol_post.targets) { + auto nft_target_it{ state_mapping.find(nfa_target) }; + if (nft_target_it == state_mapping.end()) { + nft_target = nft_state; + state_mapping[nfa_target] = nft_target; + ++nft_state; + } else { + nft_target = nft_target_it->second; + } + nft.levels[curr_nft_state] = level; + nft.delta.add(curr_nft_state, symbol_post.symbol, nft_target); + } + } + } + nft.initial.reserve(nfa.initial.size()); + std::ranges::for_each(nfa.initial, [&](const State nfa_state){ nft.initial.insert(state_mapping[nfa_state]); }); + nft.final.reserve(nfa.final.size()); + std::ranges::for_each(nfa.final, [&](const State nfa_state){ nft.final.insert(state_mapping[nfa_state]); }); + return nft; +} diff --git a/tests/nft/builder.cc b/tests/nft/builder.cc index 4b61215c..f93744ee 100644 --- a/tests/nft/builder.cc +++ b/tests/nft/builder.cc @@ -10,12 +10,53 @@ #include "mata/nft/builder.hh" using namespace mata::nft; +using mata::nfa::Nfa; using Symbol = mata::Symbol; using IntAlphabet = mata::IntAlphabet; using OnTheFlyAlphabet = mata::OnTheFlyAlphabet; using Word = std::vector; +TEST_CASE("nft::create_from_nfa()") { + Nft nft{}; + Nft expected{}; + Nfa nfa{}; + + SECTION("small nfa to 2 level NFT") { + constexpr Level LEVEL_CNT{ 2 }; + nfa.initial = { 0 }; + nfa.final = { 3 }; + nfa.delta.add(0, 1, 2); + nfa.delta.add(1, EPSILON, 3); + nfa.delta.add(3, 2, 3); + nfa.delta.add(2, 3, 1); + nfa.delta.add(2, 3, 0); + nft = builder::create_from_nfa(nfa, LEVEL_CNT); + expected = mata::nft::builder::parse_from_mata( + std::string("@NFT-explicit\n%Alphabet-auto\n%Initial q0\n%Final q4\n%Levels q0:0 q1:1 q2:0 q3:0 q4:0 q5:1 q6:1\n%LevelsCnt 2\nq0 1 q1\nq1 1 q2\nq2 3 q5\nq3 4294967295 q4\nq4 2 q6\nq5 3 q0\nq5 3 q3\nq6 2 q4\n") + ); + expected.levels_cnt = LEVEL_CNT; + CHECK(mata::nft::are_equivalent(nft, expected)); + } + + SECTION("small nfa to 3 level NFT") { + constexpr Level LEVEL_CNT{ 3 }; + nfa.initial = { 0 }; + nfa.final = { 3 }; + nfa.delta.add(0, 1, 2); + nfa.delta.add(1, EPSILON, 3); + nfa.delta.add(3, 2, 3); + nfa.delta.add(2, 3, 1); + nfa.delta.add(2, 3, 0); + nft = builder::create_from_nfa(nfa, LEVEL_CNT); + expected = mata::nft::builder::parse_from_mata( + std::string("@NFT-explicit\n%Alphabet-auto\n%Initial q0\n%Final q5\n%Levels q0:0 q1:1 q2:2 q3:0 q4:0 q5:0 q6:1 q7:2 q8:1 q9:2\n%LevelsCnt 3\nq0 1 q1\nq1 1 q2\nq2 1 q3\nq3 3 q6\nq4 4294967295 q5\nq5 2 q8\nq6 3 q7\nq7 3 q0\nq7 3 q4\nq8 2 q9\nq9 2 q5\n") + ); + expected.levels_cnt = LEVEL_CNT; + CHECK(mata::nft::are_equivalent(nft, expected)); + } +} + TEST_CASE("nft::parse_from_mata()") { Delta delta; From da1fc07f239234b3cb03128c045b2d663c4ae6a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Thu, 15 Feb 2024 08:55:58 +0100 Subject: [PATCH 10/24] Check number of levels in equivalence checking --- src/nft/inclusion.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/nft/inclusion.cc b/src/nft/inclusion.cc index 8d47077d..1e8c3980 100644 --- a/src/nft/inclusion.cc +++ b/src/nft/inclusion.cc @@ -271,6 +271,7 @@ bool mata::nft::is_included( bool mata::nft::are_equivalent(const Nft& lhs, const Nft& rhs, const Alphabet *alphabet, const ParameterMap& params) { + if (lhs.levels_cnt != rhs.levels_cnt) { return false; } //TODO: add comment on what this is doing, what is __func__ ... AlgoType algo{ set_algorithm(std::to_string(__func__), params) }; From a0fdefbdba397bea74e43bb5afac49238bf3488e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Thu, 15 Feb 2024 12:21:45 +0100 Subject: [PATCH 11/24] Implement end marker DFT for reluctant replacement --- include/mata/nft/strings.hh | 17 ++++++++++ src/nft/strings.cc | 65 +++++++++++++++++++++++++++++++++++++ tests/nft/strings.cc | 21 ++++++++++++ 3 files changed, 103 insertions(+) diff --git a/include/mata/nft/strings.hh b/include/mata/nft/strings.hh index c22c186d..0612fc66 100644 --- a/include/mata/nft/strings.hh +++ b/include/mata/nft/strings.hh @@ -19,6 +19,23 @@ Nft create_identity(mata::Alphabet* alphabet, Level level_cnt = 2); */ Nft create_identity_with_single_replace(mata::Alphabet* alphabet, Symbol from_symbol, Symbol to_symbol); +Nft reluctant_replace( + const std::string& regex, + const std::string& replacement, + // TODO: Change into constants? + Symbol begin_marker = EPSILON - 101, + Symbol end_marker = EPSILON - 100 +); +Nft reluctant_replace( + mata::nfa::Nfa regex, + const std::string& replacement, + Symbol begin_marker = EPSILON - 101, + Symbol end_marker = EPSILON - 100 +); + +nfa::Nfa end_marker_dfa(nfa::Nfa regex); +nft::Nft end_marker_dft(const nfa::Nfa& end_marker_dfa, Symbol end_marker); + } // Namespace mata::nft. #endif // MATA_NFT_STRING_SOLVING_HH_. diff --git a/src/nft/strings.cc b/src/nft/strings.cc index 1fab2af4..2471b912 100644 --- a/src/nft/strings.cc +++ b/src/nft/strings.cc @@ -2,9 +2,12 @@ */ #include "mata/nft/strings.hh" +#include "mata/parser/re2parser.hh" #include "mata/nft/nft.hh" +#include "mata/nft/builder.hh" //using mata::nft::Nft; +using namespace mata; using mata::nft::Level; using mata::Symbol; using mata::nft::State; @@ -55,3 +58,65 @@ Nft mata::nft::create_identity_with_single_replace( nft.delta.mutable_state_post(from_replace_state).front().symbol = to_symbol; return nft; } + +Nft mata::nft::reluctant_replace( + const std::string& regex, + const std::string& replacement, + Symbol begin_marker, + Symbol end_marker +) { + nfa::Nfa regex_nfa{}; + parser::create_nfa(®ex_nfa, regex); + return reluctant_replace(std::move(regex_nfa), replacement); +} + +Nft mata::nft::reluctant_replace( + nfa::Nfa regex, + const std::string& replacement, + Symbol begin_marker, + Symbol end_marker +) { + regex = end_marker_dfa(std::move(regex)); + Nft dft_end_marker{ end_marker_dft(regex, end_marker) }; + + return Nft{}; +} + +nfa::Nfa mata::nft::end_marker_dfa(nfa::Nfa regex) { + if (!regex.is_deterministic()) { + regex = determinize(regex); + } + + State new_final; + for (State orig_final: regex.final) { + new_final = regex.add_state(); + regex.final.insert(new_final); + regex.final.erase(orig_final); + StatePost::Moves orig_moves{ regex.delta[orig_final].moves() }; + std::vector moves{ orig_moves.begin(), orig_moves.end() }; + for (const Move& move: moves) { + regex.delta.remove(orig_final, move.symbol, move.target); + regex.delta.add(new_final, move.symbol, move.target); + } + regex.delta.add(orig_final, EPSILON, new_final); + } + return regex; +} + +nft::Nft mata::nft::end_marker_dft(const nfa::Nfa& end_marker_dfa, const Symbol end_marker) { + assert(end_marker_dfa.is_deterministic()); + + Nft dft_end_marker{ nft::builder::create_from_nfa(end_marker_dfa) }; + const size_t dft_end_marker_num_of_states{ dft_end_marker.num_of_states() }; + for (State source{ 0 }; source < dft_end_marker_num_of_states; ++source) { + StatePost& state_post = dft_end_marker.delta.mutable_state_post(source); + for (const Move& move: state_post.moves_epsilons()) { + const State end_marker_state{ dft_end_marker.add_state() }; + SymbolPost& symbol_post{ *state_post.find(move.symbol) }; + symbol_post.targets.erase(move.target); + symbol_post.targets.insert(end_marker_state); + dft_end_marker.delta.add(end_marker_state, end_marker, move.target); + } + } + return dft_end_marker; +} diff --git a/tests/nft/strings.cc b/tests/nft/strings.cc index d3985fc6..068c04eb 100644 --- a/tests/nft/strings.cc +++ b/tests/nft/strings.cc @@ -9,6 +9,7 @@ #include "mata/nft/nft.hh" #include "mata/nft/builder.hh" #include "mata/nft/strings.hh" +#include "mata/parser/re2parser.hh" using namespace mata::nft; using Symbol = mata::Symbol; @@ -136,3 +137,23 @@ TEST_CASE("nft::create_identity_with_single_replace()") { CHECK(nft_identity.is_identical(nft)); } } + +TEST_CASE("nft::reluctant_replacement()") { + Nft nft{}; + mata::nfa::Nfa regex{}; + SECTION("nft::end_marker_dfa()") { + mata::parser::create_nfa(®ex, "cb+a+"); + mata::nfa::Nfa nfa_end_marker{ end_marker_dfa(regex) }; + mata::nfa::Nfa dfa_expected_end_marker{}; + dfa_expected_end_marker.initial = { 0 }; + dfa_expected_end_marker.final = { 4 }; + dfa_expected_end_marker.delta.add(0, 'c', 1); + dfa_expected_end_marker.delta.add(1, 'b', 2); + dfa_expected_end_marker.delta.add(2, 'b', 2); + dfa_expected_end_marker.delta.add(2, 'a', 3); + dfa_expected_end_marker.delta.add(3, EPSILON, 4); + dfa_expected_end_marker.delta.add(4, 'a', 3); + CHECK(nfa_end_marker.is_deterministic()); + CHECK(mata::nfa::are_equivalent(nfa_end_marker, dfa_expected_end_marker)); + } +} From be4f2f910bb3ac4200719d85368778b24c1462c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Thu, 15 Feb 2024 13:09:36 +0100 Subject: [PATCH 12/24] Move NFT string solving function into their own namespace --- include/mata/nft/strings.hh | 4 ++-- src/nft/builder.cc | 8 +++----- src/nft/strings.cc | 16 +++++++++------- tests/nft/strings.cc | 38 ++++++++++++++++++++++++++++--------- 4 files changed, 43 insertions(+), 23 deletions(-) diff --git a/include/mata/nft/strings.hh b/include/mata/nft/strings.hh index 0612fc66..7f92f798 100644 --- a/include/mata/nft/strings.hh +++ b/include/mata/nft/strings.hh @@ -7,7 +7,7 @@ #include "mata/nfa/strings.hh" #include "nft.hh" -namespace mata::nft { +namespace mata::nft::strings { /** * Create identity transducer over the @p alphabet with @p level_cnt levels. */ @@ -36,6 +36,6 @@ Nft reluctant_replace( nfa::Nfa end_marker_dfa(nfa::Nfa regex); nft::Nft end_marker_dft(const nfa::Nfa& end_marker_dfa, Symbol end_marker); -} // Namespace mata::nft. +} // Namespace mata::nft::strings. #endif // MATA_NFT_STRING_SOLVING_HH_. diff --git a/src/nft/builder.cc b/src/nft/builder.cc index 7f7703e7..aebaf392 100644 --- a/src/nft/builder.cc +++ b/src/nft/builder.cc @@ -304,17 +304,15 @@ Nft builder::create_from_nfa(const mata::nfa::Nfa& nfa, Level level_cnt, const s std::unordered_map state_mapping{}; state_mapping.reserve(nfa_num_of_states); State nft_state{ 0 }; - State curr_nft_state{ 0 }; + State curr_nft_state; for (State source{ 0 }; source < nfa.num_of_states(); ++source) { const auto nft_state_it{ state_mapping.find(source) }; if (nft_state_it == state_mapping.end()) { - curr_nft_state = nft_state; - state_mapping[source] = curr_nft_state; + state_mapping[source] = nft_state; ++nft_state; - } else { - curr_nft_state = nft_state_it->second; } for (const SymbolPost& symbol_post: nfa.delta[source]) { + curr_nft_state = state_mapping[source]; Level level{ 0 }; if (!epsilons.contains(symbol_post.symbol)) { for (; level < num_of_additional_states_per_nfa_trans; ++level) { diff --git a/src/nft/strings.cc b/src/nft/strings.cc index 2471b912..86e606e2 100644 --- a/src/nft/strings.cc +++ b/src/nft/strings.cc @@ -15,7 +15,7 @@ using mata::nfa::StatePost; using mata::nfa::SymbolPost; using namespace mata::nft; -Nft mata::nft::create_identity(mata::Alphabet* alphabet, Level level_cnt) { +Nft mata::nft::strings::create_identity(mata::Alphabet* alphabet, Level level_cnt) { if (level_cnt == 0) { throw std::runtime_error("NFT must have at least one level"); } const auto alphabet_symbols{ alphabet->get_alphabet_symbols() }; const size_t additional_states_per_symbol_num{ level_cnt - 1 }; @@ -49,8 +49,8 @@ Nft mata::nft::create_identity(mata::Alphabet* alphabet, Level level_cnt) { return nft; } -Nft mata::nft::create_identity_with_single_replace( - mata::Alphabet *alphabet, const Symbol from_symbol, const Symbol to_symbol) { +Nft mata::nft::strings::create_identity_with_single_replace( + mata::Alphabet* alphabet, const Symbol from_symbol, const Symbol to_symbol) { Nft nft{ create_identity(alphabet) }; if (alphabet->empty()) { throw std::runtime_error("Alphabet does not contain symbol being replaced."); } auto symbol_post_to_state_with_replace{ nft.delta.mutable_state_post(0).find(from_symbol) }; @@ -59,7 +59,7 @@ Nft mata::nft::create_identity_with_single_replace( return nft; } -Nft mata::nft::reluctant_replace( +Nft mata::nft::strings::reluctant_replace( const std::string& regex, const std::string& replacement, Symbol begin_marker, @@ -70,7 +70,7 @@ Nft mata::nft::reluctant_replace( return reluctant_replace(std::move(regex_nfa), replacement); } -Nft mata::nft::reluctant_replace( +Nft mata::nft::strings::reluctant_replace( nfa::Nfa regex, const std::string& replacement, Symbol begin_marker, @@ -82,7 +82,7 @@ Nft mata::nft::reluctant_replace( return Nft{}; } -nfa::Nfa mata::nft::end_marker_dfa(nfa::Nfa regex) { +nfa::Nfa mata::nft::strings::end_marker_dfa(nfa::Nfa regex) { if (!regex.is_deterministic()) { regex = determinize(regex); } @@ -103,7 +103,7 @@ nfa::Nfa mata::nft::end_marker_dfa(nfa::Nfa regex) { return regex; } -nft::Nft mata::nft::end_marker_dft(const nfa::Nfa& end_marker_dfa, const Symbol end_marker) { +nft::Nft mata::nft::strings::end_marker_dft(const nfa::Nfa& end_marker_dfa, const Symbol end_marker) { assert(end_marker_dfa.is_deterministic()); Nft dft_end_marker{ nft::builder::create_from_nfa(end_marker_dfa) }; @@ -112,6 +112,8 @@ nft::Nft mata::nft::end_marker_dft(const nfa::Nfa& end_marker_dfa, const Symbol StatePost& state_post = dft_end_marker.delta.mutable_state_post(source); for (const Move& move: state_post.moves_epsilons()) { const State end_marker_state{ dft_end_marker.add_state() }; + dft_end_marker.levels.resize(end_marker_state + 1); + dft_end_marker.levels[end_marker_state] = 1; SymbolPost& symbol_post{ *state_post.find(move.symbol) }; symbol_post.targets.erase(move.target); symbol_post.targets.insert(end_marker_state); diff --git a/tests/nft/strings.cc b/tests/nft/strings.cc index 068c04eb..1f58998c 100644 --- a/tests/nft/strings.cc +++ b/tests/nft/strings.cc @@ -11,14 +11,13 @@ #include "mata/nft/strings.hh" #include "mata/parser/re2parser.hh" +using namespace mata; using namespace mata::nft; -using Symbol = mata::Symbol; +using namespace mata::nft::strings; using IntAlphabet = mata::IntAlphabet; using OnTheFlyAlphabet = mata::OnTheFlyAlphabet; using mata::EnumAlphabet; -using Word = std::vector; - TEST_CASE("nft::create_identity()") { Nft nft{}; nft.initial = { 0 }; @@ -140,11 +139,11 @@ TEST_CASE("nft::create_identity_with_single_replace()") { TEST_CASE("nft::reluctant_replacement()") { Nft nft{}; - mata::nfa::Nfa regex{}; + nfa::Nfa regex{}; SECTION("nft::end_marker_dfa()") { - mata::parser::create_nfa(®ex, "cb+a+"); - mata::nfa::Nfa nfa_end_marker{ end_marker_dfa(regex) }; - mata::nfa::Nfa dfa_expected_end_marker{}; + parser::create_nfa(®ex, "cb+a+"); + nfa::Nfa dfa_end_marker{ nft::strings::end_marker_dfa(regex) }; + nfa::Nfa dfa_expected_end_marker{}; dfa_expected_end_marker.initial = { 0 }; dfa_expected_end_marker.final = { 4 }; dfa_expected_end_marker.delta.add(0, 'c', 1); @@ -153,7 +152,28 @@ TEST_CASE("nft::reluctant_replacement()") { dfa_expected_end_marker.delta.add(2, 'a', 3); dfa_expected_end_marker.delta.add(3, EPSILON, 4); dfa_expected_end_marker.delta.add(4, 'a', 3); - CHECK(nfa_end_marker.is_deterministic()); - CHECK(mata::nfa::are_equivalent(nfa_end_marker, dfa_expected_end_marker)); + CHECK(dfa_end_marker.is_deterministic()); + CHECK(nfa::are_equivalent(dfa_end_marker, dfa_expected_end_marker)); + constexpr Symbol END_MARKER{ EPSILON - 100 }; + Nft dft_end_marker{ nft::strings::end_marker_dft(dfa_end_marker, END_MARKER) }; + Nft dft_expected_end_marker{}; + dft_expected_end_marker.levels_cnt = 2; + dft_expected_end_marker.levels = { 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1 }; + dft_expected_end_marker.initial = { 0 }; + dft_expected_end_marker.final = { 9 }; + dft_expected_end_marker.delta.add(0, 'c', 1); + dft_expected_end_marker.delta.add(1, 'c', 2); + dft_expected_end_marker.delta.add(2, 'b', 3); + dft_expected_end_marker.delta.add(3, 'b', 4); + dft_expected_end_marker.delta.add(4, 'b', 5); + dft_expected_end_marker.delta.add(5, 'b', 4); + dft_expected_end_marker.delta.add(4, 'a', 6); + dft_expected_end_marker.delta.add(6, 'a', 7); + dft_expected_end_marker.delta.add(7, EPSILON, 8); + dft_expected_end_marker.delta.add(8, END_MARKER, 9); + dft_expected_end_marker.delta.add(9, 'a', 10); + dft_expected_end_marker.delta.add(10, 'a', 7); + CHECK(dft_end_marker.is_deterministic()); + CHECK(nft::are_equivalent(dft_end_marker, dft_expected_end_marker)); } } From cb7cebd573f2d6ee9846b192c23a0aebb7af5a92 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Fri, 16 Feb 2024 08:58:03 +0100 Subject: [PATCH 13/24] Create generic end marker DFT --- include/mata/nft/strings.hh | 6 +- src/nft/strings.cc | 82 +++++++++++++++++++- tests/nft/strings.cc | 149 +++++++++++++++++++++++++++++++++++- 3 files changed, 229 insertions(+), 8 deletions(-) diff --git a/include/mata/nft/strings.hh b/include/mata/nft/strings.hh index 7f92f798..d5f3bdf0 100644 --- a/include/mata/nft/strings.hh +++ b/include/mata/nft/strings.hh @@ -22,6 +22,7 @@ Nft create_identity_with_single_replace(mata::Alphabet* alphabet, Symbol from_sy Nft reluctant_replace( const std::string& regex, const std::string& replacement, + Alphabet* alphabet, // TODO: Change into constants? Symbol begin_marker = EPSILON - 101, Symbol end_marker = EPSILON - 100 @@ -29,13 +30,16 @@ Nft reluctant_replace( Nft reluctant_replace( mata::nfa::Nfa regex, const std::string& replacement, + Alphabet* alphabet, Symbol begin_marker = EPSILON - 101, Symbol end_marker = EPSILON - 100 ); nfa::Nfa end_marker_dfa(nfa::Nfa regex); -nft::Nft end_marker_dft(const nfa::Nfa& end_marker_dfa, Symbol end_marker); +Nft end_marker_dft(const nfa::Nfa& end_marker_dfa, Symbol end_marker); +nfa::Nfa generic_end_marker_dfa(const std::string& regex, Alphabet* alphabet); +nfa::Nfa generic_end_marker_dfa(nfa::Nfa regex, Alphabet* alphabet); } // Namespace mata::nft::strings. #endif // MATA_NFT_STRING_SOLVING_HH_. diff --git a/src/nft/strings.cc b/src/nft/strings.cc index 86e606e2..d618a850 100644 --- a/src/nft/strings.cc +++ b/src/nft/strings.cc @@ -1,6 +1,8 @@ /* nfa-strings.hh -- Operations on NFAs for string solving. */ +#include + #include "mata/nft/strings.hh" #include "mata/parser/re2parser.hh" #include "mata/nft/nft.hh" @@ -15,6 +17,37 @@ using mata::nfa::StatePost; using mata::nfa::SymbolPost; using namespace mata::nft; +namespace { + /// Add transitions, optionally add @p source to @p dfa_generic_end_marker.final, and update @p labeling and @p labeling_inv functions. + void process_source(const nfa::Nfa& regex, const Alphabet* alphabet, nfa::Nfa& dfa_generic_end_marker, + std::map& labeling, + std::unordered_map& labeling_inv, State source, + StateSet& source_label, std::vector& worklist) { + const State generic_initial_state{ *dfa_generic_end_marker.initial.begin() }; + for (const Symbol symbol: alphabet->get_alphabet_symbols()) { + StateSet target_label{ generic_initial_state }; + for (const State regex_state: source_label) { + const StatePost& state_post{ regex.delta[regex_state] }; + auto symbol_post_it{ state_post.find(symbol) }; + if (symbol_post_it == state_post.end()) { continue; } + target_label.insert(symbol_post_it->targets); + } + auto target_it{ labeling_inv.find(target_label) }; + State target; + if (target_it == labeling_inv.end()) { + target = dfa_generic_end_marker.add_state(); + labeling.emplace(target, target_label); + labeling_inv.emplace(target_label, target); + worklist.push_back(target); + } else { + target = target_it->second; + } + dfa_generic_end_marker.delta.add(source, symbol, target); + } + dfa_generic_end_marker.final.insert(source); + } +} + Nft mata::nft::strings::create_identity(mata::Alphabet* alphabet, Level level_cnt) { if (level_cnt == 0) { throw std::runtime_error("NFT must have at least one level"); } const auto alphabet_symbols{ alphabet->get_alphabet_symbols() }; @@ -62,22 +95,24 @@ Nft mata::nft::strings::create_identity_with_single_replace( Nft mata::nft::strings::reluctant_replace( const std::string& regex, const std::string& replacement, + Alphabet* alphabet, Symbol begin_marker, Symbol end_marker ) { nfa::Nfa regex_nfa{}; parser::create_nfa(®ex_nfa, regex); - return reluctant_replace(std::move(regex_nfa), replacement); + return reluctant_replace(std::move(regex_nfa), replacement, alphabet, begin_marker, end_marker); } Nft mata::nft::strings::reluctant_replace( nfa::Nfa regex, const std::string& replacement, + Alphabet* alphabet, Symbol begin_marker, Symbol end_marker ) { - regex = end_marker_dfa(std::move(regex)); - Nft dft_end_marker{ end_marker_dft(regex, end_marker) }; + nfa::Nfa dfa_generic_end_marker{ generic_end_marker_dfa(std::move(regex), alphabet) }; + Nft dft_generic_end_marker{ end_marker_dft(dfa_generic_end_marker, end_marker) }; return Nft{}; } @@ -103,7 +138,7 @@ nfa::Nfa mata::nft::strings::end_marker_dfa(nfa::Nfa regex) { return regex; } -nft::Nft mata::nft::strings::end_marker_dft(const nfa::Nfa& end_marker_dfa, const Symbol end_marker) { +Nft mata::nft::strings::end_marker_dft(const nfa::Nfa& end_marker_dfa, const Symbol end_marker) { assert(end_marker_dfa.is_deterministic()); Nft dft_end_marker{ nft::builder::create_from_nfa(end_marker_dfa) }; @@ -122,3 +157,42 @@ nft::Nft mata::nft::strings::end_marker_dft(const nfa::Nfa& end_marker_dfa, cons } return dft_end_marker; } + +nfa::Nfa nft::strings::generic_end_marker_dfa(const std::string& regex, Alphabet* alphabet) { + nfa::Nfa nfa{}; + parser::create_nfa(&nfa, regex); + return generic_end_marker_dfa(std::move(nfa), alphabet); +} + +nfa::Nfa nft::strings::generic_end_marker_dfa(nfa::Nfa regex, Alphabet* alphabet) { + if (!regex.is_deterministic()) { + regex = determinize(regex); + } + + nfa::Nfa dfa_generic_end_marker{}; + dfa_generic_end_marker.initial.insert(0); + std::map labeling{}; + std::unordered_map labeling_inv{}; + labeling.emplace(0, *regex.initial.begin()); + labeling_inv.emplace(*regex.initial.begin(), 0); + + std::vector worklist{ 0 }; + while (!worklist.empty()) { + State source{ worklist.back() }; + worklist.pop_back(); + StateSet& source_label{ labeling[source] }; + + if (regex.final.intersects_with(source_label)) { + const State end_marker_target{ dfa_generic_end_marker.add_state() }; + dfa_generic_end_marker.delta.add(source, EPSILON, end_marker_target); + process_source(regex, alphabet, dfa_generic_end_marker, labeling, labeling_inv, end_marker_target, + source_label, worklist); + } else { + process_source(regex, alphabet, dfa_generic_end_marker, labeling, labeling_inv, source, source_label, + worklist); + } + + } + + return dfa_generic_end_marker; +} diff --git a/tests/nft/strings.cc b/tests/nft/strings.cc index 1f58998c..0bc00f29 100644 --- a/tests/nft/strings.cc +++ b/tests/nft/strings.cc @@ -38,7 +38,7 @@ TEST_CASE("nft::create_identity()") { nft.delta.add(7, 3, 8); nft.delta.add(8, 3, 0); nft.levels_cnt = 3; - nft.levels.resize(nft.levels_cnt * ( alphabet.get_number_of_symbols() - 1)); + nft.levels.resize(nft.levels_cnt * (alphabet.get_number_of_symbols() - 1)); nft.levels[0] = 0; nft.levels[1] = 1; nft.levels[2] = 2; @@ -53,7 +53,7 @@ TEST_CASE("nft::create_identity()") { } SECTION("identity nft no symbols") { - EnumAlphabet alphabet{ }; + EnumAlphabet alphabet{}; nft.alphabet = &alphabet; nft.levels_cnt = 3; nft.levels.resize(1); @@ -140,6 +140,8 @@ TEST_CASE("nft::create_identity_with_single_replace()") { TEST_CASE("nft::reluctant_replacement()") { Nft nft{}; nfa::Nfa regex{}; + EnumAlphabet alphabet{ 'a', 'b', 'c' }; + constexpr Symbol END_MARKER{ EPSILON - 100 }; SECTION("nft::end_marker_dfa()") { parser::create_nfa(®ex, "cb+a+"); nfa::Nfa dfa_end_marker{ nft::strings::end_marker_dfa(regex) }; @@ -154,7 +156,6 @@ TEST_CASE("nft::reluctant_replacement()") { dfa_expected_end_marker.delta.add(4, 'a', 3); CHECK(dfa_end_marker.is_deterministic()); CHECK(nfa::are_equivalent(dfa_end_marker, dfa_expected_end_marker)); - constexpr Symbol END_MARKER{ EPSILON - 100 }; Nft dft_end_marker{ nft::strings::end_marker_dft(dfa_end_marker, END_MARKER) }; Nft dft_expected_end_marker{}; dft_expected_end_marker.levels_cnt = 2; @@ -176,4 +177,146 @@ TEST_CASE("nft::reluctant_replacement()") { CHECK(dft_end_marker.is_deterministic()); CHECK(nft::are_equivalent(dft_end_marker, dft_expected_end_marker)); } + + SECTION("nft::generic_end_marker_dft() regex cb+a+") { + nfa::Nfa dfa_generic_end_marker{ generic_end_marker_dfa("cb+a+", &alphabet) }; + nfa::Nfa dfa_expected{ nfa::Delta{}, { 0 }, { 0, 1, 2, 4 }}; + dfa_expected.delta.add(0, 'a', 0); + dfa_expected.delta.add(0, 'b', 0); + dfa_expected.delta.add(0, 'c', 1); + dfa_expected.delta.add(1, 'a', 0); + dfa_expected.delta.add(1, 'b', 2); + dfa_expected.delta.add(1, 'c', 1); + dfa_expected.delta.add(2, 'a', 3); + dfa_expected.delta.add(2, 'b', 2); + dfa_expected.delta.add(2, 'c', 1); + dfa_expected.delta.add(3, EPSILON, 4); + dfa_expected.delta.add(4, 'a', 3); + dfa_expected.delta.add(4, 'b', 0); + dfa_expected.delta.add(4, 'c', 1); + CHECK(nfa::are_equivalent(dfa_generic_end_marker, dfa_expected)); + + Nft dft_generic_end_marker{ end_marker_dft(dfa_generic_end_marker, END_MARKER) }; + Nft dft_expected{}; + dft_expected.initial.insert(0); + dft_expected.final = { 0, 4, 7, 14 }; + dft_expected.levels_cnt = 2; + dft_expected.delta.add(0, 'a', 1); + dft_expected.delta.add(1, 'a', 0); + dft_expected.delta.add(0, 'b', 2); + dft_expected.delta.add(2, 'b', 0); + dft_expected.delta.add(0, 'c', 3); + dft_expected.delta.add(3, 'c', 4); + dft_expected.delta.add(4, 'a', 5); + dft_expected.delta.add(5, 'a', 0); + dft_expected.delta.add(4, 'b', 6); + dft_expected.delta.add(6, 'b', 7); + dft_expected.delta.add(4, 'c', 8); + dft_expected.delta.add(8, 'c', 4); + dft_expected.delta.add(7, 'a', 9); + dft_expected.delta.add(9, 'a', 10); + dft_expected.delta.add(7, 'b', 11); + dft_expected.delta.add(11, 'b', 7); + dft_expected.delta.add(7, 'c', 12); + dft_expected.delta.add(12, 'c', 4); + dft_expected.delta.add(10, EPSILON, 13); + dft_expected.delta.add(13, END_MARKER, 14); + dft_expected.delta.add(14, 'a', 15); + dft_expected.delta.add(15, 'a', 10); + dft_expected.delta.add(14, 'b', 16); + dft_expected.delta.add(16, 'b', 0); + dft_expected.delta.add(14, 'c', 17); + dft_expected.delta.add(17, 'c', 4); + dft_expected.levels.resize(18); + dft_expected.levels[0] = 0; + dft_expected.levels[1] = 1; + dft_expected.levels[2] = 1; + dft_expected.levels[3] = 1; + dft_expected.levels[4] = 0; + dft_expected.levels[5] = 1; + dft_expected.levels[6] = 1; + dft_expected.levels[7] = 0; + dft_expected.levels[8] = 1; + dft_expected.levels[9] = 1; + dft_expected.levels[10] = 0; + dft_expected.levels[11] = 1; + dft_expected.levels[12] = 1; + dft_expected.levels[13] = 1; + dft_expected.levels[14] = 0; + dft_expected.levels[15] = 1; + dft_expected.levels[16] = 1; + dft_expected.levels[17] = 1; + CHECK(nft::are_equivalent(dft_generic_end_marker, dft_expected)); + } + + SECTION("nft::generic_end_marker_dft() regex ab+a+") { + nfa::Nfa dfa_generic_end_marker{ generic_end_marker_dfa("ab+a+", &alphabet) }; + nfa::Nfa dfa_expected{ nfa::Delta{}, { 0 }, { 0, 1, 2, 4 }}; + dfa_expected.delta.add(0, 'a', 1); + dfa_expected.delta.add(0, 'b', 0); + dfa_expected.delta.add(0, 'c', 0); + dfa_expected.delta.add(1, 'a', 1); + dfa_expected.delta.add(1, 'b', 2); + dfa_expected.delta.add(1, 'c', 0); + dfa_expected.delta.add(2, 'a', 3); + dfa_expected.delta.add(2, 'b', 2); + dfa_expected.delta.add(2, 'c', 0); + dfa_expected.delta.add(3, EPSILON, 4); + dfa_expected.delta.add(4, 'a', 3); + dfa_expected.delta.add(4, 'b', 2); + dfa_expected.delta.add(4, 'c', 0); + CHECK(nfa::are_equivalent(dfa_generic_end_marker, dfa_expected)); + + Nft dft_generic_end_marker{ end_marker_dft(dfa_generic_end_marker, END_MARKER) }; + Nft dft_expected{}; + dft_expected.initial.insert(0); + dft_expected.final = { 0, 2, 7, 14}; + dft_expected.levels_cnt = 2; + dft_expected.delta.add(0, 'a', 1); + dft_expected.delta.add(1, 'a', 2); + dft_expected.delta.add(0, 'b', 3); + dft_expected.delta.add(3, 'b', 0); + dft_expected.delta.add(0, 'c', 4); + dft_expected.delta.add(4, 'c', 0); + dft_expected.delta.add(2, 'a', 5); + dft_expected.delta.add(5, 'a', 2); + dft_expected.delta.add(2, 'b', 6); + dft_expected.delta.add(6, 'b', 7); + dft_expected.delta.add(2, 'c', 8); + dft_expected.delta.add(8, 'c', 0); + dft_expected.delta.add(7, 'a', 9); + dft_expected.delta.add(9, 'a', 10); + dft_expected.delta.add(7, 'b', 11); + dft_expected.delta.add(11, 'b', 7); + dft_expected.delta.add(7, 'c', 12); + dft_expected.delta.add(12, 'c', 0); + dft_expected.delta.add(10, EPSILON, 13); + dft_expected.delta.add(13, END_MARKER, 14); + dft_expected.delta.add(14, 'a', 15); + dft_expected.delta.add(15, 'a', 10); + dft_expected.delta.add(14, 'b', 16); + dft_expected.delta.add(16, 'b', 7); + dft_expected.delta.add(14, 'c', 17); + dft_expected.delta.add(17, 'c', 0); + dft_expected.levels.resize(18); + dft_expected.levels[0] = 0; + dft_expected.levels[1] = 1; + dft_expected.levels[2] = 0; + dft_expected.levels[3] = 1; + dft_expected.levels[4] = 1; + dft_expected.levels[5] = 1; + dft_expected.levels[6] = 1; + dft_expected.levels[7] = 0; + dft_expected.levels[8] = 1; + dft_expected.levels[9] = 1; + dft_expected.levels[10] = 0; + dft_expected.levels[11] = 1; + dft_expected.levels[12] = 1; + dft_expected.levels[13] = 1; + dft_expected.levels[14] = 0; + dft_expected.levels[15] = 1; + dft_expected.levels[16] = 1; + dft_expected.levels[17] = 1; + CHECK(nft::are_equivalent(dft_generic_end_marker, dft_expected)); + } } From e07c1c20e05a25f5f85c3ec0a74470c3a2d483e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Mon, 19 Feb 2024 06:51:41 +0100 Subject: [PATCH 14/24] Generalize end_marker_dft() for general marker DFTconstruction --- include/mata/nft/strings.hh | 2 +- src/nft/strings.cc | 26 +++++++++++++------------- tests/nft/strings.cc | 6 +++--- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/include/mata/nft/strings.hh b/include/mata/nft/strings.hh index d5f3bdf0..72cd92c1 100644 --- a/include/mata/nft/strings.hh +++ b/include/mata/nft/strings.hh @@ -36,7 +36,7 @@ Nft reluctant_replace( ); nfa::Nfa end_marker_dfa(nfa::Nfa regex); -Nft end_marker_dft(const nfa::Nfa& end_marker_dfa, Symbol end_marker); +Nft marker_dft(const nfa::Nfa& marker_dfa, Symbol marker); nfa::Nfa generic_end_marker_dfa(const std::string& regex, Alphabet* alphabet); nfa::Nfa generic_end_marker_dfa(nfa::Nfa regex, Alphabet* alphabet); diff --git a/src/nft/strings.cc b/src/nft/strings.cc index d618a850..86718810 100644 --- a/src/nft/strings.cc +++ b/src/nft/strings.cc @@ -112,7 +112,7 @@ Nft mata::nft::strings::reluctant_replace( Symbol end_marker ) { nfa::Nfa dfa_generic_end_marker{ generic_end_marker_dfa(std::move(regex), alphabet) }; - Nft dft_generic_end_marker{ end_marker_dft(dfa_generic_end_marker, end_marker) }; + Nft dft_generic_end_marker{ marker_dft(dfa_generic_end_marker, end_marker) }; return Nft{}; } @@ -138,24 +138,24 @@ nfa::Nfa mata::nft::strings::end_marker_dfa(nfa::Nfa regex) { return regex; } -Nft mata::nft::strings::end_marker_dft(const nfa::Nfa& end_marker_dfa, const Symbol end_marker) { - assert(end_marker_dfa.is_deterministic()); +Nft mata::nft::strings::marker_dft(const nfa::Nfa& marker_dfa, const Symbol marker) { + assert(marker_dfa.is_deterministic()); - Nft dft_end_marker{ nft::builder::create_from_nfa(end_marker_dfa) }; - const size_t dft_end_marker_num_of_states{ dft_end_marker.num_of_states() }; - for (State source{ 0 }; source < dft_end_marker_num_of_states; ++source) { - StatePost& state_post = dft_end_marker.delta.mutable_state_post(source); + Nft dft_marker{ nft::builder::create_from_nfa(marker_dfa) }; + const size_t dft_marker_num_of_states{ dft_marker.num_of_states() }; + for (State source{ 0 }; source < dft_marker_num_of_states; ++source) { + StatePost& state_post = dft_marker.delta.mutable_state_post(source); for (const Move& move: state_post.moves_epsilons()) { - const State end_marker_state{ dft_end_marker.add_state() }; - dft_end_marker.levels.resize(end_marker_state + 1); - dft_end_marker.levels[end_marker_state] = 1; + const State marker_state{ dft_marker.add_state() }; + dft_marker.levels.resize(marker_state + 1); + dft_marker.levels[marker_state] = 1; SymbolPost& symbol_post{ *state_post.find(move.symbol) }; symbol_post.targets.erase(move.target); - symbol_post.targets.insert(end_marker_state); - dft_end_marker.delta.add(end_marker_state, end_marker, move.target); + symbol_post.targets.insert(marker_state); + dft_marker.delta.add(marker_state, marker, move.target); } } - return dft_end_marker; + return dft_marker; } nfa::Nfa nft::strings::generic_end_marker_dfa(const std::string& regex, Alphabet* alphabet) { diff --git a/tests/nft/strings.cc b/tests/nft/strings.cc index 0bc00f29..3962a14a 100644 --- a/tests/nft/strings.cc +++ b/tests/nft/strings.cc @@ -156,7 +156,7 @@ TEST_CASE("nft::reluctant_replacement()") { dfa_expected_end_marker.delta.add(4, 'a', 3); CHECK(dfa_end_marker.is_deterministic()); CHECK(nfa::are_equivalent(dfa_end_marker, dfa_expected_end_marker)); - Nft dft_end_marker{ nft::strings::end_marker_dft(dfa_end_marker, END_MARKER) }; + Nft dft_end_marker{ nft::strings::marker_dft(dfa_end_marker, END_MARKER) }; Nft dft_expected_end_marker{}; dft_expected_end_marker.levels_cnt = 2; dft_expected_end_marker.levels = { 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1 }; @@ -196,7 +196,7 @@ TEST_CASE("nft::reluctant_replacement()") { dfa_expected.delta.add(4, 'c', 1); CHECK(nfa::are_equivalent(dfa_generic_end_marker, dfa_expected)); - Nft dft_generic_end_marker{ end_marker_dft(dfa_generic_end_marker, END_MARKER) }; + Nft dft_generic_end_marker{ marker_dft(dfa_generic_end_marker, END_MARKER) }; Nft dft_expected{}; dft_expected.initial.insert(0); dft_expected.final = { 0, 4, 7, 14 }; @@ -267,7 +267,7 @@ TEST_CASE("nft::reluctant_replacement()") { dfa_expected.delta.add(4, 'c', 0); CHECK(nfa::are_equivalent(dfa_generic_end_marker, dfa_expected)); - Nft dft_generic_end_marker{ end_marker_dft(dfa_generic_end_marker, END_MARKER) }; + Nft dft_generic_end_marker{ marker_dft(dfa_generic_end_marker, END_MARKER) }; Nft dft_expected{}; dft_expected.initial.insert(0); dft_expected.final = { 0, 2, 7, 14}; From 28b00f3eaeb116d0a818721dbfef2d414d29a05f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Mon, 19 Feb 2024 07:26:29 +0100 Subject: [PATCH 15/24] Rename functions --- include/mata/nft/strings.hh | 8 +++++--- src/nft/strings.cc | 15 ++++++++++----- tests/nft/strings.cc | 14 +++++++------- 3 files changed, 22 insertions(+), 15 deletions(-) diff --git a/include/mata/nft/strings.hh b/include/mata/nft/strings.hh index 72cd92c1..a725453c 100644 --- a/include/mata/nft/strings.hh +++ b/include/mata/nft/strings.hh @@ -19,7 +19,7 @@ Nft create_identity(mata::Alphabet* alphabet, Level level_cnt = 2); */ Nft create_identity_with_single_replace(mata::Alphabet* alphabet, Symbol from_symbol, Symbol to_symbol); -Nft reluctant_replace( +Nft replace_reluctant( const std::string& regex, const std::string& replacement, Alphabet* alphabet, @@ -27,8 +27,8 @@ Nft reluctant_replace( Symbol begin_marker = EPSILON - 101, Symbol end_marker = EPSILON - 100 ); -Nft reluctant_replace( - mata::nfa::Nfa regex, +Nft replace_reluctant( + nfa::Nfa regex, const std::string& replacement, Alphabet* alphabet, Symbol begin_marker = EPSILON - 101, @@ -40,6 +40,8 @@ Nft marker_dft(const nfa::Nfa& marker_dfa, Symbol marker); nfa::Nfa generic_end_marker_dfa(const std::string& regex, Alphabet* alphabet); nfa::Nfa generic_end_marker_dfa(nfa::Nfa regex, Alphabet* alphabet); + +Nft end_marker_dft(const nfa::Nfa& end_marker_dfa, Symbol end_marker); } // Namespace mata::nft::strings. #endif // MATA_NFT_STRING_SOLVING_HH_. diff --git a/src/nft/strings.cc b/src/nft/strings.cc index 86718810..cac5ac8e 100644 --- a/src/nft/strings.cc +++ b/src/nft/strings.cc @@ -92,7 +92,7 @@ Nft mata::nft::strings::create_identity_with_single_replace( return nft; } -Nft mata::nft::strings::reluctant_replace( +Nft mata::nft::strings::replace_reluctant( const std::string& regex, const std::string& replacement, Alphabet* alphabet, @@ -101,18 +101,18 @@ Nft mata::nft::strings::reluctant_replace( ) { nfa::Nfa regex_nfa{}; parser::create_nfa(®ex_nfa, regex); - return reluctant_replace(std::move(regex_nfa), replacement, alphabet, begin_marker, end_marker); + return replace_reluctant(std::move(regex_nfa), replacement, alphabet, begin_marker, end_marker); } -Nft mata::nft::strings::reluctant_replace( +Nft mata::nft::strings::replace_reluctant( nfa::Nfa regex, const std::string& replacement, Alphabet* alphabet, Symbol begin_marker, Symbol end_marker ) { - nfa::Nfa dfa_generic_end_marker{ generic_end_marker_dfa(std::move(regex), alphabet) }; - Nft dft_generic_end_marker{ marker_dft(dfa_generic_end_marker, end_marker) }; + nfa::Nfa dfa_generic_marker{ generic_end_marker_dfa(std::move(regex), alphabet) }; + Nft dft_generic_end_marker{ end_marker_dft(dfa_generic_marker, end_marker) }; return Nft{}; } @@ -196,3 +196,8 @@ nfa::Nfa nft::strings::generic_end_marker_dfa(nfa::Nfa regex, Alphabet* alphabet return dfa_generic_end_marker; } + + +Nft nft::strings::end_marker_dft(const nfa::Nfa& end_marker_dfa, Symbol end_marker) { + return marker_dft(end_marker_dfa, end_marker); +} diff --git a/tests/nft/strings.cc b/tests/nft/strings.cc index 3962a14a..242dff10 100644 --- a/tests/nft/strings.cc +++ b/tests/nft/strings.cc @@ -141,7 +141,7 @@ TEST_CASE("nft::reluctant_replacement()") { Nft nft{}; nfa::Nfa regex{}; EnumAlphabet alphabet{ 'a', 'b', 'c' }; - constexpr Symbol END_MARKER{ EPSILON - 100 }; + constexpr Symbol MARKER{ EPSILON - 100 }; SECTION("nft::end_marker_dfa()") { parser::create_nfa(®ex, "cb+a+"); nfa::Nfa dfa_end_marker{ nft::strings::end_marker_dfa(regex) }; @@ -156,7 +156,7 @@ TEST_CASE("nft::reluctant_replacement()") { dfa_expected_end_marker.delta.add(4, 'a', 3); CHECK(dfa_end_marker.is_deterministic()); CHECK(nfa::are_equivalent(dfa_end_marker, dfa_expected_end_marker)); - Nft dft_end_marker{ nft::strings::marker_dft(dfa_end_marker, END_MARKER) }; + Nft dft_end_marker{ end_marker_dft(dfa_end_marker, MARKER) }; Nft dft_expected_end_marker{}; dft_expected_end_marker.levels_cnt = 2; dft_expected_end_marker.levels = { 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1 }; @@ -171,7 +171,7 @@ TEST_CASE("nft::reluctant_replacement()") { dft_expected_end_marker.delta.add(4, 'a', 6); dft_expected_end_marker.delta.add(6, 'a', 7); dft_expected_end_marker.delta.add(7, EPSILON, 8); - dft_expected_end_marker.delta.add(8, END_MARKER, 9); + dft_expected_end_marker.delta.add(8, MARKER, 9); dft_expected_end_marker.delta.add(9, 'a', 10); dft_expected_end_marker.delta.add(10, 'a', 7); CHECK(dft_end_marker.is_deterministic()); @@ -196,7 +196,7 @@ TEST_CASE("nft::reluctant_replacement()") { dfa_expected.delta.add(4, 'c', 1); CHECK(nfa::are_equivalent(dfa_generic_end_marker, dfa_expected)); - Nft dft_generic_end_marker{ marker_dft(dfa_generic_end_marker, END_MARKER) }; + Nft dft_generic_end_marker{ end_marker_dft(dfa_generic_end_marker, MARKER) }; Nft dft_expected{}; dft_expected.initial.insert(0); dft_expected.final = { 0, 4, 7, 14 }; @@ -220,7 +220,7 @@ TEST_CASE("nft::reluctant_replacement()") { dft_expected.delta.add(7, 'c', 12); dft_expected.delta.add(12, 'c', 4); dft_expected.delta.add(10, EPSILON, 13); - dft_expected.delta.add(13, END_MARKER, 14); + dft_expected.delta.add(13, MARKER, 14); dft_expected.delta.add(14, 'a', 15); dft_expected.delta.add(15, 'a', 10); dft_expected.delta.add(14, 'b', 16); @@ -267,7 +267,7 @@ TEST_CASE("nft::reluctant_replacement()") { dfa_expected.delta.add(4, 'c', 0); CHECK(nfa::are_equivalent(dfa_generic_end_marker, dfa_expected)); - Nft dft_generic_end_marker{ marker_dft(dfa_generic_end_marker, END_MARKER) }; + Nft dft_generic_end_marker{ end_marker_dft(dfa_generic_end_marker, MARKER) }; Nft dft_expected{}; dft_expected.initial.insert(0); dft_expected.final = { 0, 2, 7, 14}; @@ -291,7 +291,7 @@ TEST_CASE("nft::reluctant_replacement()") { dft_expected.delta.add(7, 'c', 12); dft_expected.delta.add(12, 'c', 0); dft_expected.delta.add(10, EPSILON, 13); - dft_expected.delta.add(13, END_MARKER, 14); + dft_expected.delta.add(13, MARKER, 14); dft_expected.delta.add(14, 'a', 15); dft_expected.delta.add(15, 'a', 10); dft_expected.delta.add(14, 'b', 16); From 5ab75693e13130c747674fcd3d99d0dd4de63284 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Mon, 19 Feb 2024 07:55:17 +0100 Subject: [PATCH 16/24] Construct begin marker DFT --- include/mata/nft/strings.hh | 4 ++ src/nft/strings.cc | 28 +++++++- tests/nft/strings.cc | 133 ++++++++++++++++++++++++++++++++++++ 3 files changed, 164 insertions(+), 1 deletion(-) diff --git a/include/mata/nft/strings.hh b/include/mata/nft/strings.hh index a725453c..9c2b8cdb 100644 --- a/include/mata/nft/strings.hh +++ b/include/mata/nft/strings.hh @@ -41,6 +41,10 @@ Nft marker_dft(const nfa::Nfa& marker_dfa, Symbol marker); nfa::Nfa generic_end_marker_dfa(const std::string& regex, Alphabet* alphabet); nfa::Nfa generic_end_marker_dfa(nfa::Nfa regex, Alphabet* alphabet); +nfa::Nfa begin_marker_dfa(const std::string& regex, Alphabet* alphabet); +nfa::Nfa begin_marker_dfa(nfa::Nfa regex, Alphabet* alphabet); + +Nft begin_marker_dft(const nfa::Nfa& begin_marker_dfa, Symbol begin_marker); Nft end_marker_dft(const nfa::Nfa& end_marker_dfa, Symbol end_marker); } // Namespace mata::nft::strings. diff --git a/src/nft/strings.cc b/src/nft/strings.cc index cac5ac8e..03ff9c59 100644 --- a/src/nft/strings.cc +++ b/src/nft/strings.cc @@ -113,6 +113,7 @@ Nft mata::nft::strings::replace_reluctant( ) { nfa::Nfa dfa_generic_marker{ generic_end_marker_dfa(std::move(regex), alphabet) }; Nft dft_generic_end_marker{ end_marker_dft(dfa_generic_marker, end_marker) }; + Nft dft_begin_marker{ begin_marker_dft(dfa_generic_marker, begin_marker) }; return Nft{}; } @@ -139,7 +140,7 @@ nfa::Nfa mata::nft::strings::end_marker_dfa(nfa::Nfa regex) { } Nft mata::nft::strings::marker_dft(const nfa::Nfa& marker_dfa, const Symbol marker) { - assert(marker_dfa.is_deterministic()); +// assert(marker_dfa.is_deterministic()); Nft dft_marker{ nft::builder::create_from_nfa(marker_dfa) }; const size_t dft_marker_num_of_states{ dft_marker.num_of_states() }; @@ -197,6 +198,31 @@ nfa::Nfa nft::strings::generic_end_marker_dfa(nfa::Nfa regex, Alphabet* alphabet return dfa_generic_end_marker; } +nfa::Nfa nft::strings::begin_marker_dfa(const std::string& regex, Alphabet* alphabet) { + nfa::Nfa nfa{}; + parser::create_nfa(&nfa, regex); + return begin_marker_dfa(std::move(nfa), alphabet); +} + +nfa::Nfa nft::strings::begin_marker_dfa(nfa::Nfa regex, Alphabet* alphabet) { + nfa::Nfa dfa_generic_end_marker{ generic_end_marker_dfa(std::move(regex), alphabet) }; + dfa_generic_end_marker = revert(dfa_generic_end_marker); + std::swap(dfa_generic_end_marker.initial, dfa_generic_end_marker.final); + return dfa_generic_end_marker; +} + +Nft nft::strings::begin_marker_dft(const nfa::Nfa& begin_marker_dfa, Symbol begin_marker) { + Nft begin_marker_dft{ marker_dft(begin_marker_dfa, begin_marker) }; + const State new_initial{ begin_marker_dft.add_state() }; + for (const State orig_final: begin_marker_dft.final) { + begin_marker_dft.delta.add(new_initial, EPSILON, orig_final); + } + begin_marker_dft.final = begin_marker_dft.initial; + begin_marker_dft.initial = { new_initial }; + begin_marker_dft.levels.resize(new_initial + 1); + begin_marker_dft.levels[new_initial] = 0; + return begin_marker_dft; +} Nft nft::strings::end_marker_dft(const nfa::Nfa& end_marker_dfa, Symbol end_marker) { return marker_dft(end_marker_dfa, end_marker); diff --git a/tests/nft/strings.cc b/tests/nft/strings.cc index 242dff10..f3ccf325 100644 --- a/tests/nft/strings.cc +++ b/tests/nft/strings.cc @@ -319,4 +319,137 @@ TEST_CASE("nft::reluctant_replacement()") { dft_expected.levels[17] = 1; CHECK(nft::are_equivalent(dft_generic_end_marker, dft_expected)); } + + SECTION("nft::begin_marker_dft() regex cb+a+") { + nfa::Nfa dfa_begin_marker{ begin_marker_dfa("cb+a+", &alphabet) }; + nfa::Nfa dfa_expected{ nfa::Delta{}, { 0 }, { 0, 1, 2, 4 }}; + dfa_expected.delta.add(0, 'a', 0); + dfa_expected.delta.add(0, 'b', 0); + dfa_expected.delta.add(1, 'c', 0); + dfa_expected.delta.add(0, 'a', 1); + dfa_expected.delta.add(2, 'b', 1); + dfa_expected.delta.add(1, 'c', 1); + dfa_expected.delta.add(3, 'a', 2); + dfa_expected.delta.add(2, 'b', 2); + dfa_expected.delta.add(1, 'c', 2); + dfa_expected.delta.add(4, EPSILON, 3); + dfa_expected.delta.add(3, 'a', 4); + dfa_expected.delta.add(0, 'b', 4); + dfa_expected.delta.add(1, 'c', 4); + CHECK(nfa::are_equivalent(dfa_begin_marker, dfa_expected)); + + Nft dft_begin_marker{ begin_marker_dft(dfa_begin_marker, MARKER) }; + Nft dft_expected{}; + dft_expected.initial.insert(0); + dft_expected.final.insert(1); + dft_expected.levels_cnt = 2; + dft_expected.delta.add(0, EPSILON, 1); + dft_expected.delta.add(0, EPSILON, 2); + dft_expected.delta.add(0, EPSILON, 3); + dft_expected.delta.add(0, EPSILON, 5); + dft_expected.delta.add(1, 'a', 6); + dft_expected.delta.add(6, 'a', 1); + dft_expected.delta.add(6, 'a', 2); + dft_expected.delta.add(1, 'b', 7); + dft_expected.delta.add(7, 'b', 1); + dft_expected.delta.add(7, 'b', 5); + dft_expected.delta.add(2, 'c', 8); + dft_expected.delta.add(8, 'c', 2); + dft_expected.delta.add(8, 'c', 1); + dft_expected.delta.add(8, 'c', 3); + dft_expected.delta.add(8, 'c', 5); + dft_expected.delta.add(3, 'b', 9); + dft_expected.delta.add(9, 'b', 3); + dft_expected.delta.add(9, 'b', 2); + dft_expected.delta.add(4, 'a', 10); + dft_expected.delta.add(10, 'a', 3); + dft_expected.delta.add(10, 'a', 5); + dft_expected.delta.add(5, EPSILON, 11); + dft_expected.delta.add(11, MARKER, 4); + dft_expected.levels.resize(12); + dft_expected.levels[0] = 0; + dft_expected.levels[1] = 0; + dft_expected.levels[2] = 0; + dft_expected.levels[3] = 0; + dft_expected.levels[4] = 0; + dft_expected.levels[5] = 0; + dft_expected.levels[6] = 1; + dft_expected.levels[7] = 1; + dft_expected.levels[8] = 1; + dft_expected.levels[9] = 1; + dft_expected.levels[10] = 1; + dft_expected.levels[11] = 1; + CHECK(nft::are_equivalent(dft_begin_marker, dft_expected)); + } + + SECTION("nft::begin_marker_dft() regex ab+a+") { + nfa::Nfa dfa_begin_marker{ begin_marker_dfa("ab+a+", &alphabet) }; + nfa::Nfa dfa_expected{ nfa::Delta{}, { 0 }, { 0, 1, 2, 4 }}; + dfa_expected.delta.add(1, 'a', 0); + dfa_expected.delta.add(0, 'b', 0); + dfa_expected.delta.add(0, 'c', 0); + dfa_expected.delta.add(1, 'a', 1); + dfa_expected.delta.add(2, 'b', 1); + dfa_expected.delta.add(0, 'c', 1); + dfa_expected.delta.add(3, 'a', 2); + dfa_expected.delta.add(2, 'b', 2); + dfa_expected.delta.add(0, 'c', 2); + dfa_expected.delta.add(4, EPSILON, 3); + dfa_expected.delta.add(3, 'a', 4); + dfa_expected.delta.add(2, 'b', 4); + dfa_expected.delta.add(0, 'c', 4); + CHECK(nfa::are_equivalent(dfa_begin_marker, dfa_expected)); + + Nft dft_generic_end_marker{ end_marker_dft(dfa_begin_marker, MARKER) }; + Nft dft_expected{}; + dft_expected.initial.insert(0); + dft_expected.final = { 0, 2, 7, 14}; + dft_expected.levels_cnt = 2; + dft_expected.delta.add(0, 'a', 1); + dft_expected.delta.add(1, 'a', 2); + dft_expected.delta.add(0, 'b', 3); + dft_expected.delta.add(3, 'b', 0); + dft_expected.delta.add(0, 'c', 4); + dft_expected.delta.add(4, 'c', 0); + dft_expected.delta.add(2, 'a', 5); + dft_expected.delta.add(5, 'a', 2); + dft_expected.delta.add(2, 'b', 6); + dft_expected.delta.add(6, 'b', 7); + dft_expected.delta.add(2, 'c', 8); + dft_expected.delta.add(8, 'c', 0); + dft_expected.delta.add(7, 'a', 9); + dft_expected.delta.add(9, 'a', 10); + dft_expected.delta.add(7, 'b', 11); + dft_expected.delta.add(11, 'b', 7); + dft_expected.delta.add(7, 'c', 12); + dft_expected.delta.add(12, 'c', 0); + dft_expected.delta.add(10, EPSILON, 13); + dft_expected.delta.add(13, MARKER, 14); + dft_expected.delta.add(14, 'a', 15); + dft_expected.delta.add(15, 'a', 10); + dft_expected.delta.add(14, 'b', 16); + dft_expected.delta.add(16, 'b', 7); + dft_expected.delta.add(14, 'c', 17); + dft_expected.delta.add(17, 'c', 0); + dft_expected.levels.resize(18); + dft_expected.levels[0] = 0; + dft_expected.levels[1] = 1; + dft_expected.levels[2] = 0; + dft_expected.levels[3] = 1; + dft_expected.levels[4] = 1; + dft_expected.levels[5] = 1; + dft_expected.levels[6] = 1; + dft_expected.levels[7] = 0; + dft_expected.levels[8] = 1; + dft_expected.levels[9] = 1; + dft_expected.levels[10] = 0; + dft_expected.levels[11] = 1; + dft_expected.levels[12] = 1; + dft_expected.levels[13] = 1; + dft_expected.levels[14] = 0; + dft_expected.levels[15] = 1; + dft_expected.levels[16] = 1; + dft_expected.levels[17] = 1; + CHECK(nft::are_equivalent(dft_generic_end_marker, dft_expected)); + } } From c7b3f0d62f9b5a9456ada57fae789e45839536c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Mon, 19 Feb 2024 08:35:49 +0100 Subject: [PATCH 17/24] Rename `dft` to `nft` where creating DFT is not guaranteed --- include/mata/nft/strings.hh | 8 +- src/nft/strings.cc | 17 ++- tests/nft/strings.cc | 245 +++++++++++++++++------------------- 3 files changed, 130 insertions(+), 140 deletions(-) diff --git a/include/mata/nft/strings.hh b/include/mata/nft/strings.hh index 9c2b8cdb..342ab4b0 100644 --- a/include/mata/nft/strings.hh +++ b/include/mata/nft/strings.hh @@ -36,15 +36,15 @@ Nft replace_reluctant( ); nfa::Nfa end_marker_dfa(nfa::Nfa regex); -Nft marker_dft(const nfa::Nfa& marker_dfa, Symbol marker); +Nft marker_nft(const nfa::Nfa& marker_dfa, Symbol marker); nfa::Nfa generic_end_marker_dfa(const std::string& regex, Alphabet* alphabet); nfa::Nfa generic_end_marker_dfa(nfa::Nfa regex, Alphabet* alphabet); -nfa::Nfa begin_marker_dfa(const std::string& regex, Alphabet* alphabet); -nfa::Nfa begin_marker_dfa(nfa::Nfa regex, Alphabet* alphabet); +nfa::Nfa begin_marker_nfa(const std::string& regex, Alphabet* alphabet); +nfa::Nfa begin_marker_nfa(nfa::Nfa regex, Alphabet* alphabet); -Nft begin_marker_dft(const nfa::Nfa& begin_marker_dfa, Symbol begin_marker); +Nft begin_marker_nft(const nfa::Nfa& begin_marker_dfa, Symbol begin_marker); Nft end_marker_dft(const nfa::Nfa& end_marker_dfa, Symbol end_marker); } // Namespace mata::nft::strings. diff --git a/src/nft/strings.cc b/src/nft/strings.cc index 03ff9c59..3688200b 100644 --- a/src/nft/strings.cc +++ b/src/nft/strings.cc @@ -113,7 +113,7 @@ Nft mata::nft::strings::replace_reluctant( ) { nfa::Nfa dfa_generic_marker{ generic_end_marker_dfa(std::move(regex), alphabet) }; Nft dft_generic_end_marker{ end_marker_dft(dfa_generic_marker, end_marker) }; - Nft dft_begin_marker{ begin_marker_dft(dfa_generic_marker, begin_marker) }; + Nft dft_begin_marker{ begin_marker_nft(dfa_generic_marker, begin_marker) }; return Nft{}; } @@ -139,8 +139,7 @@ nfa::Nfa mata::nft::strings::end_marker_dfa(nfa::Nfa regex) { return regex; } -Nft mata::nft::strings::marker_dft(const nfa::Nfa& marker_dfa, const Symbol marker) { -// assert(marker_dfa.is_deterministic()); +Nft mata::nft::strings::marker_nft(const nfa::Nfa& marker_dfa, Symbol marker) { Nft dft_marker{ nft::builder::create_from_nfa(marker_dfa) }; const size_t dft_marker_num_of_states{ dft_marker.num_of_states() }; @@ -198,21 +197,21 @@ nfa::Nfa nft::strings::generic_end_marker_dfa(nfa::Nfa regex, Alphabet* alphabet return dfa_generic_end_marker; } -nfa::Nfa nft::strings::begin_marker_dfa(const std::string& regex, Alphabet* alphabet) { +nfa::Nfa nft::strings::begin_marker_nfa(const std::string& regex, Alphabet* alphabet) { nfa::Nfa nfa{}; parser::create_nfa(&nfa, regex); - return begin_marker_dfa(std::move(nfa), alphabet); + return begin_marker_nfa(std::move(nfa), alphabet); } -nfa::Nfa nft::strings::begin_marker_dfa(nfa::Nfa regex, Alphabet* alphabet) { +nfa::Nfa nft::strings::begin_marker_nfa(nfa::Nfa regex, Alphabet* alphabet) { nfa::Nfa dfa_generic_end_marker{ generic_end_marker_dfa(std::move(regex), alphabet) }; dfa_generic_end_marker = revert(dfa_generic_end_marker); std::swap(dfa_generic_end_marker.initial, dfa_generic_end_marker.final); return dfa_generic_end_marker; } -Nft nft::strings::begin_marker_dft(const nfa::Nfa& begin_marker_dfa, Symbol begin_marker) { - Nft begin_marker_dft{ marker_dft(begin_marker_dfa, begin_marker) }; +Nft nft::strings::begin_marker_nft(const nfa::Nfa& begin_marker_dfa, Symbol begin_marker) { + Nft begin_marker_dft{ marker_nft(begin_marker_dfa, begin_marker) }; const State new_initial{ begin_marker_dft.add_state() }; for (const State orig_final: begin_marker_dft.final) { begin_marker_dft.delta.add(new_initial, EPSILON, orig_final); @@ -225,5 +224,5 @@ Nft nft::strings::begin_marker_dft(const nfa::Nfa& begin_marker_dfa, Symbol begi } Nft nft::strings::end_marker_dft(const nfa::Nfa& end_marker_dfa, Symbol end_marker) { - return marker_dft(end_marker_dfa, end_marker); + return marker_nft(end_marker_dfa, end_marker); } diff --git a/tests/nft/strings.cc b/tests/nft/strings.cc index f3ccf325..3fc2c1fb 100644 --- a/tests/nft/strings.cc +++ b/tests/nft/strings.cc @@ -320,136 +320,127 @@ TEST_CASE("nft::reluctant_replacement()") { CHECK(nft::are_equivalent(dft_generic_end_marker, dft_expected)); } - SECTION("nft::begin_marker_dft() regex cb+a+") { - nfa::Nfa dfa_begin_marker{ begin_marker_dfa("cb+a+", &alphabet) }; - nfa::Nfa dfa_expected{ nfa::Delta{}, { 0 }, { 0, 1, 2, 4 }}; - dfa_expected.delta.add(0, 'a', 0); - dfa_expected.delta.add(0, 'b', 0); - dfa_expected.delta.add(1, 'c', 0); - dfa_expected.delta.add(0, 'a', 1); - dfa_expected.delta.add(2, 'b', 1); - dfa_expected.delta.add(1, 'c', 1); - dfa_expected.delta.add(3, 'a', 2); - dfa_expected.delta.add(2, 'b', 2); - dfa_expected.delta.add(1, 'c', 2); - dfa_expected.delta.add(4, EPSILON, 3); - dfa_expected.delta.add(3, 'a', 4); - dfa_expected.delta.add(0, 'b', 4); - dfa_expected.delta.add(1, 'c', 4); - CHECK(nfa::are_equivalent(dfa_begin_marker, dfa_expected)); + SECTION("nft::begin_marker_nft() regex cb+a+") { + nfa::Nfa nfa_begin_marker{ begin_marker_nfa("cb+a+", &alphabet) }; + nfa::Nfa nfa_expected{ nfa::Delta{}, { 0 }, { 0, 1, 2, 4 }}; + nfa_expected.delta.add(0, 'a', 0); + nfa_expected.delta.add(0, 'b', 0); + nfa_expected.delta.add(1, 'c', 0); + nfa_expected.delta.add(0, 'a', 1); + nfa_expected.delta.add(2, 'b', 1); + nfa_expected.delta.add(1, 'c', 1); + nfa_expected.delta.add(3, 'a', 2); + nfa_expected.delta.add(2, 'b', 2); + nfa_expected.delta.add(1, 'c', 2); + nfa_expected.delta.add(4, EPSILON, 3); + nfa_expected.delta.add(3, 'a', 4); + nfa_expected.delta.add(0, 'b', 4); + nfa_expected.delta.add(1, 'c', 4); + CHECK(nfa::are_equivalent(nfa_begin_marker, nfa_expected)); - Nft dft_begin_marker{ begin_marker_dft(dfa_begin_marker, MARKER) }; - Nft dft_expected{}; - dft_expected.initial.insert(0); - dft_expected.final.insert(1); - dft_expected.levels_cnt = 2; - dft_expected.delta.add(0, EPSILON, 1); - dft_expected.delta.add(0, EPSILON, 2); - dft_expected.delta.add(0, EPSILON, 3); - dft_expected.delta.add(0, EPSILON, 5); - dft_expected.delta.add(1, 'a', 6); - dft_expected.delta.add(6, 'a', 1); - dft_expected.delta.add(6, 'a', 2); - dft_expected.delta.add(1, 'b', 7); - dft_expected.delta.add(7, 'b', 1); - dft_expected.delta.add(7, 'b', 5); - dft_expected.delta.add(2, 'c', 8); - dft_expected.delta.add(8, 'c', 2); - dft_expected.delta.add(8, 'c', 1); - dft_expected.delta.add(8, 'c', 3); - dft_expected.delta.add(8, 'c', 5); - dft_expected.delta.add(3, 'b', 9); - dft_expected.delta.add(9, 'b', 3); - dft_expected.delta.add(9, 'b', 2); - dft_expected.delta.add(4, 'a', 10); - dft_expected.delta.add(10, 'a', 3); - dft_expected.delta.add(10, 'a', 5); - dft_expected.delta.add(5, EPSILON, 11); - dft_expected.delta.add(11, MARKER, 4); - dft_expected.levels.resize(12); - dft_expected.levels[0] = 0; - dft_expected.levels[1] = 0; - dft_expected.levels[2] = 0; - dft_expected.levels[3] = 0; - dft_expected.levels[4] = 0; - dft_expected.levels[5] = 0; - dft_expected.levels[6] = 1; - dft_expected.levels[7] = 1; - dft_expected.levels[8] = 1; - dft_expected.levels[9] = 1; - dft_expected.levels[10] = 1; - dft_expected.levels[11] = 1; - CHECK(nft::are_equivalent(dft_begin_marker, dft_expected)); + Nft nft_begin_marker{ begin_marker_nft(nfa_begin_marker, MARKER) }; + Nft nft_expected{}; + nft_expected.initial.insert(0); + nft_expected.final.insert(1); + nft_expected.levels_cnt = 2; + nft_expected.delta.add(0, EPSILON, 1); + nft_expected.delta.add(0, EPSILON, 2); + nft_expected.delta.add(0, EPSILON, 3); + nft_expected.delta.add(0, EPSILON, 5); + nft_expected.delta.add(1, 'a', 6); + nft_expected.delta.add(6, 'a', 1); + nft_expected.delta.add(6, 'a', 2); + nft_expected.delta.add(1, 'b', 7); + nft_expected.delta.add(7, 'b', 1); + nft_expected.delta.add(7, 'b', 5); + nft_expected.delta.add(2, 'c', 8); + nft_expected.delta.add(8, 'c', 2); + nft_expected.delta.add(8, 'c', 1); + nft_expected.delta.add(8, 'c', 3); + nft_expected.delta.add(8, 'c', 5); + nft_expected.delta.add(3, 'b', 9); + nft_expected.delta.add(9, 'b', 3); + nft_expected.delta.add(9, 'b', 2); + nft_expected.delta.add(4, 'a', 10); + nft_expected.delta.add(10, 'a', 3); + nft_expected.delta.add(10, 'a', 5); + nft_expected.delta.add(5, EPSILON, 11); + nft_expected.delta.add(11, MARKER, 4); + nft_expected.levels.resize(12); + nft_expected.levels[0] = 0; + nft_expected.levels[1] = 0; + nft_expected.levels[2] = 0; + nft_expected.levels[3] = 0; + nft_expected.levels[4] = 0; + nft_expected.levels[5] = 0; + nft_expected.levels[6] = 1; + nft_expected.levels[7] = 1; + nft_expected.levels[8] = 1; + nft_expected.levels[9] = 1; + nft_expected.levels[10] = 1; + nft_expected.levels[11] = 1; + CHECK(nft::are_equivalent(nft_begin_marker, nft_expected)); } - SECTION("nft::begin_marker_dft() regex ab+a+") { - nfa::Nfa dfa_begin_marker{ begin_marker_dfa("ab+a+", &alphabet) }; - nfa::Nfa dfa_expected{ nfa::Delta{}, { 0 }, { 0, 1, 2, 4 }}; - dfa_expected.delta.add(1, 'a', 0); - dfa_expected.delta.add(0, 'b', 0); - dfa_expected.delta.add(0, 'c', 0); - dfa_expected.delta.add(1, 'a', 1); - dfa_expected.delta.add(2, 'b', 1); - dfa_expected.delta.add(0, 'c', 1); - dfa_expected.delta.add(3, 'a', 2); - dfa_expected.delta.add(2, 'b', 2); - dfa_expected.delta.add(0, 'c', 2); - dfa_expected.delta.add(4, EPSILON, 3); - dfa_expected.delta.add(3, 'a', 4); - dfa_expected.delta.add(2, 'b', 4); - dfa_expected.delta.add(0, 'c', 4); - CHECK(nfa::are_equivalent(dfa_begin_marker, dfa_expected)); + SECTION("nft::begin_marker_nft() regex ab+a+") { + nfa::Nfa nfa_begin_marker{ begin_marker_nfa("ab+a+", &alphabet) }; + nfa::Nfa nfa_expected{ nfa::Delta{}, { 0 }, { 0, 1, 2, 4 }}; + nfa_expected.delta.add(1, 'a', 0); + nfa_expected.delta.add(0, 'b', 0); + nfa_expected.delta.add(0, 'c', 0); + nfa_expected.delta.add(1, 'a', 1); + nfa_expected.delta.add(2, 'b', 1); + nfa_expected.delta.add(0, 'c', 1); + nfa_expected.delta.add(3, 'a', 2); + nfa_expected.delta.add(2, 'b', 2); + nfa_expected.delta.add(0, 'c', 2); + nfa_expected.delta.add(4, EPSILON, 3); + nfa_expected.delta.add(3, 'a', 4); + nfa_expected.delta.add(2, 'b', 4); + nfa_expected.delta.add(0, 'c', 4); + CHECK(nfa::are_equivalent(nfa_begin_marker, nfa_expected)); - Nft dft_generic_end_marker{ end_marker_dft(dfa_begin_marker, MARKER) }; - Nft dft_expected{}; - dft_expected.initial.insert(0); - dft_expected.final = { 0, 2, 7, 14}; - dft_expected.levels_cnt = 2; - dft_expected.delta.add(0, 'a', 1); - dft_expected.delta.add(1, 'a', 2); - dft_expected.delta.add(0, 'b', 3); - dft_expected.delta.add(3, 'b', 0); - dft_expected.delta.add(0, 'c', 4); - dft_expected.delta.add(4, 'c', 0); - dft_expected.delta.add(2, 'a', 5); - dft_expected.delta.add(5, 'a', 2); - dft_expected.delta.add(2, 'b', 6); - dft_expected.delta.add(6, 'b', 7); - dft_expected.delta.add(2, 'c', 8); - dft_expected.delta.add(8, 'c', 0); - dft_expected.delta.add(7, 'a', 9); - dft_expected.delta.add(9, 'a', 10); - dft_expected.delta.add(7, 'b', 11); - dft_expected.delta.add(11, 'b', 7); - dft_expected.delta.add(7, 'c', 12); - dft_expected.delta.add(12, 'c', 0); - dft_expected.delta.add(10, EPSILON, 13); - dft_expected.delta.add(13, MARKER, 14); - dft_expected.delta.add(14, 'a', 15); - dft_expected.delta.add(15, 'a', 10); - dft_expected.delta.add(14, 'b', 16); - dft_expected.delta.add(16, 'b', 7); - dft_expected.delta.add(14, 'c', 17); - dft_expected.delta.add(17, 'c', 0); - dft_expected.levels.resize(18); - dft_expected.levels[0] = 0; - dft_expected.levels[1] = 1; - dft_expected.levels[2] = 0; - dft_expected.levels[3] = 1; - dft_expected.levels[4] = 1; - dft_expected.levels[5] = 1; - dft_expected.levels[6] = 1; - dft_expected.levels[7] = 0; - dft_expected.levels[8] = 1; - dft_expected.levels[9] = 1; - dft_expected.levels[10] = 0; - dft_expected.levels[11] = 1; - dft_expected.levels[12] = 1; - dft_expected.levels[13] = 1; - dft_expected.levels[14] = 0; - dft_expected.levels[15] = 1; - dft_expected.levels[16] = 1; - dft_expected.levels[17] = 1; - CHECK(nft::are_equivalent(dft_generic_end_marker, dft_expected)); + Nft nft_begin_marker{ begin_marker_nft(nfa_begin_marker, MARKER) }; + Nft nft_expected{}; + nft_expected.initial.insert(0); + nft_expected.final.insert(1); + nft_expected.levels_cnt = 2; + nft_expected.delta.add(0, EPSILON, 1); + nft_expected.delta.add(0, EPSILON, 2); + nft_expected.delta.add(0, EPSILON, 3); + nft_expected.delta.add(0, EPSILON, 5); + nft_expected.delta.add(1, 'b', 6); + nft_expected.delta.add(6, 'b', 1); + nft_expected.delta.add(1, 'c', 7); + nft_expected.delta.add(7, 'c', 1); + nft_expected.delta.add(7, 'c', 2); + nft_expected.delta.add(7, 'c', 3); + nft_expected.delta.add(7, 'c', 5); + nft_expected.delta.add(2, 'a', 8); + nft_expected.delta.add(8, 'a', 2); + nft_expected.delta.add(8, 'a', 1); + nft_expected.delta.add(3, 'b', 9); + nft_expected.delta.add(9, 'b', 3); + nft_expected.delta.add(9, 'b', 2); + nft_expected.delta.add(9, 'b', 5); + nft_expected.delta.add(4, 'a', 10); + nft_expected.delta.add(10, 'a', 5); + nft_expected.delta.add(10, 'a', 3); + nft_expected.delta.add(5, EPSILON, 11); + nft_expected.delta.add(11, MARKER, 4); + nft_expected.levels.resize(12); + nft_expected.levels[0] = 0; + nft_expected.levels[1] = 0; + nft_expected.levels[2] = 0; + nft_expected.levels[3] = 0; + nft_expected.levels[4] = 0; + nft_expected.levels[5] = 0; + nft_expected.levels[6] = 1; + nft_expected.levels[7] = 1; + nft_expected.levels[8] = 1; + nft_expected.levels[9] = 1; + nft_expected.levels[10] = 1; + nft_expected.levels[11] = 1; + CHECK(nft::are_equivalent(nft_begin_marker, nft_expected)); } } From e75e7f1b9367f5d0635afa98fa2a3036e2be749f Mon Sep 17 00:00:00 2001 From: koniksedy Date: Mon, 19 Feb 2024 18:56:14 +0100 Subject: [PATCH 18/24] nft::are_quivalent implemented using the method get_one_level_aut --- include/mata/nft/nft.hh | 12 ++-- src/nft/inclusion.cc | 20 ++++--- tests/nft/builder.cc | 2 +- tests/nft/nft.cc | 128 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 147 insertions(+), 15 deletions(-) diff --git a/include/mata/nft/nft.hh b/include/mata/nft/nft.hh index c3ed2f80..e3f6eb9c 100644 --- a/include/mata/nft/nft.hh +++ b/include/mata/nft/nft.hh @@ -62,24 +62,22 @@ public: public: explicit Nft(Delta delta = {}, utils::SparseSet initial_states = {}, - utils::SparseSet final_states = {}, std::vector levels = {}, const Level levels_cnt = 1, + utils::SparseSet final_states = {}, std::vector levels = {}, const Level levels_cnt = 0, Alphabet* alphabet = nullptr) - : mata::nfa::Nfa(std::move(delta), std::move(initial_states), std::move(final_states), alphabet), - levels(std::move(levels)), levels_cnt(levels_cnt) {} - + : mata::nfa::Nfa(std::move(delta), std::move(initial_states), std::move(final_states), alphabet), levels(levels), levels_cnt(levels_cnt) {} /** * @brief Construct a new explicit NFT with num_of_states states and optionally set initial and final states. * * @param[in] num_of_states Number of states for which to preallocate Delta. */ explicit Nft(const unsigned long num_of_states, StateSet initial_states = {}, - StateSet final_states = {}, std::vector levels = {}, const Level levels_cnt = 1, Alphabet* + StateSet final_states = {}, std::vector levels = {}, const Level levels_cnt = 0, Alphabet* alphabet = nullptr) - : mata::nfa::Nfa(num_of_states, std::move(initial_states), std::move(final_states), alphabet), levels(std::move(levels)), levels_cnt(levels_cnt) {} + : mata::nfa::Nfa(num_of_states, std::move(initial_states), std::move(final_states), alphabet), levels(levels), levels_cnt(levels_cnt) {} explicit Nft(const mata::nfa::Nfa& other) : mata::nfa::Nfa(other.delta, other.initial, other.final, other.alphabet), - levels(std::vector(other.num_of_states(), 0)), levels_cnt(1) {} + levels(std::vector()), levels_cnt(0) {} /** * @brief Construct a new explicit NFT from other NFT. diff --git a/src/nft/inclusion.cc b/src/nft/inclusion.cc index 1e8c3980..eda720c3 100644 --- a/src/nft/inclusion.cc +++ b/src/nft/inclusion.cc @@ -271,18 +271,24 @@ bool mata::nft::is_included( bool mata::nft::are_equivalent(const Nft& lhs, const Nft& rhs, const Alphabet *alphabet, const ParameterMap& params) { + if (lhs.levels_cnt != rhs.levels_cnt) { return false; } - //TODO: add comment on what this is doing, what is __func__ ... - AlgoType algo{ set_algorithm(std::to_string(__func__), params) }; + if (lhs.levels_cnt == 0) { return nfa::are_equivalent(lhs, rhs, alphabet, params); } - if (params.at("algorithm") == "naive") { - if (alphabet == nullptr) { - const auto computed_alphabet{create_alphabet(lhs, rhs) }; - return compute_equivalence(lhs, rhs, &computed_alphabet, algo); + OrdVector symbols; + if (alphabet == nullptr) { + symbols = create_alphabet(lhs, rhs).get_alphabet_symbols(); + if (symbols.contains(DONT_CARE) && symbols.size() > 1) { + symbols.erase(DONT_CARE); } + } else { + symbols = alphabet->get_alphabet_symbols(); } - return compute_equivalence(lhs, rhs, alphabet, algo); + return nfa::are_equivalent(lhs.get_one_level_aut(symbols), + rhs.get_one_level_aut(symbols), + alphabet, + params); } bool mata::nft::are_equivalent(const Nft& lhs, const Nft& rhs, const ParameterMap& params) { diff --git a/tests/nft/builder.cc b/tests/nft/builder.cc index f93744ee..4e4cb2d9 100644 --- a/tests/nft/builder.cc +++ b/tests/nft/builder.cc @@ -64,7 +64,7 @@ TEST_CASE("nft::parse_from_mata()") { delta.add(0, 0, 0); delta.add(0, 1, 1); delta.add(1, 2, 0); - Nft nft{ delta, { 0 }, { 1 }, { 0 }, 1}; + Nft nft{ delta, { 0 }, { 1 }, { 0, 0 }, 1}; SECTION("from string") { Nft parsed{ mata::nft::builder::parse_from_mata(nft.print_to_mata()) }; diff --git a/tests/nft/nft.cc b/tests/nft/nft.cc index ff41a1d9..f2d39543 100644 --- a/tests/nft/nft.cc +++ b/tests/nft/nft.cc @@ -2997,3 +2997,131 @@ TEST_CASE("mata::nft::Nft::get_words") { CHECK(aut.get_words(5) == std::set{{}, {0}, {1}, {0, 1}, {1, 0}, {0,1,0}, {1,0,1}, {0,1,0,1}, {1,0,1,0}, {0,1,0,1,0}, {1,0,1,0,1}}); } } + +TEST_CASE("mata::nft::Nft::get_one_level_aut") { + #define REPLACE_DONT_CARE(delta, src, trg)\ + delta.add(src, 0, trg);\ + delta.add(src, 1, trg);\ + + #define SPLIT_TRANSITION(delta, src, symbol, inter, trg)\ + ((symbol == DONT_CARE) ? (delta.add(src, 0, inter), delta.add(src, 1, inter)) : (delta.add(src, symbol, inter)));\ + REPLACE_DONT_CARE(delta, inter, trg);\ + + SECTION("level_cnt == 1") { + Nft aut(5, {0}, {3, 4}, {0, 0, 0, 0, 0}, 1); + aut.delta.add(0, 0, 1); + aut.delta.add(0, 1, 2); + aut.delta.add(1, 0, 1); + aut.delta.add(1, DONT_CARE, 3); + aut.delta.add(2, DONT_CARE, 2); + aut.delta.add(2, DONT_CARE, 4); + aut.delta.add(3, 0, 1); + aut.delta.add(3, DONT_CARE, 3); + aut.delta.add(4, 1, 2); + aut.delta.add(4, DONT_CARE, 4); + + Nft expected(5, {0}, {3, 4}, {0, 0, 0, 0, 0}, 1); + expected.delta.add(0, 0, 1); + expected.delta.add(0, 1, 2); + expected.delta.add(1, 0, 1); + REPLACE_DONT_CARE(expected.delta, 1, 3); + REPLACE_DONT_CARE(expected.delta, 2, 2); + REPLACE_DONT_CARE(expected.delta, 2, 4); + expected.delta.add(3, 0, 1); + REPLACE_DONT_CARE(expected.delta, 3, 3); + expected.delta.add(4, 1, 2); + REPLACE_DONT_CARE(expected.delta, 4, 4); + + CHECK(nfa::are_equivalent(aut.get_one_level_aut({0, 1}), expected)); + CHECK(nfa::are_equivalent(aut.get_one_level_aut().get_one_level_aut({0, 1}), expected)); + CHECK(nft::are_equivalent(aut, expected)); + } + + SECTION("level_cnt == 2") { + Nft aut(7, {0}, {5, 6}, {0, 1, 1, 0, 0, 0, 0}, 2); + aut.delta.add(0, 0, 1); + aut.delta.add(0, 1, 2); + aut.delta.add(1, DONT_CARE, 3); + aut.delta.add(2, DONT_CARE, 4); + aut.delta.add(3, 0, 3); + aut.delta.add(3, 0, 5); + aut.delta.add(4, DONT_CARE, 4); + aut.delta.add(4, DONT_CARE, 6); + aut.delta.add(5, DONT_CARE, 5); + aut.delta.add(5, 0, 3); + aut.delta.add(6, DONT_CARE, 6); + aut.delta.add(6, 1, 4); + + Nft expected(15, {0}, {5, 6}, {0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1}, 2); + expected.delta.add(0, 0, 1); + expected.delta.add(0, 1, 2); + REPLACE_DONT_CARE(expected.delta, 1, 3); + REPLACE_DONT_CARE(expected.delta, 2, 4); + SPLIT_TRANSITION(expected.delta, 3, 0, 7, 3); + SPLIT_TRANSITION(expected.delta, 3, 0, 8, 5); + SPLIT_TRANSITION(expected.delta, 4, DONT_CARE, 10, 4); + SPLIT_TRANSITION(expected.delta, 4, DONT_CARE, 12, 6); + SPLIT_TRANSITION(expected.delta, 5, DONT_CARE, 13, 5); + SPLIT_TRANSITION(expected.delta, 5, 0, 9, 3); + SPLIT_TRANSITION(expected.delta, 6, DONT_CARE, 14, 6); + SPLIT_TRANSITION(expected.delta, 6, 1, 11, 4); + + CHECK(nfa::are_equivalent(aut.get_one_level_aut({0, 1}), expected)); + CHECK(nfa::are_equivalent(aut.get_one_level_aut().get_one_level_aut({0, 1}), expected)); + CHECK(nft::are_equivalent(aut, expected)); + + } + + SECTION("level_cnt == 4") { + Nft aut(17, {0}, {15, 16}, {0, 1, 1, 3, 3, 0, 0, 2, 2, 0, 0, 1, 1, 2, 2, 0, 0}, 4); + aut.delta.add(0, 0, 1); + aut.delta.add(0, 1, 2); + aut.delta.add(1, 0, 3); + aut.delta.add(2, DONT_CARE, 4); + aut.delta.add(3, 0, 5); + aut.delta.add(4, DONT_CARE, 6); + aut.delta.add(5, 0, 5); + aut.delta.add(5, 0, 7); + aut.delta.add(6, DONT_CARE, 6); + aut.delta.add(6, DONT_CARE, 8); + aut.delta.add(7, 0, 9); + aut.delta.add(8, DONT_CARE, 10); + aut.delta.add(9, 0, 11); + aut.delta.add(10, DONT_CARE, 12); + aut.delta.add(11, 0, 13); + aut.delta.add(12, DONT_CARE, 14); + aut.delta.add(13, 0, 15); + aut.delta.add(14, DONT_CARE, 16); + + Nft expected(31, {0}, {15, 16}, {0, 1, 1, 3, 3, 0, 0, 2, 2, 0, 0, 1, 1, 2, 2, 0, 0, 2, 2, 2, 1, 1, 3, 3, 1, 2, 1, 3, 3, 3, 3}, 4); + expected.delta.add(0, 0, 1); + expected.delta.add(0, 1, 2); + SPLIT_TRANSITION(expected.delta, 1, 0, 17, 3); + SPLIT_TRANSITION(expected.delta, 2, DONT_CARE, 18, 4); + expected.delta.add(3, 0, 5); + REPLACE_DONT_CARE(expected.delta, 4, 6); + expected.delta.add(5, 0, 20); + REPLACE_DONT_CARE(expected.delta, 20, 19); + REPLACE_DONT_CARE(expected.delta, 19, 29); + REPLACE_DONT_CARE(expected.delta, 29, 5); + SPLIT_TRANSITION(expected.delta, 5, 0, 21, 7); + REPLACE_DONT_CARE(expected.delta, 6, 24); + REPLACE_DONT_CARE(expected.delta, 24, 25); + REPLACE_DONT_CARE(expected.delta, 25, 30); + REPLACE_DONT_CARE(expected.delta, 30, 6); + SPLIT_TRANSITION(expected.delta, 6, DONT_CARE, 26, 8); + SPLIT_TRANSITION(expected.delta, 7, 0, 22, 9); + SPLIT_TRANSITION(expected.delta, 8, DONT_CARE, 27, 10); + expected.delta.add(9, 0, 11); + REPLACE_DONT_CARE(expected.delta, 10, 12); + expected.delta.add(11, 0, 13); + REPLACE_DONT_CARE(expected.delta, 12, 14); + SPLIT_TRANSITION(expected.delta, 13, 0, 23, 15); + SPLIT_TRANSITION(expected.delta, 14, DONT_CARE, 28, 16); + + CHECK(nfa::are_equivalent(aut.get_one_level_aut({0, 1}), expected)); + CHECK(nfa::are_equivalent(aut.get_one_level_aut().get_one_level_aut({0, 1}), expected)); + CHECK(nft::are_equivalent(aut, expected)); + } + +} From 08ee3999f8307c2c6437181f2857b249555febff Mon Sep 17 00:00:00 2001 From: koniksedy Date: Mon, 19 Feb 2024 19:03:33 +0100 Subject: [PATCH 19/24] nft::algorithms::is_included_antichains implemented using the method get_one_level_aut --- src/nft/inclusion.cc | 195 ++++--------------------------------------- 1 file changed, 14 insertions(+), 181 deletions(-) diff --git a/src/nft/inclusion.cc b/src/nft/inclusion.cc index eda720c3..ec9844e1 100644 --- a/src/nft/inclusion.cc +++ b/src/nft/inclusion.cc @@ -4,6 +4,7 @@ // MATA headers #include "mata/nft/nft.hh" #include "mata/nft/algorithms.hh" +#include "mata/nfa/algorithms.hh" #include "mata/utils/sparse-set.hh" using namespace mata::nft; @@ -35,190 +36,23 @@ bool mata::nft::algorithms::is_included_antichains( const Alphabet* const alphabet, //TODO: this parameter is not used Run* cex) { // {{{ - (void)alphabet; + if (smaller.levels_cnt != bigger.levels_cnt) { return false; } + if (smaller.levels_cnt == 0) { return nfa::algorithms::is_included_antichains(smaller, bigger, alphabet, cex); } - // TODO: Decide what is the best optimization for inclusion. - - using ProdStateType = std::tuple; - using ProdStatesType = std::vector; - // ProcessedType is indexed by states of the smaller nft - // tailored for pure antichain approach ... the simulation-based antichain will not work (without changes). - using ProcessedType = std::vector; - - auto subsumes = [](const ProdStateType& lhs, const ProdStateType& rhs) { - if (std::get<0>(lhs) != std::get<0>(rhs)) { - return false; - } - - const StateSet& lhs_bigger = std::get<1>(lhs); - const StateSet& rhs_bigger = std::get<1>(rhs); - - //TODO: Can this be done faster using more heuristics? E.g., compare the last elements first ... - //TODO: Try BDDs! What about some abstractions? - return lhs_bigger.IsSubsetOf(rhs_bigger); - }; - - - // initialize - ProdStatesType worklist{};//Pairs (q,S) to be processed. It sometimes gives a huge speed-up when they are kept sorted by the size of S, - // worklist.reserve(32); - // so those with smaller popped for processing first. - ProcessedType processed(smaller.num_of_states()); // Allocate to the number of states of the smaller nft. - // The pairs of each state are also kept sorted. It allows slightly faster antichain pruning - no need to test inclusion in sets that have less elements. - - //Is |S| < |S'| for the inut pairs (q,S) and (q',S')? - // auto smaller_set = [](const ProdStateType & a, const ProdStateType & b) { return std::get<1>(a).size() < std::get<1>(b).size(); }; - - std::vector distances_smaller = revert(smaller).distances_from_initial(); - std::vector distances_bigger = revert(bigger).distances_from_initial(); - - // auto closer_dist = [&](const ProdStateType & a, const ProdStateType & b) { - // return distances_smaller[a.first] < distances_smaller[b.first]; - // }; - - // auto closer_smaller = [&](const ProdStateType & a, const ProdStateType & b) { - // if (distances_smaller[a.first] != distances_smaller[b.first]) - // return distances_smaller[a.first] < distances_smaller[b.first]; - // else - // return a.second.size() < b.second.size(); - // }; - - // auto smaller_closer = [&](const ProdStateType & a, const ProdStateType & b) { - // if (a.second.size() != b.second.size()) - // return a.second.size() < b.second.size(); - // else - // return distances_smaller[a.first] < distances_smaller[b.first]; - // }; - - auto min_dst = [&](const StateSet& set) { - if (set.empty()) return Limits::max_state; - return distances_bigger[*std::min_element(set.begin(), set.end(), [&](const State a,const State b){return distances_bigger[a] < distances_bigger[b];})]; - }; - - auto lengths_incompatible = [&](const ProdStateType& pair) { - return distances_smaller[std::get<0>(pair)] < std::get<2>(pair); - }; - - auto insert_to_pairs = [&](ProdStatesType & pairs,const ProdStateType & pair) { - // auto it = std::lower_bound(pairs.begin(), pairs.end(), pair, smaller_set); - // auto it = std::lower_bound(pairs.begin(), pairs.end(), pair, closer_dist); - // auto it = std::lower_bound(pairs.begin(), pairs.end(), pair, smaller_closer); - // auto it = std::lower_bound(pairs.begin(), pairs.end(), pair, closer_smaller); - // pairs.insert(it,pair); - pairs.push_back(pair); - // std::sort(pairs.begin(), pairs.end(), smaller_closer); - }; - - // 'paths[s] == t' denotes that state 's' was accessed from state 't', - // 'paths[s] == s' means that 's' is an initial state - std::map> paths; - - // check initial states first // TODO: this would be done in the main loop as the first thing anyway? - for (const auto& state : smaller.initial) { - if (smaller.final[state] && - are_disjoint(bigger.initial, bigger.final)) - { - if (cex != nullptr) { cex->word.clear(); } - return false; + OrdVector symbols; + if (alphabet == nullptr) { + symbols = create_alphabet(smaller, bigger).get_alphabet_symbols(); + if (symbols.contains(DONT_CARE) && symbols.size() > 1) { + symbols.erase(DONT_CARE); } - - StateSet bigger_state_set{ bigger.initial }; - const ProdStateType st = std::tuple(state, bigger_state_set, min_dst(bigger_state_set)); - insert_to_pairs(worklist, st); - insert_to_pairs(processed[state],st); - - if (cex != nullptr) - paths.insert({ st, {st, 0}}); + } else { + symbols = alphabet->get_alphabet_symbols(); } - //For synchronised iteration over the set of states - SynchronizedExistentialSymbolPostIterator sync_iterator; - - // We use DFS strategy for the worklist processing - while (!worklist.empty()) { - // get a next product state - ProdStateType prod_state = *worklist.rbegin(); - worklist.pop_back(); - - const State& smaller_state = std::get<0>(prod_state); - const StateSet& bigger_set = std::get<1>(prod_state); - - sync_iterator.reset(); - for (State q: bigger_set) { - mata::utils::push_back(sync_iterator, bigger.delta[q]); - } - - // process transitions leaving smaller_state - for (const auto& smaller_move : smaller.delta[smaller_state]) { - const Symbol& smaller_symbol = smaller_move.symbol; - - StateSet bigger_succ = {}; - if(sync_iterator.synchronize_with(smaller_move)) { - bigger_succ = sync_iterator.unify_targets(); - } - - for (const State& smaller_succ : smaller_move.targets) { - const ProdStateType succ = {smaller_succ, bigger_succ, min_dst(bigger_succ)}; - - if (lengths_incompatible(succ) || (smaller.final[smaller_succ] && - !bigger.final.intersects_with(bigger_succ))) - { - if (cex != nullptr) { - cex->word.clear(); - cex->word.push_back(smaller_symbol); - ProdStateType trav = prod_state; - while (paths[trav].first != trav) - { // go back until initial state - cex->word.push_back(paths[trav].second); - trav = paths[trav].first; - } - - std::reverse(cex->word.begin(), cex->word.end()); - } - - return false; - } - - bool is_subsumed = false; - for (const auto& anti_state : processed[smaller_succ]) - { // trying to find in processed a smaller state than the newly created succ - // if (smaller_set(succ,anti_state)) { - // break; - // } - if (subsumes(anti_state, succ)) { - is_subsumed = true; - break; - } - } - - if (is_subsumed) { - continue; - } - - for (ProdStatesType* ds: {&processed[smaller_succ], &worklist}) { - //Pruning of processed and the worklist. - //Since they are ordered by the size of the sets, we can iterate from back, - //and as soon as we get to sets larger than succ, we can stop (larger sets cannot be subsets). - std::erase_if(*ds, [&](const auto& d){ return subsumes(succ, d); }); - // for (long it = static_cast(ds->size()-1);it>=0;--it) { - // // if (smaller_set((*ds)[static_cast(it)],succ)) - // // break; - // if (subsumes(succ, (*ds)[static_cast(it)])) { - // //Using index it instead of an iterator since erase could invalidate it (?) - // ds->erase(ds->begin() + it); - // } - // } - insert_to_pairs(*ds, succ); - } - - if(cex != nullptr) { - // also set that succ was accessed from state - paths[succ] = {prod_state, smaller_symbol}; - } - } - } - } - return true; + return nfa::algorithms::is_included_antichains(smaller.get_one_level_aut(symbols), + bigger.get_one_level_aut(symbols), + alphabet, + cex); } // }}} namespace { @@ -271,7 +105,6 @@ bool mata::nft::is_included( bool mata::nft::are_equivalent(const Nft& lhs, const Nft& rhs, const Alphabet *alphabet, const ParameterMap& params) { - if (lhs.levels_cnt != rhs.levels_cnt) { return false; } if (lhs.levels_cnt == 0) { return nfa::are_equivalent(lhs, rhs, alphabet, params); } From b672b4e2edc2767dcf5f610ffed3cbf26750f960 Mon Sep 17 00:00:00 2001 From: koniksedy Date: Tue, 20 Feb 2024 13:17:25 +0100 Subject: [PATCH 20/24] default level set to 1; levels size synchronized with delta --- include/mata/nft/nft.hh | 14 ++++++++------ src/nft/builder.cc | 4 ++++ src/nft/inclusion.cc | 13 ------------- src/nft/nft.cc | 14 +++++++++++++- tests/nft/nft.cc | 4 ++-- 5 files changed, 27 insertions(+), 22 deletions(-) diff --git a/include/mata/nft/nft.hh b/include/mata/nft/nft.hh index e3f6eb9c..eec4ccfe 100644 --- a/include/mata/nft/nft.hh +++ b/include/mata/nft/nft.hh @@ -51,7 +51,7 @@ struct Nft : public mata::nfa::Nfa { public: /// @brief For state q, levels[q] gives the state a level. std::vector levels{}; - Level levels_cnt = 0; + Level levels_cnt = 1; /// Key value store for additional attributes for the NFT. Keys are attribute names as strings and the value types /// are up to the user. /// For example, we can set up attributes such as "state_dict" for state dictionary attribute mapping states to their @@ -62,22 +62,24 @@ public: public: explicit Nft(Delta delta = {}, utils::SparseSet initial_states = {}, - utils::SparseSet final_states = {}, std::vector levels = {}, const Level levels_cnt = 0, + utils::SparseSet final_states = {}, std::vector levels = {}, const Level levels_cnt = 1, Alphabet* alphabet = nullptr) - : mata::nfa::Nfa(std::move(delta), std::move(initial_states), std::move(final_states), alphabet), levels(levels), levels_cnt(levels_cnt) {} + : mata::nfa::Nfa(std::move(delta), std::move(initial_states), std::move(final_states), alphabet), + levels(levels.size() ? std::move(levels) : std::vector(delta.num_of_states(), 0)), levels_cnt(levels_cnt) {} /** * @brief Construct a new explicit NFT with num_of_states states and optionally set initial and final states. * * @param[in] num_of_states Number of states for which to preallocate Delta. */ explicit Nft(const unsigned long num_of_states, StateSet initial_states = {}, - StateSet final_states = {}, std::vector levels = {}, const Level levels_cnt = 0, Alphabet* + StateSet final_states = {}, std::vector levels = {}, const Level levels_cnt = 1, Alphabet* alphabet = nullptr) - : mata::nfa::Nfa(num_of_states, std::move(initial_states), std::move(final_states), alphabet), levels(levels), levels_cnt(levels_cnt) {} + : mata::nfa::Nfa(num_of_states, std::move(initial_states), std::move(final_states), alphabet), + levels(levels.size() ? std::move(levels) : std::vector(num_of_states, 0)), levels_cnt(levels_cnt) {} explicit Nft(const mata::nfa::Nfa& other) : mata::nfa::Nfa(other.delta, other.initial, other.final, other.alphabet), - levels(std::vector()), levels_cnt(0) {} + levels(std::vector(other.delta.num_of_states(), 0)), levels_cnt(1) {} /** * @brief Construct a new explicit NFT from other NFT. diff --git a/src/nft/builder.cc b/src/nft/builder.cc index aebaf392..2d799f9f 100644 --- a/src/nft/builder.cc +++ b/src/nft/builder.cc @@ -340,5 +340,9 @@ Nft builder::create_from_nfa(const mata::nfa::Nfa& nfa, Level level_cnt, const s std::ranges::for_each(nfa.initial, [&](const State nfa_state){ nft.initial.insert(state_mapping[nfa_state]); }); nft.final.reserve(nfa.final.size()); std::ranges::for_each(nfa.final, [&](const State nfa_state){ nft.final.insert(state_mapping[nfa_state]); }); + + // TODO(nft): HACK. Levels do not work if the size of delta differs from the size of the vector level. + nft.levels.resize(nft.delta.num_of_states()); + return nft; } diff --git a/src/nft/inclusion.cc b/src/nft/inclusion.cc index ec9844e1..8d786746 100644 --- a/src/nft/inclusion.cc +++ b/src/nft/inclusion.cc @@ -37,7 +37,6 @@ bool mata::nft::algorithms::is_included_antichains( Run* cex) { // {{{ if (smaller.levels_cnt != bigger.levels_cnt) { return false; } - if (smaller.levels_cnt == 0) { return nfa::algorithms::is_included_antichains(smaller, bigger, alphabet, cex); } OrdVector symbols; if (alphabet == nullptr) { @@ -58,17 +57,6 @@ bool mata::nft::algorithms::is_included_antichains( namespace { using AlgoType = decltype(algorithms::is_included_naive)*; - bool compute_equivalence(const Nft &lhs, const Nft &rhs, const mata::Alphabet *const alphabet, const AlgoType &algo) { - //alphabet should not be needed as input parameter - if (algo(lhs, rhs, alphabet, nullptr)) { - if (algo(rhs, lhs, alphabet, nullptr)) { - return true; - } - } - - return false; - } - AlgoType set_algorithm(const std::string &function_name, const ParameterMap ¶ms) { if (!haskey(params, "algorithm")) { throw std::runtime_error(function_name + @@ -106,7 +94,6 @@ bool mata::nft::is_included( bool mata::nft::are_equivalent(const Nft& lhs, const Nft& rhs, const Alphabet *alphabet, const ParameterMap& params) { if (lhs.levels_cnt != rhs.levels_cnt) { return false; } - if (lhs.levels_cnt == 0) { return nfa::are_equivalent(lhs, rhs, alphabet, params); } OrdVector symbols; if (alphabet == nullptr) { diff --git a/src/nft/nft.cc b/src/nft/nft.cc index fb8b112a..2a0e4977 100644 --- a/src/nft/nft.cc +++ b/src/nft/nft.cc @@ -240,6 +240,13 @@ void Nft::make_one_level_aut(const utils::OrdVector &dcare_replacements) Nft Nft::get_one_level_aut(const utils::OrdVector &dcare_replacements) const { Nft result{ *this }; + + // TODO(nft): Create a class for LEVELS with overloaded getter and setter. + // HACK. Works only for automata without levels. + if (result.levels.size() != result.num_of_states()) { + return result; + } + result.make_one_level_aut(dcare_replacements); return result; } @@ -263,7 +270,12 @@ State Nft::add_state() { } State Nft::add_state(State state) { - levels.push_back(0); + const size_t levels_size = levels.size(); + if (state >= levels_size) { + levels.resize(state + 1); + const size_t begin_idx = (levels_size == 0) ? 0 : levels_size - 1; + std::fill(levels.begin() + static_cast(begin_idx), levels.end(), 0); + } return mata::nfa::Nfa::add_state(state); } diff --git a/tests/nft/nft.cc b/tests/nft/nft.cc index f2d39543..0c3ddc8a 100644 --- a/tests/nft/nft.cc +++ b/tests/nft/nft.cc @@ -3008,7 +3008,7 @@ TEST_CASE("mata::nft::Nft::get_one_level_aut") { REPLACE_DONT_CARE(delta, inter, trg);\ SECTION("level_cnt == 1") { - Nft aut(5, {0}, {3, 4}, {0, 0, 0, 0, 0}, 1); + Nft aut(5, {0}, {3, 4}); aut.delta.add(0, 0, 1); aut.delta.add(0, 1, 2); aut.delta.add(1, 0, 1); @@ -3020,7 +3020,7 @@ TEST_CASE("mata::nft::Nft::get_one_level_aut") { aut.delta.add(4, 1, 2); aut.delta.add(4, DONT_CARE, 4); - Nft expected(5, {0}, {3, 4}, {0, 0, 0, 0, 0}, 1); + Nft expected(5, {0}, {3, 4}); expected.delta.add(0, 0, 1); expected.delta.add(0, 1, 2); expected.delta.add(1, 0, 1); From b9360f20e266011e03589abfcc0089c98a4190e3 Mon Sep 17 00:00:00 2001 From: koniksedy Date: Tue, 20 Feb 2024 14:27:44 +0100 Subject: [PATCH 21/24] levels initialization fixed --- include/mata/nft/nft.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/include/mata/nft/nft.hh b/include/mata/nft/nft.hh index eec4ccfe..8fdcfb7e 100644 --- a/include/mata/nft/nft.hh +++ b/include/mata/nft/nft.hh @@ -79,7 +79,7 @@ public: explicit Nft(const mata::nfa::Nfa& other) : mata::nfa::Nfa(other.delta, other.initial, other.final, other.alphabet), - levels(std::vector(other.delta.num_of_states(), 0)), levels_cnt(1) {} + levels(std::vector(other.num_of_states(), 0)), levels_cnt(1) {} /** * @brief Construct a new explicit NFT from other NFT. From 5191aab7bc9cb025cacaf8cd11af88d9ec3c6c55 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Wed, 21 Feb 2024 09:20:26 +0100 Subject: [PATCH 22/24] Apply resize() levels instead of push_back() only when needed --- include/mata/nft/types.hh | 3 +++ src/nft/nft.cc | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/include/mata/nft/types.hh b/include/mata/nft/types.hh index 2fb38269..5168c4b5 100644 --- a/include/mata/nft/types.hh +++ b/include/mata/nft/types.hh @@ -44,6 +44,9 @@ struct Nft; ///< A non-deterministic finite automaton. constexpr Symbol EPSILON = mata::nfa::EPSILON; constexpr Symbol DONT_CARE = EPSILON - 1; +constexpr Level DEFAULT_LEVEL{ 0 }; +constexpr Level DEFAULT_LEVEL_CNT{ 1 }; + } // namespace mata::nfa. #endif //MATA_TYPES_HH diff --git a/src/nft/nft.cc b/src/nft/nft.cc index 2a0e4977..7c3b524a 100644 --- a/src/nft/nft.cc +++ b/src/nft/nft.cc @@ -265,7 +265,10 @@ Nft& Nft::operator=(Nft&& other) noexcept { } State Nft::add_state() { - levels.push_back(0); + const size_t required_capacity{ num_of_states() + 1 }; + if (levels.size() < required_capacity) { + levels.resize(required_capacity, DEFAULT_LEVEL); + } return mata::nfa::Nfa::add_state(); } From f7223db2fd09560de2afbe66e25fb280802fd1ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Wed, 21 Feb 2024 09:56:33 +0100 Subject: [PATCH 23/24] Allow adding new states with levels --- include/mata/nft/nft.hh | 14 +++++++++++++- src/nft/nft.cc | 22 ++++++++++++++++------ tests/nft/nft.cc | 19 +++++++++++++++++++ 3 files changed, 48 insertions(+), 7 deletions(-) diff --git a/include/mata/nft/nft.hh b/include/mata/nft/nft.hh index 8fdcfb7e..f968080e 100644 --- a/include/mata/nft/nft.hh +++ b/include/mata/nft/nft.hh @@ -106,11 +106,23 @@ public: State add_state(); /** - * Add state @p state to @c delta if @p state is not in @c delta yet. + * Add state @p state to @c this if @p state is not in @c this yet. * @return The requested @p state. */ State add_state(State state); + /** + * Add a new (fresh) state to the automaton with level @p level. + * @return The newly created state. + */ + State add_state_with_level(Level level); + + /** + * Add state @p state to @c this with level @p level if @p state is not in @c this yet. + * @return The requested @p state. + */ + State add_state_with_level(State state, Level level); + /** * @brief Clear the underlying NFT to a blank NFT. * diff --git a/src/nft/nft.cc b/src/nft/nft.cc index 7c3b524a..1c8b355f 100644 --- a/src/nft/nft.cc +++ b/src/nft/nft.cc @@ -272,16 +272,26 @@ State Nft::add_state() { return mata::nfa::Nfa::add_state(); } -State Nft::add_state(State state) { - const size_t levels_size = levels.size(); - if (state >= levels_size) { - levels.resize(state + 1); - const size_t begin_idx = (levels_size == 0) ? 0 : levels_size - 1; - std::fill(levels.begin() + static_cast(begin_idx), levels.end(), 0); +State Nft::add_state(const State state) { + const size_t required_capacity{ state + 1 }; + if (levels.size() < required_capacity) { + levels.resize(required_capacity, DEFAULT_LEVEL); } return mata::nfa::Nfa::add_state(state); } +State Nft::add_state_with_level(const Level level) { + const State state{ add_state() }; + levels[state] = level; + return state; +} + +State Nft::add_state_with_level(const State state, const Level level) { + add_state(state); + levels[state] = level; + return state; +} + void Nft::clear() { mata::nfa::Nfa::clear(); levels.clear(); diff --git a/tests/nft/nft.cc b/tests/nft/nft.cc index 0c3ddc8a..bddcba61 100644 --- a/tests/nft/nft.cc +++ b/tests/nft/nft.cc @@ -3125,3 +3125,22 @@ TEST_CASE("mata::nft::Nft::get_one_level_aut") { } } + +TEST_CASE("mata::nft::Nft::add_state()") { + Nft nft{}; + State state{ nft.add_state() }; + CHECK(state == 0); + CHECK(nft.levels[state] == 0); + state = nft.add_state(4); + CHECK(state == 4); + CHECK(nft.levels[state] == 0); + CHECK(nft.num_of_states() == 5); + state = nft.add_state_with_level(3); + CHECK(state == 5); + CHECK(nft.levels[state] == 3); + CHECK(nft.num_of_states() == 6); + state = nft.add_state_with_level(12, 1); + CHECK(state == 12); + CHECK(nft.levels[state] == 1); + CHECK(nft.num_of_states() == 13); +} From 2fd87d3f11663434138f017300a66439d4c0c541 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Chocholat=C3=BD?= Date: Wed, 21 Feb 2024 15:45:00 +0100 Subject: [PATCH 24/24] Remove unused import This also fixes running tests on MacOS in GitHub Actions. --- include/mata/nft/builder.hh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/include/mata/nft/builder.hh b/include/mata/nft/builder.hh index 01677838..9b8bbaa7 100644 --- a/include/mata/nft/builder.hh +++ b/include/mata/nft/builder.hh @@ -3,14 +3,11 @@ #ifndef LIBMATA_NFT_BUILDER_HH #define LIBMATA_NFT_BUILDER_HH -#include "nft.hh" -#include - #include "mata/nfa/builder.hh" +#include "nft.hh" #include - /** * Namespace providing options to build NFAs. */