Example #1
0
    def model_subset_of_dfa_query(self, dfa: DFA, start_time, timeout):
        """
        Tests whether the model language is a subset of the dfa language by testing random words.
        If not subset returns an example
        """

        number_of_rounds = int(
            (1 / self.epsilon) * (np.log(1 / self.delta) + np.log(2) *
                                  (self._num_equivalence_asked + 1)))
        self._num_equivalence_asked = self._num_equivalence_asked + 1

        if isinstance(self.model, RNNLanguageClasifier):
            batch_size = 200
            for i in range(int(number_of_rounds / batch_size) + 1):
                if time.time() - start_time > timeout:
                    return None
                batch = [
                    random_word(self.model.alphabet) for _ in range(batch_size)
                ]
                for x, y, w in zip(
                        self.model.is_words_in_batch(batch) > 0.5,
                    [dfa.is_word_in(w) for w in batch], batch):
                    if x and (not y):
                        return w
            return None

        else:
            for i in range(number_of_rounds):
                word = random_word(self.model.alphabet)
                if self.model.is_word_in(word) != dfa.is_word_in(word):
                    return word
            return None
Example #2
0
    def equivalence_query(self, dfa: DFA, start_time, timeout):
        """
        Tests whether the dfa is equivalent to the model by testing random words.
        If not equivalent returns an example
        """

        number_of_rounds = int(
            (1 / self.epsilon) * (np.log(1 / self.delta) + np.log(2) *
                                  (self._num_equivalence_asked + 1)))

        self._num_equivalence_asked = self._num_equivalence_asked + 1

        if self.is_counter_example_in_batches:
            batch_size = 200
            for i in range(int(number_of_rounds / batch_size) + 1):
                if time.time() - start_time > timeout:
                    return None
                batch = [
                    random_word(self.model.alphabet) for _ in range(batch_size)
                ]
                for x, y, w in zip(self.model.is_words_in_batch(batch),
                                   [dfa.is_word_in(w) for w in batch], batch):
                    if x != y:
                        return w
            return None

        else:
            for i in range(number_of_rounds):
                word = random_word(self.model.alphabet)
                if self.model.is_word_in(word) != dfa.is_word_in(word):
                    return word
            return None