def random_CDMVLP(nb_features, nb_targets, max_feature_values, max_target_values, algorithm): dataset = random_StateTransitionsDataset(100, nb_features, nb_targets, max_feature_values, max_target_values) model = CDMVLP(features=dataset.features, targets=dataset.targets) model.compile(algorithm=algorithm) model.fit(dataset=dataset) return model
def test_predict(self): print(">> CDMVLP.predict()") for i in range(self._nb_tests): dataset = random_StateTransitionsDataset( \ nb_transitions=random.randint(1, self._nb_transitions), \ nb_features=random.randint(1,self._nb_features), \ nb_targets=random.randint(1,self._nb_targets), \ max_feature_values=self._nb_feature_values, \ max_target_values=self._nb_target_values) for algorithm in self._SUPPORTED_ALGORITHMS: model = CDMVLP(features=dataset.features, targets=dataset.targets) model.compile(algorithm=algorithm) model.fit(dataset=dataset) feature_states = list(set( tuple(s1) for s1, s2 in dataset.data)) prediction = model.predict(feature_states) for state_id, s1 in enumerate(feature_states): feature_state_encoded = [] for var_id, val in enumerate(s1): val_id = model.features[var_id][1].index(str(val)) feature_state_encoded.append(val_id) #eprint(feature_state_encoded) target_states = SynchronousConstrained.next( feature_state_encoded, model.targets, model.rules, model.constraints) output = [] for s in target_states: target_state = [] for var_id, val_id in enumerate(s): #eprint(var_id, val_id) if val_id == -1: target_state.append("?") else: target_state.append( model.targets[var_id][1][val_id]) output.append(target_state) self.assertEqual(prediction[state_id][0], list(s1)) self.assertEqual(prediction[state_id][1], output) # Force missing value model.rules = [ r for r in model.rules if r.head_variable != random.randint(0, len(model.targets)) ] prediction = model.predict(feature_states) for state_id, s1 in enumerate(feature_states): feature_state_encoded = [] for var_id, val in enumerate(s1): val_id = model.features[var_id][1].index(str(val)) feature_state_encoded.append(val_id) #eprint(feature_state_encoded) target_states = SynchronousConstrained.next( feature_state_encoded, model.targets, model.rules, model.constraints) output = [] for s in target_states: target_state = [] for var_id, val_id in enumerate(s): #eprint(var_id, val_id) if val_id == -1: target_state.append("?") else: target_state.append( model.targets[var_id][1][val_id]) output.append(target_state) self.assertEqual(prediction[state_id][1], output) # Exceptions: self.assertRaises( TypeError, model.predict, "") # Feature_states bad format: is not a list self.assertRaises( TypeError, model.predict, [["0", "1"], 0, 10 ]) # Feature_states bad format: is not a list of list self.assertRaises( TypeError, model.predict, [["0", "1"], [0, 10]] ) # Feature_states bad format: is not a list of list of string feature_states = [ list(s) for s in set(tuple(s1) for s1, s2 in dataset.data) ] state_id = random.randint(0, len(feature_states) - 1) original = feature_states[state_id].copy() feature_states[state_id] = feature_states[ state_id][:-random.randint(1, len(dataset.features))] self.assertRaises( TypeError, model.predict, feature_states ) # Feature_states bad format: size of state not correspond to model features < feature_states[state_id] = original.copy() feature_states[state_id].extend( ["0" for i in range(random.randint(1, 10))]) self.assertRaises( TypeError, model.predict, feature_states ) # Feature_states bad format: size of state not correspond to model features > feature_states[state_id] = original.copy() var_id = random.randint(0, len(dataset.features) - 1) feature_states[state_id][var_id] = "bad_value" self.assertRaises( ValueError, model.predict, feature_states ) # Feature_states bad format: value out of domain feature_states[state_id] = original.copy()
def test_next(self): print(">> pylfit.semantics.SynchronousConstrained.next(feature_state, targets, rules)") # Unit test data = [ \ ([0,0,0],[0,0,1]), \ ([0,0,0],[1,0,0]), \ ([1,0,0],[0,0,0]), \ ([0,1,0],[1,0,1]), \ ([0,0,1],[0,0,1]), \ ([1,1,0],[1,0,0]), \ ([1,0,1],[0,1,0]), \ ([0,1,1],[1,0,1]), \ ([1,1,1],[1,1,0])] feature_names=["p_t-1","q_t-1","r_t-1"] target_names=["p_t","q_t","r_t"] dataset = pylfit.preprocessing.transitions_dataset_from_array(data=data, feature_names=feature_names, target_names=target_names) model = CDMVLP(features=dataset.features, targets=dataset.targets) model.compile(algorithm="synchronizer") model.fit(dataset=dataset) feature_state = Algorithm.encode_state([0,0,0], model.features) self.assertEqual(set([tuple(s) for s in SynchronousConstrained.next(feature_state, model.targets, model.rules, model.constraints)]), set([(1,0,0), (0, 0, 1)])) feature_state = Algorithm.encode_state([1,1,1], model.features) self.assertEqual(set([tuple(s) for s in SynchronousConstrained.next(feature_state, model.targets, model.rules, model.constraints)]), set([(1,1,0)])) feature_state = Algorithm.encode_state([0,1,0], model.features) self.assertEqual(set([tuple(s) for s in SynchronousConstrained.next(feature_state, model.targets, model.rules, model.constraints)]), set([(1,0,1)])) # Random tests for i in range(self._nb_tests): # Apply CDMVLP correctly model = random_CDMVLP( \ nb_features=random.randint(1,self._nb_features), \ nb_targets=random.randint(1,self._nb_targets), \ max_feature_values=self._nb_feature_values, \ max_target_values=self._nb_target_values, \ algorithm="synchronizer") feature_state = random.choice(model.feature_states()) feature_state = Algorithm.encode_state(feature_state, model.features) target_states = SynchronousConstrained.next(feature_state, model.targets, model.rules, model.constraints) domains = [set() for var in model.targets] # Apply synchronous semantics candidates = Synchronous.next(feature_state, model.targets, model.rules) # Apply constraints expected = [] for s in candidates: valid = True for c in model.constraints: if c.matches(list(feature_state)+list(s)): valid = False #eprint(c, " matches ", feature_state, ", ", s) break if valid: # Decode state with domain values expected.append(s) for s2 in target_states: self.assertTrue(s2 in expected) for s2 in expected: self.assertTrue(s2 in target_states)
def test_fit(self): print(">> CDMVLP.fit(dataset)") for i in range(self._nb_tests): dataset = random_StateTransitionsDataset( \ nb_transitions=random.randint(1, self._nb_transitions), \ nb_features=random.randint(1,self._nb_features), \ nb_targets=random.randint(1,self._nb_targets), \ max_feature_values=self._nb_feature_values, \ max_target_values=self._nb_target_values) for algorithm in self._SUPPORTED_ALGORITHMS: for verbose in [0, 1]: model = CDMVLP(features=dataset.features, targets=dataset.targets) model.compile(algorithm=algorithm) f = io.StringIO() with contextlib.redirect_stderr(f): model.fit(dataset=dataset, verbose=verbose) expected_rules, expected_constraints = Synchronizer.fit( dataset, complete=(algorithm == "synchronizer")) self.assertEqual(expected_rules, model.rules) self.assertEqual(expected_constraints, model.constraints) # Exceptions #------------ model = CDMVLP(features=dataset.features, targets=dataset.targets) model.compile(algorithm=algorithm) self.assertRaises(ValueError, model.fit, [], verbose) # dataset is not of valid type model.algorithm = "bad_value" self.assertRaises(ValueError, model.fit, dataset, verbose) # algorithm not supported model.algorithm = algorithm original = CDMVLP._COMPATIBLE_DATASETS.copy() class newdataset(Dataset): def __init__(self, data, features, targets): x = "" CDMVLP._COMPATIBLE_DATASETS = [newdataset] self.assertRaises( ValueError, model.fit, newdataset([], [], []), verbose) # dataset not supported by the algo CDMVLP._COMPATIBLE_DATASETS = original model.algorithm = "gula" original = CDMVLP._ALGORITHMS.copy() class newdataset(Dataset): def __init__(self, data, features, targets): x = "" CDMVLP._ALGORITHMS = ["gula"] self.assertRaises(NotImplementedError, model.fit, dataset, verbose) # dataset not supported yet CDMVLP._ALGORITHMS = original