def __init__(self, context): self._UPPER = 'upper' self._LOWER = 'lower' self._CONCEPT = 'concept' self._lattice = {} init_intent = context._Attributes.supremum init_extent = context._Objects.fromint(context.down(init_intent)) init_concept = Concept(init_extent, init_intent) self._lattice[init_concept.get_id()] = { self._UPPER: set(), self._LOWER: set(), self._CONCEPT: init_concept} queue = deque((init_concept, )) while queue: concept = queue.pop() for neighbor in self.__calculate_upper_neighbors(context, concept): existing_neighbor = self._lattice.get(neighbor.get_id()) if not existing_neighbor: self._lattice[neighbor.get_id()] = { self._UPPER: set(), self._LOWER: set((concept, )), self._CONCEPT: neighbor} else: existing_neighbor[self._LOWER].add(concept) self._lattice[concept.get_id()][self._UPPER].add(neighbor) queue.append(neighbor)
def test_concept_id(): Objects = bitset('Objects', ('a', 'b')) Attributes = bitset('Attributes', ('1', '2', '3')) concept = Concept(Objects(['a']), Attributes(['3'])) assert concept.get_id() == 4
def test_lattice(datafiles): for lattice in datafiles.listdir(): lattice_dict = cex_to_list(lattice) Objects = bitset('Objects', lattice_dict['context']['objects']) Attributes = bitset( 'Attributes', lattice_dict['context']['attributes']) print(Objects.supremum.members()) print(Attributes.supremum.members()) context = Context( lattice_dict['context']['table'], Objects, Attributes) expected_concepts = [] for intent in lattice_dict['concepts']: intent = Attributes.frombools(intent) extent = context.down(intent) expected_concepts.append(Concept(extent, intent)) result = Lattice(context) assert len(expected_concepts) == len(result.get_concepts()) assert set(expected_concepts) == set(result.get_concepts())
def test_cohesion_min(similarity_function): bools = ((0, 1), (1, 1)) Objects = bitset('Objects', ('a', 'b')) Attributes = bitset('Attributes', ('1', '2')) context = Context(bools, Objects, Attributes) concept = Concept(Objects(['a', 'b']), Attributes(['2'])) rows = context.filter_rows_by_extent(concept.extent) expected_coh = similarity_function(rows[0], rows[1]) assert cohesion_min(concept, context, similarity_function) == expected_coh
def __calculate_upper_neighbors(self, context, concept): minimal = ~concept.extent for objects in context._Objects.atomic(minimal): new_intent = context.up(concept.extent | objects) new_extent = context.down(new_intent) if minimal & (new_extent & ~objects): minimal &= ~objects else: neighbor = Concept( context._Objects.fromint(new_extent), context._Attributes.fromint(new_intent)) yield neighbor
def test_cohesion_avg_2(similarity_function): bools = ((0, 1), (1, 1), (0, 1)) Objects = bitset('Objects', ('a', 'b', 'c')) Attributes = bitset('Attributes', ('1', '2')) context = Context(bools, Objects, Attributes) concept = Concept(Objects(['a', 'b', 'c']), Attributes(['2'])) rows = context.filter_rows_by_extent(concept.extent) suma = similarity_function(rows[0], rows[1]) + \ similarity_function(rows[1], rows[2]) + \ similarity_function(rows[0], rows[2]) expected_coh = suma / (len(concept.extent) * (len(concept.extent) - 1) / 2) assert cohesion_avg(concept, context, similarity_function) == expected_coh
def __init__(self, context, similarity_measure): self._UPPER = 'upper' self._LOWER = 'lower' self._CONCEPT = 'concept' self._subset_lattice = {} Objects = context._Objects Attributes = context._Attributes init_intent = context._Attributes.supremum init_extent = context._Objects.fromint(context.down(init_intent)) init_concept = Concept(init_extent, init_intent) atoms = Objects.supremum.atoms() worklist = set([Concept(Objects.fromint(context.down(context.up(extent))), Attributes.fromint(context.up(extent))) for extent in atoms]) self._subset_lattice[init_concept.get_id()] = { self._UPPER: worklist.copy(), self._LOWER: set(), self._CONCEPT: init_concept} # add worklist to subset lattice for atom in worklist: self._subset_lattice[atom.get_id()] = { self._UPPER: set(), self._LOWER: set([init_concept]), self._CONCEPT: atom} while len(worklist) > 1: concept_combinations = tuple(combinations(worklist, 2)) distances = [1 - similarity_measure( concepts[0].intent, concepts[1].intent) for concepts in concept_combinations] min_distance = min(distances) found = set() for concept_tuple, distance in zip(concept_combinations, distances): if distance == min_distance: found.add(concept_tuple[0]) found.add(concept_tuple[1]) worklist = worklist.difference(found) extent = reduce(lambda c1, c2: c1 | c2, map(lambda x: x.extent, found)) new_intent = context.up(extent) new_extent = context.down(new_intent) new_concept = Concept(Objects.fromint( new_extent), Attributes.fromint(new_intent)) existing_neighbor = self._subset_lattice.get( new_concept.get_id()) for concept in found: if not existing_neighbor: self._subset_lattice[new_concept.get_id()] = { self._UPPER: set(), self._LOWER: set((concept, )), self._CONCEPT: new_concept} else: existing_neighbor[self._LOWER].add(concept) self._subset_lattice[concept.get_id()][self._UPPER].add( new_concept) worklist.add(new_concept)