def destroy_repair(self, solution):
     s, destroy_mask = deepcopy(solution), deepcopy(self.mask)
     shuffle(destroy_mask)
     for i, bit in enumerate(destroy_mask):
         if bit and i not in [0, len(s) - 1]:
             s[i] = None
     holes = self._construct_holes(s)
     for hole in holes:
         # print(hole, s)
         hole_cities = [s[hole[0]], s[hole[1]]]
         while len(set(hole_cities)) <= hole[1] - hole[0]:
             # print(set(hole_cities))
             cities_id_to_check = list(
                 set(range(self.instance.length)) - set(hole_cities) -
                 set(s))
             # print(cities_id_to_check)
             city_insertions = defaultdict(list)
             for city_id in cities_id_to_check:
                 for i, pair in enumerate(pairwise(hole_cities)):
                     insertion: Insertion = Insertion(
                         city_id, i + 1,
                         self._insertion_cost(*pair, city_id))
                     city_insertions[str(city_id)].append(insertion)
             city_insertion_cost = self._map_insertions_on_insertion_costs(
                 city_insertions)
             best_city_insertion: Insertion = min(
                 city_insertion_cost, key=lambda x: x.cost)  # min cost
             hole_cities.insert(best_city_insertion.position_in_solution,
                                best_city_insertion.city_id)
         s[hole[0]:hole[1] + 1] = deepcopy(hole_cities)
     return s
Пример #2
0
 def get_weight(self, node_a, node_b):
     """
     Given two nodes of a graph, build a relation text from the store
     :param node_a: networkx node, e_1
     :param node_b: networkx node, e_2
     :return: weight of the relation
     """
     # determine the relation
     try:
         path = nx.shortest_path(self.family, node_a, node_b)
         if len(path) == 2:
             # direct path
             weight = self.family[node_a][node_b]['weight']
             return weight
         else:
             # indirect path
             weight = sum([
                 self.family[na][nb]['weight'] for na, nb in pairwise(path)
             ])
             return weight
     except nx.NetworkXNoPath as e:
         # no direct path exists, check if they are siblings
         for sibling in self.siblings:
             if node_a in sibling and node_b in sibling:
                 weight = 0
                 return weight
         return -1
Пример #3
0
    def make_single_story(self, num_relations=6, num_stories=10):
        """
        In single story mode, there will be only one abstract per story
        Idea: Select any two nodes, get its longest connected path, form the story out of the path,
        then form the abstract from the shortest connected path
        :param: min_relations: min path of relations to consider
        :param: max_relations: max path of relations to consider
        :return:
        """
        stories = []
        abstracts = []
        relation_path = []  # for debugging purposes
        # for all pairs, calculate the min paths and max paths
        all_pairs, _ = self.calc_all_pairs(num_relations=num_relations)
        if len(all_pairs) == 0:
            return [], []
        # create story-abstract pairs
        for path_pairs in all_pairs:
            node_a, node_b, max_path, min_path = path_pairs
            story = []
            path_str = ''
            for pi, (na, nb) in enumerate(pairwise(max_path)):
                text = self.stringify(na, nb)
                story.append(text)
                if pi == 0:
                    story.extend(
                        self._get_attributes(
                            self.connected_family.node[na]['data']))
                story.extend(
                    self._get_attributes(
                        self.connected_family.node[nb]['data']))
                weight = self.inv_rel_type[self.connected_family[na][nb]
                                           ['weight']]
                if not self.connected_forward.has_edge(
                        na, nb) and weight not in ['sibling', 'SO']:
                    weight = 'inv-' + weight
                path_str += ' -- <{}> -- '.format(weight)
            #story = '. '.join(story) + '.'
            abstract = self.stringify(node_a, node_b) + '.'
            fw = self.inv_rel_type[self.connected_family[node_a][node_b]
                                   ['weight']]
            path_str += ' ===> {}'.format(fw)
            stories.append(story)
            abstracts.append(abstract)
            relation_path.append(path_str)
            num_stories = -1

            if num_stories == 0:
                break
        return stories, abstracts, relation_path
    def _solve(self, first_city_id=0):
        solution = [first_city_id, first_city_id]
        while len(set(solution)) < self.goal_length:

            cities_id_to_check = list(set(range(self.instance.length)) - set(solution))

            city_insertions = defaultdict(list)
            for city_id in cities_id_to_check:
                for i, pair in enumerate(pairwise(solution)):
                    insertion: Insertion = Insertion(city_id, i + 1, self._insertion_cost(*pair, city_id))
                    city_insertions[str(city_id)].append(insertion)

            city_insertion_cost = self._map_insertions_on_insertion_costs(city_insertions)

            best_city_insertion: Insertion = min(city_insertion_cost, key=lambda x: x.cost)  # min cost
            solution.insert(best_city_insertion.position_in_solution, best_city_insertion.city_id)
        return solution, self._get_solution_cost(solution)
Пример #5
0
 def _get_solution_cost(self, solution) -> int:
     # if we dont want random solutions, change it to yielding before generated list
     return sum([
         self.instance.adjacency_matrix[id_source, id_destination]
         for id_source, id_destination in pairwise(solution)
     ])
        l.split('\t') for l in open(inputConceptPath[0]).read().splitlines()
    ], inputConceptPath[1]


if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        description=
        'Predict concept pair class according to a trained classifier')
    parser.add_argument("vocFilePath", help='voc file')
    parser.add_argument("trainedClfPath", help='trained classifier file')
    parser.add_argument("inputConceptPairPathAndClassList",
                        nargs='+',
                        help='concept pair file list followed by class name')
    parser.add_argument("--compose",
                        help='try to compose concept',
                        action='store_false')
    args = parser.parse_args()

    vocFilePath = args.vocFilePath
    trainedClfPath = args.trainedClfPath
    inputConceptPairPathAndClassList = args.inputConceptPairPathAndClassList
    annotedConceptPairStrList = [
        extractAnnotedConceptPairStr(f)
        for f in pairwise(args.inputConceptPairPathAndClassList)
    ]
    strict = args.compose

    detailConceptPairClfError(db.DB(vocFilePath),
                              dill.load(open(trainedClfPath)),
                              annotedConceptPairStrList, strict)
 def get_average_hamming_distance(ciphertext: str,
                                  chunk_size: int) -> float:
     return sum(
         RepeatingKeyXorEstimator.get_normalized_hamming_distance(i, j)
         for i, j in pairwise(partition_string(ciphertext, chunk_size))
     ) / get_number_partitions(ciphertext, chunk_size)