def __init__(self, graphs: Graphs, feature_pairs, split=1):
     self._interval = int(graphs.number_of_graphs() / split)
     self._all_features = graphs.features_matrix_by_index(for_all=True)
     self._nodes_for_graph = graphs.nodes_count_list()
     self._all_ftr_graph_index = []
     for i in range(len(self._nodes_for_graph) + 1):
         self._all_ftr_graph_index.append(np.sum(
             self._nodes_for_graph[0:i]))
     super(LinearContext, self).__init__(graphs, feature_pairs)
Beispiel #2
0
    def __init__(self, graphs: Graphs, feature_pairs, split=1):
        self._interval = int(graphs.number_of_graphs() / split)
        self._all_features = []
        for graph in graphs.graph_names():
            m = graphs.features_matrix(graph)
            # self._nodes_for_graph.append(m.shape[0])
            # append graph features
            self._all_features.append(m)
            # append 0.001 for all missing nodes
            self._all_features.append(np.ones((graphs.nodes_for_graph(graphs.name_to_index(graph)) - m.shape[0],
                                               m.shape[1])) * 0.001)
        # create one big matrix of everything - rows: nodes, columns: features
        self._all_features = np.concatenate(self._all_features)

        # all_ftr_graph_index - [ .... last_row_index_for_graph_i ... ]
        self._all_ftr_graph_index = np.cumsum([0] + graphs.nodes_count_list()).tolist()
        super(LinearContext, self).__init__(graphs, feature_pairs)