def _get_graph_kernel(dataset, kernel_options):
	from gklearn.utils.utils import get_graph_kernel_by_name
	graph_kernel = get_graph_kernel_by_name(kernel_options['name'], 
				  node_labels=dataset.node_labels,
				  edge_labels=dataset.edge_labels, 
				  node_attrs=dataset.node_attrs,
				  edge_attrs=dataset.edge_attrs,
				  ds_infos=dataset.get_dataset_infos(keys=['directed']),
				  kernel_options=kernel_options)
	return graph_kernel
def _compute_gram_matrix_unnorm(dataset, kernel_options):
	from gklearn.utils.utils import get_graph_kernel_by_name
	graph_kernel = get_graph_kernel_by_name(kernel_options['name'], 
				  node_labels=dataset.node_labels,
				  edge_labels=dataset.edge_labels, 
				  node_attrs=dataset.node_attrs,
				  edge_attrs=dataset.edge_attrs,
				  ds_infos=dataset.get_dataset_infos(keys=['directed']),
				  kernel_options=kernel_options)
	
	gram_matrix, run_time = graph_kernel.compute(dataset.graphs, **kernel_options)
	gram_matrix_unnorm = graph_kernel.gram_matrix_unnorm
		
	return gram_matrix_unnorm, run_time
Exemple #3
0
def __compute_kernel_distances(dataset, kernel_options, trainset=None):
	graph_kernel = get_graph_kernel_by_name(kernel_options['name'], 
				  node_labels=dataset.node_labels,
				  edge_labels=dataset.edge_labels, 
				  node_attrs=dataset.node_attrs,
				  edge_attrs=dataset.edge_attrs,
				  ds_infos=dataset.get_dataset_infos(keys=['directed']),
				  kernel_options=kernel_options)
	
	gram_matrix, run_time = graph_kernel.compute(dataset.graphs, **kernel_options)

	dis_mat, _, _, _ = graph_kernel.compute_distance_matrix()
	
	if trainset is not None:
		gram_matrix_unnorm = graph_kernel.gram_matrix_unnorm
		

	return dis_mat
Exemple #4
0
	def run(self):
		self._graph_kernel = get_graph_kernel_by_name(self._kernel_options['name'], 
						  node_labels=self._dataset.node_labels,
						  edge_labels=self._dataset.edge_labels, 
						  node_attrs=self._dataset.node_attrs,
						  edge_attrs=self._dataset.edge_attrs,
						  ds_infos=self._dataset.get_dataset_infos(keys=['directed']),
						  kernel_options=self._kernel_options)
		
		# record start time.
		start = time.time()
		
		# 1. precompute gram matrix.
		if self.__gram_matrix_unnorm is None:
			gram_matrix, run_time = self._graph_kernel.compute(self._dataset.graphs, **self._kernel_options)
			self.__gram_matrix_unnorm = self._graph_kernel.gram_matrix_unnorm
			end_precompute_gm = time.time()
			self.__runtime_precompute_gm = end_precompute_gm - start
		else:
			if self.__runtime_precompute_gm is None:
				raise Exception('Parameter "runtime_precompute_gm" must be given when using pre-computed Gram matrix.')
			self._graph_kernel.gram_matrix_unnorm = self.__gram_matrix_unnorm
			if self._kernel_options['normalize']:
				self._graph_kernel.gram_matrix = self._graph_kernel.normalize_gm(np.copy(self.__gram_matrix_unnorm))
			else:
				self._graph_kernel.gram_matrix = np.copy(self.__gram_matrix_unnorm)
			end_precompute_gm = time.time()
			start -= self.__runtime_precompute_gm
			
		# 2. compute k nearest neighbors of phi in D_N.
		if self._verbose >= 2:
			print('\nstart computing k nearest neighbors of phi in D_N...\n')
		D_N = self._dataset.graphs
		if self.__alphas is None:
			self.__alphas = [1 / len(D_N)] * len(D_N)
		k_dis_list = [] # distance between g_star and each graph.
		term3 = 0
		for i1, a1 in enumerate(self.__alphas):
			for i2, a2 in enumerate(self.__alphas):
				term3 += a1 * a2 * self._graph_kernel.gram_matrix[i1, i2]
		for idx in range(len(D_N)):
			k_dis_list.append(compute_k_dis(idx, range(0, len(D_N)), self.__alphas, self._graph_kernel.gram_matrix, term3=term3, withterm3=True))
			
		# sort.
		sort_idx = np.argsort(k_dis_list)
		dis_gs = [k_dis_list[idis] for idis in sort_idx[0:self.__k]] # the k shortest distances.
		nb_best = len(np.argwhere(dis_gs == dis_gs[0]).flatten().tolist())
		g0hat_list = [D_N[idx].copy() for idx in sort_idx[0:nb_best]] # the nearest neighbors of phi in D_N
		self.__best_from_dataset = g0hat_list[0] # get the first best graph if there are muitlple.
		self.__k_dis_dataset = dis_gs[0]
		
		if self.__k_dis_dataset == 0: # get the exact pre-image.
			end_generate_preimage = time.time()
			self.__runtime_generate_preimage = end_generate_preimage - end_precompute_gm
			self.__runtime_total = end_generate_preimage - start
			self.__preimage = self.__best_from_dataset.copy()	
			self.__k_dis_preimage = self.__k_dis_dataset
			if self._verbose:
				print()
				print('=============================================================================')
				print('The exact pre-image is found from the input dataset.')
				print('-----------------------------------------------------------------------------')
				print('Distance in kernel space for the best graph from dataset and for preimage:', self.__k_dis_dataset)
				print('Time to pre-compute Gram matrix:', self.__runtime_precompute_gm)
				print('Time to generate pre-images:', self.__runtime_generate_preimage)
				print('Total time:', self.__runtime_total)
				print('=============================================================================')
				print()
			return
		
		dhat = dis_gs[0] # the nearest distance
		Gk = [D_N[ig].copy() for ig in sort_idx[0:self.__k]] # the k nearest neighbors
		Gs_nearest = [nx.convert_node_labels_to_integers(g) for g in Gk] # [g.copy() for g in Gk]
		
		# 3. start iterations.
		if self._verbose >= 2:
			print('starting iterations...')
		gihat_list = []
		dihat_list = []
		r = 0
		dis_of_each_itr = [dhat]
		if self.__parallel:
			self._kernel_options['parallel'] = None
		self.__itrs = 0
		self.__num_updates = 0
		timer = Timer(self.__time_limit_in_sec)
		while not self.__termination_criterion_met(timer, self.__itrs, r):
			print('\n- r =', r)
			found = False
			dis_bests = dis_gs + dihat_list
			
			# compute numbers of edges to be inserted/deleted.
			# @todo what if the log is negetive? how to choose alpha (scalar)?
			fdgs_list = np.array(dis_bests)
			if np.min(fdgs_list) < 1: # in case the log is negetive.
				fdgs_list /= np.min(fdgs_list)
			fdgs_list = [int(item) for item in np.ceil(np.log(fdgs_list))]
			if np.min(fdgs_list) < 1: # in case the log is smaller than 1.
				fdgs_list = np.array(fdgs_list) + 1
			# expand the number of modifications to increase the possiblity.
			nb_vpairs_list = [nx.number_of_nodes(g) * (nx.number_of_nodes(g) - 1) for g in (Gs_nearest + gihat_list)]
			nb_vpairs_min = np.min(nb_vpairs_list)
			idx_fdgs_max = np.argmax(fdgs_list)
			fdgs_max_old = fdgs_list[idx_fdgs_max]
			fdgs_max = fdgs_max_old
			nb_modif = 1
			for idx, nb in enumerate(range(nb_vpairs_min, nb_vpairs_min - fdgs_max, -1)):
				nb_modif *= nb / (fdgs_max - idx)
			while fdgs_max < nb_vpairs_min and nb_modif < self.__l:
				fdgs_max += 1
				nb_modif *= (nb_vpairs_min - fdgs_max + 1) / fdgs_max
			nb_increase = int(fdgs_max - fdgs_max_old)
			if nb_increase > 0:
				fdgs_list += 1
				
				
			for ig, gs in enumerate(Gs_nearest + gihat_list):
				if self._verbose >= 2:
					print('-- computing', ig + 1, 'graphs out of', len(Gs_nearest) + len(gihat_list))
				gnew, dhat, found = self.__generate_l_graphs(gs, fdgs_list[ig], dhat, ig, found, term3)
						  
			if found:
				r = 0
				gihat_list = [gnew]
				dihat_list = [dhat]
			else:
				r += 1
				
			dis_of_each_itr.append(dhat)
			self.__itrs += 1
			if self._verbose >= 2:
				print('Total number of iterations is', self.__itrs, '.')
				print('The preimage is updated', self.__num_updates, 'times.')
				print('The shortest distances for previous iterations are', dis_of_each_itr, '.')
			
			
		# get results and print.
		end_generate_preimage = time.time()
		self.__runtime_generate_preimage = end_generate_preimage - end_precompute_gm
		self.__runtime_total = end_generate_preimage - start
		self.__preimage = (g0hat_list[0] if len(gihat_list) == 0 else gihat_list[0])
		self.__k_dis_preimage = dhat
		if self._verbose:
			print()
			print('=============================================================================')
			print('Finished generation of preimages.')
			print('-----------------------------------------------------------------------------')
			print('Distance in kernel space for the best graph from dataset:', self.__k_dis_dataset)
			print('Distance in kernel space for the preimage:', self.__k_dis_preimage)
			print('Total number of iterations for optimizing:', self.__itrs)
			print('Total number of updating preimage:', self.__num_updates)
			print('Time to pre-compute Gram matrix:', self.__runtime_precompute_gm)
			print('Time to generate pre-images:', self.__runtime_generate_preimage)
			print('Total time:', self.__runtime_total)
			print('=============================================================================')
			print()	
Exemple #5
0
	def run(self):
		self._graph_kernel = get_graph_kernel_by_name(self._kernel_options['name'], 
						  node_labels=self._dataset.node_labels,
						  edge_labels=self._dataset.edge_labels, 
						  node_attrs=self._dataset.node_attrs,
						  edge_attrs=self._dataset.edge_attrs,
						  ds_infos=self._dataset.get_dataset_infos(keys=['directed']),
						  kernel_options=self._kernel_options)
		
		# record start time.
		start = time.time()
		
		# 1. precompute gram matrix.
		if self.__gram_matrix_unnorm is None:
			gram_matrix, run_time = self._graph_kernel.compute(self._dataset.graphs, **self._kernel_options)
			self.__gram_matrix_unnorm = self._graph_kernel.gram_matrix_unnorm
			end_precompute_gm = time.time()
			self.__runtime_precompute_gm = end_precompute_gm - start
		else:
			if self.__runtime_precompute_gm is None:
				raise Exception('Parameter "runtime_precompute_gm" must be given when using pre-computed Gram matrix.')
			self._graph_kernel.gram_matrix_unnorm = self.__gram_matrix_unnorm
			if self._kernel_options['normalize']:
				self._graph_kernel.gram_matrix = self._graph_kernel.normalize_gm(np.copy(self.__gram_matrix_unnorm))
			else:
				self._graph_kernel.gram_matrix = np.copy(self.__gram_matrix_unnorm)
			end_precompute_gm = time.time()
			start -= self.__runtime_precompute_gm
			
		if self.__fit_method != 'k-graphs' and self.__fit_method != 'whole-dataset':
			start = time.time()
			self.__runtime_precompute_gm = 0
			end_precompute_gm = start
		
		# 2. optimize edit cost constants. 
		self.__optimize_edit_cost_constants()
		end_optimize_ec = time.time()
		self.__runtime_optimize_ec = end_optimize_ec - end_precompute_gm
		
		# 3. compute set median and gen median using optimized edit costs.
		if self._verbose >= 2:
			print('\nstart computing set median and gen median using optimized edit costs...\n')
		self.__gmg_bcu()
		end_generate_preimage = time.time()
		self.__runtime_generate_preimage = end_generate_preimage - end_optimize_ec
		self.__runtime_total = end_generate_preimage - start
		if self._verbose >= 2:
			print('medians computed.')
			print('SOD of the set median: ', self.__sod_set_median)
			print('SOD of the generalized median: ', self.__sod_gen_median)
			
		# 4. compute kernel distances to the true median.
		if self._verbose >= 2:
			print('\nstart computing distances to true median....\n')
		self.__compute_distances_to_true_median()

		# 5. print out results.
		if self._verbose:
			print()
			print('================================================================================')
			print('Finished generation of preimages.')
			print('--------------------------------------------------------------------------------')
			print('The optimized edit cost constants:', self.__edit_cost_constants)
			print('SOD of the set median:', self.__sod_set_median)
			print('SOD of the generalized median:', self.__sod_gen_median)
			print('Distance in kernel space for set median:', self.__k_dis_set_median)
			print('Distance in kernel space for generalized median:', self.__k_dis_gen_median)
			print('Minimum distance in kernel space for each graph in median set:', self.__k_dis_dataset)
			print('Time to pre-compute Gram matrix:', self.__runtime_precompute_gm)
			print('Time to optimize edit costs:', self.__runtime_optimize_ec)
			print('Time to generate pre-images:', self.__runtime_generate_preimage)
			print('Total time:', self.__runtime_total)
			print('Total number of iterations for optimizing:', self.__itrs)
			print('Total number of updating edit costs:', self.__num_updates_ecc)
			print('Is optimization of edit costs converged:', self.__converged)
			print('================================================================================')
			print()
def xp_simple_preimage():
    import numpy as np
    """**1.   Get dataset.**"""

    from gklearn.utils import Dataset, split_dataset_by_target

    # Predefined dataset name, use dataset "MAO".
    ds_name = 'MAO'
    # The node/edge labels that will not be used in the computation.
    irrelevant_labels = {
        'node_attrs': ['x', 'y', 'z'],
        'edge_labels': ['bond_stereo']
    }

    # Initialize a Dataset.
    dataset_all = Dataset()
    # Load predefined dataset "MAO".
    dataset_all.load_predefined_dataset(ds_name)
    # Remove irrelevant labels.
    dataset_all.remove_labels(**irrelevant_labels)
    # Split the whole dataset according to the classification targets.
    datasets = split_dataset_by_target(dataset_all)
    # Get the first class of graphs, whose median preimage will be computed.
    dataset = datasets[0]
    len(dataset.graphs)
    """**2.  Set parameters.**"""

    import multiprocessing

    # Parameters for MedianPreimageGenerator (our method).
    mpg_options = {
        'fit_method':
        'k-graphs',  # how to fit edit costs. "k-graphs" means use all graphs in median set when fitting.
        'init_ecc': [4, 4, 2, 1, 1, 1],  # initial edit costs.
        'ds_name': ds_name,  # name of the dataset.
        'parallel': True,  # whether the parallel scheme is to be used.
        'time_limit_in_sec':
        0,  # maximum time limit to compute the preimage. If set to 0 then no limit.
        'max_itrs':
        10,  # maximum iteration limit to optimize edit costs. If set to 0 then no limit.
        'max_itrs_without_update':
        3,  # If the times that edit costs is not update is more than this number, then the optimization stops.
        'epsilon_residual':
        0.01,  # In optimization, the residual is only considered changed if the change is bigger than this number.
        'epsilon_ec':
        0.1,  # In optimization, the edit costs are only considered changed if the changes are bigger than this number.
        'verbose': 2  # whether to print out results.
    }
    # Parameters for graph kernel computation.
    kernel_options = {
        'name': 'PathUpToH',  # use path kernel up to length h.
        'depth': 9,
        'k_func': 'MinMax',
        'compute_method': 'trie',
        'parallel': 'imap_unordered',  # or None
        'n_jobs': multiprocessing.cpu_count(),
        'normalize':
        True,  # whether to use normalized Gram matrix to optimize edit costs.
        'verbose': 2  # whether to print out results.
    }
    # Parameters for GED computation.
    ged_options = {
        'method': 'IPFP',  # use IPFP huristic.
        'initialization_method': 'RANDOM',  # or 'NODE', etc.
        'initial_solutions':
        10,  # when bigger than 1, then the method is considered mIPFP.
        'edit_cost': 'CONSTANT',  # use CONSTANT cost.
        'attr_distance':
        'euclidean',  # the distance between non-symbolic node/edge labels is computed by euclidean distance.
        'ratio_runs_from_initial_solutions': 1,
        'threads': multiprocessing.cpu_count(
        ),  # parallel threads. Do not work if mpg_options['parallel'] = False.
        'init_option': 'EAGER_WITHOUT_SHUFFLED_COPIES'
    }
    # Parameters for MedianGraphEstimator (Boria's method).
    mge_options = {
        'init_type':
        'MEDOID',  # how to initial median (compute set-median). "MEDOID" is to use the graph with smallest SOD.
        'random_inits':
        10,  # number of random initialization when 'init_type' = 'RANDOM'.
        'time_limit':
        600,  # maximum time limit to compute the generalized median. If set to 0 then no limit.
        'verbose': 2,  # whether to print out results.
        'refine': False  # whether to refine the final SODs or not.
    }
    print('done.')
    """**3.   Compute the Gram matrix and distance matrix.**"""

    from gklearn.utils.utils import get_graph_kernel_by_name

    # Get a graph kernel instance.
    graph_kernel = get_graph_kernel_by_name(
        kernel_options['name'],
        node_labels=dataset.node_labels,
        edge_labels=dataset.edge_labels,
        node_attrs=dataset.node_attrs,
        edge_attrs=dataset.edge_attrs,
        ds_infos=dataset.get_dataset_infos(keys=['directed']),
        kernel_options=kernel_options)
    # Compute Gram matrix.
    gram_matrix, run_time = graph_kernel.compute(dataset.graphs,
                                                 **kernel_options)

    # Compute distance matrix.
    from gklearn.utils import compute_distance_matrix
    dis_mat, _, _, _ = compute_distance_matrix(gram_matrix)

    print('done.')
    """**4.   Find the candidate graph.**"""

    from gklearn.preimage.utils import compute_k_dis

    # Number of the nearest neighbors.
    k_neighbors = 10

    # For each graph G in dataset, compute the distance between its image \Phi(G) and the mean of its neighbors' images.
    dis_min = np.inf  # the minimum distance between possible \Phi(G) and the mean of its neighbors.
    for idx, G in enumerate(dataset.graphs):
        # Find the k nearest neighbors of G.
        dis_list = dis_mat[
            idx]  # distance between \Phi(G) and image of each graphs.
        idx_sort = np.argsort(
            dis_list)  # sort distances and get the sorted indices.
        idx_nearest = idx_sort[1:k_neighbors +
                               1]  # indices of the k-nearest neighbors.
        dis_k_nearest = [dis_list[i] for i in idx_nearest
                         ]  # k-nearest distances, except the 0.
        G_k_nearest = [dataset.graphs[i]
                       for i in idx_nearest]  # k-nearest neighbors.

        # Compute the distance between \Phi(G) and the mean of its neighbors.
        dis_tmp = compute_k_dis(
            idx,  # the index of G in Gram matrix.
            idx_nearest,  # the indices of the neighbors
            [1 / k_neighbors] * k_neighbors,  # coefficients for neighbors. 
            gram_matrix,
            withterm3=False)
        # Check if the new distance is smallers.
        if dis_tmp < dis_min:
            dis_min = dis_tmp
            G_cand = G
            G_neighbors = G_k_nearest

    print('The minimum distance is', dis_min)
    """**5.   Run median preimage generator.**"""

    from gklearn.preimage import MedianPreimageGenerator

    # Set the dataset as the k-nearest neighbors.
    dataset.load_graphs(G_neighbors)

    # Create median preimage generator instance.
    mpg = MedianPreimageGenerator()
    # Add dataset.
    mpg.dataset = dataset
    # Set parameters.
    mpg.set_options(**mpg_options.copy())
    mpg.kernel_options = kernel_options.copy()
    mpg.ged_options = ged_options.copy()
    mpg.mge_options = mge_options.copy()
    # Run.
    mpg.run()
    """**4. Get results.**"""

    # Get results.
    import pprint
    pp = pprint.PrettyPrinter(indent=4)  # pretty print
    results = mpg.get_results()
    pp.pprint(results)

    draw_graph(mpg.set_median)
    draw_graph(mpg.gen_median)
    draw_graph(G_cand)