def som_load(som_pointer, weights, award, capture_objects): """! @brief Load dump of the network to SOM. @details Initialize SOM using existed weights, amount of captured objects by each neuron, captured objects by each neuron. Initialization is not performed if weights are empty. @param[in] som_pointer (POINTER): pointer to object of self-organized map. @param[in] weights (list): weights that should assigned to neurons. @param[in] awards (list): amount of captured objects by each neuron. @param[in] capture_objects (list): captured objects by each neuron. """ if len(weights) == 0: return ccore = ccore_library.get() package_weights = package_builder(weights, c_double).create() package_award = package_builder(award, c_size_t).create() package_capture_objects = package_builder(capture_objects, c_size_t).create() ccore.som_load(som_pointer, package_weights, package_award, package_capture_objects)
def antcolony_tsp_process( cities, params, citiesDistRepresent=CITIES_DISTANCE_SET_BY_LIST_OF_COORDINATES): algorithm_params = get_algo_params(params) ccore = load_core() if citiesDistRepresent == CITIES_DISTANCE_SET_BY_MATRIX: cities_coord = package_builder(cities, c_double).create() # antcolony_tsp_prepare_matrix(cities); ccore.antcolony_tsp_process_by_matrix.restype = POINTER( pyclustering_package) result_package = ccore.antcolony_tsp_process_by_matrix( cities_coord, algorithm_params) else: cities_coord = package_builder(cities, c_double).create() # antcolony_tsp_prepare_cities_list(cities); ccore.antcolony_tsp_process.restype = POINTER(pyclustering_package) result_package = ccore.antcolony_tsp_process(cities_coord, algorithm_params) result = package_extractor(result_package).extract() ccore.free_pyclustering_package(result_package) return result
def __call__(self, point1, point2): point_package1 = package_builder(point1, c_double).create(); point_package2 = package_builder(point2, c_double).create(); ccore = ccore_library.get(); ccore.metric_calculate.restype = c_double; return ccore.metric_calculate(self.__pointer, point_package1, point_package2);
def silhoeutte(sample, clusters, pointer_metric): pointer_data = package_builder(sample, c_double).create() pointer_clusters = package_builder(clusters, c_size_t).create() ccore = ccore_library.get() ccore.silhouette_algorithm.restype = POINTER(pyclustering_package) package = ccore.silhouette_algorithm(pointer_data, pointer_clusters, pointer_metric) result = package_extractor(package).extract() ccore.free_pyclustering_package(package) return result
def kmedians(sample, centers, tolerance, itermax, metric_pointer): pointer_data = package_builder(sample, c_double).create() pointer_centers = package_builder(centers, c_double).create() ccore = ccore_library.get() ccore.kmedians_algorithm.restype = POINTER(pyclustering_package) package = ccore.kmedians_algorithm(pointer_data, pointer_centers, c_double(tolerance), c_size_t(itermax), metric_pointer) result = package_extractor(package).extract() ccore.free_pyclustering_package(package) return result[0], result[1]
def fcm_algorithm(sample, centers, m, tolerance, itermax): pointer_data = package_builder(sample, c_double).create() pointer_centers = package_builder(centers, c_double).create() ccore = ccore_library.get() ccore.fcm_algorithm.restype = POINTER(pyclustering_package) package = ccore.fcm_algorithm(pointer_data, pointer_centers, c_double(m), c_double(tolerance), c_size_t(itermax)) result = package_extractor(package).extract() ccore.free_pyclustering_package(package) return result
def kmedoids(sample, medoids, tolerance): pointer_data = package_builder(sample, c_double).create(); medoids_package = package_builder(medoids, c_size_t).create(); ccore = load_core(); ccore.kmedoids_algorithm.restype = POINTER(pyclustering_package); package = ccore.kmedoids_algorithm(pointer_data, medoids_package, c_double(tolerance)); result = package_extractor(package).extract(); ccore.free_pyclustering_package(package); return result;
def xmeans(sample, centers, kmax, tolerance, criterion): pointer_data = package_builder(sample, c_double).create(); pointer_centers = package_builder(centers, c_double).create(); ccore = ccore_library.get(); ccore.xmeans_algorithm.restype = POINTER(pyclustering_package); package = ccore.xmeans_algorithm(pointer_data, pointer_centers, c_size_t(kmax), c_double(tolerance), c_uint(criterion)); result = package_extractor(package).extract(); ccore.free_pyclustering_package(package); return result[0], result[1];
def silhoeutte(sample, clusters, pointer_metric): pointer_data = package_builder(sample, c_double).create() pointer_clusters = package_builder(clusters, c_size_t).create() ccore = ccore_library.get() ccore.silhouette_algorithm.restype = POINTER(pyclustering_package) package = ccore.silhouette_algorithm(pointer_data, pointer_clusters, pointer_metric) result = package_extractor(package).extract() ccore.free_pyclustering_package(package) return result
def kmeans(sample, centers, tolerance, observe): pointer_data = package_builder(sample, c_double).create(); pointer_centers = package_builder(centers, c_double).create(); ccore = ccore_library.get(); ccore.kmeans_algorithm.restype = POINTER(pyclustering_package); package = ccore.kmeans_algorithm(pointer_data, pointer_centers, c_double(tolerance), c_bool(observe)); result = package_extractor(package).extract(); ccore.free_pyclustering_package(package); return result;
def kmedians(sample, centers, tolerance, metric_pointer): pointer_data = package_builder(sample, c_double).create() pointer_centers = package_builder(centers, c_double).create() ccore = ccore_library.get() ccore.kmedians_algorithm.restype = POINTER(pyclustering_package) package = ccore.kmedians_algorithm(pointer_data, pointer_centers, c_double(tolerance), metric_pointer) result = package_extractor(package).extract() ccore.free_pyclustering_package(package) return result
def xmeans(sample, centers, kmax, tolerance, criterion): pointer_data = package_builder(sample, c_double).create(); pointer_centers = package_builder(centers, c_double).create(); ccore = ccore_library.get(); ccore.xmeans_algorithm.restype = POINTER(pyclustering_package); package = ccore.xmeans_algorithm(pointer_data, pointer_centers, c_size_t(kmax), c_double(tolerance), c_uint(criterion)); result = package_extractor(package).extract(); ccore.free_pyclustering_package(package); return result[0], result[1];
def kmedoids(sample, medoids, tolerance, itermax, metric_pointer, data_type): pointer_data = package_builder(sample, c_double).create() medoids_package = package_builder(medoids, c_size_t).create() c_data_type = convert_data_type(data_type) ccore = ccore_library.get() ccore.kmedoids_algorithm.restype = POINTER(pyclustering_package) package = ccore.kmedoids_algorithm(pointer_data, medoids_package, c_double(tolerance), c_size_t(itermax), metric_pointer, c_data_type) result = package_extractor(package).extract() ccore.free_pyclustering_package(package) return result[0], result[1]
def fcm_algorithm(sample, centers, m, tolerance, itermax): pointer_data = package_builder(sample, c_double).create() pointer_centers = package_builder(centers, c_double).create() ccore = ccore_library.get() ccore.fcm_algorithm.restype = POINTER(pyclustering_package) package = ccore.fcm_algorithm(pointer_data, pointer_centers, c_double(m), c_double(tolerance), c_size_t(itermax)) result = package_extractor(package).extract() ccore.free_pyclustering_package(package) return result
def kmedoids(sample, medoids, tolerance, itermax, metric_pointer, data_type): pointer_data = package_builder(sample, c_double).create() medoids_package = package_builder(medoids, c_size_t).create() c_data_type = convert_data_type(data_type) ccore = ccore_library.get() ccore.kmedoids_algorithm.restype = POINTER(pyclustering_package) package = ccore.kmedoids_algorithm(pointer_data, medoids_package, c_double(tolerance), c_size_t(itermax), metric_pointer, c_data_type) result = package_extractor(package).extract() ccore.free_pyclustering_package(package) return result[0], result[1], result[2][0], result[3][0]
def elbow(sample, kmin, kmax, kstep, initializer, random_state): random_state = random_state or -1 pointer_data = package_builder(sample, c_double).create() ccore = ccore_library.get() if initializer == elbow_center_initializer.KMEANS_PLUS_PLUS: ccore.elbow_method_ikpp.restype = POINTER(pyclustering_package) package = ccore.elbow_method_ikpp(pointer_data, c_size_t(kmin), c_size_t(kmax), c_size_t(kstep), c_longlong(random_state)) elif initializer == elbow_center_initializer.RANDOM: ccore.elbow_method_irnd.restype = POINTER(pyclustering_package) package = ccore.elbow_method_irnd(pointer_data, c_size_t(kmin), c_size_t(kmax), c_size_t(kstep), c_longlong(random_state)) else: raise ValueError("Not supported type of center initializer '" + str(initializer) + "'.") results = package_extractor(package).extract() ccore.free_pyclustering_package(package) if isinstance(results, bytes): raise RuntimeError(results.decode('utf-8')) return (results[elbow_package_indexer.ELBOW_PACKAGE_INDEX_AMOUNT][0], results[elbow_package_indexer.ELBOW_PACKAGE_INDEX_WCE])
def legion_simulate(legion_network_pointer, steps, time, solver, collect_dynamic, stimulus): ccore = ccore_library.get(); c_stimulus = package_builder(stimulus, c_double).create(); ccore.legion_simulate.restype = POINTER(c_void_p); return ccore.legion_simulate(legion_network_pointer, c_uint(steps), c_double(time), c_uint(solver), c_uint(collect_dynamic), c_stimulus);
def rock(sample, eps, number_clusters, threshold): """ @brief Clustering algorithm ROCK returns allocated clusters and noise that are consisted from input data. @details Calculation is performed via CCORE (C/C++ part of the pyclustering)." @param[in] sample: input data - list of points where each point is represented by list of coordinates. @param[in] eps: connectivity radius (similarity threshold), points are neighbors if distance between them is less than connectivity radius. @param[in] number_clusters: defines number of clusters that should be allocated from the input data set. @param[in] threshold: value that defines degree of normalization that influences on choice of clusters for merging during processing. @return List of allocated clusters, each cluster contains indexes of objects in list of data. """ pointer_data = package_builder(sample, c_double).create(); ccore = ccore_library.get(); ccore.rock_algorithm.restype = POINTER(pyclustering_package); package = ccore.rock_algorithm(pointer_data, c_double(eps), c_size_t(number_clusters), c_double(threshold)); list_of_clusters = package_extractor(package).extract(); ccore.free_pyclustering_package(package); return list_of_clusters;
def legion_simulate(legion_network_pointer, steps, time, solver, collect_dynamic, stimulus): ccore = load_core(); c_stimulus = package_builder(stimulus, c_double).create(); ccore.legion_simulate.restype = POINTER(c_void_p); return ccore.legion_simulate(legion_network_pointer, c_uint(steps), c_double(time), c_uint(solver), c_uint(collect_dynamic), c_stimulus);
def rock(sample, eps, number_clusters, threshold): """ @brief Clustering algorithm ROCK returns allocated clusters and noise that are consisted from input data. @details Calculation is performed via CCORE (C/C++ part of the pyclustering)." @param[in] sample: input data - list of points where each point is represented by list of coordinates. @param[in] eps: connectivity radius (similarity threshold), points are neighbors if distance between them is less than connectivity radius. @param[in] number_clusters: defines number of clusters that should be allocated from the input data set. @param[in] threshold: value that defines degree of normalization that influences on choice of clusters for merging during processing. @return List of allocated clusters, each cluster contains indexes of objects in list of data. """ pointer_data = package_builder(sample, c_double).create() ccore = load_core() ccore.rock_algorithm.restype = POINTER(pyclustering_package) package = ccore.rock_algorithm(pointer_data, c_double(eps), c_size_t(number_clusters), c_double(threshold)) list_of_clusters = package_extractor(package).extract() ccore.free_pyclustering_package(package) return list_of_clusters
def optics(sample, radius, minimum_neighbors, amount_clusters, data_type): amount = amount_clusters if amount is None: amount = 0 pointer_data = package_builder(sample, c_double).create() c_data_type = convert_data_type(data_type) ccore = ccore_library.get() ccore.optics_algorithm.restype = POINTER(pyclustering_package) package = ccore.optics_algorithm(pointer_data, c_double(radius), c_size_t(minimum_neighbors), c_size_t(amount), c_data_type) results = package_extractor(package).extract() ccore.free_pyclustering_package(package) return (results[optics_package_indexer.OPTICS_PACKAGE_INDEX_CLUSTERS], results[optics_package_indexer.OPTICS_PACKAGE_INDEX_NOISE], results[optics_package_indexer.OPTICS_PACKAGE_INDEX_ORDERING], results[optics_package_indexer.OPTICS_PACKAGE_INDEX_RADIUS][0], results[optics_package_indexer. OPTICS_PACKAGE_INDEX_OPTICS_OBJECTS_INDEX], results[optics_package_indexer. OPTICS_PACKAGE_INDEX_OPTICS_OBJECTS_CORE_DISTANCE], results[optics_package_indexer. OPTICS_PACKAGE_INDEX_OPTICS_OBJECTS_REACHABILITY_DISTANCE])
def syncnet_create_network(sample, radius, initial_phases, enable_conn_weight): package_data = package_builder(sample, c_double).create() ccore = ccore_library.get() ccore.syncnet_create_network.restype = POINTER(c_void_p) pointer_network = ccore.syncnet_create_network(package_data, c_double(radius), c_bool(enable_conn_weight), c_uint(initial_phases)) return pointer_network
def hsyncnet_create_network(sample, number_clusters, initial_phases, initial_neighbors, increase_persent): data_package = package_builder(sample, c_double).create(); ccore = ccore_library.get(); ccore.hsyncnet_create_network.restype = POINTER(c_void_p); pointer_network = ccore.hsyncnet_create_network(data_package, c_uint(number_clusters), c_uint(initial_phases), c_uint(initial_neighbors), c_double(increase_persent)); return pointer_network;
def xmeans(sample, centers, kmax, tolerance, criterion, repeat, random_state): random_state = random_state or -1 pointer_data = package_builder(sample, c_double).create() pointer_centers = package_builder(centers, c_double).create() ccore = ccore_library.get() ccore.xmeans_algorithm.restype = POINTER(pyclustering_package) package = ccore.xmeans_algorithm(pointer_data, pointer_centers, c_size_t(kmax), c_double(tolerance), c_uint(criterion), c_size_t(repeat), c_longlong(random_state)) result = package_extractor(package).extract() ccore.free_pyclustering_package(package) return result
def hhn_simulate(hhn_network_pointer, steps, time, solution, stimulus, ccore_hhn_dynamic_pointer): ccore = ccore_library.get() c_stimulus = package_builder(stimulus, c_double).create() ccore.hhn_simulate(hhn_network_pointer, c_size_t(steps), c_double(time), c_size_t(solution), c_stimulus, ccore_hhn_dynamic_pointer)
def cure_algorithm(sample, number_clusters, number_represent_points, compression): pointer_data = package_builder(sample, c_double).create(); ccore = ccore_library.get(); ccore.cure_algorithm.restype = POINTER(c_void_p); cure_data_pointer = ccore.cure_algorithm(pointer_data, c_size_t(number_clusters), c_size_t(number_represent_points), c_double(compression)); return cure_data_pointer;
def syncnet_create_network(sample, radius, initial_phases, enable_conn_weight): package_data = package_builder(sample, c_double).create(); ccore = load_core(); ccore.syncnet_create_network.restype = POINTER(c_void_p); pointer_network = ccore.syncnet_create_network(package_data, c_double(radius), c_uint(initial_phases), c_bool(enable_conn_weight)); return pointer_network;
def templatePackUnpack(self, dataset, c_type_data = None): package_pointer = package_builder(dataset, c_type_data).create() unpacked_package = package_extractor(package_pointer).extract() packing_data = dataset if (isinstance(packing_data, numpy.matrix)): packing_data = dataset.tolist() assert self.compare_containers(packing_data, unpacked_package);
def templatePackUnpack(self, dataset, c_type_data = None): package_pointer = package_builder(dataset, c_type_data).create(); unpacked_package = package_extractor(package_pointer).extract(); packing_data = dataset; if (isinstance(packing_data, numpy.matrix)): packing_data = dataset.tolist(); assert self.compare_containers(packing_data, unpacked_package);
def legion_simulate(legion_network_pointer, steps, time, solver, collect_dynamic, stimulus): ccore = ccore_library.get() c_stimulus = package_builder(stimulus, c_double).create() ccore.legion_simulate.restype = POINTER(c_void_p) return ccore.legion_simulate(legion_network_pointer, c_uint(steps), c_double(time), c_uint(solver), c_bool(collect_dynamic), c_stimulus)
def __init__(self, type_metric, arguments, func): self.__func = lambda p1, p2: func(package_extractor(p1).extract(), package_extractor(p2).extract()); package_arguments = package_builder(arguments, c_double).create(); ccore = ccore_library.get(); ccore.metric_create.restype = POINTER(c_void_p); self.__pointer = ccore.metric_create(c_size_t(type_metric), package_arguments, metric_callback(self.__func));
def hhn_simulate(hhn_network_pointer, steps, time, solution, stimulus, ccore_hhn_dynamic_pointer): ccore = ccore_library.get(); c_stimulus = package_builder(stimulus, c_double).create(); ccore.hhn_simulate(hhn_network_pointer, c_size_t(steps), c_double(time), c_size_t(solution), c_stimulus, ccore_hhn_dynamic_pointer);
def agglomerative_algorithm(data, number_clusters, link): pointer_data = package_builder(data, c_double).create(); ccore = ccore_library.get(); ccore.agglomerative_algorithm.restype = POINTER(pyclustering_package); package = ccore.agglomerative_algorithm(pointer_data, c_size_t(number_clusters), c_size_t(link)); result = package_extractor(package).extract(); ccore.free_pyclustering_package(package); return result;
def templatePackUnpack(self, dataset, c_type_data=None): package_pointer = package_builder(dataset, c_type_data).create() unpacked_package = package_extractor(package_pointer).extract() packing_data = dataset if isinstance(packing_data, numpy.ndarray): packing_data = dataset.tolist() if isinstance(packing_data, str): self.assertEqual(dataset, unpacked_package) else: self.assertTrue(self.compare_containers(packing_data, unpacked_package))
def ttsas(sample, threshold1, threshold2, metric_pointer): pointer_data = package_builder(sample, c_double).create(); ccore = ccore_library.get(); ccore.ttsas_algorithm.restype = POINTER(pyclustering_package); package = ccore.ttsas_algorithm(pointer_data, c_double(threshold1), c_double(threshold2), metric_pointer); result = package_extractor(package).extract(); ccore.free_pyclustering_package(package); return result[0], result[1];
def bsas(sample, amount, threshold, metric_pointer): pointer_data = package_builder(sample, c_double).create() ccore = ccore_library.get() ccore.bsas_algorithm.restype = POINTER(pyclustering_package) package = ccore.bsas_algorithm(pointer_data, c_size_t(amount), c_double(threshold), metric_pointer) result = package_extractor(package).extract() ccore.free_pyclustering_package(package) return result[0], result[1]
def silhoeutte_ksearch(sample, kmin, kmax, allocator): pointer_data = package_builder(sample, c_double).create() ccore = ccore_library.get() ccore.silhouette_ksearch_algorithm.restype = POINTER(pyclustering_package) package = ccore.silhouette_ksearch_algorithm(pointer_data, c_size_t(kmin), c_size_t(kmax), c_size_t(allocator)) results = package_extractor(package).extract() ccore.free_pyclustering_package(package) return (results[silhouette_ksearch_package_indexer.SILHOUETTE_KSEARCH_PACKAGE_INDEX_AMOUNT][0], results[silhouette_ksearch_package_indexer.SILHOUETTE_KSEARCH_PACKAGE_INDEX_SCORE][0], results[silhouette_ksearch_package_indexer.SILHOUETTE_KSEARCH_PACKAGE_INDEX_SCORES])
def gmeans(sample, amount, tolerance, repeat): pointer_data = package_builder(sample, c_double).create() ccore = ccore_library.get() ccore.gmeans_algorithm.restype = POINTER(pyclustering_package) package = ccore.gmeans_algorithm(pointer_data, c_size_t(amount), c_double(tolerance), c_size_t(repeat)) result = package_extractor(package).extract() ccore.free_pyclustering_package(package) return result[0], result[1], result[2][0]
def silhoeutte_ksearch(sample, kmin, kmax, allocator): pointer_data = package_builder(sample, c_double).create() ccore = ccore_library.get() ccore.silhouette_ksearch_algorithm.restype = POINTER(pyclustering_package) package = ccore.silhouette_ksearch_algorithm(pointer_data, c_size_t(kmin), c_size_t(kmax), c_size_t(allocator)) results = package_extractor(package).extract() ccore.free_pyclustering_package(package) return (results[silhouette_ksearch_package_indexer.SILHOUETTE_KSEARCH_PACKAGE_INDEX_AMOUNT][0], results[silhouette_ksearch_package_indexer.SILHOUETTE_KSEARCH_PACKAGE_INDEX_SCORE][0], results[silhouette_ksearch_package_indexer.SILHOUETTE_KSEARCH_PACKAGE_INDEX_SCORES])
def dbscan(sample, eps, min_neighbors, return_noise = False): pointer_data = package_builder(sample, c_double).create(); ccore = load_core(); ccore.dbscan_algorithm.restype = POINTER(pyclustering_package); package = ccore.dbscan_algorithm(pointer_data, c_double(eps), c_size_t(min_neighbors)); list_of_clusters = package_extractor(package).extract(); ccore.free_pyclustering_package(package); noise = list_of_clusters[len(list_of_clusters) - 1]; list_of_clusters.remove(noise); return (list_of_clusters, noise);
def dbscan(sample, eps, min_neighbors, data_type): pointer_data = package_builder(sample, c_double).create() c_data_type = convert_data_type(data_type) ccore = ccore_library.get() ccore.dbscan_algorithm.restype = POINTER(pyclustering_package) package = ccore.dbscan_algorithm(pointer_data, c_double(eps), c_size_t(min_neighbors), c_data_type) list_of_clusters = package_extractor(package).extract() ccore.free_pyclustering_package(package) noise = list_of_clusters[len(list_of_clusters) - 1] list_of_clusters.remove(noise) return list_of_clusters, noise
def dbscan(sample, eps, min_neighbors, data_type): pointer_data = package_builder(sample, c_double).create() c_data_type = convert_data_type(data_type) ccore = ccore_library.get() ccore.dbscan_algorithm.restype = POINTER(pyclustering_package) package = ccore.dbscan_algorithm(pointer_data, c_double(eps), c_size_t(min_neighbors), c_data_type) list_of_clusters = package_extractor(package).extract() ccore.free_pyclustering_package(package) noise = list_of_clusters[len(list_of_clusters) - 1] list_of_clusters.remove(noise) return list_of_clusters, noise
def clique(sample, intervals, threshold): pointer_data = package_builder(sample, c_double).create() ccore = ccore_library.get() ccore.clique_algorithm.restype = POINTER(pyclustering_package) package = ccore.clique_algorithm(pointer_data, c_size_t(intervals), c_size_t(threshold)) results = package_extractor(package).extract() ccore.free_pyclustering_package(package) return (results[clique_package_indexer.CLIQUE_PACKAGE_INDEX_CLUSTERS], results[clique_package_indexer.CLIQUE_PACKAGE_INDEX_NOISE], results[clique_package_indexer.CLIQUE_PACKAGE_INDEX_LOGICAL_LOCATION], results[clique_package_indexer.CLIQUE_PACKAGE_INDEX_MAX_CORNER], results[clique_package_indexer.CLIQUE_PACKAGE_INDEX_MIN_CORNER], results[clique_package_indexer.CLIQUE_PACKAGE_INDEX_BLOCK_POINTS])
def optics(sample, radius, minimum_neighbors, amount_clusters, data_type): amount = amount_clusters if amount is None: amount = 0 pointer_data = package_builder(sample, c_double).create() c_data_type = convert_data_type(data_type) ccore = ccore_library.get() ccore.optics_algorithm.restype = POINTER(pyclustering_package) package = ccore.optics_algorithm(pointer_data, c_double(radius), c_size_t(minimum_neighbors), c_size_t(amount), c_data_type) results = package_extractor(package).extract() ccore.free_pyclustering_package(package) return (results[optics_package_indexer.OPTICS_PACKAGE_INDEX_CLUSTERS], results[optics_package_indexer.OPTICS_PACKAGE_INDEX_NOISE], results[optics_package_indexer.OPTICS_PACKAGE_INDEX_ORDERING], results[optics_package_indexer.OPTICS_PACKAGE_INDEX_RADIUS][0], results[optics_package_indexer.OPTICS_PACKAGE_INDEX_OPTICS_OBJECTS_INDEX], results[optics_package_indexer.OPTICS_PACKAGE_INDEX_OPTICS_OBJECTS_CORE_DISTANCE], results[optics_package_indexer.OPTICS_PACKAGE_INDEX_OPTICS_OBJECTS_REACHABILITY_DISTANCE])
def pack_pattern(pattern): return package_builder(pattern, c_int).create();
def syncpr_train(pointer_network, patterns): c_patterns = package_builder(patterns, c_int).create(); ccore = ccore_library.get(); ccore.syncpr_train(pointer_network, c_patterns);
def pcnn_simulate(network_pointer, steps, stimulus): ccore = ccore_library.get(); c_stimulus = package_builder(stimulus, c_double).create(); ccore.pcnn_simulate.restype = POINTER(c_void_p); return ccore.pcnn_simulate(network_pointer, c_uint(steps), c_stimulus);