def create_counter_types_matrix(locs, types, retriever_o, pars_ret): """Create network of types. Parameters ---------- locs: array_like, shape (n, n_dim) the spatial information of the retrievable elements. types: array_like or list the types codes. retriever_o: pst.BaseRetriever the retriever class (not instantiated yet). pars_ret: dict the parameters of the retriever. Returns ------- counts: np.ndarray or scipy.sparse the counts matrix. types_u: array_like or list the types codes in the same order of the counts matrix. """ ## 0. Prepare Retriever ret = retriever_o(locs, **pars_ret) counterdesc = CountDescriptor() nulldesc = NullPhantomDescriptor() ## 1. Prepare variables types_u = np.unique(types) types_tr = np.array([np.where(e == types_u)[0][0] for e in types]) ## 2. Prepare features feats = ImplicitFeatures(types_tr, descriptormodel=counterdesc) feats_ph = PhantomFeatures((len(locs), 1), descriptormodel=nulldesc) ## 3. Prepare builders vals = np.arange(len(types)) feats_ret = FeaturesManager([feats, feats_ph], selectors=[(0, 1), (0, 0), (1, 0)], maps_vals_i=vals) counter = SpatialDescriptorModel(ret, feats_ret) ## 4. Compute counter counts = counter.compute() return counts, types_u
centroids = (np.random.random((nclusters, 2))-.5)*10 selectors = ((0, 0), (0, 1), (0, 0)) avgdesc = AvgPosition() avgdesc.set_global_info(np.arange(nclusters)) feats = ImplicitFeatures(np.arange(nclusters), descriptormodel=avgdesc) ## Performing algorithm for i in range(n_steps): ## Expectation (assignation of the points) ret0 = KRetriever(locs=centroids, autolocs=points, info_ret=nclusters, ifdistance=True, output_map=_output_map_mindist_filter) feats_ret = FeaturesManager([points, feats], maps_vals_i=np.zeros(len(points)), selectors=selectors, descriptormodels=AvgJoinerPosition()) spdesc = SpatialDescriptorModel(ret0, feats_ret) measure = spdesc.compute() #### Case of different axis variance ## Initialization of the points # Parameters nclusters = 3 # Random clustered points x = np.concatenate([np.random.normal(5, 0.2, size=500), np.random.normal(-4, 2.0, size=500), np.random.normal(-3, 2.4, size=1000)]) y = np.concatenate([np.random.normal(4, 5, size=500), np.random.normal(-3, 0.4, size=500), np.random.normal(0, 0.2, size=1000)]) # Random initialization x0 = (np.random.random(nclusters)-0.5)*10