def test_point_correctness(): import itertools stencil = [-1, 0, 1] ndim = 3 n = 2000 stencil = itertools.product(*[stencil] * ndim) stencil = np.array(list(stencil)).astype(np.int32) points = (np.random.rand(n, ndim) * [1, 2, 3]).astype(np.float32) scale = 0.1 spec = GridSpec(points, float(scale)) offsets = spec.stencil(stencil).astype(np.int32) grid = PointGrid(spec, points, offsets) pairs = grid.pairs() from scipy.spatial import cKDTree tree = cKDTree(points) tree_pairs = tree.query_pairs(scale, output_type='ndarray') print(tree_pairs) print(pairs) assert np.alltrue( npi.unique(tree_pairs) == npi.unique(np.sort(pairs, axis=1)))
def create_pre_compute(): with open('./Data/Trajectories.txt', 'r') as f: # trajectory_num = [] trajectories_list = [] for line in f: lst = re.split(r'[ ]', line) lst.pop(-1) tmp = [float(i) for i in lst] # trajectory_num.append(tmp.pop(-1)) trajectories_list.append(tmp) trajectories = np.zeros([len(trajectories_list), 24]) for i, trajectory in enumerate(trajectories_list): for j, stop_area in enumerate(trajectory): trajectories[i, j] = int(stop_area) unique_trajectories = npi.unique(trajectories) label_uniques = npi.indices(unique_trajectories, trajectories) np.save('./Data/label_uniques.npy', label_uniques) np.save('./Data/trajectories_uniques.npy', unique_trajectories) np.save('./Data/np_trajectories.npy', trajectories) # trajectories = np.load('./Data/np_trajectories.npy') # unique_trajectories = np.load('./Data/trajectories_uniques.npy') # label_uniques = np.load('./Data/label_uniques.npy') dist_pre_compute = np.zeros([len(unique_trajectories), len(unique_trajectories)]) for i in range(0, len(unique_trajectories)-1): for j in range(i+1, len(unique_trajectories)): if j % 2000 == 0: print([i, j]) dist_pre_compute[i, j] = dist_pre_compute[j, i] = levenshtein(unique_trajectories[i, :], unique_trajectories[j, :]) # dist_pre_compute[i, j] = dist_pre_compute[j, i] = dtw(unique_trajectories[i, :], unique_trajectories[j, :]) np.save('./Data/pre_compute_dtw', dist_pre_compute)
def find_squares(img): yuv = cv.cvtColor(img, cv.COLOR_BGR2YUV) y, u, v = cv.split(yuv) #img = cv.GaussianBlur(img, (5, 5), 0) squares = [] #for gray in cv.split(img): _retval, bin = cv.threshold(y, 0, 255, cv.THRESH_OTSU) binn = cv.bitwise_not(bin) c = cv.getStructuringElement(cv.MORPH_ELLIPSE, (7, 7)) opened = cv.morphologyEx(binn, cv.MORPH_OPEN, c) bin, contours, _hierarchy = cv.findContours(opened, cv.RETR_LIST, cv.CHAIN_APPROX_SIMPLE) contours = sorted(contours, key=cv.contourArea, reverse=True) topN = 3 maxLoop = topN if len(contours) >= topN else len(contours) for i in range(0, maxLoop): cnt = contours[i] cnt_len = cv.arcLength(cnt, True) cnt = cv.approxPolyDP(cnt, 0.02 * cnt_len, True) if len(cnt) == 4 and cv.contourArea(cnt) > 1000 and cv.isContourConvex( cnt): cnt = cnt.reshape(-1, 2) max_cos = np.max([ angle_cos(cnt[i], cnt[(i + 1) % 4], cnt[(i + 2) % 4]) for i in list(range(4)) ]) if max_cos < 0.1: squares.append(cnt) squares = npi.unique(squares) return squares
def merge(self, other): vertices = np.concatenate([self.vertices, other.vertices], axis=0) faces = np.concatenate([self.faces, other.faces + len(self.vertices)], axis=0) _, _idx, _inv = npi.unique(vertices, return_index=True, return_inverse=True) return type(self)(vertices[_idx], _inv[faces])
def compute_face_incidence(self): unsorted_edges = self.edges().reshape(-1, 2) sorted_edges = np.sort(unsorted_edges, axis=-1) unique_edges, edge_indices = npi.unique(sorted_edges, return_inverse=True) face_indices = np.arange(self.faces.size) // 3 orientation = sorted_edges[:, 0] == unsorted_edges[:, 0] incidence = scipy.sparse.csr_matrix((orientation * 2 - 1, (edge_indices, face_indices))) return incidence, unique_edges
def create_np(): names_dict, label_list = points_dict() for file_tmp in os.listdir("./Data/CoordinatesInput"): if file_tmp.endswith(".txt"): str_file = file_tmp.title() str_file_pre = './Data/CoordinatesInput/%s.txt' % (str_file[:-4]) str_file_np = './Data/npData/%s' % (str_file[:-4]) with open(str_file_pre, 'r') as f: print(str_file) all_coordinates = [] for line in f: line = line.strip('\n') tmp = '' coordinates_list = [] for sign in line: if sign != '[' and sign != ']' and sign != "'" and sign != '"' and sign != ' ': if sign == ',': coordinates_list.append(tmp) tmp = '' else: tmp += sign coordinates_list.append(tmp) all_coordinates.append(coordinates_list) if len(all_coordinates) > 0: data = np.zeros([len(all_coordinates), 6]) for idx, item1 in enumerate(all_coordinates): for idy, item2 in enumerate(item1): data[idx][idy] = float(item2) tmp = data[:, :2] unique_coordinate = npi.unique(tmp) label = npi.indices(unique_coordinate, tmp) max_cluster = np.max(label) + 1 count = np.zeros(max_cluster) for i in label: count[i] += 1 temp = count idc = np.zeros(max_cluster) for i in range(max_cluster): idx = np.argmax(temp) idc[idx] = max_cluster - (i + 1) temp[idx] = 0 for idx, count_label in enumerate(idc): if count_label < max_cluster - math.floor(max_cluster * 0.1): label[np.where(label == idx)] = max_cluster j = 0 for i in range(max_cluster + 1): tmp = np.where(label == i) if len(tmp[0]) > 0: data[np.where(label == i), 5] = label_list[names_dict[str_file[:-4]]][j] j += 1 np.save(str_file_np, data)
def load_stl(filename): dtype = [('normal', '<f4', 3,),('vertex', '<f4', (3,3)), ('abc', '<u2', 1,)] with open(filename, 'rb') as fh: header = np.fromfile(fh, '<c', 80) triangles = np.fromfile(fh, '<u4', 1)[0] data = np.fromfile(fh, dtype, triangles) vertices, triangles = npi.unique(data['vertex'].reshape(-1, 3), return_inverse=True) return Mesh(vertices, triangles.reshape(-1, 3))
def compute_vertex_incidence(self): unsorted_edges = self.edges().reshape(-1, 2) sorted_edges = np.sort(unsorted_edges, axis=-1) vertex_indices = npi.unique(sorted_edges) edge_indices = np.arange(vertex_indices.size) // 2 orientation = vertex_indices == vertex_indices[:, 0:1] incidence = scipy.sparse.csr_matrix( ((orientation * 2 - 1).flatten(), (edge_indices, vertex_indices.flatten()))) return incidence
def compute_face_incidence(self): unsorted_edges = self.edges().reshape(-1, 2) sorted_edges = np.sort(unsorted_edges, axis=-1) unique_edges, edge_indices = npi.unique(sorted_edges, return_inverse=True) face_indices = np.arange(self.faces.size) // 3 orientation = sorted_edges[:, 0] == unsorted_edges[:, 0] incidence = scipy.sparse.csr_matrix( (orientation * 2 - 1, (edge_indices, face_indices))) return incidence, unique_edges
def minimize_objective( x, y, cv_splits, dim, dimension_bounds): # Possible to leave out and integrate in Max_EI temp = npi.unique(x, return_index=1) x = temp[0] y = y[temp[1]] model_weights = build_ensemble(x, y, cv_splits, sklearn.metrics.mean_absolute_error) trained_models = train_models(x, y) new_points_prediction = max_EI(trained_models, model_weights, x, y.reshape(-1, 1), dim, dimension_bounds) return ((trained_models, model_weights, new_points_prediction))
def test_point_correctness(): import itertools stencil = [-1, 0, 1] ndim = 3 n = 2000 stencil = itertools.product(*[stencil]*ndim) stencil = np.array(list(stencil)).astype(np.int32) points = (np.random.rand(n, ndim) * [1, 2, 3]).astype(np.float32) scale = 0.1 spec = GridSpec(points, float(scale)) offsets = spec.stencil(stencil).astype(np.int32) grid = PointGrid(spec, points, offsets) pairs = grid.pairs() from scipy.spatial import cKDTree tree = cKDTree(points) tree_pairs = tree.query_pairs(scale, output_type='ndarray') print(tree_pairs) print(pairs) assert np.alltrue(npi.unique(tree_pairs) == npi.unique(np.sort(pairs, axis=1)))
def remove_disconnected_elems_from_mesh(mesh, split_facets, C_elems): """Elements which are completely disconnected from the rest of the mesh can safely be removed""" components = list(nx.connected_components(C_elems)) print "Number of components after facet disconnection = ", len(components) if len(components) == 1: #Everything connected so nothing to be done return mesh, split_facets, C_elems #Some elems are disconnected by split facets so remove them from the mesh lc_num = np.argmax([len(c) for c in components]) #Largest component connected_elems = np.array(list(components[lc_num])) isolated_elems = np.setdiff1d(range(len(mesh.elems)), connected_elems) print "Number elements removed = ", len(isolated_elems) assert isolated_elems.any() #update split facets removed_facets = npi.unique( np.sort(np.sort(np.array([ np.roll(mesh.elems[isolated_elems], i, axis=1).flatten() for i in range(mesh.edim - 1) ]).transpose(), axis=1), axis=1)) split_facets = split_facets[np.logical_not( npi.in_(split_facets, np.array(removed_facets)))] #Update the connectivity graph C_elems.remove_nodes_from(isolated_elems) ediff = np.zeros(len(mesh.elems), dtype=int) for enum in isolated_elems: ediff[enum:] += 1 elem_connections = np.array(C_elems.edges()) elem_connections -= ediff[elem_connections] C_elems = nx.Graph(elem_connections.tolist()) # #Renumber and update mesh is_elem_removed = np.zeros(len(mesh.elems)) is_elem_removed[isolated_elems] = 1 mesh, vdiff = renumber_mesh(mesh, is_elem_removed, return_vdiff=True) #Renumber the split vertex numbers split_facets -= vdiff[split_facets] return mesh, split_facets, C_elems
def validate(self, *, population: np.ndarray, **kwargs) -> np.ndarray: """Removes duplicate individuals from population Args: population (np.ndarray): the population to validate **kwargs: keyword arguments for plugins Returns: np.ndarray: same width as population, likely has less rows """ # the first part eliminates individuals with duplicate genes # the second part eliminates duplicate individuals population_sorted = np.sort(population, axis=-1) population = population[(population_sorted[..., 1:] != population_sorted[..., :-1]).all(-1)] return unique(np.sort(population, axis=1))
def generate_vertices(self, group): """instantiate a full sphere by repeating the transformed fundamental domain Returns ------- ndarray, [n, 3], float all points in the geometry, on a unit sphere """ points = np.empty((group.index, group.order, self.topology.P0, 3), np.float) PP = self.decomposed for i, B in enumerate(group.basis): for t, b in enumerate(B.reshape(-1, 3, 3)): b = util.normalize(b.T).T # now every row is a normalized vertex P = np.dot(b, PP.T).T # go from decomposed coords to local coordinate system points[i, t] = P # make single unique point list return npi.unique(points.reshape(-1, 3))
def load_stl(filename): dtype = [( 'normal', '<f4', 3, ), ('vertex', '<f4', (3, 3)), ( 'abc', '<u2', 1, )] with open(filename, 'rb') as fh: header = np.fromfile(fh, '<c', 80) triangles = np.fromfile(fh, '<u4', 1)[0] data = np.fromfile(fh, dtype, triangles) vertices, triangles = npi.unique(data['vertex'].reshape(-1, 3), return_inverse=True) return Mesh(vertices, triangles.reshape(-1, 3))
def refine_sphere(sphere): """given a spherical mesh, insert a new vertex on every edge Parameters ---------- sphere : skcg.Mesh instance Returns ------- skcg.Mesh instance """ vertices = sphere.vertices edges = npi.unique(sphere.ordered_edges()) new_vertices = vertices[edges].mean(axis=1) new_vertices /= np.linalg.norm(new_vertices, axis=1, keepdims=True) sphere = triangulate_convex(np.concatenate((vertices, new_vertices))) direction = collision.mymath.dot(sphere.face_normals(), sphere.face_centroids()) > 0 faces = np.where(direction[:, None], sphere.faces[:, ::+1], sphere.faces[:, ::-1]) sphere = Mesh(sphere.vertices, faces) return sphere
def list_equivalent_rules(rules, preferences='none'): """ :param rules: dictionary containing rule_names (key) and rule_values (boolean array) :return: list of lists. each inner lists contains the rule_names with the same rule_value, ordered """ rule_names, rule_values = zip(*rules.items()) rule_values = np.vstack(rule_values) equivalent_values, duplicate_idx = npi.unique(rule_values, return_inverse=True) n_equivalent = len(equivalent_values) # create list of rules containing equivalence classes if preferences == 'none': equivalent_rules = [[]] * n_equivalent for j in range(n_equivalent): rule_idx = np.flatnonzero(j == duplicate_idx) equivalent_rules[j] = [rule_names[k] for k in rule_idx] else: # order rules from first to last raise NotImplementedError() return equivalent_rules, equivalent_values
def get_facets(self): facets = np.vstack([ np.roll(self.elems, i, axis=1).flatten() for i in range(self.edim - 1) ]).transpose() return npi.unique(np.sort(facets, axis=1))
def unique(ar, return_index=False, return_inverse=False, return_counts=False): return numpy_indexed.unique(ar, None, return_index, return_inverse, return_counts)
# Drop all triangles with vertex on one side of split line keep = [] for tri in fileMesh.vectors: if not any([x[minDim]>=split for x in tri]): keep.append(tri) keep = np.array(keep) fileMesh.vectors = keep #plotMesh(fileMesh) goodDir1 = (minDim+1)%3 goodDir2 = (minDim+2)%3 flatTriangles = keep[:,:,[goodDir1,goodDir2]] # Drop lines that appear in two triangles - part of interior of part # Sort the 2 points in each edge to ensure collision. Use 2 sort dirs to break ties edges = [np.array([sorted([x[0,:],x[1,:]], key=lambda v: v[0]+0.0001*v[1]), sorted([x[1,:],x[2,:]], key=lambda v: v[0]+0.0001*v[1]), sorted([x[2,:],x[0,:]], key=lambda v: v[0]+0.0001*v[1])]) for x in flatTriangles] edges = np.reshape(edges,(-1,4)) edges,counts = npi.unique(np.around(edges,decimals=point_accuracy),return_count=True) edges = np.reshape(edges[counts==1,:],(-1,2,2)) # Write to svg svg = svgwrite.Drawing(fName.split('.stl')[0]+'.svg', profile='full',size=('1000mm', '1000mm'), viewBox=('0 0 1000 1000')) for e in edges: svg.add(svg.line(tuple(e[0,:].tolist()),tuple(e[1,:].tolist()), stroke=svgwrite.rgb(0,0,0,'%'))) svg.save() print "Saved " + str(edges.shape[0]) + " edges."