def final_data_flattening(model_name): ''' Saves a flattened version of the brush data, duplicating the data for each point of the polyline. ''' final_data = common.load_json( "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data.json") flattened_data = [] print("Flattening model %s" % model_name[0]) for step in final_data: print("%s / %d" % (step, len(final_data) - 1), ) step_data = final_data[step]["brush_data"] if step_data["valid"]: for idx, point in enumerate(step_data["paths"][0]): single_data = { "step": int(step), "projected_size": step_data["size"][0][0], "unprojected_size": step_data["size"][0][1], "position": point, "pressure": step_data["pressure"][0][idx], "mode": step_data["mode"][0], } flattened_data.append(single_data) save_path = "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/flattened/" + model_name[ 0] + "/" common.make_dirs(save_path) common.save_json(flattened_data, save_path + "flattened_data.json", compressed=False)
def add_brush_category_to_final(model_name): final_data = common.load_json( "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_3.json") on_surface = [ 'BLOB', 'CLAY', 'CLAY_STRIPS', 'CREASE', 'DRAW', 'FLATTEN', 'INFLATE', 'LAYER', 'PINCH', 'SCRAPE', 'SMOOTH' ] global_brush = ['GRAB', 'SNAKE_HOOK'] mask_brush = ['MASK'] for step in final_data: print("%s / %d" % (step, len(final_data) - 1), ) if final_data[step]["brush_data"]["brush_type"] in on_surface: final_data[step]["brush_data"]["brush_category"] = "ON_SURFACE" elif final_data[step]["brush_data"]["brush_type"] in global_brush: final_data[step]["brush_data"]["brush_category"] = "GLOBAL" elif final_data[step]["brush_data"]["brush_type"] in mask_brush: final_data[step]["brush_data"]["brush_category"] = "MASK" common.save_json( final_data, "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_3.json", compressed=False)
def add_brush_type_to_final(model_name): final_data = common.load_json( "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_3.json") brush_type_data = common.load_json("../steps/" + model_name[0] + "/brush_type_new.json") for step in final_data: print("%s / %d" % (step, len(final_data) - 1), ) final_data[step]["brush_data"]["brush_type"] = brush_type_data[step] common.save_json( final_data, "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_3.json", compressed=False)
def add_brush_2d_angle_to_final(model_name): final_data = common.load_json( "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_3.json") for step in final_data: print("%s / %d" % (step, len(final_data) - 1), ) if final_data[step]["brush_data"]["valid"]: final_data[step]["brush_data"]["brush_2d_angles"] = get_2d_angles( final_data[step]["brush_data"]) final_data[step]["brush_data"][ "brush_2d_angles_filtered"] = get_2d_angles( final_data[step]["brush_data"], True) common.save_json( final_data, "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_3.json", compressed=False)
def add_brush_2d_pos_to_final(model_name): final_data = common.load_json( "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_2.json") brush_2d_pos_data = common.load_json("../steps/" + model_name[0] + "/brush_2d_pos.json") for step in final_data: print("%s / %d" % (step, len(final_data) - 1), ), if step in brush_2d_pos_data: final_data[step]["brush_data"]["brush_2d_pos"] = brush_2d_pos_data[ step]["path"] final_data[step]["brush_data"]["lenght_2d"] = brush_2d_pos_data[ step]["lenght"] elif final_data[step]["brush_data"]["valid"]: print("############# ERROR #############") common.save_json( final_data, "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_3.json", compressed=False)
def diff_compressing(self): print("Compressing diff data for " + self.model_name + ": ", ) # Numeric summary added_vertices = [] deleted_vertices = [] added_normals = [] deleted_normals = [] added_faces = [] deleted_faces = [] # Geometric properties diff_added_bbox = [] diff_added_centroids = [] diff_deleted_bbox = [] diff_deleted_centroids = [] # Basic statistics of added/deleted points (average, variance, skewness, curtosis) added_mean = [] added_variance = [] added_skewness = [] added_curtosis = [] deleted_mean = [] deleted_variance = [] deleted_skewness = [] deleted_curtosis = [] serialized = False if os.path.isfile(self.diff_root_path + self.model_name + "/step_1/serialized.txt"): serialized = True print("SERIALIZED") else: print("NOT SERIALIZED") for diff_no in range(self.end_step): print("step %d/%d" % (diff_no, self.end_step)) data_av_c = [] data_dv_c = [] if not serialized: data_temp = common.load_pickle(self.diff_root_path + self.model_name + "/step_1/diff_" + str(diff_no)) if not data_temp["valid"]: data_c = {"valid": False} else: data_c["valid"] = True data_c['new_verts'] = len(data_temp['new_verts']) data_c['new_normals'] = len(data_temp['new_normals']) data_c['new_faces'] = len(data_temp['new_faces']) data_c['verts_no'] = int(data_temp['verts_no']) data_c['normals_no'] = int(data_temp['normals_no']) data_c['faces_no'] = int(data_temp['faces_no']) data_av_c = data_temp["new_verts"] data_dv_c = data_temp["del_verts"] else: data_c = common.load_pickle(self.diff_root_path + self.model_name + "/step_1/diff_" + str(diff_no) + "/diff_head") if data_c["valid"]: data_av_c = common.load_pickle(self.diff_root_path + self.model_name + "/step_1/diff_" + str(diff_no) + "/new_verts") data_dv_c = common.load_pickle(self.diff_root_path + self.model_name + "/step_1/diff_" + str(diff_no) + "/del_verts") if not data_c['valid']: added_vertices.append(0) deleted_vertices.append(0) added_normals.append(0) deleted_normals.append(0) added_faces.append(0) deleted_faces.append(0) diff_added_bbox.append([]) diff_added_centroids.append([]) diff_deleted_bbox.append([]) diff_deleted_centroids.append([]) added_mean.append([None, None, None]) added_variance.append([None, None, None]) added_skewness.append([None, None, None]) added_curtosis.append([None, None, None]) deleted_mean.append([None, None, None]) deleted_variance.append([None, None, None]) deleted_skewness.append([None, None, None]) deleted_curtosis.append([None, None, None]) else: # adding numeric features added_vertices.append(data_c["new_verts"]) deleted_vertices.append(data_c["del_verts"]) added_normals.append(data_c["new_normals"]) deleted_normals.append(data_c["del_normals"]) added_faces.append(data_c["new_faces"]) deleted_faces.append(data_c["del_faces"]) added_vertices_pos = [] for key, data in data_av_c: added_vertices_pos.append([float(el) for el in data]) deleted_vertices_pos = [] for key, data in data_dv_c: deleted_vertices_pos.append([float(el) for el in data]) # adding diff geometric features (bbox, centroids, curvature) if len(added_vertices_pos) > 0: diff_added_centroids.append( get_centroid(added_vertices_pos)) obb_points, bbox_pos, m_ext, r, u, f = get_bbox( added_vertices_pos) diff_added_bbox.append([bbox_pos, m_ext]) else: diff_added_bbox.append([]) diff_added_centroids.append([]) if len(deleted_vertices_pos) > 0: diff_deleted_centroids.append( get_centroid(deleted_vertices_pos)) obb_points, bbox_pos, m_ext, r, u, f = get_bbox( deleted_vertices_pos) diff_deleted_bbox.append([bbox_pos, m_ext]) else: diff_deleted_bbox.append([]) diff_deleted_centroids.append([]) # TODO: compute curvature # adding statistics if len(added_vertices_pos) > 0: numpy_arr_av = numpy.array(added_vertices_pos) added_mean.append(numpy.mean(numpy_arr_av, axis=0)) added_variance.append(numpy.var(numpy_arr_av, axis=0)) added_skewness.append(scs.skew(numpy_arr_av, axis=0)) added_curtosis.append(scs.kurtosis(numpy_arr_av, axis=0)) else: added_mean.append([None, None, None]) added_variance.append([None, None, None]) added_skewness.append([None, None, None]) added_curtosis.append([None, None, None]) if len(deleted_vertices_pos) > 0: numpy_arr_dv = numpy.array(deleted_vertices_pos) deleted_mean.append(numpy.mean(numpy_arr_dv, axis=0)) deleted_variance.append(numpy.var(numpy_arr_dv, axis=0)) deleted_skewness.append(scs.skew(numpy_arr_dv, axis=0)) deleted_curtosis.append(scs.kurtosis(numpy_arr_dv, axis=0)) else: deleted_mean.append([None, None, None]) deleted_variance.append([None, None, None]) deleted_skewness.append([None, None, None]) deleted_curtosis.append([None, None, None]) final_data = {} final_data["added_vertices"] = added_vertices final_data["deleted_vertices"] = deleted_vertices final_data["added_normals"] = added_normals final_data["deleted_normals"] = deleted_normals final_data["added_faces"] = added_faces final_data["deleted_faces"] = deleted_faces final_data["diff_added_centroids"] = diff_added_centroids final_data["diff_added_bbox"] = diff_added_bbox final_data["diff_deleted_centroids"] = diff_deleted_centroids final_data["diff_deleted_bbox"] = diff_deleted_bbox final_data["added_mean"] = numpy2list(added_mean) final_data["added_variance"] = numpy2list(added_variance) final_data["added_skewness"] = numpy2list(added_skewness) final_data["added_curtosis"] = numpy2list(added_curtosis) final_data["deleted_mean"] = numpy2list(deleted_mean) final_data["deleted_variance"] = numpy2list(deleted_variance) final_data["deleted_skewness"] = numpy2list(deleted_skewness) final_data["deleted_curtosis"] = numpy2list(deleted_curtosis) common.save_json(final_data, "../steps/" + self.model_name + "/diff_plot_data.json", compressed=False)
def clusterize(self, prefix=""): clusters = {} times = {} for obj in self.data: if not obj.clustered: X = self.retrieve_neighbours(obj) if len(X) < self.min_pts: obj.clabel = self.noise self.noised.append(obj.id) else: self.cluster_label += 1 cl_times = [] print("found clust %s" % self.cluster_label) obj.clabel = self.cluster_label obj.clustered = True clusters[self.cluster_label] = [obj.id] cl_times.append(obj.time) for idx_X_obj in X: if self.data[idx_X_obj].clustered: continue else: self.data[idx_X_obj].clabel = self.cluster_label self.data[idx_X_obj].clustered = True clusters[self.cluster_label].append(self.data[idx_X_obj].id) cl_times.append(self.data[idx_X_obj].time) stack = X[:] while len(stack) > 0: stack_obj_idx = stack.pop(0) stack_X = self.retrieve_neighbours(self.data[stack_obj_idx]) if len(stack_X) >= self.min_pts: for stack_X_obj_idx in stack_X: if not self.data[stack_X_obj_idx].clustered or not self.data[stack_X_obj_idx].clabel == self.noise: if self.data[stack_X_obj_idx].clabel != self.cluster_label: self.data[stack_X_obj_idx].clabel = self.cluster_label self.data[stack_X_obj_idx].clustered = True cl_times.append(self.data[stack_X_obj_idx].time) clusters[self.cluster_label].append(self.data[stack_X_obj_idx].id) if stack_X_obj_idx not in stack: stack.append(stack_X_obj_idx) print(cl_times) times[self.cluster_label] = cl_times acc = 0 labels = [None, ] * len(self.data) centroid_pos = [None, ] * len(self.data) raw_data = [None, ] * len(self.data) for c in clusters: print("Cluster no %s [len = %d]" % (c, len(clusters[c]))) acc += len(clusters[c]) for el in clusters[c]: labels[el] = c noise = [obj for obj in self.data if obj.clabel == self.noise] print("Noised [len = %d]" % (len(noise))) acc += len(noise) print("Total instances: %d" % acc) for idx in range(len(labels)): if not labels[idx]: labels[idx] = len(clusters) + 1 for obj in self.data: centroid_pos[obj.id] = [obj.x, obj.y, obj.z] raw_data[obj.id] = obj.data[:] centroid_pos = np.array(centroid_pos) labels = np.array(labels) raw_data = np.array(raw_data) # saving clustering data save_dir = "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/clustering/" + self.model_name + "/" if not os.path.exists(save_dir): os.makedirs(save_dir) fh = open(save_dir + prefix + "_centroid_pos", 'wb') np.save(fh, centroid_pos) fh.close() fh = open(save_dir + prefix + "_labels", 'wb') np.save(fh, labels) fh.close() fh = open(save_dir + prefix + "_raw_data", 'wb') np.save(fh, raw_data) fh.close() common.save_json(times, save_dir + prefix + "_times.json", compressed=False)
else: brush_types[final_data[step]["brush_data"]["brush_type"]] = 1 data[model_name]["brush_no"] = brush_no camera_data = common.load_json(data_dir + model_name + "/camera_movements.json") camera_mov = 0 for step in camera_data: camera_mov += len(camera_data[step]) data[model_name]["camera_movements"] = camera_mov #data[model_name]["model_name"] = model_name for line in data: print(line, " ") print(data[line]) common.save_json( data, "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/ipython/data/introduction_table.json", compressed=False) common.save_json( brush_types, "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/ipython/data/all_brushes_type.json", compressed=False) a = pd.DataFrame(data) print(a.T) print(brush_types)
"type": bd_json["brush_type"] }) else: final_list.append({ "pos": [9999.0, 9999.0, 9999.0, 9999.0, 9999.0, 9999.0], "line": [0, 1], "category": "ON_SURFACE", "type": "CLAY" }) final_list_flipped.append({ "pos": [9999.0, 9999.0, 9999.0, 9999.0, 9999.0, 9999.0], "line": [0, 1], "category": "ON_SURFACE", "type": "CLAY" }) common.save_json(final_list, brush_dir + model_name + "/" + model_name + "_flattened_strokes.json", compressed=False) common.save_json(final_list_flipped, brush_dir + model_name + "/" + model_name + "_flattened_strokes_flipped.json", compressed=False)
models = [ ["alien", 2216], ["elder", 3119], ["elf", 4307], ["engineer", 987], ["explorer", 1858], ["fighter", 1608], ["gargoyle", 1058], ["gorilla", 2719], ["man", 1580], ["merman", 2619], ["monster", 967], ["ogre", 1720], ["sage", 2136] ] ''' for model_name, max_step in models: print("saving data for " + model_name) bd = BrushData(model_name, max_step) type_json = {} for k in range(max_step + 1): type_json[str(k)] = bd.load_brush_type(k) common.save_json(type_json, "../steps/" + model_name + "/brush_type_new.json") ''' for model_name, step in models: bt = common.load_json("../steps/" + model_name + "/brush_type_new.json") bd = BrushData(model_name, step) type = bd.load_brush_type(step) bt[str(step)] = type common.save_json(bt, "../steps/" + model_name + "/brush_type_new.json")
def brush_flattening(model_names): ''' Saves a flattened version of the brush data (for Weka analysis and such) ''' feature_vectors = {} for model_name in model_names: brush_data = common.load_json("../steps/" + model_name[0] + "/brush_data.json") feature_vectors[model_name[0]] = [] for step_idx in brush_data: print("Model %s | Step %s" % (model_name[0], step_idx)) data = brush_data[str(step_idx)] if data["valid"]: sizes = float(data["size"][0]) unp_sizes = float(data["size"][1]) modes = data["mode"] b_number = data["brush_number"] path_lenghts = 0 for i in range(b_number + 1): path_lenghts = float(data["lenghts"][i]) path_centroids = data["centroids"][i] obb_center = [None, None, None] obb_center[0] = data["obboxes"][i]["bbox_center"][0] obb_center[1] = data["obboxes"][i]["bbox_center"][1] obb_center[2] = data["obboxes"][i]["bbox_center"][2] obb_dimensions = [None, None, None] obb_dimensions[0] = data["obboxes"][i]["bbox_ext"][0] obb_dimensions[1] = data["obboxes"][i]["bbox_ext"][1] obb_dimensions[2] = data["obboxes"][i]["bbox_ext"][2] break pressure_mean = data["pressure_mean"] pressure_variance = data["pressure_variance"] pressure_skewness = data["pressure_skewness"] pressure_curtosis = data["pressure_curtosis"] path_mean = data["path_mean"] path_variance = data["path_variance"] path_skewness = data["path_skewness"] path_curtosis = data["path_curtosis"] feature_vectors[model_name[0]].append([ sizes, unp_sizes, modes[0], path_lenghts, path_centroids[0], path_centroids[1], path_centroids[2], obb_center[0], obb_center[1], obb_center[2], obb_dimensions[0], obb_dimensions[1], obb_dimensions[2], pressure_mean, pressure_variance, pressure_skewness, pressure_curtosis, path_mean, path_variance, path_skewness, path_curtosis, int(step_idx) ]) common.save_json(feature_vectors[model_name[0]], "../steps/" + model_name[0] + "/feature_vector.json", compressed=False) out = open("../steps/" + model_name[0] + "/feature_vector.csv", "w") out.write('size,unp_size,mode,lenght,' + \ 'centroid_x,centroid_y,centroid_z,' + \ 'obb_cen_x,obb_cen_y,obb_cen_z,'+ \ 'obb_dim_x,obb_dim_y,obb_dim_z,'+ \ 'pressure_mean,pressure_variance,pressure_skewness,pressure_curtosis,'+ \ 'path_mean,path_variance,path_skewness,path_curtosis,'+ \ 'step\n') for line in feature_vectors[model_name[0]]: l = ','.join([str(el) for el in line]) out.write(l + '\n') out.close()
def distance_compressing(model_name, single_file=False): ''' Produces the JSON for the data on mesh distances between steps For every step, it saves: - distance mean; - distance variance; - distance skewness; - distance curtosis; ''' if single_file: file_name = "../steps/" + model_name[0] + "/distance_data.txt" fh = open(file_name, 'r') i = 0 distances = {} idx = 0 for line in fh: if i % 2 == 0: idx = int(line) else: data = line.split(' ') data = [ float(el) for idx, el in enumerate(data) if idx % 2 == 1 ] if data: np_data = numpy.array(data) distances[idx] = {} distances[idx]["distance_mean"] = numpy.mean(np_data, axis=0) distances[idx]["distance_variance"] = numpy.var(np_data, axis=0) distances[idx]["distance_skewness"] = scs.skew(np_data, axis=0) distances[idx]["distance_curtosis"] = scs.kurtosis(np_data, axis=0) else: distances[idx] = {} distances[idx]["distance_mean"] = None distances[idx]["distance_variance"] = None distances[idx]["distance_skewness"] = None distances[idx]["distance_curtosis"] = None i += 1 fh.close() common.save_json(distances, "../steps/" + model_name[0] + "/distance_data.json", compressed=False) else: dir_name = "../steps/" + model_name[0] + "/dist_data/" files = common.get_files_from_directory(dir_name) distances = {} for path, filename in files: step = int(filename[4:]) fh = open(path, 'r') dists = [] for line in fh: data = line.split(' ') dists.append(float(data[1].strip())) if dists: distances[step] = {} distances[step]["distance_mean"] = numpy.mean(dists, axis=0) distances[step]["distance_variance"] = numpy.var(dists, axis=0) distances[step]["distance_skewness"] = scs.skew(dists, axis=0) distances[step]["distance_curtosis"] = scs.kurtosis(dists, axis=0) else: distances[step] = {} distances[step]["distance_mean"] = None distances[step]["distance_variance"] = None distances[step]["distance_skewness"] = None distances[step]["distance_curtosis"] = None common.save_json(distances, "../steps/" + model_name[0] + "/distance_data.json", compressed=False)
def generate_final_data(model_names): """ Produces the final JSON, with all the data extracted from brush stroke and diff For each step, it saves: final_data = { diff_data = { "added_vertices" "deleted_vertices" "added_faces" "deleted_faces" "diff_added_centroids" "diff_added_bbox" "diff_deleted_centroids" "diff_deleted_bbox" "added_mean" "added_variance" "added_skewness" "added_curtosis" "deleted_mean" "deleted_variance" "deleted_skewness" "deleted_curtosis" } brush_data = { "valid" "size" "mode" "brush_number" "paths" "centroid" "obboxes" "aabboxes" "lenghts" "pressure" } distance_data = { "distance_mean" "distance_variance" "distance_skewness" "distance_curtosis" } } """ for model_name in model_names: print("Creating fina data for " + model_name[0]) final_data = {} brush_data = common.load_json("../steps/" + model_name[0] + "/brush_data.json") diff_data = common.load_json("../steps/" + model_name[0] + "/diff_plot_data.json") distance_data = common.load_json("../steps/" + model_name[0] + "/distance_data.json") final_data[0] = { "step_number": 0, "valid": brush_data['0']["valid"], "brush_data": sanitize_brush_data(brush_data['0']), "diff_data": null_diff_data(), "distance_data": null_distance_data() } for step_idx in range(1, len(brush_data)): print(str(step_idx) + " ", ) final_data[step_idx] = {} final_data[step_idx]["step_number"] = step_idx final_data[step_idx]["valid"] = brush_data[str(step_idx)]["valid"] final_data[step_idx]["brush_data"] = sanitize_brush_data( brush_data[str(step_idx)]) final_data[step_idx]["diff_data"] = get_diff_data_step( diff_data, step_idx - 1) final_data[step_idx]["distance_data"] = get_distance_data_step( distance_data, str(step_idx)) common.save_json(final_data, "../final_data/" + model_name[0] + "/final_data.json", compressed=False)
angles[model].append(0.0) ang2 = get_2d_angles(bd) if ang2: angles_2d[model].append(np.mean(ang2)) else: angles_2d[model].append(0.0) ang2f = get_2d_angles(bd, True) if ang2f: angles_2d_f[model].append(np.mean(ang2f)) else: angles_2d_f[model].append(0.0) common.save_json( lengths, "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/ipython/data/scatter_lengths.json", compressed=False) common.save_json( sizes, "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/ipython/data/scatter_sizes.json", compressed=False) common.save_json( pressures, "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/ipython/data/scatter_pressures.json", compressed=False) common.save_json( distances, "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/ipython/data/scatter_distances.json", compressed=False) common.save_json( angles,