def add_brush_category_to_final(model_name): final_data = common.load_json( "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_3.json") on_surface = [ 'BLOB', 'CLAY', 'CLAY_STRIPS', 'CREASE', 'DRAW', 'FLATTEN', 'INFLATE', 'LAYER', 'PINCH', 'SCRAPE', 'SMOOTH' ] global_brush = ['GRAB', 'SNAKE_HOOK'] mask_brush = ['MASK'] for step in final_data: print("%s / %d" % (step, len(final_data) - 1), ) if final_data[step]["brush_data"]["brush_type"] in on_surface: final_data[step]["brush_data"]["brush_category"] = "ON_SURFACE" elif final_data[step]["brush_data"]["brush_type"] in global_brush: final_data[step]["brush_data"]["brush_category"] = "GLOBAL" elif final_data[step]["brush_data"]["brush_type"] in mask_brush: final_data[step]["brush_data"]["brush_category"] = "MASK" common.save_json( final_data, "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_3.json", compressed=False)
def add_brush_type_to_final(model_name): final_data = common.load_json( "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_3.json") brush_type_data = common.load_json("../steps/" + model_name[0] + "/brush_type_new.json") for step in final_data: print("%s / %d" % (step, len(final_data) - 1), ) final_data[step]["brush_data"]["brush_type"] = brush_type_data[step] common.save_json( final_data, "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_3.json", compressed=False)
def final_data_flattening(model_name): ''' Saves a flattened version of the brush data, duplicating the data for each point of the polyline. ''' final_data = common.load_json( "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data.json") flattened_data = [] print("Flattening model %s" % model_name[0]) for step in final_data: print("%s / %d" % (step, len(final_data) - 1), ) step_data = final_data[step]["brush_data"] if step_data["valid"]: for idx, point in enumerate(step_data["paths"][0]): single_data = { "step": int(step), "projected_size": step_data["size"][0][0], "unprojected_size": step_data["size"][0][1], "position": point, "pressure": step_data["pressure"][0][idx], "mode": step_data["mode"][0], } flattened_data.append(single_data) save_path = "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/flattened/" + model_name[ 0] + "/" common.make_dirs(save_path) common.save_json(flattened_data, save_path + "flattened_data.json", compressed=False)
def add_brush_2d_pos_to_final(model_name): final_data = common.load_json( "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_2.json") brush_2d_pos_data = common.load_json("../steps/" + model_name[0] + "/brush_2d_pos.json") for step in final_data: print("%s / %d" % (step, len(final_data) - 1), ), if step in brush_2d_pos_data: final_data[step]["brush_data"]["brush_2d_pos"] = brush_2d_pos_data[ step]["path"] final_data[step]["brush_data"]["lenght_2d"] = brush_2d_pos_data[ step]["lenght"] elif final_data[step]["brush_data"]["valid"]: print("############# ERROR #############") common.save_json( final_data, "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_3.json", compressed=False)
def generate_script(self, diff_no): # .: diff_no = 10 obj_1 = 9 obj_2 = 10 bbox_diag = { "alien": 4.88411, "elder": 18.1198, "elf": 5.62413, "engineer": 2.03795, "explorer": 17.2768, "fighter": 2.0678, "gargoyle": 10.5912, "gorilla": 12.3074, "man": 4.38392, "merman": 4.90317, "monster": 6.87775, "ogre": 20.0907, "sage": 4.91853 } #open mesh data md = common.load_json("../steps/" + self.model_name + "/mesh_data.json") vertices_no = int(md[str(diff_no)]['vertices_no']) file_content = "<!DOCTYPE FilterScript>\n" + \ "<FilterScript>\n" + \ "<filter name=\"Hausdorff Distance\">\n" + \ "<Param type=\"RichMesh\" name=\"SampledMesh\" value=\"1\" />\n" + \ "<Param type=\"RichMesh\" name=\"TargetMesh\" value=\"0\" />\n" + \ "<Param type=\"RichBool\" name=\"SaveSample\" value=\"true\" />\n" + \ "<Param type=\"RichBool\" name=\"SampleVert\" value=\"true\" />\n" + \ "<Param type=\"RichBool\" name=\"SampleEdge\" value=\"true\" />\n" + \ "<Param type=\"RichBool\" name=\"SampleFauxEdge\" value=\"false\"/>\n" + \ "<Param type=\"RichBool\" name=\"SampleFace\" value=\"true\" />\n" + \ "<Param type=\"RichInt\" name=\"SampleNum\" value=\"" + str(int(vertices_no * self.sample_perc)) + "\" />\n" + \ "<Param type=\"RichAbsPerc\" name=\"MaxDist\" value=\"" + str((bbox_diag[self.model_name] / 100.0) * 5.0) + "\" min=\"0\" max=\"" + str(bbox_diag[self.model_name]) + "\"/>\n" + \ "</filter>\n" + \ "<filter name=\"Select None\">\n" + \ "<Param type=\"RichBool\" name=\"allFaces\" value=\"true\" />\n" + \ "<Param type=\"RichBool\" name=\"allVerts\" value=\"true\" />\n" + \ "</filter>\n" + \ "<filter name=\"Change the current layer\">\n" + \ "<Param type=\"RichMesh\" name=\"mesh\" value=\"1\" />\n" + \ "</filter>\n" + \ "</FilterScript>\n" script_fh = open(self.script_path, 'w') script_fh.write(file_content) script_fh.close()
def load_data(filename, is_brush=True): final_data = common.load_json(filename) id = 0 loaded_data = [None, ] * len(final_data) for step in final_data: step_data = final_data[step] try: loaded_data[int(step_data["step_number"])] = Data(id, int(step_data["step_number"]), "b" if is_brush else "d", step_data["brush_data" if is_brush else "diff_data"]) id += 1 except TypeError: continue loaded_data = [el for el in loaded_data if el] return loaded_data
def add_brush_2d_angle_to_final(model_name): final_data = common.load_json( "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_3.json") for step in final_data: print("%s / %d" % (step, len(final_data) - 1), ) if final_data[step]["brush_data"]["valid"]: final_data[step]["brush_data"]["brush_2d_angles"] = get_2d_angles( final_data[step]["brush_data"]) final_data[step]["brush_data"][ "brush_2d_angles_filtered"] = get_2d_angles( final_data[step]["brush_data"], True) common.save_json( final_data, "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name[0] + "/final_data_3.json", compressed=False)
def visualize(self, show=False, save_image=True, prefix=""): load_dir = "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/clustering/" + self.model_name + "/" fh = open(load_dir + prefix + "_centroid_pos", 'rb') centroid_pos = np.load(fh) fh.close() fh = open(load_dir + prefix + "_labels", 'rb') labels = np.load(fh) fh.close() fh = open(load_dir + prefix + "_raw_data", 'rb') raw_data = np.load(fh) fh.close() cluster_timings = common.load_json(load_dir + prefix + "_times.json") noise = [] for obj in self.data: noise.append(obj.time) max_time = -1 for c in cluster_timings: for el in cluster_timings[c]: max_time = max(max_time, el) if el in noise: noise.remove(el) for el in noise: max_time = max(max_time, el) fig2 = plt.figure(figsize=(22, 10), facecolor="white") cols = int(math.sqrt(len(cluster_timings) + 1)) rows = ((len(cluster_timings) + 1)// cols) + 1 for c_idx in cluster_timings: cl_time = np.array(cluster_timings[c_idx]) print("%d%d%d" % (rows, cols, int(c_idx))) ax = fig2.add_subplot(rows, cols, int(c_idx)) n, bin, patches = ax.hist(cl_time, bins=30, range=(0, max_time), color=plt.cm.spectral(float(int(c_idx)) / (len(cluster_timings) + 1))) plt.title("C%s [%d, m=%.2f, v=%.2f]" % (c_idx, len(cl_time), np.mean(cl_time), np.std(cl_time)) ) ax = fig2.add_subplot(rows, cols, len(cluster_timings) + 1) n, bin, patches = ax.hist(noise, bins=30, range=(0, max_time), color=plt.cm.spectral(1.0)) fig2.tight_layout() if save_image: root_images = "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/results/" common.make_dirs(root_images) plt.savefig(root_images + self.model_name + "_st-dbscan-" + prefix + "_times.png") fig = plt.figure(figsize=(22, 10), facecolor="white") plot_clustering_scatter(centroid_pos, labels, raw_data, title="[%s] st-dbscan" % self.model_name, fig=fig, subplot_idx=111, model_name=self.model_name, save_image=save_image, prefix=prefix) if show: plt.show()
"artist": "jw" }, "ogre": { "technique": "subd", "artist": "jw" }, "sage": { "technique": "subd", "artist": "jw" } } brush_types = {} for model_name in data: mesh_data = common.load_json(data_dir + model_name + "/mesh_data.json") max_step_num = str(len(mesh_data.keys()) - 1) data[model_name]["faces_no"] = mesh_data[max_step_num]["faces_no"] final_data = common.load_json(final_data_dir + model_name + "/final_data_3.json") brush_no = 0 for step in final_data: if final_data[step]["brush_data"]["valid"]: brush_no += final_data[step]["brush_data"]["brush_number"] if final_data[step]["brush_data"]["brush_type"] in brush_types: brush_types[final_data[step]["brush_data"]["brush_type"]] += 1 else: brush_types[final_data[step]["brush_data"]["brush_type"]] = 1 data[model_name]["brush_no"] = brush_no
ax.scatter(X_red[:, force_idx[0]], -1.0 * X_red[:, force_idx[2]], X_red[:, force_idx[1]], c=labels, cmap=plt.cm.cool, s=c * 1000) ax.view_init(elev=0, azim=-90) plt.axis('equal') if title is not None: plt.title(title, size=17) # Loading brush data model_name = "monster" json_array = common.load_json("../steps/" + model_name + "/feature_vector.json") modes = {} for idx_l in range(len(json_array)): for idx_e in range(len(json_array[idx_l])): if idx_e != 2: json_array[idx_l][idx_e] = float(json_array[idx_l][idx_e]) elif idx_e == 2: if json_array[idx_l][idx_e] in modes: json_array[idx_l][idx_e] = modes[json_array[idx_l][idx_e]] else: print("Adding %d for %s" % (len(modes), json_array[idx_l][idx_e])) modes[json_array[idx_l][idx_e]] = len(modes) json_array[idx_l][idx_e] = modes[json_array[idx_l][idx_e]]
labels = [] k = 0 for step_idx in range(len(final_data)): if final_data[str(step_idx)]["valid"]: for i in range(final_data[str(step_idx)]["brush_data"]["brush_number"]): if time_serialized: ret_list.append([step_idx, final_data[str(step_idx)]["brush_data"][attribute][i]]) else: ret_list.append(final_data[str(step_idx)]["brush_data"][attribute][i]) labels.append(k) k += 1 return ret_list, labels # Loading brush data model_name = "ogre" json_array = common.load_json("../steps/" + model_name + "/final_data.json") centroid, centroid_label = filter_brush_attribute(json_array, "centroids", False) centroid_dist = [] for point_idx in range(1, len(centroid)): dist = np.linalg.norm(np.array(centroid[point_idx]) - np.array(centroid[point_idx - 1])) centroid_dist.append(dist) for attr_name in ["lenghts", "size", "unp_size", "centroid_dist"]: if attr_name == "size" or attr_name == "unp_size": attr_data, labels = filter_brush_attribute(json_array, "size", False) elif attr_name == "centroid_dist": attr_data, labels = centroid_dist, centroid_label else: attr_data, labels = filter_brush_attribute(json_array, attr_name, False)
model_names = [ "alien", "elder", "elf", "engineer", "explorer", "fighter", "gargoyle", "gorilla", "man", "merman", "monster", "ogre", "sage" ] on_surface = [ 'BLOB', 'CLAY', 'CLAY_STRIPS', 'CREASE', 'DRAW', 'FLATTEN', 'INFLATE', 'LAYER', 'PINCH', 'SCRAPE', 'SMOOTH', 'MASK' ] global_brush = ['GRAB', 'SNAKE_HOOK'] for model_name in model_names: print("Analyzing model " + model_name) fd_json = common.load_json(brush_dir + model_name + "/final_data_3.json") final_list = [] final_list_flipped = [] for k in range(len(fd_json)): print(str(k) + "/" + str(len(fd_json)), ) bd_json = fd_json[str(k)]["brush_data"] if bd_json["valid"]: points = bd_json["paths"][0] flattened_points = [] flattened_points_flipped = [] flattened_indices = [] for idx, p in enumerate(points): flattened_points += p flattened_points_flipped += [-p[0], p[1], p[2]] if idx < len(points) - 1:
models = [ ["alien", 2216], ["elder", 3119], ["elf", 4307], ["engineer", 987], ["explorer", 1858], ["fighter", 1608], ["gargoyle", 1058], ["gorilla", 2719], ["man", 1580], ["merman", 2619], ["monster", 967], ["ogre", 1720], ["sage", 2136] ] ''' for model_name, max_step in models: print("saving data for " + model_name) bd = BrushData(model_name, max_step) type_json = {} for k in range(max_step + 1): type_json[str(k)] = bd.load_brush_type(k) common.save_json(type_json, "../steps/" + model_name + "/brush_type_new.json") ''' for model_name, step in models: bt = common.load_json("../steps/" + model_name + "/brush_type_new.json") bd = BrushData(model_name, step) type = bd.load_brush_type(step) bt[str(step)] = type common.save_json(bt, "../steps/" + model_name + "/brush_type_new.json")
"sage": { 0: [-1, 0], 80: [0, 1], 1267: [2, 3], 1525: [3, 4], 1798: [4, 3], 1802: [3, 4], 558: [1, 2] }, "man": { 0: [-1, 0], 16: [3, 2], 658: [2, 3], 1174: [3, 4], 9: [0, 1], 10: [1, 2], 11: [2, 3] } } for model_name in subd_steps: bd = common.load_json( "/Users/christian/Desktop/Ph.D./sculptAnalysis_final_data/complete/" + model_name + "/final_data_3.json") for step in subd_steps[model_name]: for k in [-1, 0, 1]: if str(step + k) in bd: print(model_name, step + k, bd[str(step + k)]["distance_data"]["distance_mean"]) print() print("--------")
def brush_flattening(model_names): ''' Saves a flattened version of the brush data (for Weka analysis and such) ''' feature_vectors = {} for model_name in model_names: brush_data = common.load_json("../steps/" + model_name[0] + "/brush_data.json") feature_vectors[model_name[0]] = [] for step_idx in brush_data: print("Model %s | Step %s" % (model_name[0], step_idx)) data = brush_data[str(step_idx)] if data["valid"]: sizes = float(data["size"][0]) unp_sizes = float(data["size"][1]) modes = data["mode"] b_number = data["brush_number"] path_lenghts = 0 for i in range(b_number + 1): path_lenghts = float(data["lenghts"][i]) path_centroids = data["centroids"][i] obb_center = [None, None, None] obb_center[0] = data["obboxes"][i]["bbox_center"][0] obb_center[1] = data["obboxes"][i]["bbox_center"][1] obb_center[2] = data["obboxes"][i]["bbox_center"][2] obb_dimensions = [None, None, None] obb_dimensions[0] = data["obboxes"][i]["bbox_ext"][0] obb_dimensions[1] = data["obboxes"][i]["bbox_ext"][1] obb_dimensions[2] = data["obboxes"][i]["bbox_ext"][2] break pressure_mean = data["pressure_mean"] pressure_variance = data["pressure_variance"] pressure_skewness = data["pressure_skewness"] pressure_curtosis = data["pressure_curtosis"] path_mean = data["path_mean"] path_variance = data["path_variance"] path_skewness = data["path_skewness"] path_curtosis = data["path_curtosis"] feature_vectors[model_name[0]].append([ sizes, unp_sizes, modes[0], path_lenghts, path_centroids[0], path_centroids[1], path_centroids[2], obb_center[0], obb_center[1], obb_center[2], obb_dimensions[0], obb_dimensions[1], obb_dimensions[2], pressure_mean, pressure_variance, pressure_skewness, pressure_curtosis, path_mean, path_variance, path_skewness, path_curtosis, int(step_idx) ]) common.save_json(feature_vectors[model_name[0]], "../steps/" + model_name[0] + "/feature_vector.json", compressed=False) out = open("../steps/" + model_name[0] + "/feature_vector.csv", "w") out.write('size,unp_size,mode,lenght,' + \ 'centroid_x,centroid_y,centroid_z,' + \ 'obb_cen_x,obb_cen_y,obb_cen_z,'+ \ 'obb_dim_x,obb_dim_y,obb_dim_z,'+ \ 'pressure_mean,pressure_variance,pressure_skewness,pressure_curtosis,'+ \ 'path_mean,path_variance,path_skewness,path_curtosis,'+ \ 'step\n') for line in feature_vectors[model_name[0]]: l = ','.join([str(el) for el in line]) out.write(l + '\n') out.close()
def generate_final_data(model_names): """ Produces the final JSON, with all the data extracted from brush stroke and diff For each step, it saves: final_data = { diff_data = { "added_vertices" "deleted_vertices" "added_faces" "deleted_faces" "diff_added_centroids" "diff_added_bbox" "diff_deleted_centroids" "diff_deleted_bbox" "added_mean" "added_variance" "added_skewness" "added_curtosis" "deleted_mean" "deleted_variance" "deleted_skewness" "deleted_curtosis" } brush_data = { "valid" "size" "mode" "brush_number" "paths" "centroid" "obboxes" "aabboxes" "lenghts" "pressure" } distance_data = { "distance_mean" "distance_variance" "distance_skewness" "distance_curtosis" } } """ for model_name in model_names: print("Creating fina data for " + model_name[0]) final_data = {} brush_data = common.load_json("../steps/" + model_name[0] + "/brush_data.json") diff_data = common.load_json("../steps/" + model_name[0] + "/diff_plot_data.json") distance_data = common.load_json("../steps/" + model_name[0] + "/distance_data.json") final_data[0] = { "step_number": 0, "valid": brush_data['0']["valid"], "brush_data": sanitize_brush_data(brush_data['0']), "diff_data": null_diff_data(), "distance_data": null_distance_data() } for step_idx in range(1, len(brush_data)): print(str(step_idx) + " ", ) final_data[step_idx] = {} final_data[step_idx]["step_number"] = step_idx final_data[step_idx]["valid"] = brush_data[str(step_idx)]["valid"] final_data[step_idx]["brush_data"] = sanitize_brush_data( brush_data[str(step_idx)]) final_data[step_idx]["diff_data"] = get_diff_data_step( diff_data, step_idx - 1) final_data[step_idx]["distance_data"] = get_distance_data_step( distance_data, str(step_idx)) common.save_json(final_data, "../final_data/" + model_name[0] + "/final_data.json", compressed=False)