def WriteParameter(fc_file,spreadsheet,alias,par_write,write): #___________________GET FC FILE try: F.open(fc_file) Fname = bpy.path.display_name_from_filepath(fc_file) F.setActiveDocument(Fname) #___________________SEARCH FOR SKETCHES cell_out = None for obj in F.ActiveDocument.Objects: if obj.Label == spreadsheet: if alias in obj.PropertiesList: cell = obj.getCellFromAlias(alias) if write: obj.set(cell,str(par_write)) F.ActiveDocument.recompute() F.getDocument(Fname).save() cell_out = obj.get(cell) break except: info('WriteParameter error') finally: F.closeDocument(Fname) return cell_out
def draw_add_node_operator(layout, nodetype, label=None, icon_name=None, params=None): """ Draw node adding operator button. This is to be used both in Shift-A menu and in T panel. """ default_context = bpy.app.translations.contexts.default node_class = get_node_class_reference(nodetype) if node_class is None: info("cannot locate node class: %s", nodetype) return node_rna = node_class.bl_rna if label is None: if hasattr(node_rna, 'bl_label'): label = node_rna.bl_label elif nodetype == "NodeReroute": label = "Reroute" else: label = node_rna.name if params is None: params = dict(text=label) params['text_ctxt'] = default_context if icon_name is not None: params.update(**icon(icon_name)) else: params.update(**node_icon(node_rna)) add = layout.operator("node.sv_add_" + get_node_idname_for_operator(nodetype), **params) add.type = nodetype add.use_transform = True return add
def check_category(directory): dir_name = basename(directory) bad_files = [] known = [] for module_path in glob(join(directory, "*.py")): module_file = basename(module_path) if module_file == "__init__.py": continue module_name, ext = splitext(module_file) doc_name = module_name + ".rst" doc_path = join(docs_dir, dir_name, doc_name) if not exists(doc_path): if module_file in known_problems: known.append(module_file) else: bad_files.append(module_file) if known: info( "Category %s: Tolerating unexistance of the documentation for the following nodes for now:\n%s", dir_name, "\n".join(known)) if bad_files: self.fail( "Not all nodes of category `{}' have corresponding documentation; missing are:\n{}" .format(dir_name, "\n".join(bad_files)))
def check_category(directory): dir_name = basename(directory) bad_files = [] known = [] for module_path in glob(join(directory, "*.py")): module_file = basename(module_path) if module_file == "__init__.py": continue module_name, ext = splitext(module_file) doc_name = module_name + ".rst" doc_path = join(docs_dir, dir_name, doc_name) if not exists(doc_path): if module_file in known_problems: known.append(module_file) else: bad_files.append(module_file) category = dir_name if known: explicitly_missing = "\n".join(known) info( f"{category=}: Tolerating missing documentation for the following nodes for now:\n{explicitly_missing=}" ) if bad_files: missing = "\n".join(bad_files) self.fail( f"Not all nodes of {category=} have corresponding documentation; \n{missing=}" )
def make_categories(): original_categories = make_node_cats() node_cats = juggle_and_join(original_categories) node_categories = [] node_count = 0 for category, nodes in node_cats.items(): name_big = "SVERCHOK_" + category.replace(' ', '_') node_items = [] for item in nodes: nodetype = item[0] rna = get_node_class_reference(nodetype) if not rna and not nodetype == 'separator': info( "Node `%s' is not available (probably due to missing dependencies).", nodetype) else: node_item = SverchNodeItem.new(nodetype) node_items.append(node_item) if node_items: node_categories.append( SverchNodeCategory(name_big, category, items=node_items)) node_count += len(nodes) node_categories.append( SverchNodeCategory("SVERCHOK_GROUPS", "Groups", items=sv_group_items)) return node_categories, node_count, original_categories
def apply_core_props(node, node_ref): params = node_ref['params'] if 'cls_dict' in params: return param_names = params.keys() if hasattr(node, "force_param_order_iojson"): param_names = node.force_param_order_iojson info(f"iojson - Forcing param order, {param_names}") for p in param_names: # print(f" param {p}") val = params[p] try: setattr(node, p, val) except Exception as e: # FIXME: this is ugly, need to find better approach error_message = repr(e) # for reasons error(error_message) msg = 'failed to assign value to the node' debug("`%s': %s = %s: %s", node.name, p, val, msg) if "val: expected sequence items of type boolean, not int" in error_message: debug( "going to convert a list of ints to a list of bools and assign that instead" ) setattr(node, p, [bool(i) for i in val])
def LabelReader(self, context): labels = [('', '', '')] tree = bpy.data.node_groups[self.tree_name] node = tree.nodes[self.node_name] fc_file_list = node.inputs['File Path'].sv_get()[0] obj_mask = [] if node.read_features: obj_mask.append('PartDesign') if node.read_part: obj_mask.append('Part') if node.read_body: obj_mask.append('PartDesign::Body') for f in fc_file_list: try: doc = F.open(f) Fname = doc.Name or bpy.path.display_name_from_filepath(f) for obj in doc.Objects: if obj.Module in obj_mask or obj.TypeId in obj_mask: labels.append((obj.Label, obj.Label, obj.Label)) except Exception as err: info(f'FCStd label read error: {Fname=}') info(err) finally: # del doc F.closeDocument(doc.Name) return labels
def LabelReader(operator): tree = bpy.data.node_groups[operator.tree_name] node = tree.nodes[operator.node_name] module_filter = [] # \/ does not appear to be available from the items= func # node = self.get_node(context) # if node.read_features: module_filter.append('PartDesign') if node.read_part: module_filter.append('Part') if node.read_body: module_filter.append('PartDesign::Body') if node.merge_linked: module_filter.append('App::Link') labels = [('', '', '')] fc_file_list = node.inputs['File Path'].sv_get()[0] for fc_file in fc_file_list: try: doc = F.open(fc_file) for obj in doc.Objects: if obj.Module in module_filter or obj.TypeId in module_filter: labels.append((obj.Label, obj.Label, obj.Label)) except: info('FCStd label read error') finally: #del doc F.closeDocument(doc.Name) return labels
def execute(self, context): if not self.id_tree: msg = "Node tree is not specified" error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} if not self.preset_name: msg = "Preset name is not specified" error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} ng = bpy.data.node_groups[self.id_tree] nodes = list(filter(lambda n: n.select, ng.nodes)) if not len(nodes): msg = "There are no selected nodes to export" error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} layout_dict = JSONExporter.get_nodes_structure([n for n in ng.nodes if n.select]) preset = SvPreset(name=self.preset_name, category = self.category) preset.make_add_operator() destination_path = preset.path json.dump(layout_dict, open(destination_path, 'w'), sort_keys=True, indent=2) msg = 'exported to: ' + destination_path self.report({"INFO"}, msg) info(msg) return {'FINISHED'}
def execute(self, context): if not self.preset_name: msg = "Preset name is not specified" error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} path = get_preset_path(self.preset_name) gist_filename = self.preset_name + ".json" gist_description = self.preset_name with open(path, 'rb') as jsonfile: gist_body = jsonfile.read().decode('utf8') try: gist_url = sv_gist_tools.main_upload_function(gist_filename, gist_description, gist_body, show_browser=False) context.window_manager.clipboard = gist_url # full destination url info(gist_url) self.report({'WARNING'}, "Copied gist URL to clipboad") sv_gist_tools.write_or_append_datafiles(gist_url, gist_filename) except Exception as err: exception(err) self.report({'ERROR'}, "Error 222: net connection or github login failed!") return {'CANCELLED'} finally: return {'FINISHED'}
def changable_sockets(node, inputsocketname, outputsocketname): ''' arguments: node, name of socket to follow, list of socket to change ''' if not inputsocketname in node.inputs: # - node not initialized in sv_init yet, # - or socketname incorrect info("changable_socket was called on node (%s) with a socket named \"%s\", this socket does not exist" % (node.name, inputsocketname)) return in_socket = node.inputs[inputsocketname] ng = node.id_data if in_socket.links: in_other = get_other_socket(in_socket) if not in_other: return outputs = node.outputs s_type = in_other.bl_idname if s_type == 'SvDummySocket': return # if outputs[outputsocketname[0]].bl_idname != s_type: node.id_data.freeze(hard=True) to_links = {} for n in outputsocketname: out_socket = outputs[n] to_links[n] = [l.to_socket for l in out_socket.links] outputs.remove(outputs[n]) for n in outputsocketname: new_out_socket = outputs.new(s_type, n) for to_socket in to_links[n]: ng.links.new(to_socket, new_out_socket) node.id_data.unfreeze(hard=True)
def execute(self, context): global is_currently_enabled is_currently_enabled = not is_currently_enabled info("Profiling is set to %s", is_currently_enabled) return {'FINISHED'}
def execute(self, context): if not self.preset_name: msg = "Preset name is not specified" error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} if not self.gist_id: msg = "Gist ID is not specified" error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} gist_data = sv_IO_panel_tools.load_json_from_gist(self.gist_id, self) target_path = get_preset_path(self.preset_name) if os.path.exists(target_path): msg = "Preset named `{}' already exists. Refusing to rewrite existing preset.".format(self.preset_name) error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} with open(target_path, 'wb') as jsonfile: gist_data = json.dumps(gist_data, sort_keys=True, indent=2).encode('utf8') jsonfile.write(gist_data) msg = "Imported `{}' as `{}'".format(self.gist_id, self.preset_name) info(msg) self.report({'INFO'}, msg) return {'FINISHED'}
def execute(self, context): ng = context.space_data.node_tree gist_filename = ng.name gist_description = 'to do later? 2018' layout_dict = create_dict_of_tree(ng, skip_set={}, selected=self.selected_only) try: gist_body = json.dumps(layout_dict, sort_keys=True, indent=2) except Exception as err: if 'not JSON serializable' in repr(err): error(layout_dict) else: exception(err) self.report({'WARNING'}, "See terminal/Command prompt for printout of error") return {'CANCELLED'} try: gist_url = sv_gist_tools.main_upload_function(gist_filename, gist_description, gist_body, show_browser=False) context.window_manager.clipboard = gist_url # full destination url info(gist_url) self.report({'WARNING'}, "Copied gistURL to clipboad") sv_gist_tools.write_or_append_datafiles(gist_url, gist_filename) return {'FINISHED'} except Exception as err: info(err) self.report({'ERROR'}, "Error 222: net connection or github login failed!") return {'CANCELLED'}
def execute(self, context): if not self.id_tree: msg = "Node tree is not specified" error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} if not self.preset_name: msg = "Preset name is not specified" error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} ng = bpy.data.node_groups[self.id_tree] nodes = list(filter(lambda n: n.select, ng.nodes)) if not len(nodes): msg = "There are no selected nodes to export" error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} layout_dict = create_dict_of_tree(ng, selected=True) preset = SvPreset(name=self.preset_name) preset.make_add_operator() destination_path = preset.path write_json(layout_dict, destination_path) msg = 'exported to: ' + destination_path self.report({"INFO"}, msg) info(msg) return {'FINISHED'}
def LabelReader(self,context): labels=[('','','')] tree = bpy.data.node_groups[self.tree_name] node = tree.nodes[self.node_name] fc_file_list = node.inputs['File Path'].sv_get()[0] try: for f in fc_file_list: F.open(f) Fname = bpy.path.display_name_from_filepath(f) F.setActiveDocument(Fname) for obj in F.ActiveDocument.Objects: if obj.Label == node.selected_sheet: props = obj.PropertiesList for label in props: alias = obj.getCellFromAlias(label) if alias: labels.append( (label, label, label) ) except: info('Label reader read cell error') finally: F.closeDocument(Fname) return labels
def report_log_result(self): """Prints fails if their was or that they did not happen""" if self.has_fails: warning(f'During import next fails has happened:') print(self.fail_message) else: info(f'Import done with no fails')
def run_all_tests(pattern=None): """ Run all existing test cases. Test cases are looked up under tests/ directory. """ if pattern is None: pattern = "*_tests.py" tests_path = get_tests_path() log_handler = logging.FileHandler(join(tests_path, "sverchok_tests.log"), mode='w') logging.getLogger().addHandler(log_handler) try: loader = unittest.TestLoader() suite = loader.discover(start_dir=tests_path, pattern=pattern) buffer = StringIO() runner = unittest.TextTestRunner(stream=buffer, verbosity=2) old_nodes.register_all() with coverage_report(): result = runner.run(suite) info("Test cases result:\n%s", buffer.getvalue()) return result finally: logging.getLogger().removeHandler(log_handler)
def changable_sockets(node, inputsocketname, outputsocketname): ''' arguments: node, name of socket to follow, list of socket to change ''' if not inputsocketname in node.inputs: # - node not initialized in sv_init yet, # - or socketname incorrect info( f"changable_socket was called on {node.name} with a socket named {inputsocketname}, this socket does not exist" ) return in_socket = node.inputs[inputsocketname] ng = node.id_data if in_socket.links: in_other = get_other_socket(in_socket) if not in_other: return outputs = node.outputs s_type = in_other.bl_idname if s_type == 'SvDummySocket': return # if outputs[outputsocketname[0]].bl_idname != s_type: node.id_data.freeze(hard=True) to_links = {} for n in outputsocketname: out_socket = outputs[n] to_links[n] = [l.to_socket for l in out_socket.links] outputs.remove(outputs[n]) for n in outputsocketname: new_out_socket = outputs.new(s_type, n) for to_socket in to_links[n]: ng.links.new(to_socket, new_out_socket) node.id_data.unfreeze(hard=True)
def generate_layout(fullpath, nodes_json): # it may be necessary to store monads as dicts instead of string/json # this will handle both scenarios if isinstance(nodes_json, str): nodes_json = json.loads(nodes_json) debug('==== loading monad ====') info(('#' * 12) + nodes_json['export_version']) # create all nodes and groups ''' update_lists = nodes_json['update_lists'] nodes_to_import = nodes_json['nodes'] if center is not None: center_nodes(nodes_to_import, center) groups_to_import = nodes_json.get('groups', {}) add_groups(groups_to_import) # this return is not used yet name_remap = add_nodes(ng, nodes_to_import, nodes, create_texts) # now connect them / prevent unnecessary updates ng.freeze(hard=True) make_links(update_lists, name_remap) # set frame parents ''' place_frames(ng, nodes_json, name_remap) # clean up old_nodes.scan_for_old(ng) ng.unfreeze(hard=True) ng.update() ng.update_tag()
def update_entry(self, context): if hasattr(context, 'node'): updateNode(context.node, context) else: info( "Node is not defined in this context, so will not update the node." )
def concatenate_curves(curves, scale_to_unit=False, allow_generic=True): """ Concatenate a list of curves. When possible, use `concatenate` method of curves to make a "native" concatenation - for example, make one Nurbs out of several Nurbs. inputs: * curves: list of SvCurve * scale_to_unit: if specified, reparametrize each curve to [0; 1] before concatenation. * allow_generic: what to do it it is not possible to concatenate curves natively: True - use generic SvConcatCurve False - raise an Exeption. output: SvCurve. """ if not curves: raise Exception("List of curves must be not empty") result = [curves[0]] some_native = False exceptions = [] for idx, curve in enumerate(curves[1:]): new_curve = None ok = False if hasattr(result[-1], 'concatenate'): try: if scale_to_unit: # P.1: try to join with rescaled curve new_curve = result[-1].concatenate(reparametrize_curve(curve)) else: new_curve = result[-1].concatenate(curve) some_native = True ok = True except UnsupportedCurveTypeException as e: exceptions.append(e) # "concatenate" method can't work with this type of curve info("Can't natively join curve #%s (%s), will use generic method: %s", idx+1, curve, e) # P.2: if some curves were already joined natively, # then we have to rescale each of other curves separately if some_native and scale_to_unit: curve = reparametrize_curve(curve) #print(f"C: {curve}, prev: {result[-1]}, ok: {ok}, new: {new_curve}") if ok: result[-1] = new_curve else: result.append(curve) if len(result) == 1: return result[0] else: if allow_generic: # if any of curves were scaled while joining natively (at P.1), # then all other were scaled at P.2; # if no successfull joins were made, then we can rescale all curves # at once. return SvConcatCurve(result, scale_to_unit and not some_native) else: err_msg = "\n".join([str(e) for e in exceptions]) raise Exception(f"Could not join some curves natively. Result is: {result}.\nErrors were:\n{err_msg}")
def execute(self, context): if not self.old_node_name: self.report({'ERROR'}, "Old node name is not provided") return {'CANCELLED'} if not self.new_bl_idname: self.report({'ERROR'}, "New node bl_idname is not provided") return {'CANCELLED'} tree = context.space_data.edit_tree old_node = tree.nodes[self.old_node_name] new_node = tree.nodes.new(self.new_bl_idname) # Copy UI properties ui_props = ['location', 'height', 'width', 'label', 'hide'] for prop_name in ui_props: setattr(new_node, prop_name, getattr(old_node, prop_name)) # Copy ID properties for prop_name, prop_value in old_node.items(): new_node[prop_name] = old_node[prop_name] # Copy incoming / outgoing links old_in_links = [link for link in tree.links if link.to_node == old_node] old_out_links = [link for link in tree.links if link.from_node == old_node] for old_link in old_in_links: new_target_socket_name = self.get_new_input_name(old_link.to_socket.name) if new_target_socket_name in new_node.inputs: new_target_socket = new_node.inputs[new_target_socket_name] new_link = tree.links.new(old_link.from_socket, new_target_socket) else: debug("New node %s has no input named %s, skipping", new_node.name, new_target_socket_name) tree.links.remove(old_link) for old_link in old_out_links: new_source_socket_name = self.get_new_output_name(old_link.from_socket.name) # We have to remove old link before creating new one # Blender would not allow two links pointing to the same target socket old_target_socket = old_link.to_socket tree.links.remove(old_link) if new_source_socket_name in new_node.outputs: new_source_socket = new_node.outputs[new_source_socket_name] new_link = tree.links.new(new_source_socket, old_target_socket) else: debug("New node %s has no output named %s, skipping", new_node.name, new_source_socket_name) if hasattr(new_node, "migrate_from"): # Allow new node to copy what generic code could not. new_node.migrate_from(old_node) msg = "Node `{}' ({}) has been replaced with new node `{}' ({})".format( old_node.name, old_node.bl_idname, new_node.name, new_node.bl_idname) info(msg) self.report({'INFO'}, msg) tree.nodes.remove(old_node) return {'FINISHED'}
def test_adaptive_sockets(self): """ Test for nodes that allow arbitrary data at input. """ tested_nodes = { 'SvListDecomposeNode': ["data"], 'ListJoinNode': ["data", "data 1"], 'ListLevelsNode': ["data"], 'ZipNode': ["data", "data 1"], 'MaskListNode': ["data"], 'ListFlipNode': ["data"], 'SvListItemNode': ["Data"], 'ListRepeaterNode': ["Data"], 'ListReverseNode': ["data"], 'ListSliceNode': ["Data"], 'ShiftNodeMK2': ["data"], 'ListShuffleNode': ['data'], 'SvListSortNode': ['data'], 'SvListSplitNode': ['Data'], 'ListFLNode': ['Data'], 'SvFormulaNodeMk5': ["x", "y"], 'SvSetDataObjectNodeMK2': ["Objects"] } info("starting socket conversion tests") for bl_idname in tested_nodes.keys(): with self.subTest(bl_idname=bl_idname): # info(f"creating SvNGonNode and {bl_idname}") ngon = create_node("SvNGonNode") node = create_node(bl_idname) if bl_idname == "SvSetDataObjectNodeMK2": node.formula = "__str__()" for input_name in tested_nodes[bl_idname]: # info(f"Linking {ngon.name}'s vertex output ----> ({bl_idname}).inputs[{input_name}]") self.tree.links.new(ngon.outputs["Vertices"], node.inputs[input_name]) # Trigger processing of the NGon node, # so that there will be some data at input # of tested node. ngon.process() try: for input_name in tested_nodes[bl_idname]: with self.subTest(input_name=input_name): # Read the data from input. # We do not actually care about the data # itself, it is only important that there # was no exception. data = node.inputs[input_name].sv_get() except ImplicitConversionProhibited as e: raise e except Exception as e: info(e) finally: self.tree.nodes.remove(node) self.tree.nodes.remove(ngon)
def link_text_block(reference_blend_path, block_name): """ Link text block from specified .blend file. """ with bpy.data.libraries.load(reference_blend_path, link=True) as (data_src, data_dst): info(f"---- Linked text block: {basename(reference_blend_path)}") data_dst.texts = [block_name]
def execute(self, context): ng = context.space_data.node_tree is_tree_exportable, msg = self.can_be_exported(ng) if not is_tree_exportable: self.report({'ERROR'}, msg) return {'CANCELLED'} gist_filename = ng.name app_version = bpy.app.version_string.replace(" ", "") time_stamp = strftime("%Y.%m.%d | %H:%M", localtime()) license = 'license: CC BY-SA' gist_description = f"Sverchok.{version_and_sha} | Blender.{app_version} | {ng.name} | {time_stamp} | {license}" # layout_dict = create_dict_of_tree(ng, skip_set={}, selected=self.selected_only) if self.selected_only: layout_dict = JSONExporter.get_nodes_structure( [node for node in ng.nodes if node.select]) else: layout_dict = JSONExporter.get_tree_structure(ng) try: gist_body = json.dumps(layout_dict, sort_keys=True, indent=2) except Exception as err: if 'not JSON serializable' in repr(err): error(layout_dict) exception(err) self.report({'WARNING'}, "See terminal/Command prompt for printout of error") return {'CANCELLED'} try: gist_url = sv_gist_tools.main_upload_function(gist_filename, gist_description, gist_body, show_browser=False) if not gist_url: self.report( {'ERROR'}, "You have not specified GitHub API access token, which is " + "required to create gists from Sverchok. Please see " + TOKEN_HELP_URL + " for more information.") return {'CANCELLED'} context.window_manager.clipboard = gist_url # full destination url info(gist_url) self.report({'WARNING'}, "Copied gist URL to clipboad") sv_gist_tools.write_or_append_datafiles(gist_url, gist_filename) return {'FINISHED'} except Exception as err: exception(err) self.report({'ERROR'}, "Error 222: net connection or github login failed!") return {'CANCELLED'}
def get(cls, surface, implementation=NATIVE): if isinstance(surface, SvNurbsSurface): return surface if hasattr(surface, 'to_nurbs'): try: return surface.to_nurbs(implementation=implementation) except UnsupportedSurfaceTypeException as e: info("Can't convert %s to NURBS: %s", surface, e) return None
def execute(self, context): ntree = context.space_data.node_tree for node in ntree.nodes: if old_nodes.is_old(node): info("Deprecated node: `%s' (%s)", node.name, node.bl_idname) self.report({'INFO'}, "See logs") return {'FINISHED'}
def execute(self, context): ntree = context.space_data.node_tree for node in ntree.nodes: if is_old(node): info("Deprecated node: `%s' (%s)", node.name, node.bl_idname) self.report({'INFO'}, "See logs") return {'FINISHED'}
def add_nodes_to_sv(): index = nodes_index() for _, items in index: for item in items: nodetype = item[1] rna = get_node_class_reference(nodetype) if not rna and nodetype != 'separator': info("Node `%s' is not available (probably due to missing dependencies).", nodetype) else: SverchNodeItem.new(nodetype)
def save(self): if self._data is None: debug("Preset `%s': no data was loaded, nothing to save.", self.name) return data = json.dumps(self.data, sort_keys=True, indent=2).encode('utf8') with open(self.path, 'wb') as jsonfile: jsonfile.write(data) info("Saved preset `%s'", self.name)
def link_node_tree(reference_blend_path, tree_name=None): """ Link node tree from specified .blend file. """ if tree_name is None: tree_name = "TestingTree" if tree_name in bpy.data.node_groups: raise Exception("Tree named `{}' already exists in current scene".format(tree_name)) with bpy.data.libraries.load(reference_blend_path, link=True) as (data_src, data_dst): info(f"---- Linked node tree: {basename(reference_blend_path)}") data_dst.node_groups = [tree_name]
def remove_doubles(vertices, faces, d, face_data=None, find_doubles=False): if faces: EdgeMode = (len(faces[0]) == 2) bm = bmesh.new() bm_verts = [bm.verts.new(v) for v in vertices] if faces: if EdgeMode: for edge in faces: bm.edges.new([bm_verts[i] for i in edge]) else: for face in faces: bm.faces.new([bm_verts[i] for i in face]) if face_data: bm.faces.ensure_lookup_table() layer = bm.faces.layers.int.new("initial_index") for idx, face in enumerate(bm.faces): face[layer] = idx if find_doubles: res = bmesh.ops.find_doubles(bm, verts=bm_verts, dist=d) doubles = [vert.co[:] for vert in res['targetmap'].keys()] else: doubles = [] bmesh.ops.remove_doubles(bm, verts=bm_verts, dist=d) edges = [] faces = [] face_data_out = [] bm.verts.index_update() verts = [vert.co[:] for vert in bm.verts[:]] bm.edges.index_update() bm.faces.index_update() for edge in bm.edges[:]: edges.append([v.index for v in edge.verts[:]]) for face in bm.faces: faces.append([v.index for v in face.verts[:]]) if face_data: for face in bm.faces: initial_face_index = face[layer] if 0 <= initial_face_index < len(face_data): face_data_o = face_data[initial_face_index] else: info("No face data for face #%s", initial_face_index) face_data_o = None face_data_out.append(face_data_o) bm.clear() bm.free() return (verts, edges, faces, face_data_out, doubles)
def save_stats(path): """ Dump profiling statistics to file in cProfile's binary format. Such file can be parsed, for example, by gprof2dot utility. """ profile = get_global_profile() if not profile.getstats(): info("There are no profiling results yet") return stats = pstats.Stats(profile) stats.dump_stats(path) info("Profiling statistics saved to %s.", path)
def test_projection_4(self): p1 = (1, 0, 0) p2 = (0, 1, 0) p3 = (0, 0, 1) plane = PlaneEquation.from_three_points(p1, p2, p3) point1 = (-3, -3, -3) point2 = (2, 1, 1) point3 = (1, 1, 2) point4 = (1, 2, 1) result = plane.projection_of_points([point1, point2, point3, point4]) expected = np.array([[0.3333, 0.3333, 0.3333], [1,0,0], [0, 0, 1], [0, 1, 0]]) info(result) self.assert_numpy_arrays_equal(result, expected, precision=4)
def test_adaptive_sockets(self): """ Test for nodes that allow arbitrary data at input. """ tested_nodes = { 'SvListDecomposeNode': ["data"], 'ListJoinNode': ["data", "data 1"], 'ListLevelsNode': ["data"], 'ZipNode': ["data", "data 1"], 'MaskListNode': ["data"], 'ListFlipNode': ["data"], 'ListItem2Node': ["Data"], 'ListRepeaterNode': ["Data"], 'ListReverseNode': ["data"], 'ListSliceNode': ["Data"], 'ShiftNodeMK2': ["data"], 'ListShuffleNode': ['data'], 'ListSortNodeMK2': ['data'], 'SvListSplitNode': ['Data'], 'ListFLNode': ['Data'], 'Formula2Node': ["X", "n[0]"], 'SvSetDataObjectNodeMK2': ["Objects"] } for bl_idname in tested_nodes.keys(): with self.subTest(bl_idname = bl_idname): # Create NGon node and tested node ngon = create_node("SvNGonNode") node = create_node(bl_idname) # Link NGon node to tested inputs for input_name in tested_nodes[bl_idname]: self.tree.links.new(ngon.outputs["Vertices"], node.inputs[input_name]) # Trigger processing of the NGon node, # so that there will be some data at input # of tested node. ngon.process() try: for input_name in tested_nodes[bl_idname]: with self.subTest(input_name = input_name): # Read the data from input. # We do not actually care about the data # itself, it is only important that there # was no exception. data = node.inputs[input_name].sv_get() except ImplicitConversionProhibited as e: raise e except Exception as e: info(e) finally: self.tree.nodes.remove(node) self.tree.nodes.remove(ngon)
def execute(self, context): if not self.preset_name: msg = "Preset name is not specified" error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} path = get_preset_path(self.preset_name) os.remove(path) info("Removed `%s'", path) self.report({'INFO'}, "Removed `{}'".format(self.preset_name)) return {'FINISHED'}
def add_prop_from(self, socket): """ Add a property if possible """ other = socket.other cls = get_node_class_reference(self.cls_bl_idname) cls_dict = cls.__dict__ if cls else {} if other.prop_name: prop_name = other.prop_name prop_func, prop_dict = getattr(other.node.rna_type, prop_name, ("", {})) if prop_func.__name__ == "FloatProperty": self.get_current_as_default(prop_dict, other.node, prop_name) prop_settings = self.float_props.add() elif prop_func.__name__ == "IntProperty": self.get_current_as_default(prop_dict, other.node, prop_name) prop_settings = self.int_props.add() elif prop_func.__name__ == "FloatVectorProperty": info("FloatVectorProperty ignored (normal behaviour since day one). prop_func: %s, prop_dict: %s.", prop_func, prop_dict) return None # for now etc else: # no way to handle it return None # print('dict') # pprint.pprint(prop_dict) new_name = generate_name(prop_name, cls_dict) prop_settings.prop_name = new_name prop_settings.set_settings(prop_dict) socket.prop_name = new_name return new_name elif hasattr(other, "prop_type"): if "float" in other.prop_type: prop_settings = self.float_props.add() elif "int" in other.prop_type: prop_settings = self.int_props.add() else: return None new_name = generate_name(make_valid_identifier(other.name), cls_dict) prop_settings.prop_name = new_name prop_settings.set_settings({"name": other.name}) socket.prop_name = new_name return new_name return None
def write_json(layout_dict, destination_path): try: m = json.dumps(layout_dict, sort_keys=True, indent=2) except Exception as err: error(repr(err)) info(layout_dict) # optional post processing step post_processing = False if post_processing: flatten = lambda match: r' {}'.format(match.group(1), m) m = re.sub(r'\s\s+(\d+)', flatten, m) with open(destination_path, 'w') as node_tree: node_tree.writelines(m)
def perform_scripted_node_inject(node, node_ref): ''' Scripted Node will no longer create alternative versions of a file. If a scripted node wants to make a file called 'inverse.py' and the current .blend already contains such a file, then for simplicity the importer will not try to create 'inverse.001.py' and reference that. It will instead do nothing and assume the existing python file is functionally the same. If you have files that work differently but have the same name, stop. ''' texts = bpy.data.texts params = node_ref.get('params') if params: script_name = params.get('script_name') script_content = params.get('script_str') if script_name and not (script_name in texts): new_text = texts.new(script_name) new_text.from_string(script_content) elif script_name and (script_name in texts): # This was added to fix existing texts with the same name but no / different content. if texts[script_name].as_string() == script_content: debug("SN skipping text named `%s' - their content are the same", script_name) else: info("SN text named `%s' already found in current, but content differs", script_name) new_text = texts.new(script_name) new_text.from_string(script_content) script_name = new_text.name info('SN text named replaced with %s', script_name) node.script_name = script_name node.script_str = script_content if node.bl_idname == 'SvScriptNode': node.user_name = "templates" # best would be in the node. node.files_popup = "sv_lang_template.sn" # import to reset easy fix node.load() elif node.bl_idname == 'SvScriptNodeLite': node.load() # node.storage_set_data(node_ref) else: node.files_popup = node.avail_templates(None)[0][0] node.load()
def execute(self, context): bpy.ops.text.select_line() bpy.ops.text.copy() copied_text = bpy.data.window_managers[0].clipboard if "def sv_main(" not in copied_text: self.report({'INFO'}, "ERROR - LOOK CONSOLE") error(sv_error_message) return {'CANCELLED'} answer = converted(copied_text) if answer: info(answer) bpy.data.window_managers[0].clipboard = answer bpy.ops.text.move(type='LINE_BEGIN') bpy.ops.text.move(type='NEXT_LINE') bpy.ops.text.paste() return {'FINISHED'}
def intersect_with_plane(self, plane2): """ Return an intersection of this plane with another one. input: PlaneEquation output: LineEquation or None, in case two planes are parallel. """ if self.is_parallel(plane2): debug("{} is parallel to {}".format(self, plane2)) return None # We need an arbitrary point on this plane and two vectors. # Draw two lines in this plane and see for theirs intersection # with another plane. p0 = self.nearest_point_to_origin() v1, v2 = self.two_vectors() # it might be that p0 belongs to plane2; in that case we choose # another point in the same plane if plane2.check(p0): # Since v1 and v2 are orthogonal, it may not be that they are # both parallel to plane2. if not plane2.is_parallel(v1): p0 = p0 + v1 else: p0 = p0 + v2 line1 = LineEquation.from_direction_and_point(v1, p0) line2 = LineEquation.from_direction_and_point(v2, p0) # it might be that one of vectors we chose is parallel to plane2 # (since we are choosing them arbitrarily); but from the way # we are choosing v1 and v2, we know they are orthogonal. # So if wee just rotate them by pi/4, they will no longer be # parallel to plane2. if plane2.is_parallel(line1) or plane2.is_parallel(line2): v1_new = v1 + v2 v2_new = v1 - v2 info("{}, {} => {}, {}".format(v1, v2, v1_new, v2_new)) line1 = LineEquation.from_direction_and_point(v1_new, p0) line2 = LineEquation.from_direction_and_point(v2_new, p0) p1 = plane2.intersect_with_line(line1) p2 = plane2.intersect_with_line(line2) return LineEquation.from_two_points(p1, p2)
def run_all_tests(): """ Run all existing test cases. Test cases are looked up under tests/ directory. """ tests_path = get_tests_path() log_handler = logging.FileHandler(join(tests_path, "sverchok_tests.log"), mode='w') logging.getLogger().addHandler(log_handler) try: loader = unittest.TestLoader() suite = loader.discover(start_dir = tests_path, pattern = "*_tests.py") buffer = StringIO() runner = unittest.TextTestRunner(stream = buffer, verbosity=2) result = runner.run(suite) info("Test cases result:\n%s", buffer.getvalue()) return result finally: logging.getLogger().removeHandler(log_handler)
def execute(self, context): if not self.preset_name: msg = "Preset name is not specified" error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} if not self.filepath: msg = "Source file path is not specified" error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} target_path = get_preset_path(self.preset_name) shutil.copy(self.filepath, target_path) msg = "Imported `{}' as `{}'".format(self.filepath, self.preset_name) info(msg) self.report({'INFO'}, msg) return {'FINISHED'}
def perform_svtextin_node_object(node, node_ref): ''' as it's a beta service, old IO json may not be compatible - in this interest of neat code we assume it finds everything. ''' texts = bpy.data.texts params = node_ref.get('params') # original textin used 'current_text', textin+ uses 'text' current_text = params.get('current_text', params.get('text')) # it's not clear from the exporter code why textmode parameter isn't stored # in params.. for now this lets us look in both places. ugly but whatever. textmode = params.get('textmode') if not textmode: textmode = node_ref.get('textmode') node.textmode = textmode if not current_text: info("`%s' doesn't store a current_text in params", node.name) elif not current_text in texts: new_text = texts.new(current_text) text_line_entry = node_ref['text_lines'] if node.textmode == 'JSON': if isinstance(text_line_entry, str): debug('loading old text json content / backward compatibility mode') elif isinstance(text_line_entry, dict): text_line_entry = json.dumps(text_line_entry['stored_as_json']) new_text.from_string(text_line_entry) else: # reaches here if (current_text) and (current_text in texts) # can probably skip this.. # texts[current_text].from_string(node_ref['text_lines']) debug('%s seems to reuse a text block loaded by another node - skipping', node.name)
def execute(self, context): ng = bpy.data.node_groups[self.id_tree] destination_path = self.filepath if not destination_path.lower().endswith('.json'): destination_path += '.json' # future: should check if filepath is a folder or ends in \ layout_dict = create_dict_of_tree(ng) if not layout_dict: msg = 'no update list found - didn\'t export' self.report({"WARNING"}, msg) warning(msg) return {'CANCELLED'} write_json(layout_dict, destination_path) msg = 'exported to: ' + destination_path self.report({"INFO"}, msg) info(msg) if self.compress: comp_mode = zipfile.ZIP_DEFLATED # destination path = /a../b../c../somename.json base = basename(destination_path) # somename.json basedir = dirname(destination_path) # /a../b../c../ # somename.zip final_archivename = base.replace('.json', '') + '.zip' # /a../b../c../somename.zip fullpath = os.path.join(basedir, final_archivename) with zipfile.ZipFile(fullpath, 'w', compression=comp_mode) as myzip: myzip.write(destination_path, arcname=base) info('wrote:', final_archivename) return {'FINISHED'}
def execute(self, context): if not self.old_name: msg = "Old preset name is not specified" error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} if not self.new_name: msg = "New preset name is not specified" error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} preset = SvPreset(name = self.old_name) preset.meta['description'] = self.description preset.meta['author'] = self.author preset.meta['license'] = self.license preset.save() if self.new_name != self.old_name: old_path = get_preset_path(self.old_name) new_path = get_preset_path(self.new_name) if os.path.exists(new_path): msg = "Preset named `{}' already exists. Refusing to rewrite existing preset.".format(self.new_name) error(msg) self.report({'ERROR'}, msg) return {'CANCELLED'} os.rename(old_path, new_path) preset.name = self.new_name info("Renamed `%s' to `%s'", old_path, new_path) self.report({'INFO'}, "Renamed `{}' to `{}'".format(self.old_name, self.new_name)) bpy.utils.unregister_class(preset_add_operators[self.old_name]) del preset_add_operators[self.old_name] preset.make_add_operator() return {'FINISHED'}
def migrate_from(self, old_node): if old_node.bl_idname == 'Formula2Node': formula = old_node.formula # Older formula node allowed only fixed set of # variables, with names "x", "n[0]" .. "n[100]". # Other names could not be considered valid. k = -1 for socket in old_node.inputs: name = socket.name if k == -1: # First socket name was "x" new_name = name else: # Other names was "n[k]", which is syntactically not # a valid python variable name. # So we replace all occurences of "n[0]" in formula # with "n0", and so on. new_name = "n" + str(k) logging.info("Replacing %s with %s", name, new_name) formula = formula.replace(name, new_name) k += 1 self.formula1 = formula self.wrap = True
def execute(self, context): trees = bpy.data.node_groups for T in trees: if T.bl_rna.name in ['Shader Node Tree']: continue if trees[T.name].users > 1 and T.use_fake_user: info('Layout '+str(T.name)+' protected by fake user.') if trees[T.name].users >= 1 and self.do_clear and not T.use_fake_user: info('cleaning user: '******'removing layout: '+str(T.name)+' | '+str(T.bl_rna.name)) bpy.data.node_groups.remove(T) return {'FINISHED'}
def dump_stats(sort = "tottime", strip_dirs = False): """ Dump profiling statistics to the log. """ profile = get_global_profile() if not profile.getstats(): info("There are no profiling results yet") return stream = StringIO() stats = pstats.Stats(profile, stream=stream) if strip_dirs: stats.strip_dirs() stats = stats.sort_stats(sort) stats.print_stats() info("Profiling results:\n" + stream.getvalue()) info("---------------------------")
def execute(self, context): info(bpy.context.space_data.node_tree.name) return {'FINISHED'}
def complete_msg(self, blend_archive_path): msg = 'saved current .blend as archive at ' + blend_archive_path self.report({'INFO'}, msg) info(msg)
def update_log_level(self, context): logging.info("Setting log level to %s", self.log_level) logging.setLevel(self.log_level)