def getAllCacheNodes(self): parentPath = hou.pwd().parent().path() cacheNodes = [] for x in hou.node(parentPath).recursiveGlob('*'): if x.type().name() == 'dh_h14_bakegeo': cacheNodes.append(x) return cacheNodes
class Control(objects) userOrig = '${USER}' shotOrig = '${SHOTNAME}' userCustom = 'igor-si' shotCustom = 'CWS6340' n = hou.pwd() #? def getAllCacheNodes(self): parentPath = hou.pwd().parent().path() cacheNodes = [] for x in hou.node(parentPath).recursiveGlob('*'): if x.type().name() == 'dh_h14_bakegeo': cacheNodes.append(x) return cacheNodes def cacheNode(self.mode): if mode==0: for x in self.getAllCacheNodes(): x.parm("user").set(self.userOrig) x.parm("shot").set(self.shotOrig) if mode==1: for x in self.getAllCacheNodes(): x.parm("user").set(self.userCustom) x.parm("shot").set(self.shotCustom)
def __new__(cls, node, *args, **kwargs): """ Constructor for a new HouNode object. If a HouNode instance was previously constructed for the node passed to the constructor. The same instance is returned. Otherwise a new instance is constructed and returned. """ if args: node = cls.get_sesi_node(args.pop()) elif 'node' in kwargs: node = cls.get_sesi_node(kwargs.pop('node')) else: node = hou.pwd() if not node: raise Exception('No Houdini node could be identified' ' in the HouNode constructor.') new_instance = kwargs.pop('new_instance', False) if not new_instance: # if an instance has already been created for # the current node return that cached instance for instance in cls._HOU_NODE_INSTANCES: if instance._sesi_node == node: return instance new_inst = object.__new__(cls) # supported in Houdini 11.0 and later #node.addEventCallback(hou.nodeEventType.BeingDeleted, # new_inst.on_node_deleted) cls._HOU_NODE_INSTANCES.append(new_inst) return new_inst
def exportCsv(*args, **kwargs): node = hou.pwd() file_name = node.parm("SetName").eval() data_folder = os.path.normpath(dataFolder) data_node = node.node("Data") data_geo = data_node.geometry() data_headers = ["Pos", "Rot", "scale", "geoPath"] data_dict = {} data_list = [] for pt in data_geo.points(): # print(dir(pt)) PosValue = pt.attribValue("P") RotValue = pt.attribValue("rot_data") ScaleValue = pt.attribValue("scaleValue") geoPath = pt.attribValue("unreal_instance") data_dict["Pos"] = PosValue data_dict["Rot"] = RotValue data_dict["scale"] = ScaleValue data_dict["geoPath"] = geoPath data_list.append(data_dict) data_dict = {} if not os.path.exists(data_folder): os.makedirs(data_folder) dataFile = os.path.join(data_folder, "%s.csv" % file_name) with open(dataFile, "w+") as csvFile: f_csv = csv.DictWriter(csvFile, data_headers) f_csv.writeheader() f_csv.writerows(data_list)
def set_profile(self, node=None, reset=False): """Apply the selected profile in the session. :param hou.Node node: The node being acted upon. :param bool reset: When True, reset predefined param to defaults. Includes TK_RESET_PARM_NAMES parms. """ if not node: node = hou.pwd() output_profile = self._get_output_profile(node) self._app.log_debug("Applying tk arnold node profile: %s" % (output_profile["name"],)) # reset some parameters if need be if reset: for parm_name in self.TK_RESET_PARM_NAMES: parm = node.parm(parm_name) if parm: parm.revertToDefaults() node.setColor(hou.Color([.8, .8, .8])) # apply the supplied settings to the node settings = output_profile["settings"] if settings: self._app.log_debug("Populating format settings: %s" % (settings,)) node.setParms(settings) self.reset_render_path(node)
def fillKernelCodePythonSop(): """ this func will do all the parsing and will set up the kernel parm in descendant opencl node """ start_time = time.time() me = hou.pwd() geo = me.geometry() kernels_parm = me.parm("vft_kernels") # find a opencl downstream node cl_node = NodeUtils.getOutputNodeByTypeName(me, "opencl") # init a GenerateKernel object and init member var vft_kernels kernel = GenerateKernel() kernel.loadKernelsFileFromParm(kernels_parm) # get set of incoming fractals detail_attribs = geo.globalAttribs() # do the parsing kernel.parseKernelsFile(detail_attribs) # set vft_kernels_parsed to kernelcode parm in an opencl node if has changed cl_node_parm = cl_node.parm("kernelcode") old_cl_code = cl_node_parm.eval() if old_cl_code != kernel.vft_kernels_parsed: cl_node_parm.set(kernel.vft_kernels_parsed) log.debug("Kernel in OpenCL node updated") else: log.debug("Kernel in OpenCL is up to date") log.debug( "Python SOP evaluated in {0:.8f} seconds \n\n".format(time.time() - start_time))
def wrangleType() : node = hou.pwd() mode = node.parm( "mode" ).eval() wtype = node.parm( "type" ) context = node.type().category().name() if context == "Sop" : if mode == 0 : iterclass = node.parm("class").eval() if iterclass == 0 : wtype.set( "sop.detail" ) if iterclass == 1 : wtype.set( "sop.prim" ) if iterclass == 2 : wtype.set( "sop.point" ) if iterclass == 3 : wtype.set( "sop.vertex" ) if iterclass == 4 : wtype.set( "sop.number" ) if mode == 1 : wtype.set( "sop.volume" ) if mode == 2 : wtype.set( "sop.deform" ) if context == "Dop" : if mode == 0 : wtype.set( "dop.field" ) if mode == 1 : iterclass = node.parm("bindclass").eval() if iterclass == 0 : wtype.set( "dop.geo.detail" ) if iterclass == 1 : wtype.set( "dop.geo.prim" ) if iterclass == 2 : wtype.set( "dop.geo.point" ) if iterclass == 3 : wtype.set( "dop.geo.vertex" ) if iterclass == 4 : wtype.set( "dop.geo.number" ) if mode == 2 : wtype.set( "dop.pop" )
def startPoints(specPoints): node = hou.pwd() geo = node.geometry() colors = [(1.0,0.0,0.0),(0.0,1.0,0.0),(0.0,0.0,1.0)] activeTuple = () locs = specPoints.split() if len(locs) > 0: i = 0 for loc in locs: _initializePoint(geo,int(loc),colors,i) activeTuple = activeTuple + (int(loc),) i = i+1 else: for i in range(len(colors)): rand = random.randrange(0,len(geo.prims())) _initializePoint(geo,rand,colors,i) activeTuple = activeTuple + (rand,) primTuple = geo.findGlobalAttrib('ActivePrims') primTuple.setSize(len(activeTuple)) geo.setGlobalAttribValue('ActivePrims',activeTuple)
def expandPathParm(parm): """ Returns processed and expanded path from input parameter parm should be hou.Parm object """ in_path = hou.pwd().path() hou.cd(parm.node().path()) if len(parm.keyframes()) == 0: expr = parm.unexpandedString() else: expr = parm.eval() expr = expr.replace("$ACTIVETAKE", "<Channel>") expr = expr.replace("${ACTIVETAKE}", "<Channel>") expr = expr.replace("$WEDGE", "<Wedge>") expr = expr.replace("${WEDGE}", "<Wedge>") expr = expr.replace("${AOV}.", "<ValueVar @AOV.>") expr = expr.replace("${AOV}/", "<ValueVar @AOV/>") expr = expr.replace("${AOV}_", "<ValueVar @AOV_>") expr = expr.replace("$AOV.", "<ValueVar @AOV.>") expr = expr.replace("$AOV/", "<ValueVar @AOV/>") expr = expr.replace("$AOV_", "<ValueVar @AOV_>") path = hou.expandString(expr) expr = expr.replace( "@AOV", "@$AOV" ) #@$AOV is required for the output setting at render time, but expandString would remove it. hou.cd(in_path) return path
def clear_curve(*args, **kwargs): container = hou.pwd().node("Curve_container") for pend_node in container.children(): # print(pend_node) if pend_node.type().name() == "curve": pend_node.destroy() hou.ui.displayMessage("All Curves Clear!")
def update_parms(self, node=None): """Update a set of predefined parameters as the render path changes. :param hou.Node node: The node being acted upon. """ if not node: node = hou.pwd() # copies the value of one parm to another copy_parm = lambda p1, p2: \ node.parm(p2).set(node.parm(p1).unexpandedString()) # copy the default udpate parms for parm1, parm2 in self.TK_DEFAULT_UPDATE_PARM_MAPPING.items(): copy_parm(parm1, parm2) # handle additional planes plane_numbers = _get_extra_plane_numbers(node) for plane_number in plane_numbers: parm1 = "sgtk_vm_filename_plane" + str(plane_number) parm2 = "vm_filename_plane" + str(plane_number) copy_parm(parm1, parm2)
def __init__(self, fromnode = True, parent = None) : super(maya_scene, self).__init__(parent) self.process_maya = None self.fromnode = fromnode self.node = None self.maya_env() self.scene = hou.expandString( "$PERFORCE" ) try : self.scene = hou.pwd().parm("scene").eval() except: pass self.exportPath = hou.expandString( "$MDATA" ) self.abc = "0" self.light = "1" self.cam = "1" self.start = hou.expandString( "$RFSTART" ) self.end = hou.expandString( "$RFEND" ) self.mode = "empty" self.abcFile = "empty" self.selstr = "empty" self.smooth = "empty" self.smoothIter = "empty" self.checkMblur = "empty" self.step = "empty" if fromnode : self.initNode()
def saveCurve(*args, **kwargs): node = hou.pwd() node_name = node.type().name() node_path = node.type().definition().libraryFilePath() data_folder = os.path.dirname(node_path) + "/%s_data" % node_name if os.path.exists(data_folder): for file_del in os.listdir(data_folder): os.remove(os.path.join(data_folder, file_del)) else: os.mkdir(data_folder) # get curve nodes curve_node_list = [] for pend_node in node.children(): # print(pend_node) if pend_node.type().name() == "curve": curve_node_list.append(pend_node) # save every node and every parm for curve_node in curve_node_list: name = curve_node.name() curve_dic = {} for parm in curve_node.parms(): curve_dic[parm.name()] = parm.eval() # print parm.name() # print parm.eval() data_file = r"%s\%s.json" % (data_folder, name) if not curve_dic.get("width"): curve_dic["width"] = 10 with open(data_file, "w+") as outfile: json.dump(curve_dic, outfile) hou.ui.displayMessage("Save!")
def cache_path(mode='path'): node = hou.pwd() name = node.evalParm('cache_name') if node.evalParm('enable_version'): ver = '_v{0:0>3}'.format(node.evalParm('version')) else: ver = '' if node.evalParm('trange') > 0: frame = '.{0:0>4}'.format(int(hou.frame())) else: frame = '' ext = node.evalParm('ext') full_name = '{name}{ver}{frame}{ext}'.format(name=name, ver=ver, frame=frame, ext=ext) path = util.fix_path(os.path.join(node.evalParm('cache_dir'), full_name)) if mode == 'path': return path elif mode == 'name': return full_name else: return
def copy_path_to_clipboard(self): render_path = self._get_render_path(hou.pwd()) render_path = render_path.replace('/', os.sep) hou.ui.copyTextToClipboard(render_path) self._app.log_debug("Copied render path to clipboard: %s" % (render_path, ))
def sim(): current_node = hou.pwd() #refresh python Code = current_node.node("Code") Code.cook() #refresh output result_node = current_node.node("Output") result_node.cook() print("Done") #save csv csvFile = current_node.parm("csvFile").evalAsString() if not csvFile.endswith(".csv"): print("not csv file path") return data_geo = result_node.geometry() print(data_geo) pos_list = [] for point in data_geo.points(): pos = [] pos.append(point.attribValue("P")[0] * 100) pos.append(point.attribValue("P")[2] * 100) pos.append(point.attribValue("P")[1] * 100) pos_list.append(pos) with open(csvFile, 'wb') as csvfile: writer = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) for pos in pos_list: writer.writerow(pos)
def setNodeNameColor(): n = hou.pwd() presets = { "DISPLAY": [hou.Color((0.0, 0.4, 1.0)), "Display"], "RENDER": [hou.Color((0.4, 0.2, 0.6)), "Render"], "OUT": [hou.Color((0.1, 0.1, 0.1)), ""], "NULL": [hou.Color((0.2, 0.2, 0.2)), "Both"], "TO_REN_": [hou.Color((0.4, 0.2, 0.6)), ""], "TO_DOP_": [hou.Color((0.4, 0.2, 0.6)), ""], } prefix = n.parm('output_prefix').eval() rentype = n.parm('output_ren_type').eval() doptype = n.parm('output_dop_type').eval() label = n.parm('output_label').eval().replace(' ', '_') arg = presets[prefix] name = "" if prefix == "DISPLAY": name = prefix if len(label) > 0: name += '_%s' % label elif prefix == "RENDER": name = prefix if len(label) > 0: name += '_%s' % label elif prefix == "OUT": name = prefix if len(label) > 0: name += '_%s' % label elif prefix == "NULL": name = prefix if len(label) > 0: name += '_%s' % label elif prefix == "TO_REN_": name = '%s_%s_%s' % (prefix, rentype, label) elif prefix == "TO_DOP_": name = '%s_%s_%s' % (prefix, doptype, label) else: pass n.setName(name) n.setColor(arg[0]) if arg[1] == "Display": n.setDisplayFlag(True) elif arg[1] == "Render": n.setRenderFlag(True) elif arg[1] == "Both": n.setDisplayFlag(True) n.setRenderFlag(True) else: pass
def __init__(self, parent=None): self.node = hou.pwd() self.mode = self.node.parm("mode").eval() self.code = self.node.parm("snippet%s" % (int(self.mode) + 1)).eval() self.replace = self.node.parm("replace").eval() self.menutype = self.node.parm("menutype").eval() self.snippets = {}
def getCacheList(self): ## Init variable current_cache_nodes = [] all_nodes = hou.pwd().allSubChildren() for node in all_nodes: for item in Define.CACHE_NODES: node_type = item.get("name") rwtype = item.get("rwtype") if node.type().name().lower() == node_type: eachNode_dict = {} node_path = node.path() node_type = node.type().name().lower() node_cat = node.type().category().name() cache_path = self.getUnexpandedStringPath(node_path, node_type, node_cat) evalCachePath = self.getEvalStringPath(node_path, node_type) eachNode_dict["name"] = node.name() eachNode_dict["node_path"] = node_path eachNode_dict["cache_path"] = cache_path #eachNode_dict["env"] = self.analizeValiables(cache_path) eachNode_dict["expanded_path"] = evalCachePath eachNode_dict["color"] = node.color().rgb() eachNode_dict["rwtype"] = self.setIoType(node_path, rwtype, node_cat) eachNode_dict["editable"] = self.isEditable(node_path) eachNode_dict["status"] = self.setStatus(node, node_cat) current_cache_nodes.append(eachNode_dict) return current_cache_nodes
def saveCurve(*args, **kwargs): node = hou.pwd() node_name = node.type().name() node_path = node.type().definition().libraryFilePath() data_folder = os.path.dirname(node_path) + "/%s_data" % node_name data_file = r"%s\%s.json" % (data_folder, node_name) if os.path.exists(data_folder): for file_del in os.listdir(data_folder): os.remove(os.path.join(data_folder, file_del)) else: os.mkdir(data_folder) # get curve nodes tar_node = node.node("OUT") info_dict = {} dic = [] for pt in tar_node.geometry().points(): info_dict = {} pos = pt.attribValue("P") new_pos = [] new_pos.append(pos[0] * 100) new_pos.append(pos[2] * 100) new_pos.append(pos[1] * 100) info_dict["position"] = new_pos index = pt.number() if index % 2 == 0: info_dict["TID"] = 0 else: info_dict["TID"] = 1 info_dict["extra_info"] = pt.attribValue("extra_info") dic.append(info_dict) with open(data_file, "w+") as outfile: info = json.dumps(dic, indent=4) outfile.write(info) print("Save!!!")
def find_camera(oppattern, path=None): """Finds a camera OBJ within nested subnets and returns its full path. """ r = '' try: root = hou.pwd() if path: root = hou.node(path) root = root.glob(oppattern) if len(root) > 0: cam = [ n.path() for n in root[0].allSubChildren() if n.type().name() == 'cam' ] if len(cam) > 0: r = cam[0] else: pass # no camera found else: pass # no root node found except: pass # TODO: error handling return r
def import_btn(): node = hou.pwd() parm = node.parm('wedge_select') parm_val = parm.eval() load_value = node.parm('wedge_list').eval().split('\n') value_list = [] wedge_parms = node.parm('wedge_parm').multiParmInstances() for i in wedge_parms: if 'chs' in i.rawValue(): value_list.append(i.rawValue()[6:-3]) else: pass import_value_list = [] for l in load_value: import_value_list.append(l.split(' - ')[-1]) import_value = import_value_list[parm_val][1:-1] import_parm = node.parm('import_value') import_parm.lock(False) import_parm.set(import_value) import_parm.lock(True) set_value = import_value_list[parm_val][1:-1].split(', ') for v, s in zip(value_list, set_value): hou.parm(v).set(float(s))
def simAll(): current_node = hou.pwd() currentDate = time.asctime().replace(" ", "_").replace(":", "_") logPath = os.path.join(os.path.expanduser("~"), "%s.log" % currentDate) logging.basicConfig(filename=logPath, level=logging.INFO) # get config Dir configDir = current_node.parm("ConfigRoot").eval() csvDir = current_node.parm("CsvRoot").eval() jsonFiles = [ jsfile for jsfile in os.listdir(configDir) if jsfile.endswith(".json") ] if not jsonFiles: logging.info("No Json Files in selected Dir") return # loop for jsonfile in jsonFiles: jsonPath = os.path.join(configDir, jsonfile) # read config file LoadConfig(jsonPath) csvFile = jsonfile.replace(".json", ".csv") csvPath = os.path.join(configDir, csvFile) print(csvPath) # export csv sim(csvPath) hou.ui.displayMessage("Done") return
def loadCurve(*args, **kwargs): node = hou.pwd() node_path = node.type().definition().libraryFilePath() node_name = node.type().name() data_folder = os.path.dirname(node_path) + "/%s_data" % node_name if not os.path.exists(data_folder): return # delete all child node for c_node in node.children(): c_node.destroy() # read data for curve_data in os.listdir(data_folder): full_data_path = os.path.join(data_folder, curve_data) curve_name = os.path.splitext(curve_data)[0] # create curve node target_node = node.createNode("curve", curve_name) parm_dic = {} with open(full_data_path, "r") as outfile: all_info = outfile.read() parm_dic = json.loads(all_info) # add width parm to interface current_tmpparm = target_node.parmTemplateGroup() current_tmpparm.append(hou.StringParmTemplate("Style", "Style", 1)) current_tmpparm.append(hou.FloatParmTemplate("width", "Width", 1)) target_node.setParmTemplateGroup(current_tmpparm) target_node.setParms({"Style": "HRS_Default"}) target_node.setParms(parm_dic) hou.ui.displayMessage("Loaded!")
def read_dataset_from_current_frame(working_dir, sop_name, prediction=False): frame_id = hou.intFrame() # Find the name of the current animation anim_type = hou.parm(sop_name + '/anim_types').evalAsString() clip_name = hou.parm(sop_name + '/' + anim_type).evalAsString() clip_name = clip_name.replace('.bclip', '') if prediction: clip_path = os.path.join(working_dir, 'prediction', clip_name + '.npz') else: clip_path = os.path.join(working_dir, 'dataset', clip_name + '.npz') if os.path.exists(clip_path): npzfile = np.load(clip_path) #bones = npzfile['bones'] #base_skinnings = npzfile['bases'] if prediction: smooth_skinnings = npzfile['predicted_smooths'] else: smooth_skinnings = npzfile['smooths'] #smooth_skinnings = npzfile['bases'] #bone = bones[frame_id-1] #base_skinning = base_skinnings[frame_id-1] smooth_skinning = smooth_skinnings[frame_id - 1] node = hou.pwd() geo = node.geometry() for i, point in enumerate(geo.points()): point.setPosition(smooth_skinning[i]) else: print('the file ', clip_path, ' doesnt exist')
def _checkRenderParameters(parms): """Check the values of the render-specific parameters. Return True if the values are valid and False otherwise. """ if parms["max_hosts_per_job"] < parms["min_hosts_per_job"]: hqrop.displayError( "Max. Hosts Per Job must be greater than or equal to " "Min. Hosts Per Job.") return False # Check IFD file path. if parms["make_ifds"] or not parms["use_output_driver"]: if not parms["ifd_path"]: if parms["make_ifds"]: ifd_path_parm = hou.parm("hq_outputifd") elif not parms["use_output_driver"]: ifd_path_parm = hou.parm("hq_input_ifd") ifd_parm_label = ifd_path_parm.parmTemplate().label() hqrop.displayError(" ".join([ "The value of the", ifd_parm_label, "parameter in\n", hou.pwd().path(), "\n" "must not be blank." ])) return False return True
def updateCurve(*args, **kwargs): resetNode() node = hou.pwd() curve_data = node.node("Curve_data") parent = node.parent() data_node = parent.node("Railway_Art") curve_node_list = [] curve_list=[] for pend_node in data_node.children(): # print(pend_node) if pend_node.type().name() == "curve": curve_node_list.append(pend_node) # create_mergeNode curve_merge = curve_data.createNode("object_merge") curve_merge.parm("numobj").set(len(curve_node_list)) count = 1 for curve_node in curve_node_list: curve_path = curve_node.path() curve_merge.parm("objpath%s"%count).set(curve_path) count+=1 output = curve_data.createNode("output") output.setInput(0, curve_merge, 0) hou.ui.displayMessage("Done!")
def _checkRenderParameters(parms): """Check the values of the render-specific parameters. Return True if the values are valid and False otherwise. """ if parms["max_hosts_per_job"] < parms["min_hosts_per_job"]: hqrop.displayError( "Max. Hosts Per Job must be greater than or equal to " "Min. Hosts Per Job.") return False # Check IFD file path. if parms["make_ifds"] or not parms["use_output_driver"]: if not parms["ifd_path"]: if parms["make_ifds"]: ifd_path_parm = hou.parm("hq_outputifd") elif not parms["use_output_driver"]: ifd_path_parm = hou.parm("hq_input_ifd") ifd_parm_label = ifd_path_parm.parmTemplate().label() hqrop.displayError(" ".join(["The value of the", ifd_parm_label, "parameter in\n", hou.pwd().path(), "\n" "must not be blank."])) return False return True
def getFBXFileNames(): chunkNames = getChunkNames() objectName = hou.pwd().parm('objectName').eval() return [ "{0}_{1}".format(objectName, chunkName) for chunkName in chunkNames ]
def read_point_cloud(): """ Read Open3d point clouds and covnert to Houdini geometry Based on http://www.open3d.org/docs/tutorial/Basic/working_with_numpy.html """ node = hou.pwd() node_geo = node.geometry() path = node.parm("path").eval() pcd_load = open3d.read_point_cloud(path) if not pcd_load.has_points(): raise hou.NodeWarning("Geometry does not contain any points.") # create numpy arrays np_pos = np.asarray(pcd_load.points) np_n = np.asarray(pcd_load.normals) np_cd = np.asarray(pcd_load.colors) # position node_geo.createPoints(np_pos) # normals if pcd_load.has_normals(): node_geo.addAttrib(hou.attribType.Point, "N", default_value=(0.0, 0.0, 0.0), transform_as_normal=True, create_local_variable=False) node_geo.setPointFloatAttribValuesFromString("N", np_n, float_type=hou.numericData.Float64) # colors if pcd_load.has_colors(): node_geo.addAttrib(hou.attribType.Point, "Cd", default_value=(0.0, 0.0, 0.0), transform_as_normal=False, create_local_variable=False) node_geo.setPointFloatAttribValuesFromString("Cd", np_cd, float_type=hou.numericData.Float64)
def fast_global_registration(): """ Execute fast global registration Based on http://www.open3d.org/docs/tutorial/Advanced/fast_global_registration.html """ node = hou.pwd() node_geo = node.geometry() node_geo_target = node.inputs()[1].geometry() voxel_size = node.parm("voxel_size").eval() transform = node.parm("transform").eval() has_fpfh_source = bool(node_geo.findPointAttrib("fpfh")) has_fpfh_target = bool(node_geo_target.findPointAttrib("fpfh")) if not has_fpfh_source or not has_fpfh_target: raise hou.NodeError("One of the inputs does not have 'fpfh' attribute.") # to numpy np_pos_str_source = node_geo.pointFloatAttribValuesAsString("P", float_type=hou.numericData.Float32) np_pos_source = np.fromstring(np_pos_str_source, dtype=np.float32).reshape(-1, 3) np_fpfh_str_source = node_geo.pointFloatAttribValuesAsString("fpfh", float_type=hou.numericData.Float32) np_fpfh_size = node_geo.findPointAttrib("fpfh").size() np_fpfh_source = np.fromstring(np_fpfh_str_source, dtype=np.float32).reshape(-1, np_fpfh_size) np_fpfh_source = np.swapaxes(np_fpfh_source, 1, 0) np_pos_str_target = node_geo_target.pointFloatAttribValuesAsString("P", float_type=hou.numericData.Float32) np_pos_target = np.fromstring(np_pos_str_target, dtype=np.float32).reshape(-1, 3) np_fpfh_str_target = node_geo_target.pointFloatAttribValuesAsString("fpfh", float_type=hou.numericData.Float32) np_fpfh_target = np.fromstring(np_fpfh_str_target, dtype=np.float32).reshape(-1, np_fpfh_size) np_fpfh_target = np.swapaxes(np_fpfh_target, 1, 0) # to open3d source = open3d.PointCloud() source.points = open3d.Vector3dVector(np_pos_source.astype(np.float64)) source_fpfh = open3d.registration.Feature() source_fpfh.resize(np_fpfh_source.shape[0], np_fpfh_source.shape[1]) source_fpfh.data = np_fpfh_source.astype(np.float64) target = open3d.PointCloud() target.points = open3d.Vector3dVector(np_pos_target.astype(np.float64)) target_fpfh = open3d.registration.Feature() target_fpfh.resize(np_fpfh_source.shape[0], np_fpfh_source.shape[1]) target_fpfh.data = np_fpfh_target.astype(np.float64) # registration registration = open3d.registration_fast_based_on_feature_matching(source, target, source_fpfh, target_fpfh, open3d.FastGlobalRegistrationOption(maximum_correspondence_distance = voxel_size * 0.5)) registration_xform = hou.Matrix4(registration.transformation) registration_xform = registration_xform.transposed() # to houdini if transform: node_geo.transform(registration_xform) node_geo.addAttrib(hou.attribType.Global, "xform", default_value=(0.0,)*16, create_local_variable=False) node_geo.setGlobalAttribValue("xform", registration_xform.asTuple())
def loadCurve(*args, **kwargs): node = hou.pwd() node_path = node.type().definition().libraryFilePath() node_name = node.type().name() data_folder = os.path.dirname(node_path) + "/%s_data" % node_name #load data pf.loadCurvesData(data_folder, node) print("Loaded!")
def add_curve(*args, **kwargs): node = hou.pwd() curve_count = node.parm("curve_count").eval() if curve_count: for i in range(curve_count): # add curve node node.createNode("curve") node.layoutChildren()
def show_in_fs(self): # retrieve the calling node current_node = hou.pwd() if not current_node: return render_dir = None # first, try to just use the current cached path: render_path = self._get_render_path(current_node) if render_path: # the above method returns houdini style slashes, so ensure these # are pointing correctly render_path = render_path.replace("/", os.path.sep) dir_name = os.path.dirname(render_path) if os.path.exists(dir_name): render_dir = dir_name if not render_dir: # render directory doesn't exist so try using location # of rendered frames instead: rendered_files = self._get_rendered_files(current_node) if not rendered_files: msg = ("Unable to find rendered files for node '%s'." % (current_node,)) self._app.log_error(msg) hou.ui.displayMessage(msg) return else: render_dir = os.path.dirname(rendered_files[0]) # if we have a valid render path then show it: if render_dir: # TODO: move to utility method in core system = sys.platform # run the app if system == "linux2": cmd = "xdg-open \"%s\"" % render_dir elif system == "darwin": cmd = "open '%s'" % render_dir elif system == "win32": cmd = "cmd.exe /C start \"Folder\" \"%s\"" % render_dir else: msg = "Platform '%s' is not supported." % (system,) self._app.log_error(msg) hou.ui.displayMessage(msg) self._app.log_debug("Executing command:\n '%s'" % (cmd,)) exit_code = os.system(cmd) if exit_code != 0: msg = "Failed to launch '%s'!" % (cmd,) hou.ui.displayMessage(msg)
def copy_path_to_clipboard(self): render_path = self._get_render_path(hou.pwd()) # use Qt to copy the path to the clipboard: from sgtk.platform.qt import QtGui QtGui.QApplication.clipboard().setText(render_path) self._app.log_debug( "Copied render path to clipboard: %s" % (render_path,))
def getCamList(self): camList = [] nodes = hou.pwd().allSubChildren() for node in nodes: node_type = node.type().name().lower() if node_type == "cam": camList.append(self.createNodeInfo(node, node_type)) return camList
def _get_output_profile(self, node=None): if not node: node = hou.pwd() output_profile_parm = node.parm(self.TK_OUTPUT_PROFILE_PARM) output_profile_name = \ output_profile_parm.menuLabels()[output_profile_parm.eval()] output_profile = self._output_profiles[output_profile_name] return output_profile
def promote_blendshapes_to_ui(): """ this function is call by the button in the blend node, this will promote the blend shapes to the upper level """ main_otl = hou.pwd().parent().parent().parent().parent() hou_rig.utils.ui_folder(main_otl, ["Rig Parms", hou.pwd().parent().parent().name(), hou.pwd().name()]) hou.pwd().parm("updatechannels").pressButton() for i in hou.pwd().parms(): try: if i.name().startswith("blend") and "0" not in i.name(): hou_rig.utils.promote_parm_to_ui(i, main_otl, ["Rig Parms", hou.pwd().parent().parent().name(), hou.pwd().name()], parm_name=i.alias()) except: continue hou.pwd().parent().bypass(0)
def on_copy_path_to_clipboard_button_callback(self): """ Callback from the node whenever the 'Copy path to clipboard' button is pressed. Used by parms: sgtk__copypath_button """ node = hou.pwd() # get the path depending if in full or proxy mode: render_path = self.__get_render_path(node) # use Qt to copy the path to the clipboard: from sgtk.platform.qt import QtGui QtGui.QApplication.clipboard().setText(render_path)
def create_alembic_node(self): current_node = hou.pwd() output_path_parm = current_node.parm(self.NODE_OUTPUT_PATH_PARM) alembic_node_name = 'alembic_' + current_node.name() # create the alembic node and set the filename parm alembic_node = current_node.parent().createNode( self.HOU_SOP_ALEMBIC_TYPE) alembic_node.parm(self.NODE_OUTPUT_PATH_PARM).set( output_path_parm.menuLabels()[output_path_parm.eval()]) alembic_node.setName(alembic_node_name, unique_name=True) # move it away from the origin alembic_node.moveToGoodPosition()
def get_output_path_menu_items(self): menu = ["sgtk"] current_node = hou.pwd() # attempt to compute the output path and add it as an item in the menu try: menu.append(self._compute_output_path(current_node)) except sgtk.TankError as e: error_msg = ("Unable to construct the output path menu items: " "%s - %s" % (current_node.name(), e)) self._app.log_error(error_msg) menu.append("ERROR: %s" % (error_msg,)) return menu
def startRenderPyroCache(): if not _checkHip(): return curNodeLs = hou.pwd() curParm = curNodeLs.parm("sopoutput").eval() pyroNode = hou.node(curNodeLs.path().rsplit('/',1)[0]) if not curParm == "PIPECACHE": return hip = hou.hipFile.name() name = hip.split(os.sep)[-1].split('.')[0] import pipe obj_asset = pipe.Projects().GetAssetByInfo(hip) path = os.path.join(obj_asset.GetDataPath(),'geo',name) dir_render, padd_dir, padd_int = filesys.get_next_version_dir(path) dir_render += os.sep + pyroNode.name() if not os.path.exists(dir_render): os.makedirs(dir_render) dir_render += os.sep + pyroNode.name() + '.$F4.bgeo' pyroNode.parm("file").set(dir_render) print dir_render
def createAttributes(radius,singleThreaded=True): geo = hou.pwd().geometry() prims = geo.prims() # Initialize Attributs if geo.findPointAttrib('Active') is None: geo.addAttrib(hou.attribType.Point,'Active',0) if geo.findPrimAttrib('Flow') is None: geo.addAttrib(hou.attribType.Prim,'Flow',0) if geo.findPrimAttrib('EndNeighbors') is None: geo.addAttrib(hou.attribType.Prim,'EndNeighbors',"") if geo.findPrimAttrib('StartNeighbors') is None: geo.addAttrib(hou.attribType.Prim,'StartNeighbors',"") if geo.findPrimAttrib('CurrentPoint') is None: geo.addAttrib(hou.attribType.Prim,'CurrentPoint',-1) if geo.findGlobalAttrib('ActivePrims') is None: geo.addAttrib(hou.attribType.Global,'ActivePrims', (0) ) # Iterate through the streamlines and create a dict from id to Start and End Points end_points = {} i = 0 for prim in geo.prims(): end_points[str(i)] = {'start':(0,prim.vertices()[0].point().position()), 'end':(len(prim.vertices())-1,prim.vertices()[len(prim.vertices())-1].point().position())} if hou.updateProgressAndCheckForInterrupt(): break i = i + 1 # Using start and end dicts, create a new dict of stream # to start points and end # points within the radius of a sphere if singleThreaded: _threadCreateAttribute(geo.prims(),geo,end_points,radius) else: primitives = chunks(geo.prims(),len(geo.prims())/3) threads = [] for i in range(3): t = threading.Thread(target =_threadCreateAttribute, args=(primitives.next(),geo,end_points,radius, )) threads.append(t) t.start() #t.join() #for t in threads: # t.join() groupStartEndPoints(geo)
def getPointsFromTrack(filename): geo = hou.pwd().geometry() # Read in stream data streams, hdr = trackvis.read(filename) streamlines = [s[0] for s in streams] # For each streamline add a curve to the geometry j = 0 for stream in streamlines: i = 0 curve = geo.createNURBSCurve(len(stream)) if hou.updateProgressAndCheckForInterrupt(int(float(j)/float(len(streamlines))*100)): break for vertex in curve.vertices(): vertex.point().setPosition((float(stream[i][0]),float(stream[i][1]),float(stream[i][2]))) i = i + 1 if hou.updateProgressAndCheckForInterrupt(): break j = j+1
def solverStep(): geo = hou.pwd().geometry() activePrims = geo.intListAttribValue('ActivePrims') # For each prim, look up current point and set next point based on flow direction. i = 0 for index in activePrims: prim = geo.prims()[index] if hou.updateProgressAndCheckForInterrupt(int(float(i)/float(len(geo.prims()))*100)): break flowDir = prim.attribValue('Flow') if flowDir == 0: continue sizeOfPrim = len(prim.vertices()) currentPoint = prim.attribValue('CurrentPoint') rgb1 = prim.vertices()[currentPoint].point().attribValue('Cd') nextPoint = currentPoint + flowDir # For all valid points that are not the end points of a stream if (currentPoint > 0 and currentPoint < sizeOfPrim - 1) or (currentPoint == 0 and flowDir == 1) or (currentPoint == sizeOfPrim-1 and flowDir == -1): rgb2 = prim.vertices()[nextPoint].point().attribValue('Cd') rgbSum = (sumColors(rgb1,rgb2)) prim.vertices()[nextPoint].point().setAttribValue('Cd',rgbSum) prim.vertices()[nextPoint].point().setAttribValue('Active',1) prim.vertices()[nextPoint].point().setAttribValue('Age',1.0) prim.vertices()[currentPoint].point().setAttribValue('Active',0) prim.setAttribValue('CurrentPoint',nextPoint) # Jump to all the start points elif currentPoint == 0 and flowDir == -1: _checkForEndPoint(0,rgb1,prim,flowDir,geo) prim.vertices()[currentPoint].point().setAttribValue('Active',0) # Jump to all the end points elif currentPoint == sizeOfPrim-1 and flowDir == 1: _checkForEndPoint(sizeOfPrim-1,rgb1,prim,flowDir,geo) prim.vertices()[currentPoint].point().setAttribValue('Active',0) i = i+1
def find_camera(oppattern, path=None): '''Finds a camera OBJ within nested subnets and returns its full path.''' r = '' try: root = hou.pwd() if path: root = hou.node(path) root = root.glob(oppattern) if len(root)>0: cam = [ n.path() for n in root[0].allSubChildren() if n.type().name()=='cam' ] if len(cam)>0: r = cam[0] else: pass # no camera found else: pass # no root node found except: pass # TODO: error handling return r
def run(): if len(hou.selectedNodes()) is 0: nodes = hou.pwd().allSubChildren() else: nodes = hou.selectedNodes() for node in nodes: node_type = node.type() node_type_name = node_type.name() type_category_name = node_type.category().name().lower() node_color = None if node_type.isManager(): node_color = manager_color elif node_type_name in common_type_colors: node_color = common_type_colors[node_type_name] if node_color is not None: node.setColor(hou.Color(node_color))
def on_show_in_fs_button_callback(self): """ Shows the location of the node in the file system. This is a callback which is executed when the show in fs button is pressed on the node. Used by parms: sgtk__showinfs_button """ node = hou.pwd() if not node: return render_dir = None # first, try to just use the current cached path: render_path = self.__get_render_path(node) if render_path: # the above method returns houdini style slashes, so ensure these # are pointing correctly render_path = render_path.replace("/", os.path.sep) dir_name = os.path.dirname(render_path) if os.path.exists(dir_name): render_dir = dir_name if not render_dir: # render directory doesn't exist so try using location # of rendered frames instead: try: files = self.get_files_on_disk(node) if len(files) == 0: msg = ("There are no renders for this node yet!\n" "When you render, the files will be written to " "the following location:\n\n%s" % render_path) hou.ui.displayMessage(msg) else: render_dir = os.path.dirname(files[0]) except Exception, e: msg = ("Unable to jump to file system:\n\n%s" % e) hou.ui.displayMessage(msg)
def set_profile(self, node=None): if not node: node = hou.pwd() output_profile = self._get_output_profile(node) self._app.log_debug("Applying tk alembic node profile: %s" % (output_profile["name"],)) # apply the supplied settings to the node settings = output_profile["settings"] if settings: self._app.log_debug('Populating format settings: %s' % (settings,)) node.setParms(settings) # set the node color color = output_profile["color"] if color: node.setColor(hou.Color(color)) self.refresh_output_path(node)
def __save_hip_path_to_user_data(path=None, node=None): path = path if path else hou.hipFile.path() node = node if node else hou.pwd() node.parm('sgtk__hip_path').set(path)
def geometry2ass( path, name, min_pixel_width, mode, export_type, export_motion, export_color, render_type, double_sided=True, invert_normals=False, **kwargs ): """exports geometry to ass format """ ass_path = path start_time = time.time() parts = os.path.splitext(ass_path) extension = parts[1] use_gzip = False if extension == '.gz': use_gzip = True basename = os.path.splitext(parts[0])[0] else: basename = parts[0] asstoc_path = '%s.asstoc' % basename node = hou.pwd() file_handler = open if use_gzip: file_handler = gzip.open # normalize path ass_path = os.path.normpath(ass_path) try: os.makedirs(os.path.dirname(ass_path)) except OSError: # path exists pass data = '' if export_type == 0: data = curves2ass(node, name, min_pixel_width, mode, export_motion) elif export_type == 1: data = polygon2ass( node, name, export_motion, export_color, double_sided, invert_normals, ) elif export_type == 2: data = particle2ass(node, name, export_motion, export_color, render_type) write_start = time.time() ass_file = file_handler(ass_path, 'w') ass_file.write(data) ass_file.close() write_end = time.time() print('Writing to file : %3.3f' % (write_end - write_start)) bounding_min = node.geometry().attribValue("bound_min") bounding_max = node.geometry().attribValue("bound_max") bounding_box_info = 'bounds %s %s %s %s %s %s' % ( bounding_min[0], bounding_min[1], bounding_min[2], bounding_max[0], bounding_max[1], bounding_max[2] ) with open(asstoc_path, 'w') as asstoc_file: asstoc_file.write(bounding_box_info) end_time = time.time() print('All Conversion took : %3.3f sec' % (end_time - start_time)) print('******************************************************************')
def run_import_morphs(): """ creates a folder in the UI subnet """ import_morphs(hou.ui.selectFile(title="Select the folder", file_type=hou.fileType.Directory), hou.pwd().parent(), hou.pwd(), out_connection=hou.pwd().parent().node("blendshapes_from_houdini"))
def __load_hip_path_from_user_data(node=None): node = node if node else hou.pwd() return node.parm('sgtk__hip_path').eval()
import sys import hou hou.hipFile.load('D:/WORK/HOUDINI_16_playground/hython_test.hiplc') # hou.setFrame(101) hou.cd('/out') octaneRops = [] for child in hou.pwd().children(): if child.type().name() == 'Octane_ROP': octaneRops.append(child) print "octaneRops : " for i, rop in enumerate(octaneRops): print "\t",i, " : ",rop,"\n" userInput = raw_input("chose a Octane ROP Driver:") print userInput rop = hou.pwd().children()[int(userInput)] rTarget = hou.node(rop.parm("HO_renderTarget").evalAsString()) rTarget.parm("maxsamples").set(500) # disable mplay rendering rop.parm("HO_renderToMPlay").set(1)
def render(): """Evaluate and package the HDA parameters and submit a job to HQueue.""" # Build a dictionary of base parameters and add the HQueue Render-specific # ones. parms = hqrop.getBaseParameters() use_cloud = (hou.ch("hq_use_cloud1") if hou.parm("hq_use_cloud1") is not None else 0) num_cloud_machines = (hou.ch("hq_num_cloud_machines") if hou.parm("hq_num_cloud_machines") is not None else 0) machine_type = (hou.ch("hq_cloud_machine_type") if hou.parm("hq_cloud_machine_type") is not None else "") use_output_driver = bool(use_cloud) or parms["hip_action"] != "use_ifd" # validate the machine type if machine_type not in ['c1.medium', 'c1.xlarge', 'm1.small', 'm1.large', 'm1.xlarge']: machine_type = 'c1.xlarge' parms.update({ "assign_ifdgen_to" : hou.parm("hq_assign_ifdgen_to").evalAsString(), "ifdgen_clients": hou.ch("hq_ifdgen_clients").strip(), "ifdgen_client_groups" : hou.ch("hq_ifdgen_client_groups").strip(), "batch_all_frames": hou.ch("hq_batch_all_frames"), "frames_per_job": hou.ch("hq_framesperjob"), "render_frame_order": hou.parm("hq_render_frame_order").evalAsString(), "make_ifds": hou.ch("hq_makeifds"), "max_hosts_per_job": hou.ch("hq_max_hosts"), "min_hosts_per_job": hou.ch("hq_min_hosts"), "is_CPU_number_set": bool(hou.ch("hq_is_CPU_number_set")), "CPUs_to_use": hou.ch("hq_CPUs_to_use"), "output_ifd": hou.parm("hq_outputifd").unexpandedString().strip(), "use_output_driver" : use_output_driver, "use_cloud": use_cloud, "num_cloud_machines": num_cloud_machines, "cloud_machine_type" : machine_type, "use_render_tracker" : hou.ch("hq_use_render_tracker"), "delete_ifds": hou.ch("hq_delete_ifds"), "render_single_tile": bool(hou.ch("hq_render_single_tile")), }) if use_output_driver: # Convert output_driver path to an absolute path. parms["output_driver"] = hou.ch("hq_driver").strip() rop_node = hou.pwd().node(parms["output_driver"]) if rop_node: parms["output_driver"] = rop_node.path() parms["ifd_path"] = hou.parm("hq_outputifd").unexpandedString().strip() output_driver = hqrop.getOutputDriver(hou.pwd()) # Turn "off" Mantra-specific parameters if there is an output driver # and it is not a Mantra ROP. if output_driver and output_driver.type().name() != "ifd": parms["make_ifds"] = False parms["min_hosts_per_job"] = 1 parms["max_hosts_per_job"] = 1 else: parms.update({ "ifd_path" : hou.parm("hq_input_ifd").unexpandedString().strip(), "start_frame" : hou.ch("hq_frame_range_1"), "end_frame" : hou.ch("hq_frame_range_2"), "frame_skip" : hou.ch("hq_frame_range_3"), # If we are not using an output driver we are using IFDs and so # we won't be making them "make_ifds" : False, }) if parms["frame_skip"] <= 0: parms["frame_skip"] = 1 # We stop if we cannot establish a connection with the server if (not parms["use_cloud"] and not hqrop.doesHQServerExists(parms["hq_server"])): return None if "ifd_path" in parms and not parms["use_cloud"]: expand_frame_variables = False parms["ifd_path"] = hqrop.substituteWithHQROOT( parms["hq_server"], parms["ifd_path"], expand_frame_variables) # Validate parameter values. if (not hqrop.checkBaseParameters(parms) or not _checkRenderParameters(parms)): return if use_output_driver and parms["hip_action"] == "use_current_hip": if not hqrop.checkOutputDriver(parms["output_driver"]): return if hqrop.checkForRecursiveChain(hou.pwd()): hqrop.displayError(("Cannot submit HQueue job because" " %s is in the input chain of %s.") % (hou.pwd().path(), parms["output_driver"])) return # If we're not supposed to run this job on the cloud, submit the job. # Otherwise, we'll display the file dependency dialog. if parms["use_cloud"]: # We don't want to keep the interrupt dialog open, so we exit this soho # script so the dialog closes and schedule an event to run the code to # display the dialog. import soho rop_node = hou.node(soho.getOutputDriver().getName()) cloud.selectProjectParmsForCloudRender( rop_node, parms["num_cloud_machines"], parms["cloud_machine_type"]) return # Automatically save changes to the .hip file, # or at least warn the user about unsaved changes. should_continue = hqrop.warnOrAutoSaveHipFile(parms) if not should_continue: return hqrop.submitJob(parms, _byu_troubleshoot_hq)
# -*- coding: utf-8 -*- import hou import soho import afanasy current_afnode = hou.pwd() submit_afnode_parm = current_afnode.parm('submit_afnode') if submit_afnode_parm is None: soho.error( 'Can\'t find "submit_afnode" parameter on "%s"' % current_afnode.path() ) submit_afnode_path = submit_afnode_parm.eval() if submit_afnode_path is None: soho.error( 'Can\'t eval "submit_afnode" parameter on "%s"' % current_afnode.path() ) if submit_afnode_path == '': soho.error( 'Empty "submit_afnode" parameter on "%s"' % current_afnode.path() ) submit_afnode = hou.node(submit_afnode_path) if submit_afnode is None: soho.error( 'Can\'t find "%s" node specified in "%s"' % (submit_afnode_path, current_afnode.path())