def startFlipbook(self): inputSettings = {} outputPath = self.flipbook.getOutputPath() description = self.validateDescription() # create submitter class submit = SubmitVersion( self.app, outputPath["finFile"], int(self.validateFrameRange()[0]), int(self.validateFrameRange()[1]), description, ) # validation of inputs inputSettings["frameRange"] = self.validateFrameRange() inputSettings["resolution"] = self.validateResolution() inputSettings["mplay"] = self.validateMplay() inputSettings["beautyPass"] = self.validateBeauty() inputSettings["motionBlur"] = self.validateMotionBlur() inputSettings["output"] = outputPath["writeTempFile"] inputSettings["sessionLabel"] = outputPath["finFile"] self.app.logger.debug("Using the following settings, %s" % (inputSettings)) # retrieve full settings object settings = self.flipbook.getFlipbookSettings(inputSettings) # run the actual flipbook try: with hou.InterruptableOperation( "Flipbooking", long_operation_name="Creating a flipbook", open_interrupt_dialog=True, ) as operation: operation.updateLongProgress(0, "Starting Flipbook") self.flipbook.runFlipbook(settings) operation.updateLongProgress( 0.25, "Rendering to Nuke, please sit tight.") self.slate.runSlate( outputPath["inputTempFile"], outputPath["finFile"], inputSettings, ) operation.updateLongProgress(0.5, "Uploading to Shotgun") submit.submit_version() operation.updateLongProgress(0.75, "Saving") self.saveNewVersion() operation.updateLongProgress(1, "Done, closing window.") self.closeWindow() self.app.logger.info("Flipbook successful") except Exception as e: self.app.logger.error("Oops, something went wrong!") self.app.logger.error(e) return
def destroy_archive(self): """Remove the archives of the disk and its dependencies.""" if not self.saveto_parm.eval(): return user_confirm = hou.ui.displayConfirmation( 'Do you want to remove cached archive(s)?', severity=hou.severityType.Warning, title='pxrlightarrays') if not user_confirm: return delayed_archive = self.find_node(parent=hou.node('/shop'), node_type='pxrdelayedreadarchive::22') if delayed_archive: geo_archive = self.find_node(parent=hou.node('/obj'), node_type='geo', archive=delayed_archive) if geo_archive: geo_archive.destroy() delayed_archive.destroy() with hou.InterruptableOperation( 'Performing', 'Removing archive(s)', open_interrupt_dialog=True) as operation: if self.time_range: for current_frame in self.frame_range: percent = current_frame / float(len(self.frame_range)) operation.updateLongProgress(percent, 'Frame: %s' % current_frame) saveto_parm = self.saveto_parm.evalAtFrame(current_frame) if os.path.exists(saveto_parm): os.remove(saveto_parm) else: operation.updateLongProgress(1, 'Frame: %s' % hou.frame()) if os.path.exists(self.saveto_parm.eval()): os.remove(self.saveto_parm.eval())
def generate_description(self, points, point_attrib_names): """Constructing a generator for retrieving pointcloud data to make archive.""" with hou.InterruptableOperation( 'Generating lightarrays archive', open_interrupt_dialog=True) as sub_operation: for point in points: percent = point.number() / float(len(points)) sub_operation.updateProgress(percent) full_description = self.point_attr_description( point, point_attrib_names) yield archive_template.light.format(**full_description)
def generate(self): """Create light node(s) for each point of the pointcloud.""" start_time = time.time() if not self.node.evalParm('ptcPath'): select_pointcloud(self.node) return init_attributes(self.node, self.pointcloud.geometry()) time_range_parm = self.node.evalParm('trange') python_sop_node = self.node.parm('python_path').evalAsNode() max_num_points = python_sop_node.evalParm('current_total_points') if time_range_parm: point_counts_list = [] for current_frame in self.frame_range: pointcloud_geo = self.pointcloud.geometryAtFrame(current_frame) if not pointcloud_geo: continue init_attributes(self.node, pointcloud_geo) point_counts_list.append(len(pointcloud_geo.iterPoints())) max_num_points = max(point_counts_list) if not max_num_points: select_pointcloud(self.node) return self.node.setUserData('total_number_lights', str(max_num_points)) self.destroy_subnet() obj_path = self.node.parent() subnet_node = obj_path.createNode('subnet', self.node_name + '_network') subnet_node.setUserData('pxrlightarrays', self.node_name) subnet_node.setColor(hou.Color(0.85, 0.8, 0.5)) subnet_node.setPosition(self.node.position()) subnet_node.move([0, -1.1]) subnet_node.setNextInput(self.node) with hou.InterruptableOperation( 'Performing', 'Generating lightarrays', open_interrupt_dialog=True) as operation: for point in range(max_num_points): percent = point / float(max_num_points) operation.updateLongProgress( percent, 'Generating lightarrays: %s' % point) self.create_light(subnet_node, point) subnet_node.layoutChildren() self.node.parm('light_enable').set(1) if time_range_parm: self.node.setUserData('current_number_lights', str(0)) python_sop_node.cook(force=True) logger.info('Lightarrays generated in: {}s'.format(time.time() - start_time))
def loadFromFile(self, filename): file = open(filename, "rb") with open(filename, "rb") as file: magic = file.readline() if magic != "pcache\n": raise hou.Error("Invalid file header: expected pcache magic number : {}".format(magic)) self.clear() done = False while not done: with hou.InterruptableOperation("Loading PCACHE Header", open_interrupt_dialog=False) as operation: line = file.readline().replace("\n","") words = line.split(" ") kw = words[0] if kw == "end_header": done = True elif kw == "format": if words[1] == "ascii": self.fileType = 'a' elif words[1] == "binary": self.fileType = 'b' else: raise hou.Error("Invalid format: {}".format(words[1])) elif kw == "elements": count = int(words[1]) self.itemcount = count elif kw == "property": if len(words) != 3: raise hou.Error("Invalid property description: {}".format(words)) if words[1] == "float": self.propertyTypes.append("float") self.propertyNames.append(words[2]) self.itemstride += 4 elif words[1] == "int": self.propertyTypes.append("int") self.propertyNames.append(words[2]) self.itemstride += 4 elif kw == "comment": print ' '.join(words).replace("comment ", "") self.propertyData = bytearray(file.read()) print "Item Stride is {} bytes".format(self.itemstride) length = len(self.propertyData) self.itemcount = length/self.itemstride print "Found {} bytes of data, corresponding to {} items".format(length, self.itemcount)
def generate(self): """Generate delayed archive.""" start_time = time.time() successed = super(InProcessArchive, self).init_archive() if not successed: return write_queue = Queue(maxsize=0) write_thread = Thread(target=self.queue_loop, args=(write_queue, )) write_thread.daemon = True write_thread.start() with hou.InterruptableOperation( 'Writing to disk...', 'Frame', open_interrupt_dialog=True) as operation: if self.time_range: for current_frame in self.frame_range: percent = current_frame / float(len(self.frame_range)) operation.updateLongProgress(percent, 'Frame: %s' % current_frame) archive = super(InProcessArchive, self).make_archive(current_frame) if not archive: continue saveto_path = self.saveto_parm.evalAtFrame(current_frame) write_queue.put((archive, saveto_path)) write_queue.join() else: current_frame = hou.frame() operation.updateLongProgress(1, 'Frame: %s' % current_frame) archive = super(InProcessArchive, self).make_archive(current_frame) if not archive: return saveto_path = self.saveto_parm.evalAtFrame(current_frame) super(InProcessArchive, self).write_archive_to_disk(archive, saveto_path) super(InProcessArchive, self).read_delayed_archive() logger.info('Archive(s) generated in: {}sec'.format(time.time() - start_time))
def __init__(self, name, long_name=None, open_dialog=False): self._op = hou.InterruptableOperation(name, long_name, open_dialog)
dist_attr = geo.addAttrib(hou.attribType.Prim, "dist", -1) # create the groups line_grp = geo.createPrimGroup("lines") base_grp = geo.createPrimGroup("base") for pr in geo.prims(): for vert in pr.vertices(): vert.point().setAttribValue(clr_attr, (1, 1, 1)) base_grp.add(pr) curve_pts = get_curve.iterPoints() counter = 0.0 numPrims = float(len(geo.prims())) with hou.InterruptableOperation("Creating primitives..") as oper: for prim in base_grp.prims(): centroid = prim.positionAtInterior(0.5, 0.5, 0) intersect, dist = point_line_intersection(centroid, curve_pts[0], curve_pts[1]) poly = geo.createPolygon() poly.setIsClosed(False) counter += 1 percent = counter / numPrims for pos in [centroid, intersect]: point = geo.createPoint() point.setPosition(pos) poly.addVertex(point) line_grp.add(poly) oper.updateProgress(percent)
def create_hda(ui, name, min_inputs=1, max_inputs=1, major=0, minor=1): print('TBA :: local_hda') node = hou.selectedNodes() if not node: print('First select the subnet you want to turn into an HDA') return user_dir = hou.getenv('HOUDINI_USER_PREF_DIR') if not user_dir: print('Could not find user directory at: {0}'.format(user_dir)) return # one at a time.. node = node[0] nodeType = node.type() if nodeType.name() != 'subnet': print('Selection is not a subnet') return # try and enforce name name = name.replace('TBA_','') name = name.replace('tba_','') # name of node when created in houdini label = 'TBA_{}'.format(name) # spaces can exist in the label but not any other names name = name.replace('_','') # hda name with versioning namespace hda_name = 'tba_{}::0.1'.format(name.lower()) # filename on disk filename = 'tba_{}.hda'.format(name.lower()) local_hda_dir = os.path.join(user_dir, 'otls') # make local folder if doesnt exist if not local_hda_dir: print('Creating local otls folder: {0}'.format(local_hda_dir)) os.mkdir(local_hda_dir) hdaPath = os.path.join(local_hda_dir, filename) if not node.canCreateDigitalAsset(): print('Not able to create digital asset') return print('Create hda at: {0}'.format(hdaPath)) # show progress window operation = hou.InterruptableOperation('TBA :: Creating HDA', long_operation_name='Creating HDA', open_interrupt_dialog=True) operation.__enter__() num_tasks = 3 percent = 1.0 / num_tasks # rename subnet. This will be used for the asset label node.setName(label) hda = node.createDigitalAsset(name=hda_name, hda_file_name=hdaPath, min_num_inputs=min_inputs, max_num_inputs=max_inputs, version='0.1') # update progress operation.updateLongProgress(percent, 'Creating HDA') user = hou.getenv('USER') #node.setComment('Created by: {0}'.format(user)) hda.setUserData('user',user) # get hda definition hdaDefinition = hda.type().definition() # get all parameters template_group = hda.parmTemplateGroup() # copy to hda hdaDefinition.setParmTemplateGroup(template_group) # set tool sub menu to TBA tool = '''<?xml version="1.0" encoding="UTF-8"?> <shelfDocument> <tool name="$HDA_DEFAULT_TOOL" label="$HDA_LABEL" icon="$HDA_ICON"> <toolMenuContext name="viewer"> <contextNetType>OBJ</contextNetType> </toolMenuContext> <toolMenuContext name="network"> <contextOpType>$HDA_TABLE_AND_NAME</contextOpType> </toolMenuContext> <toolSubmenu>TBA</toolSubmenu> <script scriptType="python"><![CDATA[import objecttoolutils objecttoolutils.genericTool(kwargs, '$HDA_NAME')]]></script> </tool> </shelfDocument>''' hdaDefinition.addSection('Tools.shelf', tool) # update progress operation.updateLongProgress(percent, 'Finished') # Stop the operation. This closes the progress bar dialog. operation.__exit__(None, None, None) # open file permissions os.chmod(hdaPath, 0o700) # close ui ui.close()
def validateCache(): # geo缓存路径若是有缓存路径就得单独查找,待实现; 待增加其他的格式查找 ''' 写一个可以查找当前houdini节点使用的缓存路径,即清理缓存 最好是有个ui,标出哪段缓存是无效缓存,由用户来决定哪一个要删除 可以整合到hqTool,查找未使用,并做清理 输出log: 区分有效缓存和无效缓存 询问是不是需要把不使用的文件放到temp dir里面,输出log,大小,帧数 有效缓存标出是哪个节点在使用 + 绝对路径 + 序列缓存大小 + 序列帧数 先打开hip文件在执行脚本 ''' dir = hou.expandString( hou.ui.selectFile(title="select directory", collapse_sequences=1, file_type=hou.fileType.Directory)) new_dir = dir + "temp_dir" if not os.path.exists(new_dir): os.makedirs(new_dir) pattern_list = [] node_list = [] pattern_to_node_index_list = [] root = hou.root() for node in root.allNodes(): if not node.type().name() in ["filecache", "file", "dopio"]: continue else: parm = node.parm("file") if not parm: continue file_path = parm.unexpandedString() basename = os.path.basename(file_path) if ("$F" or "$F4") in basename: #序列的情况 pattern = basename.split("$F")[0] else: #单帧的情况 pattern = basename.split(".bgeo.sc")[0] # pattern = basename # 有$OS的情况需要全部把名字解释出来, $OS、`$OS` if "$OS" in file_path: OS = node.name() if "`$OS`" in file_path: pattern = pattern.replace("`$OS`", OS) else: pattern = pattern.replace("$OS", OS) # houdini默认的节点设置除了OS,还会有其他` if "`" in pattern: continue pattern_list.append(pattern) node_list.append(node.path()) found = "" invalid_count = 0 try: files = os.listdir(dir) except: return file_count = len(files) count = 0 with hou.InterruptableOperation("searching...", open_interrupt_dialog=1) as operation: for file in files: for pattern in pattern_list: found = re.findall(pattern + ".*", file, re.S) if found: index = pattern_list.index(pattern) if index not in pattern_to_node_index_list: pattern_to_node_index_list.append(index) break if not found: # print file file_path = dir + file shutil.move(file_path, new_dir) invalid_count += 1 count += 1 percent = float(count) / float(file_count) operation.updateProgress(percent) print "%d invalid files have been removed" % ( invalid_count - 1), pattern_to_node_index_list for idx in pattern_to_node_index_list: print pattern_list[idx], "\t", node_list[idx], "\n"