def focusThisNode(self, treeItem): node_path = self.getNodePath(treeItem) if node_path: hou.node(node_path).setCurrent(on=True, clear_all_selected=True) else: return False
def geo( self ) : geo = hou.node( "/obj/geo1" ) if not geo : obj = hou.node( "/obj" ) geo = obj.createNode( "geo", run_init_scripts=False ) return geo
def insertOBJ_main(self, **connections): try: geofile=connections["geofile"] except: geofile="" try: comment=connections["comment"] except: comment="" if os.path.isfile(geofile): pass else: return 0 gn=hou.node("/obj").createNode("geo","tmp_geo") pth=gn.path() gf=hou.node(pth+"/file1") gf.parm("file").set(geofile) gf.setComment(comment) gf.setHardLocked(1) gr=gf.createOutputNode("group") gr.parm("destroyname").set("*") at=gr.createOutputNode("attribpromote") at.setDisplayFlag(1) at.setRenderFlag(1) at.parm("inname").set("N") at.parm("outclass").set(3) return 1
def keyParentTransformAnim_main(self, **connections): try: nodeToKey=str(connections["nodeToKey"]) except: nodeToKey="" try: TMatrixNode=str(connections["TMatrixNode"]) except: TMatrixNode="" try: ntc=hou.node(nodeToKey) tn=hou.node(TMatrixNode) for val in ntc.parm("tx").keyframes(): hou.setFrame(val.frame()) ntc.parm("tx").set(val.value()*tn.parm("sx").eval()+tn.parm("tx").eval()) for val in ntc.parm("ty").keyframes(): hou.setFrame(val.frame()) ntc.parm("ty").set(val.value()*tn.parm("sy").eval()+tn.parm("ty").eval()) for val in ntc.parm("tz").keyframes(): hou.setFrame(val.frame()) ntc.parm("tz").set(val.value()*tn.parm("sz").eval()+tn.parm("tz").eval()) return 1 except: return 0
def _convertNode(self, node): if isinstance(node, str): node = hou.node(node) if node is None: raise InvalidNode(node) return node if hasattr(node, "__iter__"): out_node = [] for n in node: if isinstance(n, str): _n = hou.node(n) if _n is None: raise InvalidNode(n) out_node.append(_n) elif isinstance(n, hou.Node): out_node.append(n) return out_node if not isinstance(node, hou.Node): raise InvalidNode(str(node)) return node
def testReadMesh( self ) : scene = self.buildScene() hou.node( "/obj/sub1" ).parmTuple( "t" ).set( [ 1, 2, 3 ] ) hou.node( "/obj/sub1" ).parmTuple( "r" ).set( [ 10, 20, 30 ] ) box1 = scene.child( "sub1" ).child( "box1" ) mesh = box1.readObject( 0 ) self.failUnless( isinstance( mesh, IECore.MeshPrimitive ) ) vertList = list( mesh["P"].data ) self.assertEqual( len( vertList ), 8 ) # check the verts are in local space self.assertEqual( vertList.count( IECore.V3f( -0.5, -0.5, 0.5 ) ), 1 ) self.assertEqual( vertList.count( IECore.V3f( 0.5, -0.5, 0.5 ) ), 1 ) self.assertEqual( vertList.count( IECore.V3f( -0.5, 0.5, 0.5 ) ), 1 ) self.assertEqual( vertList.count( IECore.V3f( 0.5, 0.5, 0.5 ) ), 1 ) self.assertEqual( vertList.count( IECore.V3f( -0.5, 0.5, -0.5 ) ), 1 ) self.assertEqual( vertList.count( IECore.V3f( 0.5, 0.5, -0.5 ) ), 1 ) self.assertEqual( vertList.count( IECore.V3f( -0.5, -0.5, -0.5 ) ), 1 ) self.assertEqual( vertList.count( IECore.V3f( 0.5, -0.5, -0.5 ) ), 1 ) # check read primvars self.assertEqual( mesh["P"], box1.readObjectPrimitiveVariables( [ "P" ], 0 )["P"] )
def revertChanges(): updateDB() node= getSelectedNode() if node != None: if not isDigitalAsset(node): hou.ui.displayMessage("Not a Digital Asset.") else: libraryPath = node.type().definition().libraryFilePath() filename = os.path.basename(libraryPath) info = getFileInfo(filename) if info == None: hou.ui.displayMessage("OTL not in globals folder. Can not revert.") elif info[2]: if not node.isLocked() and info[3] == USERNAME: newfilepath = os.path.join(OTLDIR, filename) oldfilepath = os.path.join(USERDIR, filename) switchOPLibraries(oldfilepath, newfilepath) os.remove(oldfilepath) createMe = node.type().name() node.destroy() hou.node('/obj').createNode(createMe) unlockOTL(filename) hou.ui.displayMessage("Revert Successful!") else: hou.ui.displayMessage("Select EXACTLY one node.")
def testIndirectInputs( self ) : scene = self.buildScene() hou.node( "/obj/sub1/torus1" ).setInput( 0, hou.node( "/obj/sub1" ).indirectInputs()[0] ) self.assertEqual( sorted( scene.childNames() ), [ "box2", "sub1", "sub2" ] ) child = scene.child( "sub1" ) self.assertEqual( sorted( child.childNames() ), [ "box1", "torus1" ] ) child2 = child.child( "torus1" ) self.assertEqual( sorted( child2.childNames() ), [ "torus2" ] ) child3 = child2.child( "torus2" ) self.assertEqual( sorted( child3.childNames() ), [] ) box1 = child.child( "box1" ) self.assertEqual( sorted( box1.childNames() ), [ "gap" ] ) gap = box1.child( "gap" ) self.assertEqual( sorted( gap.childNames() ), [ "torus" ] ) self.assertEqual( gap.child( "torus" ).childNames(), [] ) self.assertEqual( scene.child( "box2" ).childNames(), [] ) self.assertEqual( scene.child( "sub2" ).childNames(), [] )
def getAssetDbProperties_main(self, **connections): try: assetName=connections["assetName"] except: assetName="" try: oas_output=connections["oas_output"] except: oas_output="dbPath" if oas_output=="dbPath": try: return hou.node("/obj/"+str(assetName)).parm("dbPath").eval() except: return 0 if oas_output=="dbType": try: return hou.node("/obj/"+str(assetName)).parm("dbType").eval() except: return 0 if oas_output=="dbName": try: path=hou.node("/obj/"+str(assetName)).parm("dbPath").eval() return path.rsplit(":",1)[1] except: return 0
def testSaveLoad(self): hou.hipFile.clear(suppress_save_prompt=True) save_file = "test/opHolder_testData/opSave_test.hip" self.testWireTogether() # save scene hou.hipFile.save(save_file) # new scene hou.hipFile.clear(suppress_save_prompt=True) # open scene hou.hipFile.load(save_file) # check some parameters are ok self.assertEqual( hou.node("/obj/geo1/vector1").parm("parm_size").eval(), 3 ) self.assertEqual( hou.node("/obj/geo1/vector1").parmTuple("parm_value").eval(), (1,2,3) ) self.assertEqual( hou.node("/obj/geo1/vector2").parm("parm_size").eval(), 3 ) self.assertEqual( hou.node("/obj/geo1/vector2").parmTuple("parm_value").eval(), (4,5,6) ) # check the result of our last opHolder n = hou.node("/obj/geo1/print_values") n.cook() fn = IECoreHoudini.FnOpHolder(n) result = fn.getParameterised().resultParameter().getValue() self.assertEqual( result, IECore.V3fVectorData( [IECore.V3f(5,7,9),IECore.V3f(5,7,9),IECore.V3f(5,7,9)] ) )
def testRenderRmanInject(self): if hou.isApprentice(): return rib_file = "test/cortexRmanInject_testData/testrman.$F4.rib" hou.hipFile.clear(suppress_save_prompt=True) self.testCreateRmanInject() # create a camera camera = hou.node("/obj").createNode("cam", node_name="cam1") self.assert_( camera ) # create a mantra rop rman = hou.node("/out").createNode("rib", node_name="rman_out") self.assert_( rman ) rman.parm("camera").set(camera.path()) # set path rman.parm("rib_outputmode").set(True) rman.parm("soho_diskfile").set(rib_file) # render rman.render(frame_range=(1, 10)) # check ribs made it ribs = glob.glob("test/cortexRmanInject_testData/testrman.????.rib") self.assertEqual( len(ribs), 10 ) # make sure the procedurals got in there procs = [] for rib in ribs: for line in open(rib): if 'Procedural "DynamicLoad"' in line: procs.append(line.strip()) self.assertEqual( len(procs), 10 ) self.failUnless( "iePython" in procs[0] ) self.failUnless( "['-radius', '1', '-theta', '360']" in procs[0] ) self.failUnless( "['-radius', '2', '-theta', '360']" in procs[1] ) self.failUnless( "['-radius', '3', '-theta', '360']" in procs[2] ) self.failUnless( "[-1 1 -1 1 -1 1]" in procs[0] )
def getBundles(): getclip = pyperclip.paste() try: dict = ast.literal_eval(getclip) bdl_names = dict['bundles'].keys() # Add node (if exist) in new scene to bundle for x in range(0, len(bdl_names)): name = bdl_names[x] nodes = dict['bundles'][name] if hou.nodeBundle(name): bdl = hou.nodeBundle(name) else: bdl = hou.addNodeBundle(name) # Add Same Nodes if exist for nd in nodes: if hou.node(nd): bdl.addNode(hou.node(nd)) else: pass # Set pattterns pattern = dict['bundle_pattern'][x] bdl.setPattern(pattern) # TODO: Add filter support # Set filters # filter = dict['bundle_filter'][x] # bdl.setFilter(filter) except: print 'Buffer filled not a bundle list data type, try copy to clipbord again'
def writeCamerasToAlembic(node): sequence = node.name().split("_")[2][0] children = node.children() for c in children: name = c.name() if "shot" in name: shot = name.split("_")[1] camDir = os.path.join(os.environ["SHOTS_DIR"], sequence + shot, "camera") abcName = sequence + shot + "_camera" + ".abc" sFrame, eFrame = hou.playbar.playbackRange() sSize = hou.playbar.frameIncrement() abcFilePath = writeToAlembic( camDir, abcName, node, objects=os.path.join(c.path(), "cam1"), trange="normal" # , startFrame=sFrame # , endFrame=eFrame , stepSize=sSize, ) mayaFilePath = os.path.join(camDir, sequence + shot + "_camera" + ".mb") if os.path.exists(mayaFilePath): os.remove(mayaFilePath) amu.mayaImportAlembicFile(mayaFilePath, abcFilePath) print hou.node(os.path.join(c.path(), "cam1")).evalParm("focal") amu.setFocalLengthMaya(mayaFilePath, hou.node(os.path.join(c.path(), "cam1")).evalParm("focal"))
def pbStartUp(): startup_nodes = {'cam': 'RenderCam', 'ropnet': 'ROP', 'shopnet': 'SHOP', 'cop2net': 'COP', 'geo': '_FX', 'geo': 'RENDER_', } # startup with some obj node types by default # for op in range(len(startup_nodes)): nodetype = startup_nodes.keys()[op] objnode = hou.node('/obj').createNode(nodetype, startup_nodes[nodetype]) objnode.setPosition(hou.Vector2((6-op*2, 5))) # create mantra on startup and set camera to RenderCam mantra = hou.node('/obj/ROP').createNode('ifd', 'test_render') mantra.parm('camera').set('../../RenderCam') # create delayed load on stratup hou.node('/obj/SHOP').createNode('vm_geo_file', 'delayed_load_') # prepare render geo render = hou.node('/obj/RENDER_') for child in render.children(): child.destroy() render.createNode('null', 'NOTHING') render.parm('shop_geometrypath').set('../SHOP/delayed_load_') # load 123.cmd hou.hscript('$HFS/houdini/scripts/123.cmd')
def newContainer(hpath): templateNode = hou.node(hpath).createNode("containerTemplate") templateNode.hide(True) # resp = ui.inputWindow("Enter the New Operator Label", wtitle="OTL Label") response = hou.ui.readInput("Enter the New Operator Label", buttons=("Ok", "Cancel"), title="OTL Label") if response[0] == 0: name = response[1] else: name = None if name != None and name.strip() != "": name = formatName(name) filename = name.replace(" ", "_") newfilepath = os.path.join(OTLDIR, filename + ".otl") if not os.path.exists(newfilepath): # create file heirarchy if container asset amu.createNewAssetFolders(ASSETSDIR, filename) templateNode.type().definition().copyToHDAFile(newfilepath, new_name=filename, new_menu_name=name) hou.hda.installFile(newfilepath, change_oplibraries_file=True) fileutil.clobberPermissions(newfilepath) newnode = hou.node(hpath).createNode(filename) else: hou.ui.displayMessage( "Asset by that name already exists. Cannot create asset.", title="Asset Name", severity=hou.severityType.Error, ) # clean up templateNode.destroy()
def __init__(self): self.uvLight = hou.node("/obj/uv_light") if (self.uvLight is None): self.uvLight = hou.node("/obj").createNode("envlight", 'uv_light') self.uvLight.parm("light_enable").set(0) self.sceneLights = [] self.initSceneLights()
def getCameraNode(): cam = hou.node("/obj/uv_cam") if (cam is None): cam = hou.node("/obj").createNode("cam", "uv_cam") cam.parm("resx").set("256") cam.parm("resy").set("256") return cam
def createFileLoader(sel_cache): if HOUDINI_MAJOR_VERSION <= 15: dlp = hou.node('/shop').createNode('vm_geo_file') else: dlp = hou.node('/mat').createNode('file') dlp.setParms({'file': sel_cache}) return dlp
def getMantraNode(): mn = hou.node("/out/mantra_uv") if (mn is None): #initialize with parms mn = hou.node("/out").createNode("ifd", "mantra_uv_" + str(uuid.uuid4())) mn.parm("vm_renderengine").set("pbrmicropoly") mn.parm("camera").set(getCameraNode().path()) return mn
def setParm(self, treeItem, cache_path): node_path = self.getNodePath(treeItem) node_type = hou.node(node_path).type().name().lower() for defNode in Define.CACHE_NODES: defNode_type = defNode.get("name") if node_type == defNode_type: parmName = defNode.get("parmName") hou.node(node_path).setParms({parmName:cache_path})
def unique_name(base): base = re.sub(r'[^\w/]+', '_', base).strip('_') node = hou.node(base) if not node: return base for i in itertools.count(1): path = '%s_%d' % (base, i) node = hou.node(path) if not node: return path
def test_is_child_with_name_box_and_box1_selected(self): boxPath = "/obj/box" box = hou.node(boxPath) box1 = hou.node(boxPath + "1") boxes = [box, box1] sq = sQuery.sQuery() sel = sq.children("box box1") self.assertListEqual(sel._data, boxes)
def condition(referencedGeo, primReference): counter = 0 if hou.node(referencedGeo).geometry().boundingBox().contains(primBoundingBox(primReference).center()): for prim in hou.node(referencedGeo).geometry().prims(): if rayIntersect(primBoundingBox(primReference).center(), prim): counter += 1 #si intersecta con un numero impar de poligonos significa que el centro #de la primitiva que evaluamos esta dentro del volumen('point in poligon' pero aplicado a 3D, #es decir, un "point in volume") return counter % 2 != 0
def pbFindMe(): logpath = hou.hscriptExpression('$HIP')+'/pathlog.log' found = 0 F = 'Find Me' R = 'Replace By' user_input = hou.ui.readMultiInput('Enter words to replace', (F, R), buttons=('Find', 'Replace', 'Cancle'), close_choice=2, title='pbFindMe') find = user_input[1][0] replace = user_input[1][1] if user_input[0] == 0: foundlist = '%s in %s\n\n\n' %(find, hou.hipFile.path()) + 'NODE' + ' '*46 + 'PARM' + '\n' for node in hou.node('/').allSubChildren(): for parm in node.parms(): template = parm.parmTemplate() if (template.type() == hou.parmTemplateType.String and template.stringType() == hou.stringParmType.FileReference and len(parm.keyframes()) == 0 and parm.unexpandedString().find(find) >= 0): path = parm.eval() if path: found += 1 foundlist += node.path() + ' '*(50 - len(node.path())) + parm.name()+'\n' if found: hou.ui.displayMessage(find + ' Found '+str(found)+' times !!!'+'\n\nLog file saved on: '+ logpath) logfile = open(logpath, 'w') logfile.write(foundlist) logfile.close() subprocess.Popen(['gedit',logpath]) else: hou.ui.displayMessage('Can not find %s !!!' % find) if user_input[0] == 1: def fixFilePrefixes(node, from_prefix, to_prefix): for parm in node.parms(): template = parm.parmTemplate() if (template.type() == hou.parmTemplateType.String and template.stringType() == hou.stringParmType.FileReference and len(parm.keyframes()) == 0 and parm.unexpandedString().find(from_prefix) >= 0): print 'Replacing %s to %s from:' %(find, replace), parm.path() parm.set(parm.eval().replace(from_prefix, to_prefix)) nodes = hou.selectedNodes() if nodes: for node in nodes: fixFilePrefixes(node, find, replace) for child in node.allSubChildren(): fixFilePrefixes(child, find, replace) else: for node in hou.node('/').allSubChildren(): fixFilePrefixes(node, find, replace)
def testCreateRmanInject(self): procedural = IECoreHoudini.FnProceduralHolder.create("cortex_sphere", "sphereProcedural", 1) procedural.parm("parm_radius").setExpression("$F") self.assert_( procedural ) rmaninject = hou.node("/shop").createNode("cortexRmanInject") self.assert_( rmaninject ) rmaninject.parm("procedural").set(procedural.path()) procedural.parent().parm("shop_materialpath").set(rmaninject.path()) self.assertEqual( procedural.parent().evalParm("shop_materialpath"), rmaninject.path() ) self.assert_( hou.node(procedural.parent().evalParm("shop_materialpath")) ) self.assertEqual( hou.node(procedural.parent().evalParm("shop_materialpath")).evalParm("procedural"), procedural.path() )
def startLocalRender_main(self, **connections): try: ropPath=str(connections["ropPath"]) except: ropPath="" try: hou.node(ropPath).render() return 1 except: return 0
def create_Mantra_ROP(mantra, pos = None): """ Create a Mantra ROP in Houdini. """ mantra_ROP = hou.node('/out/{0}'.format(mantra.name)) if not mantra_ROP: mantra_ROP = hou.node('/out/').createNode('ifd', '{0}'.format(mantra.name)) if pos: mantra_ROP.move(pos) return mantra_ROP
def reader( self ) : geo = hou.node( "/obj/geo1" ) if not geo : obj = hou.node( "/obj" ) geo = obj.createNode( "geo", run_init_scripts=False ) reader = geo.createNode( "file" ) reader.parm( "file" ).set( TestCobIOTranslator.__testFile ) reader.parm( "filemode" ).set( 1 ) return reader
def _init(self, initValue): contexts = ["obj", "shop", "out", "mat"] msg = "sQuery is initialized in: " if isinstance(initValue, str) and initValue.lower() in contexts: self._data = [hou.node("/" + initValue)] print(msg + initValue.title() + " Context") elif isinstance(initValue, hou.Node): self._data = [initValue] print(msg + initValue.path()) elif initValue == None: self._data = [hou.node("/obj")] print(msg + "Obj Context")
def loadAlembic( cache, globalScale = False ): """load alembic from cacheFile, connect to transform to manage global scale""" geo = hou.node( '/obj' ).createNode( 'geo' ) geo.setName( cache.name + '_geo' ) geo.children()[0].destroy() al = geo.createNode( 'alembic' ) al.setName( cache.name + '_abc' ) al.parm( 'fileName' ).set( cache.path ) if globalScale: gScale = hou.node( '/obj/globalScale' ) if not gScale: gScale = hou.node( '/obj' ).createNode( 'null', 'globalScale' ) geo.setInput(0, gScale)
def test_2_checkfoo(self): assert(len(hou.node("/out").children()) == 1)
def importLightFromClipboard(): obj = hou.node('/obj') light = None light_name = '' light_type = '' light_target = None is_tuple = False clipboard = QtWidgets.QApplication.clipboard() text = clipboard.text() lines = text.splitlines() error_count = 0 if lines[0].startswith('#light_export'): ls = lines[0].split(',', 2) fps = ls[1] range = ls[2] if fps != str(hou.fps()): print('warning: fps differs from export') if range != str(hou.playbar.timelineRange()): print('warning: animation range differs from export') for line in lines[1:]: ls = line.split(',', 1) if len(ls) == 2: parm_name = ls[0] parm_val = ls[1] if parm_val.startswith('\'') and parm_val.endswith('\''): parm_val = parm_val[1:-1] else: parm_val = eval(parm_val) is_tuple = isinstance(parm_val, tuple) if parm_name == 'name': light_name = parm_val elif line.startswith('type'): light_type = parm_val light = obj.node(light_name) if light == None: light = obj.createNode(light_type) light.setName(light_name) light.setColor(hou.Color(1, 0.898039, 0)) light.setUserData('nodeshape', 'light') light.moveToGoodPosition() out_node = None for n in light.children(): if n.isGenericFlagSet(hou.nodeFlag.Render) == True: out_node = n color = out_node.createOutputNode('color') color.parm('colorr').set(1) color.parm('colorg').set(0.898039) color.parm('colorb').set(0) color.setDisplayFlag(True) if light_type == 'VRayNodeLightSphere': light.node('sphere1').parm('type').set(4) light.node('sphere1').parm('imperfect').set(0) if light_type == 'VRayNodeLightRectangle': light.node('line1').parm('dist').setExpression( '(ch("../u_size") + ch("../v_size")) * 0.333') light.node('grid1').parm('type').set(2) light.node('grid1').parm('orderu').set(2) light.node('grid1').parm('orderv').set(2) switch = light.node('grid1').createOutputNode( 'switch') switch.parm('input').setExpression( 'ch("../is_disc")') circle = light.createNode('circle') circle.parm('type').set(2) circle.parm('radx').setExpression( 'ch("../u_size") / 2') circle.parm('rady').setExpression( 'ch("../v_size") / 2') # light.parm('v_size').setExpression('ch("u_size")') switch.setNextInput(circle) light.node('merge1').setInput(0, switch) # light.layoutChildren() if light_type == 'VRayNodeSunLight': ''' light.node('transform1').parm('sx').setExpression('ch("../size_multiplier")') light.node('transform1').parm('sy').setExpression('ch("../size_multiplier")') light.node('transform1').parm('sz').setExpression('ch("../size_multiplier")') ''' light_target = obj.node(light_name + '_target') if light_target == None: light_target = createLightTarget(obj, light) else: for p in (light_target.parms()): p.deleteAllKeyframes() null = light.createNode('null') null.setRenderFlag(True) # null.moveToGoodPosition() light.layoutChildren() else: if light.type().name() != light_type: light.changeNodeType(light_type) for p in (light.parms()): p.deleteAllKeyframes() p.revertToDefaults() light.parm('constraints_on').set(1) light.parm('constraints_path').set('constraints') elif line.startswith('target_'): light_target = obj.node(light_name + '_target') if light_target == None: light_target = createLightTarget(obj, light) else: for p in (light_target.parms()): p.deleteAllKeyframes() if is_tuple: for k in parm_val: setKey = hou.Keyframe() setKey.setFrame(k[0]) setKey.setValue(k[1]) light_target.parm( parm_name[7:]).setKeyframe(setKey) else: light_target.parm(parm_name[7:]).set(parm_val) else: try: if is_tuple: for k in parm_val: setKey = hou.Keyframe() setKey.setFrame(k[0]) setKey.setValue(k[1]) light.parm(parm_name).setKeyframe(setKey) else: light.parm(parm_name).set(parm_val) except: print('cannot setting parameter: ' + parm_name) error_count += 1 if error_count == 0: print('light successfully imported') else: print('light imported with ' + str(error_count) + " errors") else: print('cannot apply clipboad values, wrong type!')
def testAllOpenPolygonsConvertedAsLinearCurves(self): obj = hou.node("/obj") parent = obj.createNode("geo", run_init_scripts=False) curves = [ parent.createNode("curve"), parent.createNode("curve"), parent.createNode("curve") ] curves[0].parm("type").set(0) # polygon curves[0].parm("close").set(False) curves[0].parm("coords").set("0, 0, 0 0, 1, 0 1, 1, 0") curves[1].parm("type").set(0) # polygon curves[1].parm("close").set(False) curves[1].parm("coords").set("0,0,0 0,0,1 1,0,1") curves[2].parm("type").set(0) # polygon curves[2].parm("close").set(False) curves[2].parm("coords").set("0,0,0 1,0,0") merge = parent.createNode("merge") for i in range(0, len(curves)): merge.setInput(i, curves[i]) # Use the base FromHoudiniGeometryConverter.create to verify we create a CurvesConverter for this open polygon detail converter = IECoreHoudini.FromHoudiniGeometryConverter.create(merge) self.assertTrue( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniCurvesConverter))) actualCurvesPrimitive = converter.convert() self.assertTrue( actualCurvesPrimitive.isInstanceOf(IECoreScene.CurvesPrimitive)) self.assertTrue("P" in actualCurvesPrimitive) self.assertEqual(actualCurvesPrimitive.verticesPerCurve(), IECore.IntVectorData([3, 3, 2])) self.assertEqual(actualCurvesPrimitive.basis().standardBasis(), IECore.StandardCubicBasis.Linear) self.assertEqual( actualCurvesPrimitive["P"].data, IECore.V3fVectorData([ imath.V3f(0, 0, 0), imath.V3f(0, 1, 0), imath.V3f(1, 1, 0), imath.V3f(0, 0, 0), imath.V3f(0, 0, 1), imath.V3f(1, 0, 1), imath.V3f(0, 0, 0), imath.V3f(1, 0, 0) ], IECore.GeometricData.Interpretation.Point)) # Now we close one of the polygons curves[2].parm("close").set(True) converter = IECoreHoudini.FromHoudiniGeometryConverter.create(merge) self.assertTrue( converter.isInstanceOf( IECore.TypeId( IECoreHoudini.TypeId.FromHoudiniPolygonsConverter)))
def test_exclude_extra_uploads_in_other_jobs(self): result = dependency_scan.fetch(self.node, self.seq, 3) self.assertNotIn(hou.node("job2").parm("upload_1").eval(), result)
def createBox( self ) : obj = hou.node("/obj") geo = obj.createNode("geo", run_init_scripts=False) box = geo.createNode( "box" ) return box
def Assenble_bgeo(): """ This fuc is Selected Alembic objects Convert_HoudiniGeometry& write_bgeoFile Samples*** selcted obj alembicNode... Assenble_bgeo() """ import hou import dep_system select = hou.selectedItems() root = hou.node("/obj") for shapenode in select: Nodepath = str(shapenode.path()) alembicNode = hou.node(Nodepath) Child = alembicNode.children() Child_Path = str(Child[0].path()) #====== Create CahceAssembles======# Cache_AssembleNode = root.createNode("geo", None, 0) Cache_AssembleNode.setName(str(Child[0]) + '_bgeo') Cache_AssembleNode.setColor(hou.Color((0.3, 0.4, 0.3))) Cache_AssembleNode.moveToGoodPosition() Assemble_root = hou.node(Cache_AssembleNode.path()) #objectMergeSop objmergeSop = Assemble_root.createNode('object_merge') objmergeSop.setName('Import_' + str(Child[0]) + '_ABC') objmergeSop.setParms({'objpath1': Child_Path, 'xformtype': 1}) objmergeSop.setColor(hou.Color((0.7, 0, 0.4))) #PythonSOP PythonSop = objmergeSop.createOutputNode('python', 'Unpacking_process') PythonSop.move([0.3, -0.3]) PythonSop.parm('python').set( "node = hou.pwd()\ngeo = node.geometry()\nresultgeo = hou.Geometry()\ \n\nsops = hou.sopNodeTypeCategory()\n#unpack\n\nunpack_py = sops.nodeVerb('unpack')\ \nunpack_py.setParms({'transfer_attributes': 'path'})\nunpack_py.execute(geo,[geo])\ \n\n#reconvert_ShapeName\nfor prim in geo.iterPrims():\n path = prim.attribValue('path')\ \n shape = path.split('/')[-1]\n\n prim.setAttribValue('path',shape)\n\n#normal\ \nnormal_py = sops.nodeVerb('normal')\nnormal_py.execute(resultgeo,[geo])\n\n\ \nnode.geometry().clear()\nnode.geometry().merge(resultgeo)" ) #nullSop EndnullSop = PythonSop.createOutputNode('null') EndnullSop.setName("unpacking_" + str(Child[0]) + "_OUT") EndnullSop.move([0, -0.3]) EndnullSop.setColor(hou.Color((0.5, 0.4, 0.0))) EndnullSop.setCurrent(1, 1) EndnullSop.setDisplayFlag(1) EndnullSop.setRenderFlag(1) dep_system.createDepsys("obj")
def expAnyVal(outPath): expValByType(outPath, None) def expNetVal(outPath, folders = None, rootPath = "/obj"): nodeLst = xh.NodeList() nodeLst.netBuild(rootPath) lst = nodeLst.nodes expVal(lst, outPath, folders) def expMtlVal(outPath, folders = None, rootPath = "/obj", mtlTypes = ["GL_lookdev", "v_layered", "mantrasurface", "principledshader"]): nodeLst = xh.NodeList() nodeLst.netBuild(rootPath) lst = [] for node in nodeLst.nodes: if node.type().name() in mtlTypes: lst.append(node) if len(lst): expVal(lst, outPath, folders) if __name__=="__main__": outPath = hou.expandString("$HIP/") outPath = exePath #outPath = r"D:/tmp/" outPath += "/_test.xval" #expLitVal(outPath) #expAnyVal(outPath) #expNetVal(outPath) #expNetVal(outPath, rootPath = "/obj/ANIM") #expMtlVal(outPath) expVal([hou.node("/obj/xvals")], outPath)
def _file_cop(self, path, sg_publish_data): """Read the supplied path as a file COP. :param str path: The path to the file to import. :param dict sg_publish_data: The publish data for the supplied path. """ import hou app = self.parent publish_name = sg_publish_data.get("name", "published_file") # we'll use the publish name for the file cop node name, but we need to # remove non alphanumeric characers from the string (houdini node names # must be alphanumeric). first, build a regex to match non alpha-numeric # characters. Then use it to replace any matches with an underscore pattern = re.compile('[\W_]+', re.UNICODE) publish_name = pattern.sub('_', publish_name) # get the publish path path = self.get_publish_path(sg_publish_data) # houdini doesn't like UNC paths. path = path.replace("\\", "/") img_context = _get_current_context("/img") try: file_cop = img_context.createNode("file", publish_name) except hou.OperationFailed: # failed to create the node in the current context. img_context = hou.node("/img") comps = [ c for c in img_context.children() if c.type().name() == "img" ] if comps: # if there are comp networks, just pick the first one img_network = comps[0] else: # if not, create one at the /img and then add the file cop img_network = img_context.createNode("img", "comp1") file_cop = img_network.createNode("file", publish_name) # replace any %0#d format string with the corresponding houdini frame # env variable. example %04d => $F4 frame_pattern = re.compile("(%0(\d)d)") frame_match = re.search(frame_pattern, path) if frame_match: full_frame_spec = frame_match.group(1) padding = frame_match.group(2) path = path.replace(full_frame_spec, "$F%s" % (padding, )) file_cop.parm("filename1").set(path) app.log_debug("Created file COP: %s\n path: '%s' " % (file_cop.path(), path)) file_cop.parm("reload").pressButton() _show_node(file_cop)
# import the stuff we need to access houdini and parse xml import hou import xml.etree.ElementTree as et # let us parse the xml filepath = hou.getenv("XML") + '/sample_Thu Aug 18 09:40:16 2016.xml' xmldata = et.parse(filepath) skeleton = xmldata.getroot() # let us init imp houdini variables to play around with the node network node = hou.pwd() root = hou.node('/obj') # now we start # first things first - create our geo object that will be the parent of everything skeleton_node = root.createNode('geo') skeleton_node.setName(skeleton.attrib['name']) # delete the default nodes for child in skeleton_node.children(): child.destroy() # edit the parameter interface of the skeleton node # parmTemplate = hou.FloatParmTemplate("ths", "Thickness at Start", 1, default_value=([0]), min=0, max=10, min_is_strict=False) # skeleton_node.addSpareParmTuple(parmTemplate, in_folder([Tapering]), create_missing_folders=True) parm_group = node.parmTemplateGroup() parm_folder = hou.FolderParmTemplate("folder", "Thickness and Tapering") parm_folder.addParmTemplate( hou.FloatParmTemplate("ths", "Thickness Start", 1, default_value=([0.3]))) parm_folder.addParmTemplate(
selection = hou.selectedNodes() names = [n.name() for n in selection] my_list = "" if len(selection) < 1: my_list = "nothing selected, please select at least one object" else: a = 1 for sel in selection: my_list = my_list + "\n" + "selected object " + str( a) + ":" + sel.name() a = a + 1 hou.ui.displayMessage(my_list, buttons=('OK', )) length = len(selection) print "Number of nodes selected: " + str(length) paths = [ n.path() for n in hou.node('/out').allSubChildren() if n.type() == hou.nodeType('Driver/ifd') ] selected = hou.ui.selectFromTree(choices=paths) mantras = [hou.node(path) for path in selected] for m in mantras: m.parm('forceobject').set(' '.join( n.name() for n in hou.selectedNodes())) # CHANGE PARAMETER NAME AS REQURIED m.parm('vobject').set("") print("Selected mantra nodes : ") + m.name() new_name = ' '.join(names) hou.ui.displayMessage("( " + (new_name) + " ) - added to selected mantra(s) matte objects.", buttons=('*Claps*', ))
def testAttributeFilter( self ) : torus = hou.node("/obj").createNode("geo", run_init_scripts=False).createNode( "torus" ) color = torus.createOutputNode( "color" ) color.parm( "class" ).set( 3 ) color.parm( "colortype" ).set( 2 ) rest = color.createOutputNode( "rest" ) scale = rest.createOutputNode( "attribcreate" ) scale.parm( "name1" ).set( "pscale" ) scale.parm( "value1v1" ).setExpression( "$PT" ) uvunwrap = scale.createOutputNode( "uvunwrap" ) opHolder = uvunwrap.createOutputNode( "ieOpHolder" ) fn = IECoreHoudini.FnOpHolder( opHolder ) fn.setOp( "parameters/primitives/polyParam" ) out = opHolder.createOutputNode( "ieCortexConverter" ) # verify input inGeo = uvunwrap.geometry() self.assertEqual( sorted([ x.name() for x in inGeo.pointAttribs() ]), ['P', 'Pw', 'pscale', 'rest'] ) self.assertEqual( sorted([ x.name() for x in inGeo.primAttribs() ]), [] ) self.assertEqual( sorted([ x.name() for x in inGeo.vertexAttribs() ]), ['Cd', 'uv'] ) self.assertEqual( sorted([ x.name() for x in inGeo.globalAttribs() ]), ['varmap'] ) # verifty output outGeo = out.geometry() self.assertEqual( sorted([ x.name() for x in outGeo.pointAttribs() ]), ['P', 'Pw', 'pscale', 'rest'] ) self.assertEqual( sorted([ x.name() for x in outGeo.primAttribs() ]), ["ieMeshInterpolation"] ) self.assertEqual( sorted([ x.name() for x in outGeo.vertexAttribs() ]), ['Cd', 'uv'] ) self.assertEqual( sorted([ x.name() for x in outGeo.globalAttribs() ]), ['varmap'] ) # verify intermediate op result result = fn.getOp().resultParameter().getValue() self.assertEqual( result.keys(), [ "Cs", "P", "Pref", "s", "t", "varmap", "width" ] ) self.assertTrue( result.arePrimitiveVariablesValid() ) # make sure P is forced out.parm( "attributeFilter" ).set( "* ^P" ) outGeo = out.geometry() self.assertEqual( sorted([ x.name() for x in outGeo.pointAttribs() ]), ['P', 'Pw', 'pscale', 'rest'] ) self.assertEqual( sorted([ x.name() for x in outGeo.primAttribs() ]), ["ieMeshInterpolation"] ) self.assertEqual( sorted([ x.name() for x in outGeo.vertexAttribs() ]), ['Cd', 'uv'] ) self.assertEqual( sorted([ x.name() for x in outGeo.globalAttribs() ]), ['varmap'] ) # have to filter the source attrs s, t and not uv out.parm( "attributeFilter" ).set( "* ^uv ^pscale ^rest" ) outGeo = out.geometry() self.assertEqual( sorted([ x.name() for x in outGeo.pointAttribs() ]), ['P', 'Pw', 'pscale', 'rest'] ) self.assertEqual( sorted([ x.name() for x in outGeo.primAttribs() ]), ["ieMeshInterpolation"] ) self.assertEqual( sorted([ x.name() for x in outGeo.vertexAttribs() ]), ['Cd', 'uv'] ) self.assertEqual( sorted([ x.name() for x in outGeo.globalAttribs() ]), ['varmap'] ) out.parm( "attributeFilter" ).set( "* ^s ^t ^width ^Pref" ) outGeo = out.geometry() self.assertEqual( sorted([ x.name() for x in outGeo.pointAttribs() ]), ['P', 'Pw'] ) self.assertEqual( sorted([ x.name() for x in outGeo.primAttribs() ]), ["ieMeshInterpolation"] ) self.assertEqual( sorted([ x.name() for x in outGeo.vertexAttribs() ]), ['Cd'] ) self.assertEqual( sorted([ x.name() for x in outGeo.globalAttribs() ]), ['varmap'] ) # make sure we can filter on both ends opHolder.parm( "parm_input_attributeFilter" ).set( "* ^s ^t ^width ^Pref" ) result = fn.getOp().resultParameter().getValue() self.assertEqual( result.keys(), [ "Cs", "P", "Pref", "s", "t", "varmap", "width" ] ) self.assertTrue( result.arePrimitiveVariablesValid() ) outGeo = out.geometry() self.assertEqual( sorted([ x.name() for x in outGeo.pointAttribs() ]), ['P', 'Pw'] ) self.assertEqual( sorted([ x.name() for x in outGeo.primAttribs() ]), ["ieMeshInterpolation"] ) self.assertEqual( sorted([ x.name() for x in outGeo.vertexAttribs() ]), ['Cd'] ) self.assertEqual( sorted([ x.name() for x in outGeo.globalAttribs() ]), ['varmap'] ) opHolder.parm( "parm_input_attributeFilter" ).set( "* ^uv ^pscale ^rest" ) opHolder.cook( True ) result = fn.getOp().resultParameter().getValue() self.assertEqual( result.keys(), [ "Cs", "P", "varmap" ] ) self.assertTrue( result.arePrimitiveVariablesValid() ) outGeo = out.geometry() self.assertEqual( sorted([ x.name() for x in outGeo.pointAttribs() ]), ['P', 'Pw'] ) self.assertEqual( sorted([ x.name() for x in outGeo.primAttribs() ]), ["ieMeshInterpolation"] ) self.assertEqual( sorted([ x.name() for x in outGeo.vertexAttribs() ]), ['Cd'] ) self.assertEqual( sorted([ x.name() for x in outGeo.globalAttribs() ]), ['varmap'] ) # since the vars never made it to the op, the never make it out out.parm( "attributeFilter" ).set( "*" ) outGeo = out.geometry() self.assertEqual( sorted([ x.name() for x in outGeo.pointAttribs() ]), ['P', 'Pw'] ) self.assertEqual( sorted([ x.name() for x in outGeo.primAttribs() ]), ["ieMeshInterpolation"] ) self.assertEqual( sorted([ x.name() for x in outGeo.vertexAttribs() ]), ['Cd'] ) self.assertEqual( sorted([ x.name() for x in outGeo.globalAttribs() ]), ['varmap'] )
def main(): import hou import math PREFIX = "VRayNodeTex" MAT = hou.node("/mat") OBJ = hou.node("/obj") skipTexTags = ( "User", "Particle", "Normal", "Combine", "RGB", "Ptex", "Switch", "OCIO", "OSL", "Correct", "Condition", "Output", "Mask", "Mix", "Color", "Clamp", "Layered", "Range", "Invert", "Float", "Int", "UVW", "Sampler", "Sky", "Product", "Distance", "Multi", "ICC", "Lut", "Luminance", "Blend", "Bezier", "Remap", "CompMax", ) skipTexSuffix = ("Op", "ToColor", "ToFloat", "ToInt") def vrayVopFilter(vopType): if not vopType.startswith(PREFIX): return False if vopType.endswith(skipTexSuffix): return False if any(skipTag in vopType for skipTag in skipTexTags): return False return True def getCreate(network, nodeType, nodeName): node = network.node(nodeName) if not node: node = network.createNode(nodeType, node_name=nodeName) return node def getCreateEmpty(network, nodeType, nodeName): node = getCreate(network, nodeType, nodeName) node.deleteItems(node.children()) return node def generateTextures(objNode): print("OBJ: \"%s\"" % (objNode.path())) vopTypes = hou.vopNodeTypeCategory().nodeTypes() vrayVopTypes = sorted( [vopType for vopType in vopTypes if vrayVopFilter(vopType)]) # FOR TESTS # vrayVopTypes = vrayVopTypes[:2] bbox = objNode.renderNode().geometry().boundingBox() bboxWidth = bbox.sizevec().x() bboxDepth = bbox.sizevec().z() col = 0 row = 0 bboxOffsetPerc = 2.0 offsetX = bboxWidth * bboxOffsetPerc offsetY = bboxDepth * bboxOffsetPerc cellW = bboxWidth + offsetX cellD = bboxDepth + offsetY maxColCount = int(math.sqrt(len(vrayVopTypes))) matNet = getCreateEmpty(MAT, "vray_material", "TEXTURES_ALL_PER_FRAME") objNet = getCreateEmpty(OBJ, "subnet", "TEXTURES_ALL_PER_FRAME") fontMat = getCreate(MAT, "VRayNodeBRDFVRayMtl", "font") fontMat.setMaterialFlag(True) fontMat.parm("diffuser").set(0.05) fontMat.parm("diffuseg").set(0.05) fontMat.parm("diffuseb").set(0.05) uvw = matNet.createNode("VRayNodeUVWGenMayaPlace2dTexture", node_name="channel_uv") uvw.parm("repeat_u").set(3) uvw.parm("repeat_v").set(3) for texType in vrayVopTypes: tex = None texName = texType.replace(PREFIX, "") try: mtl = matNet.createNode("VRayNodeBRDFVRayMtl", node_name="mtl%s" % texName) # Attach texture to "diffuse". tex = mtl.createInputNode(0, texType, node_name=texName) uvwGenIndex = tex.inputIndex("uvwgen") if uvwGenIndex >= 0: tex.setNamedInput("uvwgen", uvw, 0) except: print("Failed: \"%s\"" % (texType)) if tex: objForTex = getCreateEmpty(objNet, "geo", "obj%s" % texName) # Copy source geo. hou.copyNodesTo(objNode.children(), objForTex) testMtl = objForTex.createNode("material") testMtl.setNextInput(objForTex.renderNode()) # Assign material objForTex.parm("shop_materialpath").set("") testMtl.parm("shop_materialpath1").set(mtl.path()) # Add text font = objForTex.createNode("font") font.parm("file").set("Consolas") font.parm("text").set(texName) font.parm("fontsize").set(0.2) fontDivide = objForTex.createNode("divide") fontDivide.setNextInput(font) fontExt = objForTex.createNode("polyextrude") fontExt.parm("dist").set(0.02) fontExt.setNextInput(fontDivide) fontTm = objForTex.createNode("xform") fontTm.setNextInput(fontExt) fontTm.parm("tx").set(0.3) fontTm.parm("rx").set(-90.0) fontTm.parm("ry").set(90.0) fontMtl = objForTex.createNode("material") fontMtl.setNextInput(fontTm) fontMtl.parm("shop_materialpath1").set(fontMat.path()) merge = objForTex.createNode("merge") merge.setNextInput(testMtl) merge.setNextInput(fontMtl) merge.setDisplayFlag(True) merge.setRenderFlag(True) objForTex.layoutChildren() pos = (row * cellD, 0, col * cellW) tm = hou.hmath.buildTranslate(pos) objForTex.setWorldTransform(tm) if col == maxColCount - 1: col = 0 row += 1 else: col += 1 matNet.layoutChildren() objNet.layoutChildren() selection = hou.selectedNodes() if not selection: hou.ui.displayMessage("Select OBJ node!") else: if len(selection) > 1: print("Multiple OBJ nodes are selected. Using the first one...") objNode = selection[0] generateTextures(objNode) objNode.setCurrent(True, clear_all_selected=True)
def output_geo(soppath, now, properties=None): """Output the geometry by calling the appropriate wrangler Geometry is partitioned into subparts based on the shop_materialpath and material_override prim attributes. Args: soppath (str): oppath to SOP properties (dict, None): Dictionary of SohoParms (Optional, defaults to None) Returns: None """ # split by material # split by material override # # split by geo type # NOTE: We won't be splitting based on medium interior/exterior # those will be left as a object level assignment only. # Note, that in the case of Houdini Volumes they will look # for the appropriate medium parameters as prim vars if properties is None: properties = {} ignore_materials = False if 'pbrt_ignorematerials' in properties: ignore_materials = properties['pbrt_ignorematerials'].Value[0] # PBRT allows setting Material parameters on the Shapes in order to # override a material's settings. (Shapes get checked first) # This paramset will be for holding those overrides and passing # them down to the actual shape api calls. material_paramset = ParamSet() # We need the soppath to come along and since we are creating new # hou.Geometry() we'll lose the original sop connection so we need # to stash it here. node = hou.node(soppath) if node is None or node.type().category() != hou.sopNodeTypeCategory(): return input_gdp = node.geometry() if input_gdp is None: return gdp = hou.Geometry() gdp.merge(input_gdp.freeze()) # Partition based on materials global_material = None if not ignore_materials: try: global_material = gdp.stringAttribValue('shop_materialpath') except hou.OperationFailed: pass attrib_h = gdp.findPrimAttrib('shop_materialpath') if attrib_h is not None and not ignore_materials: material_gdps = partition_by_attrib(gdp, attrib_h) else: material_gdps = {global_material: gdp} global_override = None if not ignore_materials: try: global_override = gdp.stringAttribValue('material_override') except hou.OperationFailed: pass # Further partition based on material overrides has_prim_overrides = bool( not ignore_materials and gdp.findPrimAttrib('material_override') is not None) for material in material_gdps: if material: api.AttributeBegin() api.NamedMaterial(material) material_gdp = material_gdps[material] #api.Comment('%s %i' % (material_gdp,len(material_gdp.prims()))) if has_prim_overrides: attrib_h = material_gdp.findPrimAttrib('material_override') override_gdps = partition_by_attrib(material_gdp, attrib_h) # Clean up post partition material_gdp.clear() else: override_gdps = {global_override: material_gdp} for override in override_gdps: override_gdp = override_gdps[override] #api.Comment(' %s %i' % (override_gdp, len(override_gdp.prims()))) shape_gdps = partition_by_attrib(override_gdp, 'typename', intrinsic=True) override_gdp.clear() for shape in shape_gdps: material_paramset = ParamSet() if override and material: material_paramset.update( override_to_paramset(material, override)) shape_gdp = shape_gdps[shape] #api.Comment(' %s %i' % (shape_gdp, len(shape_gdp.prims()))) shape_wrangler = shape_wranglers.get(shape, not_supported) if shape_wrangler: shape_wrangler(shape_gdp, material_paramset, properties) shape_gdp.clear() if material: api.AttributeEnd() return
def updateAll(self): obj = hou.node("/obj") for node in obj.children(): if node.type().name() == "cenoteLayoutNet": self.updateLayout(node)
''' Export CHARACTERs and CAMERA animation data from animation scene CAMERA should meet naming convention E<sequenceNumber>_S<shotNumber> Currently only builds cache network for ROMA character. Need to run file caching manually ''' import os import hou from EVE.dna import dna reload(dna) # INITIALIZE DATA sceneRoot = hou.node('/obj/') scenePath = hou.hipFile.path() pathMap = dna.analyzeFliePath(scenePath) sequenceNumber = pathMap['sequenceNumber'] shotNumber = pathMap['shotNumber'] shotGenes = dna.getShotGenes(sequenceNumber, shotNumber) characterData = shotGenes['charactersData'] def exportCamera(): ''' Export shot camera Two options avalable, ABC and HIP. Use HIP currently. :return: ''' print '>> Exporting camera...'
def setUp(self): self.seq = Sequence.create("1-10") self.node = hou.node("job1")
def test_exclude_simple_file_if_not_exists(self): result = dependency_scan.fetch(self.node, self.seq, 3) fn = hou.node("shader2").parm("texture_file").eval() self.assertNotIn(fn, result)
def buildFromClip(self, clipPath, fps, nodeInfoFlg = True): self.fps = fps clip = hou.node(clipPath) if not clip: return self.nameId, self.pathId = self.strLst.addNameAndPath(clipPath) ntrk = clip.parm("numchannels").evalAsInt() self.minFrame = 0 self.maxFrame = -1 nodePath = "/obj" infoLst = [] for i in xrange(ntrk): trkName = clip.parm("name"+str(i)).evalAsString() trkType = clip.parm("type"+str(i)).evalAsString() sep = trkName.rfind(":") nodeName = trkName[:sep] chanBaseName = trkName[sep+1:] sep = nodeName.rfind("/") if sep >= 0: nodeName = nodeName[sep+1:] trkSize = 0 if trkType == "euler": trkSize = 3 elif trkType == "float": trkSize = clip.parm("size"+str(i)).evalAsInt() for j in xrange(trkSize): chanName = chanBaseName if trkSize > 1: chanName += "xyzw"[j] param = clip.parm("value"+str(i)+("xyzw"[j])) nameInfo = xhou.FcvNameInfo(chanName, nodeName, nodePath) infoLst.append(ClipFcvInfo(param, nameInfo)) keys = param.keyframes() for n, k in enumerate(keys): fno = int(k.frame()) if n: fnoMin = min(fno, fnoMin) fnoMax = max(fno, fnoMax) else: fnoMin = fno fnoMax = fno self.maxFrame = max(fnoMax, self.maxFrame) #xcore.dbgmsg("Anim clip from " + str(self.minFrame) + " to " + str(self.maxFrame)) fcvLst = [] for i, info in enumerate(infoLst): fcvLst.append(xhou.FCurve(info.param.path(), self.minFrame, self.maxFrame, info.nameInfo)) self.fcv = [] for fcv in fcvLst: if fcv.isAnimated(): fcv.nodePathId = self.strLst.add(fcv.nodePath) fcv.nodeNameId = self.strLst.add(fcv.nodeName) fcv.chNameId = self.strLst.add(fcv.chName) self.fcv.append(fcv) self.nodeInfoLst = [] if nodeInfoFlg: nodeMap = {} for fcv in self.fcv: nodePath = fcv.nodePath + "/" + fcv.nodeName if not nodePath in nodeMap: nodeId = len(self.nodeInfoLst) typeId = self.strLst.add("null") info = NodeInfo(self, hou.node(nodePath), fcv.nodePathId, fcv.nodeNameId, typeId) nodeMap[nodePath] = nodeId self.nodeInfoLst.append(info)
def reloadUsd(self, layout): refNodePath = layout.parm("loppath").eval() refNode = hou.node(refNodePath) refNode.parm("reload").pressButton()
import hou from EVE.dna import dna reload(dna) # Hardcoded values cacheVersion = '001' characterName = 'ROMA' # Get shot and sequence number from the scene name fileCode = dna.analyzeFliePath(hou.hipFile.path())[2] episodeCode, shotCode = dna.analyzeFileCode(fileCode) # Get scene root OBJ = hou.node('/obj/') def createCharacterCache(characterName): ''' Create CHARACTERS container to hold character caches ''' # Build cache path pathCache = '$JOB/geo/SHOTS/{0}/SHOT_{1}/{2}/GEO/{3}/E{0}_S{1}_{2}_{3}.$F.bgeo.sc'.format(episodeCode, shotCode, characterName, cacheVersion) # Check if CHARACTERS node exists in scene chars = hou.node('/obj/{}'.format('CHARACTERS')) if not chars: # Create CHARACTERS node if its not exists in scene
import hou #create global network instance my_network = hou.node("/obj/").children() #create /mat context instance mat_context = hou.node("/mat") #create exiting material list exiting_shds = [] shaders = mat_context.children() for i in shaders: exiting_shds.append(i.name()) # if exiting_shds: # print exiting_shds # else: # print "Liste de shaders vide" #create glTF loader subnetwork glTF_Loader = hou.node('/obj').createNode('subnet', 'glTF_Loader') glTF_Loader.moveToGoodPosition() #create a list of all glTF hierarchy nodes in /obj gltf_hierarchy_list = [] for nodes in my_network: if nodes.type().name() == 'gltf_hierarchy': gltf_hierarchy_list.append(nodes) #create sub_geo for each glTF hierarchy in glTF_Loader for nodes in gltf_hierarchy_list: #create subnet to host all gltf nodes in the glTF_Loader geo_node = glTF_Loader.createNode('subnet', nodes.name())
def getBlockParameters(afnode, ropnode, subblock, prefix, frame_range): params = [] if ropnode is not None and ropnode.type().name() == 'ifd' and afnode.parm( 'sep_enable').eval(): # Case mantra separate render: run_rop = afnode.parm('sep_run_rop').eval() read_rop = afnode.parm('sep_read_rop_params').eval() join_render = afnode.parm('sep_join').eval() tile_render = afnode.parm('sep_tile').eval() tile_divx = afnode.parm('sep_tile_divx').eval() tile_divy = afnode.parm('sep_tile_divy').eval() tiles_count = tile_divx * tile_divy tiles_stitch_service = afnode.parm('tiles_stitch_service').eval() tiles_stitch_capacity = afnode.parm('tiles_stitch_capacity').eval() del_rop_files = afnode.parm('sep_del_rop_files').eval() if not run_rop: join_render = False block_generate = BlockParameters(afnode, ropnode, join_render == False, prefix, frame_range) blockname = block_generate.name if not block_generate.valid: block_generate.doPost() return None if read_rop or run_rop: if not block_generate.ropnode: hou.ui.displayMessage('Can`t find ROP for processing "%s"' % afnode.path()) if not isinstance(ropnode, hou.RopNode): hou.ui.displayMessage('"%s" is not a ROP node' % block_generate.ropnode.path()) if join_render: block_generate.name += '-Separate' tile_render = False else: block_generate.name += '-GenIFD' # Get some generation specific parameters: capacity = int(afnode.parm('sep_gen_capacity').eval()) hosts_mask = str(afnode.parm('sep_gen_hosts_mask').eval()) hosts_exclude = str( afnode.parm('sep_gen_hosts_mask_exclude').eval()) max_runtasks = int(afnode.parm('sep_gen_max_runtasks').eval()) maxperhost = int(afnode.parm('sep_gen_maxperhost').eval()) min_memory = int(afnode.parm('sep_gen_min_memory').eval()) if capacity > 0: block_generate.capacity = capacity if hosts_mask != '': block_generate.hosts_mask = hosts_mask if hosts_exclude != '': block_generate.hosts_mask_exclude = hosts_exclude if max_runtasks > -1: block_generate.max_runtasks = max_runtasks if maxperhost > -1: block_generate.maxperhost = maxperhost if min_memory > -1: block_generate.min_memory = min_memory if block_generate.ropnode.parm('soho_outputmode').eval() == 0: # Set output mode to produce ifd files: block_generate.ropnode.parm('soho_outputmode').set(1) block_generate.soho_outputmode = 0 # We use unexpandedString() here to keep $F4 as expression. # If we eval() the value, # we will 'bake' the frame number when user submit the job. block_generate.ropnode.parm('soho_diskfile').set( block_generate.ropnode.parm( 'vm_picture').unexpandedString() + '.ifd') if read_rop: parm_images = ropnode.parm('vm_picture') parm_files = ropnode.parm('soho_diskfile') else: parm_images = afnode.parm('sep_images') parm_files = afnode.parm('sep_files') images = afcommon.patternFromPaths( parm_images.evalAsStringAtFrame(block_generate.frame_first), parm_images.evalAsStringAtFrame(block_generate.frame_last)) files = afcommon.patternFromPaths( parm_files.evalAsStringAtFrame(block_generate.frame_first), parm_files.evalAsStringAtFrame(block_generate.frame_last)) if run_rop: if join_render: block_generate.preview = images if not join_render: block_generate.service = 'hbatch' block_generate.tickets = {'HYTHON': 1} else: block_generate.service = 'hbatch_mantra' block_generate.tickets = {'HYTHON': 1, 'MANTRA': 1} block_generate.cmd = block_generate.cmd.replace( 'hrender_af', 'hrender_separate') if not join_render: block_render = BlockParameters(afnode, ropnode, subblock, prefix, frame_range) block_render.cmd = 'mantra' block_render.service = 'mantra' block_render.tickets = {'MANTRA': 1} if run_rop: block_render.dependmask = block_generate.name if del_rop_files: block_render.delete_files.append(files) if tile_render: block_render.name = blockname + '-TileRender' block_render.numeric = False block_render.cmd += ' -t count=%(tile_divx)dx%(tile_divy)d,index=@#@' % vars( ) block_render.frame_pertask = -tiles_count for frame in range(block_generate.frame_first, block_generate.frame_last + 1, block_generate.frame_inc): arguments = afnode.parm( 'sep_render_arguments').evalAsStringAtFrame(frame) arguments = arguments.replace( '@FILES@', parm_files.evalAsStringAtFrame(frame)) for tile in range(0, tiles_count): block_render.tasks_names.append('frame %d tile %d' % (frame, tile)) block_render.tasks_cmds.append('%d %s' % (tile, arguments)) else: block_render.name = blockname + '-Render' block_render.cmd += ' ' + afcommon.patternFromPaths( afnode.parm('sep_render_arguments').evalAsStringAtFrame( block_generate.frame_first), afnode.parm('sep_render_arguments').evalAsStringAtFrame( block_generate.frame_last)).replace('@FILES@', files) block_render.preview = images if tile_render: cmd = 'itilestitch "%s"' % images tile_suffix = '_tile%02d_' timg = os.path.relpath(images) # Now we should find a dot before frame number, # as matra places tile_suffix there. tpos = timg.find('@') if tpos > 0: # We found Afanasy digits pattern start tpos -= 1 else: # We do not have Afanay numeric pattern. # May be aftist sent just one frame to render. name, ext = os.path.splitext(timg) # Shitf to name w/o extension tpos = len(name) - 1 # Shift frame digits while name[tpos].isdigit() and tpos > 0: tpos -= 1 # Shitf a dot before frame digits if name[tpos] == '.': tpos -= 1 # It will be used as a length to cut, not as index tpos += 1 # Insert tile suffix timg = timg[:tpos] + tile_suffix + timg[tpos:] # List all tiles in the command for i in range(0, tiles_count): cmd += ' "%s"' % (timg % i) block_join = BlockParameters(afnode, ropnode, subblock, prefix, frame_range) block_join.name = blockname + '-TilesStitch' block_join.service = tiles_stitch_service block_join.capacity = tiles_stitch_capacity block_join.tickets = dict() # block render might be referenced before assignment block_join.dependmask = block_render.name block_join.cmd = cmd block_join.cmd_useprefix = False block_join.preview = images # Disable minimum running time as it can be set on Afanasy ROP. # As tile stich can be much fast block_join.minruntime = 0 params.append(block_join) if not join_render: params.append(block_render) if run_rop: params.append(block_generate) elif len(str(afnode.parm('ds_node').eval())): # Case distribute simulation: ds_node_path = str(afnode.parm('ds_node').eval()) ds_node = hou.node(ds_node_path) if not ds_node: hou.ui.displayMessage('No such control node: "%s"' % ds_node_path) return parms = ['address', 'port', 'slice'] for parm in parms: if not ds_node.parm(parm): hou.ui.displayMessage( 'Control node "%s" does not have "%s" parameter' % (ds_node_path, parm)) return enable_tracker = not afnode.parm('ds_tracker_manual').eval() if enable_tracker: # Tracker block: par_start = getTrackerParameters(afnode, ropnode, subblock, prefix, frame_range, True) params.append(par_start) # A block for each slice: ds_num_slices = int(afnode.parm('ds_num_slices').eval()) for s in range(0, ds_num_slices): par = BlockParameters(afnode, ropnode, subblock, prefix, frame_range) sim_blocks_mask = par.name + '.*' par.name += '-s%d' % s par.frame_pertask = par.frame_last - par.frame_first + 1 if enable_tracker: par.addDependMask(par_start.name) par.fullrangedepend = True par.auxargs = ' --ds_node "%s"' % ds_node_path par.auxargs += ' --ds_address "%s"' % str( afnode.parm('ds_address').eval()) par.auxargs += ' --ds_port %d' % int(afnode.parm('ds_port').eval()) par.auxargs += ' --ds_slice %d' % s params.append(par) if enable_tracker: # Stop tracker block: par_stop = getTrackerParameters(afnode, ropnode, subblock, prefix, frame_range, False) par_stop.addDependMask(sim_blocks_mask) params.append(par_stop) # Set other block names for start tracker block. # As start tracker block will set other block environment # to specify started tracker and port. par_start.cmd += ' --envblocks "%s|%s"' % (sim_blocks_mask, par_stop.name) # On this block depend mask will be reset on tracker start: par_start.cmd += ' --depblocks "%s"' % sim_blocks_mask else: params.append( BlockParameters(afnode, ropnode, subblock, prefix, frame_range)) return params
def cacheWrite(startFrame, endFrame, subFrame, format, mode): #expected format abc(Alembic), bgeo(Houdini bgeo) #expected mode add, selectedNodes = hou.selectedNodes() parent = selectedNodes[0].parent() ropnet = hou.node(parent.path()).createNode("ropnet") hou.setFrame(startFrame) message = '' for n in selectedNodes: if mode == 'version': cachePath = makeCachePath(n, 'add') if mode == 'overwrite': cachePath = makeCachePath(n, 'get')[-1] if format == 'abc': cacheProperty = ['abc', 'alembic', 'filename', '.abc', 'objects'] if format == 'bgeo': cacheProperty = [ '$F4.bgeo', 'geometry', 'sopoutput', '.$F4.bgeo', 'soppath' ] message += 'NODE' + ' - ' + n.path( ) + ' > ' + 'CACHE - $DATA/geo/' + cachePath.split( 'geo/')[1] + '/' + n.name() + '.' + cacheProperty[0] + '\n' ropNode = hou.node(ropnet.path()).createNode(cacheProperty[1]) ropNode.setName(n.name() + "_ropNode") hou.node(ropNode.path()).parm("trange").set(1) hou.node(ropNode.path()).parm("f1").set(startFrame) hou.node(ropNode.path()).parm("f2").set(endFrame) hou.node(ropNode.path()).parm("f3").set(1.0 / subFrame) hou.node(ropNode.path()).parm(cacheProperty[2]).set( os.path.join(cachePath, n.name() + cacheProperty[3])) hou.node(ropNode.path()).parm(cacheProperty[4]).set(n.path()) submitButton = ropNode.parm("execute") hou.Parm.pressButton(submitButton) hou.ui.displayMessage(message) ropnet.destroy()
def getJobParameters(afnode, subblock=False, frame_range=None, prefix=''): if VERBOSE: print('Getting Job Parameters from "%s":' % afnode.path()) # Process frame range: if frame_range is None: frame_first = int(hou.frame()) frame_last = frame_first frame_inc = 1 else: frame_first, frame_last, frame_inc = frame_range trange = afnode.parm('trange') if int(trange.eval()) > 0: frame_first = int(afnode.parm('f1').eval()) frame_last = int(afnode.parm('f2').eval()) frame_inc = int(afnode.parm('f3').eval()) frame_range = frame_first, frame_last, frame_inc params = [] connections = [] connections.extend(afnode.inputs()) nodes = [] for node in connections: if node is not None: nodes.append(node) output_driver_path = afnode.parm('output_driver').eval() if output_driver_path: output_driver = hou.node(output_driver_path) if output_driver: nodes.insert(0, output_driver) else: hou.ui.displayMessage('Can`t find output drive node: "%s"' % output_driver_path) if afnode.parm('cmd_mode').eval(): nodes.append(None) nodes.reverse() dependmask = '' prevparams = [] for node in nodes: if node and node.isBypassed(): continue newparams = [] if node and node.type().name() == 'afanasy': newprefix = node.name() if prefix != '': newprefix = prefix + '_' + newprefix newparams = getJobParameters(node, True, frame_range, newprefix) dependmask = newprefix + '_.*' if newparams is None: return None elif node and node.type().name() == "wedge": newprefix = node.name() if prefix != '': newprefix = prefix + '_' + newprefix wedgednode = None if node.inputs(): wedgednode = node.inputs()[0] else: wedgednode = node.node(node.parm("driver").eval()) if wedgednode == None: return None numWedges = computeWedge( node, node.type().name()) # we can remove nodetype check names = node.hdaModule().getwedgenames(node) for wedge in range(numWedges): # switching wedges like houdini do to get valid filenames hou.hscript('set WEDGE = ' + names[wedge]) hou.hscript('set WEDGENUM = ' + str(wedge)) hou.hscript('varchange') # add wedged node to next block block = getBlockParameters(afnode, wedgednode, subblock, "{}_{:02}".format(newprefix, wedge), frame_range)[0] block.auxargs += " --wedge " + node.path( ) + " --wedgenum " + str(wedge) newparams.append(block) # clear environment hou.hscript('set WEDGE = ') hou.hscript('set WEDGENUM = ') hou.hscript('varchange') dependmask = newprefix + '_.*' else: newparams = \ getBlockParameters(afnode, node, subblock, prefix, frame_range) if newparams is None: return None dependmask = newparams[0].name for param in newparams: if not param.valid: param.doPost() return None if len(newparams): params.extend(newparams) else: return None if not afnode.parm('independent').eval() and dependmask != '': for prevparam in prevparams: prevparam.addDependMask(dependmask) prevparams = newparams if len(params): # Get Pre/Post Submit Scrips: parm = params[0] if afnode.parm('pre_submit_script_enable').eval(): pre_submit_script = afnode.parm('pre_submit_script').eval() if pre_submit_script is not None and len(pre_submit_script): parm.pre_submit_script = pre_submit_script if afnode.parm('post_submit_script_enable').eval(): post_submit_script = afnode.parm('post_submit_script').eval() if post_submit_script is not None and len(post_submit_script): parm.post_submit_script = post_submit_script # Last parameter needed to generate a job. if not subblock: params.append( BlockParameters(afnode, None, False, '', frame_range, True)) return params
def main(): import sys import hou import soho import sohoglue import SOHOcommon import sys import ctypes if hasattr(sys, 'setdlopenflags'): sys.setdlopenflags(sys.getdlopenflags() | ctypes.RTLD_GLOBAL) import _vfh_ipr from soho import SohoParm LogLevel = type('Enum', (), { 'Msg': 0, 'Info': 1, 'Progress': 2, 'Warning': 3, 'Error': 4, 'Debug': 5 }) def logMessage(level, fmt, *args): _vfh_ipr.logMessage(level, fmt % args) def printDebug(fmt, *args): logMessage(LogLevel.Debug, fmt, *args) def dumpObjects(listName): printDebug("Checking \"%s\"" % listName) for obj in soho.objectList(listName): printDebug(" %s", obj.getName()) def exportObjects(listName): for obj in soho.objectList(listName): _vfh_ipr.exportOpNode(opNode=obj.getName()) def deleteObjects(listName): for obj in soho.objectList(listName): _vfh_ipr.deleteOpNode(opNode=obj.getName()) def getViewParams(camera, sohoCam, t): camParms = { 'space:world': SohoParm('space:world', 'real', [], False), 'focal': SohoParm('focal', 'real', [0.050], False), 'aperture': SohoParm('aperture', 'real', [0.0414214], False), 'orthowidth': SohoParm('orthowidth', 'real', [2], False), 'near': SohoParm('near', 'real', [0.001], False), 'far': SohoParm('far', 'real', [1000], False), 'res': SohoParm('res', 'int', [640, 480], False), 'projection': SohoParm('projection', 'string', ["perspective"], False), 'cropl': SohoParm('cropl', 'real', [-1], False), 'cropr': SohoParm('cropr', 'real', [-1], False), 'cropb': SohoParm('cropb', 'real', [-1], False), 'cropt': SohoParm('cropt', 'real', [-1], False), 'camera': SohoParm('camera', 'string', ['/obj/cam1'], False), } camParmsEval = sohoCam.evaluate(camParms, t) if not camParmsEval: return {} viewParams = {} for key in camParmsEval: viewParams[key] = camParmsEval[key].Value[0] viewParams['transform'] = camParmsEval['space:world'].Value viewParams['ortho'] = 1 if camParmsEval['projection'].Value[0] in { 'ortho' } else 0 viewParams['res'] = camParmsEval['res'].Value cropX = viewParams['res'][0] * viewParams['cropl'] cropY = viewParams['res'][1] * (1.0 - viewParams['cropt']) cropW = viewParams['res'][0] * (viewParams['cropr'] - viewParams['cropl']) cropH = viewParams['res'][1] * (viewParams['cropt'] - viewParams['cropb']) printDebug(" Res: %s" % viewParams['res']) printDebug(" Crop: %i-%i %i x %i" % (cropX, cropY, cropW, cropH)) printDebug(" Camera: %s" % camera) return viewParams def exportView(rop, camera, sohoCam, t): printDebug("exportView()") _vfh_ipr.exportView(viewParams=getViewParams(camera, sohoCam, t)) mode = soho.getDefaultedString('state:previewmode', ['default'])[0] # Evaluate an intrinsic parameter (see HDK_SOHO_API::evaluate()) # The 'state:time' parameter evaluates the time from the ROP. now = soho.getDefaultedFloat('state:time', [0.0])[0] # Evaluate the 'camera' parameter as a string. # If the 'camera' parameter doesn't exist, use ['/obj/cam1']. # SOHO always returns lists of values. camera = soho.getDefaultedString('camera', ['/obj/cam1'])[0] # MPlay / Render View port. port = soho.getDefaultedInt("vm_image_mplay_socketport", [0])[0] # ROP node. ropPath = soho.getOutputDriver().getName() ropNode = hou.node(ropPath) printDebug("Initialize SOHO...") # Initialize SOHO with the camera. # XXX: This doesn't work for me, but it should according to the documentation... # soho.initialize(now, camera) if not sohoglue.initialize(now, camera, None): soho.error("Unable to initialize rendering module with given camera") # Now, add objects to our scene soho.addObjects(now, "*", "*", "*", True) # Before we can evaluate the scene from SOHO, we need to lock the object lists. soho.lockObjects(now) for sohoCam in soho.objectList('objlist:camera'): break else: soho.error("Unable to find viewing camera for render") sohoOverride = soho.getDefaultedString('soho_overridefile', ['Unknown'])[0] printDebug("Processing Mode: \"%s\"" % mode) if mode in {"generate"}: # generate: Generation phase of IPR rendering # In generate mode, SOHO will keep the pipe (soho_pipecmd) # command open between invocations of the soho_program. # objlist:all # objlist:camera # objlist:light # objlist:instance # objlist:fog # objlist:space # objlist:mat # printDebug("IPR Port: %s" % port) printDebug("Driver: %s" % ropPath) printDebug("Camera: %s" % camera) printDebug("Now: %.3f" % now) _vfh_ipr.init(rop=ropPath, port=port, now=now, viewParams=getViewParams(camera, sohoCam, now)) elif mode in {"update"}: # update: Send updated changes from previous generation # # In this rendering mode, the special object list parameters: # objlist:dirtyinstance # objlist:dirtylight # objlist:dirtyspace # objlist:dirtyfog # will contain the list of all objects modified since the last render # (whether a generate or update). # # As well, the parameters: # objlist:deletedinstance # objlist:deletedlight # objlist:deletedspace # objlist:deletedfog # will list all objects which have been deleted from the scene. # if not _vfh_ipr.isRopValid(): _vfh_ipr.init(rop=ropPath, port=port, now=now, viewParams=getViewParams(camera, sohoCam, now)) else: if _vfh_ipr.setTime(now): # Have to handle "time" event manually here. exportObjects("objlist:dirtyinstance") exportObjects("objlist:dirtylight") # Update view. exportView(ropPath, camera, sohoCam, now)
parser.add_argument('--cfolder', action="store", dest="m_cfolder") parser.add_argument('--hip', action="store", dest="m_hip") parser.add_argument('--farm', type=bool, dest="m_farm") filepath = parser.parse_args().m_hip folderpath = parser.parse_args().m_cfolder nodespath = parser.parse_args().m_nodes farm_option = parser.parse_args().m_farm print "start" root = hou.node('/obj') parent = root.createNode('geo') parent.loadChildrenFromFile( nodespath ,ignore_load_warnings=True) riverCliff = hou.node('/obj/geo1/rivercliff1') riverCliff.allowEditingOfContents() riverCliff.matchCurrentDefinition() #for i in range(10): #print "###############################################" #time.sleep(1)
def test_3_checkfoo(self): node = hou.node("/out/rop_csv_exporter1") node.render() assert(os.path.exists(os.path.join(local_dir, "hip", "export.csv")))
import hou hou.node('/obj').createNode('geo') hou.hipFile.save( file_name='/home/gulliver/Desktop/Job/Study/command_line/test_v01.hip')
def testStandardAttributeConversion( self ) : torus = hou.node("/obj").createNode("geo", run_init_scripts=False).createNode( "torus" ) color = torus.createOutputNode( "color" ) color.parm( "class" ).set( 3 ) color.parm( "colortype" ).set( 2 ) rest = color.createOutputNode( "rest" ) scale = rest.createOutputNode( "attribcreate" ) scale.parm( "name1" ).set( "pscale" ) scale.parm( "value1v1" ).setExpression( "$PT" ) uvunwrap = scale.createOutputNode( "uvunwrap" ) opHolder = uvunwrap.createOutputNode( "ieOpHolder" ) fn = IECoreHoudini.FnOpHolder( opHolder ) fn.setOp( "parameters/primitives/polyParam" ) out = opHolder.createOutputNode( "ieCortexConverter" ) # verify input inGeo = uvunwrap.geometry() self.assertEqual( sorted([ x.name() for x in inGeo.pointAttribs() ]), ['P', 'Pw', 'pscale', 'rest'] ) self.assertEqual( sorted([ x.name() for x in inGeo.primAttribs() ]), [] ) self.assertEqual( sorted([ x.name() for x in inGeo.vertexAttribs() ]), ['Cd', 'uv'] ) self.assertEqual( sorted([ x.name() for x in inGeo.globalAttribs() ]), ['varmap'] ) # verifty output outGeo = out.geometry() self.assertEqual( sorted([ x.name() for x in outGeo.pointAttribs() ]), ['P', 'Pw', 'pscale', 'rest'] ) self.assertEqual( sorted([ x.name() for x in outGeo.primAttribs() ]), ["ieMeshInterpolation"] ) self.assertEqual( sorted([ x.name() for x in outGeo.vertexAttribs() ]), ['Cd', 'uv'] ) self.assertEqual( sorted([ x.name() for x in outGeo.globalAttribs() ]), ['varmap'] ) # verify intermediate op result result = fn.getOp().resultParameter().getValue() self.assertEqual( result.keys(), [ "Cs", "P", "Pref", "s", "t", "varmap", "width" ] ) self.assertTrue( result.arePrimitiveVariablesValid() ) self.assertEqual( result["P"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point ) self.assertEqual( result["Pref"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point ) sData = result["s"].data tData = result["t"].data inUvs = inGeo.findVertexAttrib( "uv" ) outUvs = outGeo.findVertexAttrib( "uv" ) i = 0 for prim in inGeo.prims() : verts = list(prim.vertices()) verts.reverse() for vert in verts : uvValues = vert.attribValue( inUvs ) self.assertAlmostEqual( sData[i], uvValues[0] ) self.assertAlmostEqual( tData[i], 1 - uvValues[1] ) i += 1 i = 0 for prim in outGeo.prims() : verts = list(prim.vertices()) verts.reverse() for vert in verts : uvValues = vert.attribValue( outUvs ) self.assertAlmostEqual( sData[i], uvValues[0] ) self.assertAlmostEqual( tData[i], 1 - uvValues[1] ) i += 1 # turn off half the conversion opHolder.parm( "parm_input_convertStandardAttributes" ).set( False ) # verifty output outGeo = out.geometry() self.assertEqual( sorted([ x.name() for x in outGeo.pointAttribs() ]), ['P', 'Pw', 'pscale', 'rest'] ) self.assertEqual( sorted([ x.name() for x in outGeo.primAttribs() ]), ["ieMeshInterpolation"] ) self.assertEqual( sorted([ x.name() for x in outGeo.vertexAttribs() ]), ['Cd', 'uv'] ) self.assertEqual( sorted([ x.name() for x in outGeo.globalAttribs() ]), ['varmap'] ) # verify intermediate op result result = fn.getOp().resultParameter().getValue() self.assertEqual( result.keys(), [ "Cd", "P", "pscale", "rest", "uv", "varmap" ] ) self.assertTrue( result.arePrimitiveVariablesValid() ) self.assertEqual( result["P"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point ) self.assertEqual( result["rest"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point ) uvData = result["uv"].data inUvs = inGeo.findVertexAttrib( "uv" ) outUvs = outGeo.findVertexAttrib( "uv" ) i = 0 for prim in inGeo.prims() : verts = list(prim.vertices()) verts.reverse() for vert in verts : uvValues = vert.attribValue( inUvs ) self.assertAlmostEqual( uvData[i][0], uvValues[0] ) self.assertAlmostEqual( uvData[i][1], uvValues[1] ) i += 1 i = 0 for prim in outGeo.prims() : verts = list(prim.vertices()) verts.reverse() for vert in verts : uvValues = vert.attribValue( outUvs ) self.assertAlmostEqual( uvData[i][0], uvValues[0] ) self.assertAlmostEqual( uvData[i][1], uvValues[1] ) i += 1 # turn off the other half of the conversion opHolder.parm( "parm_input_convertStandardAttributes" ).set( True ) out.parm( "convertStandardAttributes" ).set( False ) # verifty output outGeo = out.geometry() self.assertEqual( sorted([ x.name() for x in outGeo.pointAttribs() ]), ['P', 'Pref', 'Pw', 'width'] ) self.assertEqual( sorted([ x.name() for x in outGeo.primAttribs() ]), ["ieMeshInterpolation"] ) self.assertEqual( sorted([ x.name() for x in outGeo.vertexAttribs() ]), ['Cs', 's', 't'] ) self.assertEqual( sorted([ x.name() for x in outGeo.globalAttribs() ]), ['varmap'] ) # verify intermediate op result result = fn.getOp().resultParameter().getValue() self.assertEqual( result.keys(), [ "Cs", "P", "Pref", "s", "t", "varmap", "width" ] ) self.assertTrue( result.arePrimitiveVariablesValid() ) self.assertEqual( result["P"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point ) self.assertEqual( result["Pref"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point ) sData = result["s"].data tData = result["t"].data inUvs = inGeo.findVertexAttrib( "uv" ) outS = outGeo.findVertexAttrib( "s" ) outT = outGeo.findVertexAttrib( "t" ) i = 0 for prim in inGeo.prims() : verts = list(prim.vertices()) verts.reverse() for vert in verts : uvValues = vert.attribValue( inUvs ) self.assertAlmostEqual( sData[i], uvValues[0] ) self.assertAlmostEqual( tData[i], 1 - uvValues[1] ) i += 1 i = 0 for prim in outGeo.prims() : verts = list(prim.vertices()) verts.reverse() for vert in verts : self.assertAlmostEqual( sData[i], vert.attribValue( outS ) ) self.assertAlmostEqual( tData[i], vert.attribValue( outT ) ) i += 1
# Evaluate an intrinsic parameter (see HDK_SOHO_API::evaluate()) # The 'state:time' parameter evaluates the time from the ROP. now = soho.getDefaultedFloat('state:time', [0.0])[0] # Evaluate the 'camera' parameter as a string. # If the 'camera' parameter doesn't exist, use ['/obj/cam1']. # SOHO always returns lists of values. camera = soho.getDefaultedString('camera', ['/obj/cam1'])[0] # MPlay / Render View port. port = soho.getDefaultedInt("vm_image_mplay_socketport", [0])[0] # ROP node. ropPath = soho.getOutputDriver().getName() ropNode = hou.node(ropPath) # Use callbacks or SOHO render_rt_update_mode = hou.evalParm("render_rt_update_mode") printDebug("Initialize SOHO...") # Initialize SOHO with the camera. # XXX: This doesn't work for me, but it should according to the documentation... # soho.initialize(now, camera) if not sohoglue.initialize(now, camera, None): soho.error("Unable to initialize rendering module with given camera") # Now, add objects to our scene soho.addObjects(now, "*", "*", "*", True)