def connect_place_2d(node_, place=None): """Connect a place 2d texture node to the given shading node. Args: node_ (str): node to apply place 2d texture place (str): use an existing place2dTexture node Returns: (HFnDependencyNode): place2dTexture node """ _node = hom.HFnDependencyNode(str(node_)) # Get tex place node if place: _place = hom.HFnDependencyNode(place) assert _place.object_type() == "place2dTexture" else: _place = hom.CMDS.shadingNode("place2dTexture", asUtility=1) # Connect attrs with same name for _attr in [ 'coverage', 'mirrorU', 'mirrorV', 'noiseUV', 'offset', 'repeatUV', 'rotateFrame', 'rotateUV', 'stagger', 'translateFrame', 'uvCoord', 'uvFilterSize', 'vertexCameraOne', 'vertexUvOne', 'vertexUvThree', 'vertexUvTwo', 'wrapU', 'wrapV', ]: if _node.has_attr(_attr): _place.plug(_attr).connect(_node.plug(_attr), force=True) # Connect attrs with different names for _src, _trg in [ ("outUV", "uvCoord"), ("outUvFilterSize", "uvFilterSize"), ]: _place.plug(_src).connect(_node.plug(_trg), force=True) return _place
def read_shd(shp, allow_base=False, verbose=1): """Read shader from the given geo shape node. Args: shp (str): shape node to read allow_base (bool): return BaseShader objects for unhandled shaders verbose (int): print process data Returns: (_BaseShader): shader object """ _shp = shp if cmds.objectType(_shp) == 'transform': _shp = get_shp(_shp) _se = get_single(cmds.listConnections(_shp, source=False, type='shadingEngine'), catch=True) if not _se: lprint('No shading engine found:', _shp, verbose=verbose) return None lprint('Shading engine:', _se, verbose=verbose > 1) _shd = get_single(cmds.listConnections(_se + '.surfaceShader', destination=False), catch=True) if not _shd: return None _shd = find_shd(_shd, allow_base=allow_base) _shd.set_se(hom.HFnDependencyNode(_se)) return _shd
def apply_caches_in_root_namespace(caches): """Apply yeti caches in the root namespace. Yeti nodes which don't currently exist will be created with no namespace. Args: caches (TTOutputFileSeq list): caches to apply """ for _cache in caches: print 'READ CACHE', _cache assert _cache.output_name.count('Yeti_') == 1 _, _tfm_name = _cache.output_name.split('Yeti_') _yeti_name = _tfm_name + 'Shape' print ' - NODE NAME', _tfm_name, _yeti_name # Get yeti node load_plugin('pgYetiMaya') if cmds.objExists(_yeti_name): _yeti = hom.HFnDependencyNode(_yeti_name) else: _yeti = hom.CMDS.createNode('pgYetiMaya', name=_yeti_name) print ' - CREATED YETI', _yeti if not _yeti.get_parent() == _tfm_name: cmds.rename(_yeti.get_parent(), _tfm_name) print ' - RENAMED PARENT', _yeti.get_parent() print ' - YETI', _yeti apply_cache(_cache, yeti=_yeti)
def __init__(self, node, verbose=0): """Constructor. Args: node (str): tranform node name verbose (int): print process data """ from maya_psyhive import open_maya as hom super(BaseTransform, self).__init__(node) # Get shape (if any) _shps = cmds.listRelatives( self.node, shapes=True, path=True, noIntermediate=True) or [] _shp = get_single([str(_shp) for _shp in _shps], catch=True) self.shp = hom.HFnDependencyNode(_shp) if _shp else None lprint('SHAPE', self.shp, _shps, verbose=verbose) # Create plugs for _param in 'trs': for _axis in 'xyz': _attr = _param + _axis _plug = HPlug(self.node + '.' + _attr) setattr(self, _attr, _plug) self.translate = HPlug(self.node + '.translate') self.rotate = HPlug(self.node + '.rotate') self.scale = HPlug(self.node + '.scale') self.visibility = HPlug(self.node + '.visibility')
def __init__(self, shd): """Constructor. Args: shd (str): shader node (eg. lambert1) """ self.shd = hom.HFnDependencyNode(shd) self.out_col_attr = self.shd.plug('outColor')
def get_node(self): """Get this plug's node. Returns: (HFnDependencyNode): node """ from maya_psyhive import open_maya as hom return hom.HFnDependencyNode(self.node)
def get_shp(self): """Find this node's shape. Returns: (HFnDependencyNode): shape """ from maya_psyhive import open_maya as hom _shp = get_shp(self) return hom.HFnDependencyNode(_shp) if _shp else None
def get_node(self, name): """Get a node matching the given name. Args: name (str): node name Returns: (HFnDependencyNode): node """ return hom.HFnDependencyNode(name)
def get_se(self, verbose=0): """Get this shader's shading engine node. Args: verbose (int): print process data Returns: (str): shading engine node """ if self._se: return self._se _sets = cmds.listConnections(self.shd, type='objectSet', source=False) lprint('SETS', _sets, verbose=verbose) _se = get_single(_sets, catch=True) return hom.HFnDependencyNode(_se) if _se else None
def _build_blast_cam(): """Build blast camera. Returns: (HFnCamera): blast cam """ _cam = hom.CMDS.camera(name='BLAST_CAM') for _node, _name in [(_cam.tfm, 'tfm'), (_cam.shp, 'shp')]: _preset = CAM_SETTINGS_FMT.format('blast', _name) print _preset hom.HFnDependencyNode(str(_node)).load_preset(_preset) _cam = _cam.rename('BLAST_CAM') return _cam
def obtain_curve_info(self): """Get a curve info node from this curve, creating if needed. If one already exists, the existing node is returned. Returns: (HFnDependencyNode): curve info """ from maya_psyhive import open_maya as hom _cis = self.world_space.list_connections(type='curveInfo', source=False) if _cis: return hom.HFnDependencyNode(get_single(_cis)) _ci = hom.CMDS.createNode('curveInfo') self.world_space.connect(_ci.plug('inputCurve')) return _ci
def _fix_nested_namespace(ref_): """Fix nested namespace issues. If the rig is in a nested namespace, move it into the root namespace. Args: ref_ (FileRef): reference to check Returns: (FileRef): fixed reference """ _ref_node = hom.HFnDependencyNode(ref_.ref_node) if not _ref_node.namespace: print 'NO NAMESPACE ISSUE TO FIX' return ref_ print 'FIXING NESTED NAMESPACE', _ref_node.namespace cmds.namespace(moveNamespace=(_ref_node.namespace, ":"), force=True) return ref.find_ref()
def find_connected(self, type_=None, depth=1, filter_=None, source=True, destination=True, verbose=0): """Recursively traverse connected nodes in graph. Args: type_ (str): filter by node type depth (int): traversal depth filter_ (str): filter by node name source (bool): traverse upstream nodes destination (bool): traverse downstream nodes verbose (int): print process data Returns: (HFnDependencyNode list): list of connected nodes """ from maya_psyhive import open_maya as hom # Find connections _conns = set() _this_conns = set(self.list_connections( source=source, destination=destination, shapes=True) or []) for _conn in _this_conns: _conn = hom.HFnDependencyNode(_conn) _conns.add(_conn) lprint(' '*(5-depth), 'ADDING', _conn, verbose=0) if depth > 0: _conns |= set(_conn.find_connected( depth=depth-1, source=source, destination=destination, verbose=verbose)) _conns = sorted(_conns) # Apply filters if type_: _conns = [ _conn for _conn in _conns if _conn.object_type() == type_] if filter_: _conns = apply_filter(_conns, filter_, key=str) return _conns
def _build_shader_overrides(shade, merge, verbose=0): """Build shader overrides. Each shader has an aiSetParameter node which applies overrides for the geometry in the abc which that shader is applied to. Args: shade (FileRef): shade reference merge (HFnDependencyNode): merge node to connect output to verbose (int): print process data """ _shds = collections.defaultdict(list) # Read shader assignments for _mesh in shade.find_meshes(): if _mesh.clean_name == 'color_switch_Geo': continue _shd = tex.read_shd(_mesh, allow_base=True) if not _shd: continue _shds[_shd].append(_mesh.shp) # Set up AIP node for each shader for _shd in qt.progress_bar(sorted(_shds), 'Applying {:d} shader{}'): _meshes = _shds[_shd] lprint(' - SHD', _shd, _meshes, verbose=verbose) # Read SE + arnold shader lprint(' - SE', _shd.get_se(), verbose=verbose) _ai_shd = get_single( _shd.get_se().plug('aiSurfaceShader').list_connections(), catch=True) if _ai_shd: _ai_shd = hom.HFnDependencyNode(_ai_shd) lprint(' - AI SHD', _ai_shd, verbose=verbose) _shd_node = _ai_shd or _shd.shd _build_aip_node(shd=_shd_node, meshes=_meshes, merge=merge)
def restore_img_plane(time_control, abc): """Restore image plane from preset data. Args: time_control (str): exocortex time control name abc (str): path to output abc """ from psyhive import tk # Ignore non camera caches _abc = tk.get_output(abc) print 'ABC', _abc.path if _abc.output_type != 'camcache': print 'NOT A CAMERA CACHE' return # Make sure there are presets to apply _presets = [] for _type in ['imagePlane', 'camera']: _preset = '{}/{}.preset'.format(_abc.dir, _type) if not os.path.exists(_preset): print 'MISSING PRESET', _preset return _presets.append(_preset) # Find camera node _time_ctrl = hom.HFnDependencyNode(time_control) _cam_shp = get_single(_time_ctrl.find_downstream(type_='camera', filter_=_abc.output_name), catch=True) if not _cam_shp: print 'NO CAM FOUND' return _cam = hom.HFnCamera(get_parent(_cam_shp)) # Create image plane and apply presets _img_plane = hom.CMDS.imagePlane(camera=_cam) for _preset, _shp in safe_zip(_presets, [_img_plane.shp, _cam.shp]): _shp.load_preset(_preset)