def build_loc(name='locator', scale=None, col=None): """Build locator at this array's position. Args: name (str): name for locator scale (str): locator scale col (str): locator colour Returns: (str): locator name """ from maya_psyhive import open_maya as hom from maya_psyhive.utils import set_col _loc = cmds.spaceLocator(name=get_unique(name))[0] # Apply scale _scale = scale or hom.LOC_SCALE if _scale != 1.0: _shp = get_single(cmds.listRelatives(_loc, shapes=True)) cmds.setAttr(_shp + '.localScale', _scale, _scale, _scale) # Apply colour _col = col or hom.LOC_COL set_col(_loc, _col) return hom.HFnTransform(_loc)
def export_img_plane(camera, abc): """Export image plane preset data for the given camera/abc. Args: camera (str): camera shape node name abc (str): path to output abc """ _cam = hom.HFnCamera(get_parent(str(camera))) lprint(' - CAM', _cam) # Read image plane _img_plane = get_single(_cam.shp.list_connections(type='imagePlane'), catch=True) if not _img_plane: lprint(' - NO IMAGE PLANE FOUND') return _img_plane = hom.HFnTransform(_img_plane.split('->')[-1]) # Export preset for each shape for _shp in [_cam.shp, _img_plane.shp]: _preset = '{}/{}.preset'.format(os.path.dirname(abc), _shp.object_type()) lprint(' - SAVING', _preset) try: _shp.save_preset(_preset) except RuntimeError: lprint(' - FAILED TO SAVE')
def add_to_grp(self, grp): """Add this node to a group, creating it if required. Args: grp (str): group to add to """ from maya_psyhive import open_maya as hom return hom.HFnTransform(add_to_grp(self, grp))
def aim_constraint(self, *args, **kwargs): """Aim constrain a node to this node. Returns: (HFnTransform): aim constraint """ from maya_psyhive import open_maya as hom _constr = cmds.aimConstraint(self, *args, **kwargs)[0] return hom.HFnTransform(_constr)
def parent_constraint(self, *args, **kwargs): """Parent constrain a node to this node. Returns: (HFnTransform): parent constraint """ from maya_psyhive import open_maya as hom _constr = cmds.parentConstraint(self, *args, **kwargs)[0] return hom.HFnTransform(_constr)
def find_ctrls(self): """Find rig controls. Returns: (HFnTransform list): list of controls """ _ctrls = set() for _ctrl_shp in self.find_nodes(type_='nurbsCurve'): _ctrl = hom.HFnTransform(get_parent(_ctrl_shp)) _ctrls.add(_ctrl) return sorted(_ctrls)
def get_parent(self): """Get parent of this node (if any). Returns: (HFnTransform): parent """ from maya_psyhive import open_maya as hom _parent = get_parent(self) if not _parent: return None return hom.HFnTransform(_parent)
def motion_path(self, trg, add_u_length=False, follow_axis='z', up_axis='y'): """Attach the target mode this curve using a motion path. This uses the pathAnimation command, and deletes the default animation and placeholder applied to the u value. Args: trg (str|HFnTransform): transform to attach add_u_length (bool): add uLength attr to get abs uValue follow_axis (str): front axis up_axis (str): up axis Returns: (MFnDependencyNode): motion path node """ from maya_psyhive import open_maya as hom _trg = hom.HFnTransform(trg) _mpath = hom.CMDS.pathAnimation(trg, self, follow=True, fractionMode=True, followAxis=follow_axis, upAxis=up_axis) _u_val = _mpath.plug('uValue') _u_val.break_connections() # Add uLength attr if add_u_length is False: pass elif add_u_length == 'driven': _ci = self.obtain_curve_info() _length = _ci.plug('arcLength') _u_len = _trg.create_attr('uLength', 0.0) multiply_node(_length, _u_val, _u_len) elif add_u_length in [True, 'driving']: _ci = self.obtain_curve_info() _length = _ci.plug('arcLength') _u_len = _trg.create_attr('uLength', 0.0) divide_node(_u_len, _length, _u_val) else: raise ValueError(add_u_length) return _mpath
def _finalise_standin(node, name, range_, verbose=0): """Finalise new aiStandIn node. Executes updates to be run after abc has loaded (abc loads using deferred evaluation). This includes renaming the transform/shape - if they are renamed before abc load the auto generated abc frame expression errors. Also the frame expression is regenenerated to make the abc loop - if this is generated before abc load then the auto generated expression also errors. Args: node (HFnDependencyNode): aiStandIn node (shape) name (str): intended node name (of transform) range_ (tuple|None): range to loop (if any) verbose (int): print process data """ print 'FINALISE STANDIN', node print ' - RANGE', range_ # Fix names _parent = node.get_parent() print ' - RENAMING', name, _parent _parent = cmds.rename(_parent, name) print ' - PARENT', _parent _node = node.rename(name + "Shape") _plug = _node.plug('frameNumber') # Apply range expression if range_: # Clean frame expression print ' - PLUG', _plug, _plug.find_driver() print ' - BREAKING CONNECTIONS' _plug.break_connections() # Build expression if range_: print ' - BUILDING EXPRESSION' _str = ('{plug} = ((frame - {start}) % ({end} - {start} + 1)) + ' '{start};').format(start=range_[0], end=range_[1], plug=_plug) lprint(_str, verbose=verbose) _expr = cmds.expression(string=_str, timeDependent=True) print ' - CREATED EXPRESSION', _expr return hom.HFnTransform(_parent)
def _build_aip_node(shd, merge, meshes, ai_attrs=None, name=None, verbose=0): """Build aiSetParameter node. Args: shd (HFnDependencyNode): shader to apply merge (HFnDependencyNode): merge node to connect output to meshes (HFnDependencyNode list): meshes to apply set param to ai_attrs (dict): override ai attrs to check name (str): override name verbose (int): print process data """ print 'BULID AIP', shd, meshes _ai_attrs = ai_attrs if ai_attrs is not None else _AI_ATTRS print ' - AI ATTRS', _ai_attrs # Create standin node _aip = hom.CMDS.createNode('aiSetParameter', name='{}_AIP'.format(name or shd.name())) _aip.plug('out').connect(_get_next_idx(merge.plug('inputs'))) if shd: _aip.plug('assignment[0]').set_val("shader = '{}'".format(shd)) lprint(' - AIP', _aip, verbose=verbose) # Determine AIP settings to apply _sels = [] _ai_attr_vals = collections.defaultdict(set) for _mesh in meshes: for _ai_attr in _ai_attrs: _plug = _mesh.plug(_ai_attr) _type = 'string' if _plug.get_type() == 'enum' else None _val = _plug.get_val(type_=_type) lprint(' - READ', _plug, _val, verbose=verbose > 1) if not _type: _default = _plug.get_default() if _default == _val: lprint(' - REJECTED DEFAULT VAL', verbose=verbose > 1) continue _ai_attr_vals[_ai_attr].add(_val) print _mesh, _mesh.namespace _prefix = '*:' if _mesh.namespace else '*/' _tfm = hom.HFnTransform(get_parent(_mesh)) _sels.append('{}{}/*'.format(_prefix, _tfm.clean_name)) # Apply API settings _aip.plug('selection').set_val(' or '.join(_sels)) for _ai_attr, _attr in _ai_attrs.items(): _vals = sorted(_ai_attr_vals[_ai_attr]) lprint(' - AI ATTR', _attr, _ai_attr, _vals, verbose=verbose > 1) _val = get_single(_vals, catch=True) if len(_vals) == 1 and _val not in [None, '']: lprint(' - APPLY', _attr, _val, verbose=verbose > 1) if isinstance(_val, six.string_types): _val = "{} = '{}'".format(_attr, _val) else: _val = "{} = {}".format(_attr, _val) _get_next_idx(_aip.plug('assignment')).set_val(_val) # Read displacement if shd: _add_displacement_override(shd=shd, aip=_aip) return _aip
def drive_shade_geo_from_rig(cache_set, progress=False, verbose=0): """Use a rig to drive tmp geo duplicated from its shade asset. The shade asset is referenced into the scene, all meshes with corresponding meshes in the rig are duplicated and then attached to the rig geo using a blendshape. The shade asset is then removed. Args: cache_set (pm.ObjectSet): cache set from rig being cached progress (bool): show progress on bind verbose (int): print process data Returns: (HFnMesh list): list of driven shade geo """ print 'DRIVE SHADE GEO FROM RIG' # Get anim rig _cache_set = cache_set or nt.ObjectSet(u'archer_rig2:bakeSet') print ' - CACHE SET', _cache_set if not _cache_set.referenceFile(): print ' - NO CORRESPONDING RIG' raise RuntimeError("No rig found for {}".format(_cache_set)) _rig = ref.find_ref(_cache_set.referenceFile().namespace) print ' - RIG', _rig print ' - RIG PATH', _rig.path # Find/import tmp shade asset _shade_file = get_shade_mb_for_rig(_rig) _shade = ref.create_ref(_shade_file.path, namespace='psyhive_tmp', force=True) # Duplicate geo and bind to rig _bake_geo = [] _tmp_ns = ':tmp_{}'.format(_rig.namespace) set_namespace(_tmp_ns, clean=True) for _shade_mesh in qt.progress_bar(_shade.find_nodes(type_='mesh'), 'Binding {:d} geo{}', col='Tomato', show=progress): # Check there is equivalent mesh in rig if _shade_mesh.plug('intermediateObject').get_val(): continue _shade_tfm = hom.HFnTransform(get_parent(_shade_mesh)) try: _rig_tfm = _rig.get_node(_shade_tfm, class_=hom.HFnTransform) except ValueError: continue lprint(' - BINDING MESH', _shade_tfm, '->', _rig_tfm, verbose=verbose) # Duplicate mesh _dup = _shade_tfm.duplicate() lprint(' - DUPLICATING', _shade_tfm, verbose=verbose) _dup.parent(world=True) _clean_unused_uv_sets(_dup) _connect_visibility(_rig_tfm, _dup) _bake_geo.append(_dup) # Bind to rig _blend = hom.CMDS.blendShape(_rig_tfm, _dup) _blend.plug('origin').set_enum('world') _blend.plug('weight[0]').set_val(1.0) _shade.remove(force=True) cmds.namespace(set=":") if not _bake_geo: del_namespace(_tmp_ns) raise RuntimeError('No geo was attached - this means none of the ' 'shade geo matched the rig bakeSet geo.') return _bake_geo, _bake_geo
def get_selected(type_=None, class_=None, multi=False, verbose=1): """Get selected node. Unless the multi flag is using, this will error if there isn't exactly one selected node matched. Args: type_ (str): filter nodes by type class_ (class): only return nodes that cast to this class multi (bool): return multiple nodes verbose (int): print process data Returns: (HFnDependencyNode): matching node (HFnDependencyNode list): matching nodes (if multi flag used) (HFnPlug|HPlug list): if class_ is HPlug """ from maya_psyhive import open_maya as hom # Build list of selected nodes _results = [] for _node in hom.CMDS.ls(selection=True): _result = _node _type = _node.object_type() lprint('TESTING', _node, verbose=verbose > 1) # Map transforms to HFnTransform if _type == 'transform': _result = hom.HFnTransform(str(_node)) # Apply type filter if type_: if type_ != 'transform' and _type == 'transform' and _result.shp: _type = _result.shp.object_type() lprint(' - SHAPE TYPE', _type, verbose=verbose > 1) if not _type == type_: lprint(' - REJECTED', type_, _type, verbose=verbose > 1) continue if class_ is hom.HPlug: for _attr in cmds.channelBox('mainChannelBox', query=True, selectedMainAttributes=True) or []: _plug = hom.HPlug('{}.{}'.format(_node, _attr)) _results.append(_plug) continue elif class_: try: _result = class_(str(_node)) except ValueError: lprint(' - CLASS FAIL', class_, verbose=verbose > 1) continue lprint(' - ADDED', verbose=verbose > 1) _results.append(_result) # Get result if multi: return _results return get_single(_results, name='selected object', verbose=verbose)
def drive_shade_geo_from_rig(cache_set, progress=False, verbose=0): """Use a rig to drive tmp geo duplicated from its shade asset. The shade asset is referenced into the scene, all meshes with corresponding meshes in the rig are duplicated and then attached to the rig geo using a blendshape. The shade asset is then removed. Args: cache_set (pm.ObjectSet): cache set from rig being cached progress (bool): show progress on bind verbose (int): print process data Returns: (HFnMesh list): list of driven shade geo """ from psyhive import tk2 # Get anim rig _cache_set = cache_set or nt.ObjectSet(u'archer_rig2:bakeSet') print 'CACHE SET', _cache_set _rig = ref.find_ref(_cache_set.namespace().strip(':')) print 'RIG', _rig print 'RIG PATH', _rig.path # Find/import tmp shade asset _rig_out = tk2.TTOutputName(_rig.path) print 'RIG OUT', _rig_out.path _shade_out = _rig_out.map_to(Step='shade', output_type='shadegeo', Task='shade').find_latest() print 'SHADE OUT', _shade_out.path if not _shade_out.exists(): raise RuntimeError("Missing shade file " + _shade_out.path) _shade_file = _shade_out.find_file(extn='mb', format_='maya') print 'SHADE FILE', _shade_file _shade = ref.create_ref(_shade_file.path, namespace='psyhive_tmp', force=True) # Duplicate geo and bind to rig _bake_geo = [] _cleanup = [] set_namespace(':tmp_{}'.format(_rig.namespace), clean=True) for _shade_mesh in qt.progress_bar(_shade.find_nodes(type_='mesh'), 'Binding {:d} geo{}', col='Tomato', show=progress): # Check there is equivalent mesh in rig if _shade_mesh.plug('intermediateObject').get_val(): continue _shade_tfm = hom.HFnTransform(get_parent(_shade_mesh)) try: _rig_tfm = _rig.get_node(_shade_tfm, class_=hom.HFnTransform) except ValueError: continue lprint(' - BINDING MESH', _shade_tfm, '->', _rig_tfm, verbose=verbose) # Duplicate mesh _dup = _shade_tfm.duplicate() lprint(' - DUPLICATING', _shade_tfm, verbose=verbose) _dup.parent(world=True) _clean_unused_uv_sets(_dup) _connect_visibility(_rig_tfm, _dup) _bake_geo.append(_dup) _cleanup.append(_dup) # Bind to rig _blend = hom.CMDS.blendShape(_rig_tfm, _dup) _blend.plug('origin').set_enum('world') _blend.plug('weight[0]').set_val(1.0) _shade.remove(force=True) return _bake_geo, _cleanup