def export_img_plane(camera, abc): """Export image plane preset data for the given camera/abc. Args: camera (str): camera shape node name abc (str): path to output abc """ _cam = hom.HFnCamera(get_parent(str(camera))) lprint(' - CAM', _cam) # Read image plane _img_plane = get_single(_cam.shp.list_connections(type='imagePlane'), catch=True) if not _img_plane: lprint(' - NO IMAGE PLANE FOUND') return _img_plane = hom.HFnTransform(_img_plane.split('->')[-1]) # Export preset for each shape for _shp in [_cam.shp, _img_plane.shp]: _preset = '{}/{}.preset'.format(os.path.dirname(abc), _shp.object_type()) lprint(' - SAVING', _preset) try: _shp.save_preset(_preset) except RuntimeError: lprint(' - FAILED TO SAVE')
def find_ctrls(self): """Find rig controls. Returns: (HFnTransform list): list of controls """ _ctrls = set() for _ctrl_shp in self.find_nodes(type_='nurbsCurve'): _ctrl = hom.HFnTransform(get_parent(_ctrl_shp)) _ctrls.add(_ctrl) return sorted(_ctrls)
def get_parent(self): """Get parent of this node (if any). Returns: (HFnTransform): parent """ from maya_psyhive import open_maya as hom _parent = get_parent(self) if not _parent: return None return hom.HFnTransform(_parent)
def get_active_cam(): """Get camera from the curret active viewport. Returns: (HFnCamera): active camera """ _model = ui.get_active_model_panel() _cam_node = cmds.modelPanel(_model, query=True, camera=True) if cmds.objectType(_cam_node) == 'camera': _cam = HFnCamera(get_parent(_cam_node)) else: _cam = HFnCamera(_cam_node) return _cam
def find_meshes(self): """Find meshes in this reference. Returns: (HFnMesh list): meshes """ from maya_psyhive import open_maya as hom _meshes = [] for _shp in self.find_nodes(type_='mesh'): if _shp.plug('intermediateObject').get_val(): continue _mesh = hom.HFnMesh(get_parent(_shp)) _meshes.append(_mesh) return _meshes
def find_meshes(self): """Find meshes in the current scene. Returns: (HFnMesh list): meshes """ _meshes = [] for _shp in hom.CMDS.ls(type='mesh'): if _shp.namespace: continue if _shp.plug('intermediateObject').get_val(): continue _mesh = hom.HFnMesh(get_parent(_shp)) _meshes.append(_mesh) return _meshes
def restore_img_plane(time_control, abc): """Restore image plane from preset data. Args: time_control (str): exocortex time control name abc (str): path to output abc """ from psyhive import tk # Ignore non camera caches _abc = tk.get_output(abc) print 'ABC', _abc.path if _abc.output_type != 'camcache': print 'NOT A CAMERA CACHE' return # Make sure there are presets to apply _presets = [] for _type in ['imagePlane', 'camera']: _preset = '{}/{}.preset'.format(_abc.dir, _type) if not os.path.exists(_preset): print 'MISSING PRESET', _preset return _presets.append(_preset) # Find camera node _time_ctrl = hom.HFnDependencyNode(time_control) _cam_shp = get_single(_time_ctrl.find_downstream(type_='camera', filter_=_abc.output_name), catch=True) if not _cam_shp: print 'NO CAM FOUND' return _cam = hom.HFnCamera(get_parent(_cam_shp)) # Create image plane and apply presets _img_plane = hom.CMDS.imagePlane(camera=_cam) for _preset, _shp in safe_zip(_presets, [_img_plane.shp, _cam.shp]): _shp.load_preset(_preset)
def _build_aip_node(shd, merge, meshes, ai_attrs=None, name=None, verbose=0): """Build aiSetParameter node. Args: shd (HFnDependencyNode): shader to apply merge (HFnDependencyNode): merge node to connect output to meshes (HFnDependencyNode list): meshes to apply set param to ai_attrs (dict): override ai attrs to check name (str): override name verbose (int): print process data """ print 'BULID AIP', shd, meshes _ai_attrs = ai_attrs if ai_attrs is not None else _AI_ATTRS print ' - AI ATTRS', _ai_attrs # Create standin node _aip = hom.CMDS.createNode('aiSetParameter', name='{}_AIP'.format(name or shd.name())) _aip.plug('out').connect(_get_next_idx(merge.plug('inputs'))) if shd: _aip.plug('assignment[0]').set_val("shader = '{}'".format(shd)) lprint(' - AIP', _aip, verbose=verbose) # Determine AIP settings to apply _sels = [] _ai_attr_vals = collections.defaultdict(set) for _mesh in meshes: for _ai_attr in _ai_attrs: _plug = _mesh.plug(_ai_attr) _type = 'string' if _plug.get_type() == 'enum' else None _val = _plug.get_val(type_=_type) lprint(' - READ', _plug, _val, verbose=verbose > 1) if not _type: _default = _plug.get_default() if _default == _val: lprint(' - REJECTED DEFAULT VAL', verbose=verbose > 1) continue _ai_attr_vals[_ai_attr].add(_val) print _mesh, _mesh.namespace _prefix = '*:' if _mesh.namespace else '*/' _tfm = hom.HFnTransform(get_parent(_mesh)) _sels.append('{}{}/*'.format(_prefix, _tfm.clean_name)) # Apply API settings _aip.plug('selection').set_val(' or '.join(_sels)) for _ai_attr, _attr in _ai_attrs.items(): _vals = sorted(_ai_attr_vals[_ai_attr]) lprint(' - AI ATTR', _attr, _ai_attr, _vals, verbose=verbose > 1) _val = get_single(_vals, catch=True) if len(_vals) == 1 and _val not in [None, '']: lprint(' - APPLY', _attr, _val, verbose=verbose > 1) if isinstance(_val, six.string_types): _val = "{} = '{}'".format(_attr, _val) else: _val = "{} = {}".format(_attr, _val) _get_next_idx(_aip.plug('assignment')).set_val(_val) # Read displacement if shd: _add_displacement_override(shd=shd, aip=_aip) return _aip
def drive_shade_geo_from_rig(cache_set, progress=False, verbose=0): """Use a rig to drive tmp geo duplicated from its shade asset. The shade asset is referenced into the scene, all meshes with corresponding meshes in the rig are duplicated and then attached to the rig geo using a blendshape. The shade asset is then removed. Args: cache_set (pm.ObjectSet): cache set from rig being cached progress (bool): show progress on bind verbose (int): print process data Returns: (HFnMesh list): list of driven shade geo """ print 'DRIVE SHADE GEO FROM RIG' # Get anim rig _cache_set = cache_set or nt.ObjectSet(u'archer_rig2:bakeSet') print ' - CACHE SET', _cache_set if not _cache_set.referenceFile(): print ' - NO CORRESPONDING RIG' raise RuntimeError("No rig found for {}".format(_cache_set)) _rig = ref.find_ref(_cache_set.referenceFile().namespace) print ' - RIG', _rig print ' - RIG PATH', _rig.path # Find/import tmp shade asset _shade_file = get_shade_mb_for_rig(_rig) _shade = ref.create_ref(_shade_file.path, namespace='psyhive_tmp', force=True) # Duplicate geo and bind to rig _bake_geo = [] _tmp_ns = ':tmp_{}'.format(_rig.namespace) set_namespace(_tmp_ns, clean=True) for _shade_mesh in qt.progress_bar(_shade.find_nodes(type_='mesh'), 'Binding {:d} geo{}', col='Tomato', show=progress): # Check there is equivalent mesh in rig if _shade_mesh.plug('intermediateObject').get_val(): continue _shade_tfm = hom.HFnTransform(get_parent(_shade_mesh)) try: _rig_tfm = _rig.get_node(_shade_tfm, class_=hom.HFnTransform) except ValueError: continue lprint(' - BINDING MESH', _shade_tfm, '->', _rig_tfm, verbose=verbose) # Duplicate mesh _dup = _shade_tfm.duplicate() lprint(' - DUPLICATING', _shade_tfm, verbose=verbose) _dup.parent(world=True) _clean_unused_uv_sets(_dup) _connect_visibility(_rig_tfm, _dup) _bake_geo.append(_dup) # Bind to rig _blend = hom.CMDS.blendShape(_rig_tfm, _dup) _blend.plug('origin').set_enum('world') _blend.plug('weight[0]').set_val(1.0) _shade.remove(force=True) cmds.namespace(set=":") if not _bake_geo: del_namespace(_tmp_ns) raise RuntimeError('No geo was attached - this means none of the ' 'shade geo matched the rig bakeSet geo.') return _bake_geo, _bake_geo
def drive_shade_geo_from_rig(cache_set, progress=False, verbose=0): """Use a rig to drive tmp geo duplicated from its shade asset. The shade asset is referenced into the scene, all meshes with corresponding meshes in the rig are duplicated and then attached to the rig geo using a blendshape. The shade asset is then removed. Args: cache_set (pm.ObjectSet): cache set from rig being cached progress (bool): show progress on bind verbose (int): print process data Returns: (HFnMesh list): list of driven shade geo """ from psyhive import tk2 # Get anim rig _cache_set = cache_set or nt.ObjectSet(u'archer_rig2:bakeSet') print 'CACHE SET', _cache_set _rig = ref.find_ref(_cache_set.namespace().strip(':')) print 'RIG', _rig print 'RIG PATH', _rig.path # Find/import tmp shade asset _rig_out = tk2.TTOutputName(_rig.path) print 'RIG OUT', _rig_out.path _shade_out = _rig_out.map_to(Step='shade', output_type='shadegeo', Task='shade').find_latest() print 'SHADE OUT', _shade_out.path if not _shade_out.exists(): raise RuntimeError("Missing shade file " + _shade_out.path) _shade_file = _shade_out.find_file(extn='mb', format_='maya') print 'SHADE FILE', _shade_file _shade = ref.create_ref(_shade_file.path, namespace='psyhive_tmp', force=True) # Duplicate geo and bind to rig _bake_geo = [] _cleanup = [] set_namespace(':tmp_{}'.format(_rig.namespace), clean=True) for _shade_mesh in qt.progress_bar(_shade.find_nodes(type_='mesh'), 'Binding {:d} geo{}', col='Tomato', show=progress): # Check there is equivalent mesh in rig if _shade_mesh.plug('intermediateObject').get_val(): continue _shade_tfm = hom.HFnTransform(get_parent(_shade_mesh)) try: _rig_tfm = _rig.get_node(_shade_tfm, class_=hom.HFnTransform) except ValueError: continue lprint(' - BINDING MESH', _shade_tfm, '->', _rig_tfm, verbose=verbose) # Duplicate mesh _dup = _shade_tfm.duplicate() lprint(' - DUPLICATING', _shade_tfm, verbose=verbose) _dup.parent(world=True) _clean_unused_uv_sets(_dup) _connect_visibility(_rig_tfm, _dup) _bake_geo.append(_dup) _cleanup.append(_dup) # Bind to rig _blend = hom.CMDS.blendShape(_rig_tfm, _dup) _blend.plug('origin').set_enum('world') _blend.plug('weight[0]').set_val(1.0) _shade.remove(force=True) return _bake_geo, _cleanup